Kirill Zhukov
5a590cbadb
[ALBS-1077] Now we delete build if it was deleted from ALBS Bugfix 'Key error' when db_port/db_host is not set Bugfix update_builds ignoring opldest_to_update attribute
231 lines
9.0 KiB
Python
231 lines
9.0 KiB
Python
from datetime import datetime
|
|
import logging
|
|
from urllib.parse import urljoin
|
|
from typing import Dict, List, Any, Optional
|
|
|
|
import requests
|
|
|
|
|
|
from .models.build import Build
|
|
from .models.build_task import BuildTask
|
|
from .models.build_node_stats import BuildNodeStats
|
|
from .models.build_stat import BuildStat
|
|
from .models.web_node_stats import WebNodeStats
|
|
from .models.test_task import TestTask
|
|
from .models.test_steps_stats import TestStepsStats
|
|
from .models.test_step_stat import TestStepStat
|
|
|
|
TZ_OFFSET = '+00:00'
|
|
|
|
|
|
class APIclient():
|
|
"""
|
|
client for working with ALBS API
|
|
"""
|
|
|
|
def __init__(self, api_root: str, jwt: str, timeout: int):
|
|
self.api_root = api_root
|
|
self.jwt = jwt
|
|
self.timeout = timeout
|
|
|
|
def get_builds(self, page_num: int = 1) -> List[Build]:
|
|
ep = '/api/v1/builds'
|
|
url = urljoin(self.api_root, ep)
|
|
params = {'pageNumber': page_num}
|
|
headers = {'accept': 'appilication/json'}
|
|
|
|
response = requests.get(
|
|
url, params=params, headers=headers, timeout=self.timeout)
|
|
response.raise_for_status()
|
|
|
|
result = []
|
|
for b in response.json()['builds']:
|
|
try:
|
|
result.append(self._parse_build(b))
|
|
except Exception as err: # pylint: disable=broad-except
|
|
logging.error("Cant convert build JSON %s to Buildmodel: %s",
|
|
b, err, exc_info=True)
|
|
return result
|
|
|
|
def get_build(self, build_id: int) -> Optional[Build]:
|
|
'''
|
|
method returns None if build was deleted from ALBS
|
|
'''
|
|
ep = f'/api/v1/builds/{build_id}'
|
|
url = urljoin(self.api_root, ep)
|
|
headers = {'accept': 'application/json'}
|
|
response = requests.get(url, headers=headers, timeout=self.timeout)
|
|
|
|
if response.status_code == 404:
|
|
return None
|
|
|
|
response.raise_for_status()
|
|
return self._parse_build(response.json())
|
|
|
|
def __parse_build_node_stats(self, stats: Dict) -> BuildNodeStats:
|
|
logging.debug('raw json: %s', stats)
|
|
|
|
keys = ['build_all', 'build_binaries', 'build_packages', 'build_srpm', 'build_node_task',
|
|
'cas_notarize_artifacts', 'cas_source_authenticate', 'git_checkout', 'upload']
|
|
params = {}
|
|
for k in keys:
|
|
try:
|
|
params[k] = BuildStat(
|
|
start_ts=datetime.fromisoformat(
|
|
stats[k]['start_ts']+TZ_OFFSET) if stats[k]['start_ts'] else None,
|
|
end_ts=datetime.fromisoformat(
|
|
stats[k]['end_ts']+TZ_OFFSET) if stats[k]['end_ts'] else None)
|
|
except KeyError:
|
|
params[k] = BuildStat()
|
|
build_node_stats = BuildNodeStats(**params)
|
|
logging.debug('BuildNodeStats: %s', build_node_stats)
|
|
return build_node_stats
|
|
|
|
def __parse_web_node_stats(self, stats: Dict) -> WebNodeStats:
|
|
keys = ['build_done', 'logs_processing',
|
|
'packages_processing', 'multilib_processing']
|
|
params = {}
|
|
logging.debug('raw json: %s', stats)
|
|
for k in keys:
|
|
try:
|
|
params[k] = BuildStat(
|
|
start_ts=datetime.fromisoformat(
|
|
stats[k]['start_ts']+TZ_OFFSET) if stats[k]['start_ts'] else None,
|
|
end_ts=datetime.fromisoformat(
|
|
stats[k]['end_ts']+TZ_OFFSET) if stats[k]['end_ts'] else None)
|
|
except KeyError:
|
|
params[k] = BuildStat()
|
|
web_node_stats = WebNodeStats(**params)
|
|
logging.debug('WebNodeStats %s', web_node_stats)
|
|
return web_node_stats
|
|
|
|
def _parse_build_tasks(self, tasks_json: Dict, build_id: int) -> List[BuildTask]:
|
|
result = []
|
|
for task in tasks_json:
|
|
try:
|
|
started_at = datetime.fromisoformat(
|
|
task['started_at']+TZ_OFFSET) if task['started_at'] else None
|
|
finished_at = datetime.fromisoformat(
|
|
task['finished_at']+TZ_OFFSET) if task['finished_at'] else None
|
|
name = task['ref']['url'].split('/')[-1].replace('.git', '')
|
|
if not task['performance_stats']:
|
|
logging.warning(
|
|
"no perfomance_stats for build_id: %s, build_task_id: %s", build_id, task['id'])
|
|
stats: dict[str, Any] = {
|
|
'build_node_stats': {}, 'build_done_stats': {}}
|
|
else:
|
|
stats = task['performance_stats'][0]['statistics']
|
|
|
|
params = {'id': task['id'],
|
|
'name': name,
|
|
'build_id': build_id,
|
|
'started_at': started_at,
|
|
'finished_at': finished_at,
|
|
'arch': task['arch'],
|
|
'status_id': task['status'],
|
|
'build_node_stats': self.__parse_build_node_stats(stats['build_node_stats']),
|
|
'web_node_stats': self.__parse_web_node_stats(stats['build_done_stats'])}
|
|
result.append(BuildTask(**params))
|
|
except Exception as err: # pylint: disable=broad-except
|
|
logging.error("Cant convert build_task JSON %s (build_id %s) to BuildTask model: %s",
|
|
task, build_id, err, exc_info=True)
|
|
|
|
result.sort(key=lambda x: x.id, reverse=True)
|
|
return result
|
|
|
|
def _parse_build(self, build_json: Dict) -> Build:
|
|
url = f"https://build.almalinux.org/build/{build_json['id']}"
|
|
created_at = datetime.fromisoformat(build_json['created_at']+TZ_OFFSET)
|
|
finished_at = datetime.fromisoformat(
|
|
build_json['finished_at']+TZ_OFFSET) if build_json['finished_at'] else None
|
|
build_tasks = self._parse_build_tasks(
|
|
build_json['tasks'], build_json['id'])
|
|
|
|
params = {
|
|
'id': build_json['id'],
|
|
'url': url,
|
|
'created_at': created_at,
|
|
'finished_at': finished_at,
|
|
'build_tasks': build_tasks}
|
|
|
|
return Build(**params)
|
|
|
|
def get_test_tasks(self, build_task_id: int) -> List[TestTask]:
|
|
result: List[TestTask] = []
|
|
revision = 1
|
|
while True:
|
|
ep = f'/api/v1/tests/{build_task_id}/{revision}'
|
|
url = urljoin(self.api_root, ep)
|
|
headers = {'accept': 'application/json'}
|
|
|
|
response = requests.get(
|
|
url, headers=headers, timeout=self.timeout)
|
|
response.raise_for_status()
|
|
raw_tasks = response.json()
|
|
if len(raw_tasks) == 0:
|
|
break
|
|
result = result + self.__parse_test_tasks(raw_tasks, build_task_id)
|
|
revision += 1
|
|
return result
|
|
|
|
def __parse_test_tasks(self, raw_tasks: List[Dict[str, Any]],
|
|
build_task_id: int) -> List[TestTask]:
|
|
result: List[TestTask] = []
|
|
for task in raw_tasks:
|
|
if task['alts_response']:
|
|
try:
|
|
stats_raw = task['alts_response']['stats']
|
|
except KeyError:
|
|
steps_stats = None
|
|
else:
|
|
steps_stats = self.__parse_test_steps_stats(stats_raw)
|
|
else:
|
|
steps_stats = None
|
|
params = {
|
|
'id': task['id'],
|
|
'build_task_id': build_task_id,
|
|
'revision': task['revision'],
|
|
'status': task['status'],
|
|
'package_fullname': '_'.join([task['package_name'],
|
|
task['package_version'],
|
|
task['package_release']]),
|
|
'started_at': self.__get_test_task_started_at(
|
|
steps_stats) if steps_stats else None,
|
|
'steps_stats': steps_stats
|
|
}
|
|
|
|
result.append(TestTask(**params))
|
|
return result
|
|
|
|
def __parse_test_steps_stats(self, stats_raw: Dict[str, Any]) -> TestStepsStats:
|
|
teast_steps_params = {}
|
|
for field_name in TestStepsStats.__fields__.keys():
|
|
try:
|
|
p = stats_raw[field_name]
|
|
except KeyError:
|
|
continue
|
|
# there are must be a better way
|
|
for k in ['start_ts', 'finish_ts']:
|
|
if k in p:
|
|
p[k] = datetime.fromisoformat(p[k]+TZ_OFFSET)
|
|
teast_steps_params[field_name] = TestStepStat(**p)
|
|
return TestStepsStats(**teast_steps_params)
|
|
|
|
def __get_test_task_started_at(self, stats: TestStepsStats) -> Optional[datetime]:
|
|
"""
|
|
getting started_at attribute for test by using oldest start_ts timestamp
|
|
among all test tasks steps
|
|
"""
|
|
if not stats:
|
|
return None
|
|
|
|
start_ts = None
|
|
for field_name in stats.__fields__.keys():
|
|
stat: TestStepStat = getattr(stats, field_name)
|
|
if not stat:
|
|
continue
|
|
if not start_ts or start_ts > stat.start_ts:
|
|
start_ts = stat.start_ts
|
|
|
|
return start_ts
|