Merge pull request 'ALBS-1026: add statistics for each build_task step' (#1) from ALBS-1026 into main

Reviewed-on: #1
This commit is contained in:
kzhukov 2023-03-15 10:25:52 +00:00
commit 1f98c072f9
36 changed files with 8055 additions and 534 deletions

View File

@ -1,115 +0,0 @@
from datetime import datetime
from typing import Union, Dict
import psycopg2
from .models.build_db import BuildDB
from .models.build_task_db import BuildTaskDB
from .models.db_config import DbConfig
class DB():
def __init__(self, config: DbConfig):
self.__conn = psycopg2.connect(database=config.name,
host=config.host,
user=config.username,
password=config.password,
port=config.port)
def close_conn(self):
self.__conn.close()
def __del__(self):
self.close_conn()
def insert_build(self, build: BuildDB):
sql = '''
INSERT INTO builds(id, url, created_at, finished_at)
VALUES (%s, %s, %s, %s);
'''
cur = self.__conn.cursor()
cur.execute(sql, (build.id, build.url,
build.created_at, build.finished_at))
self.__conn.commit()
def insert_buildtask(self, build_task: BuildTaskDB):
sql = '''
INSERT INTO build_tasks(id, name, build_id, arch_id, started_at, finished_at, status_id)
VALUES (%s, %s, %s, %s, %s, %s, %s);
'''
cur = self.__conn.cursor()
cur.execute(sql, (build_task.id, build_task.name, build_task.build_id, build_task.arch_id,
build_task.started_at, build_task.finished_at, build_task.status_id))
self.__conn.commit()
def get_latest_build_id(self) -> Union[int, None]:
sql = "SELECT id from builds ORDER BY id DESC LIMIT 1;"
cur = self.__conn.cursor()
cur.execute(sql)
val = cur.fetchone()
if not val:
return None
return int(val[0])
def cleanup_builds(self, oldest_to_keep: datetime) -> int:
params = (int(oldest_to_keep.timestamp()),)
sql = "DELETE FROM builds WHERE created_at < %s;"
cur = self.__conn.cursor()
cur.execute(sql, params)
self.__conn.commit()
return cur.rowcount
def get_unfinished_builds(self) -> Dict[int, Dict[int, int]]:
"""
Getting list of unfinished builds and build_tasks
Dict[build_id, Dict[build_task_id, task_status_id]]
"""
res: Dict[int, Dict[int, int]] = {}
# getting unfinished builds
sql = 'SELECT id FROM builds where finished_at is NULL;'
cur = self.__conn.cursor()
cur.execute(sql)
for row in cur.fetchall():
res[row[0]] = {}
# getting list of unfinished tasks
sql = 'SELECT id, build_id, status_id FROM build_tasks WHERE status_id < 2;'
cur = self.__conn.cursor()
cur.execute(sql)
for row in cur.fetchall():
build_task_id: int = row[0]
build_id: int = row[1]
status_id: int = row[2]
try:
res[build_id][build_task_id] = status_id
except KeyError:
res[build_id] = {build_task_id: status_id}
return res
def update_build(self, build: BuildDB):
sql = '''
UPDATE builds
SET finished_at = %s
WHERE id = %s;
'''
cur = self.__conn.cursor()
cur.execute(sql, (build.finished_at, build.id))
self.__conn.commit()
def update_build_task(self, build: BuildTaskDB):
sql = '''
UPDATE build_tasks
SET status_id = %s,
started_at = %s,
finished_at = %s
WHERE id = %s;
'''
cur = self.__conn.cursor()
cur.execute(sql, (build.status_id, build.started_at,
build.finished_at, build.id))
self.__conn.commit()

View File

@ -1,17 +0,0 @@
from enum import IntEnum
class ArchEnum(IntEnum):
i686 = 0
x86_64 = 1
aarch64 = 2
ppc64le = 3
s390x = 4
class BuildTaskEnum(IntEnum):
idle = 0
started = 1
completed = 2
failed = 3
excluded = 4

View File

@ -1,92 +0,0 @@
-- builds
DROP TABLE IF EXISTS builds CASCADE;
CREATE TABLE builds (
id INTEGER PRIMARY KEY,
url VARCHAR(50) NOT NULL,
created_at REAL NOT NULL,
finished_at REAL
);
CREATE INDEX IF NOT EXISTS builds_created_at
ON builds(created_at);
CREATE INDEX IF NOT EXISTS builds_finished_at
ON builds(finished_at);
-- build_taks_enum
DROP TABLE IF EXISTS build_task_status_enum CASCADE;
CREATE TABLE IF NOT EXISTS build_task_status_enum(
id INTEGER PRIMARY KEY,
value VARCHAR(15)
);
INSERT INTO build_task_status_enum (id, value)
VALUES
(0, 'idle'),
(1, 'started'),
(2, 'completed'),
(3, 'failed'),
(4, 'excluded');
-- arch_enum
DROP TABLE IF EXISTS arch_enum CASCADE;
CREATE TABLE arch_enum(
id INTEGER PRIMARY KEY,
value VARCHAR(15)
);
INSERT INTO arch_enum(id, value)
VALUES
(0, 'i686'),
(1, 'x86_64'),
(2, 'aarch64'),
(3, 'ppc64le'),
(4, 's390x');
-- build_tasks
DROP TABLE IF EXISTS build_tasks CASCADE;
CREATE TABLE build_tasks (
id INTEGER PRIMARY KEY,
name VARCHAR(50) NOT NULL,
build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE,
arch_id INTEGER REFERENCES arch_enum(id) ON DELETE SET NULL,
status_id INTEGER REFERENCES build_task_status_enum(id) ON DELETE SET NULL,
started_at REAL,
finished_at REAL
);
CREATE INDEX build_tasks_build_id
ON build_tasks(build_id);
CREATE INDEX build_tasks_started_at
ON build_tasks(started_at);
CREATE INDEX build_tasks_finished_at
ON build_tasks(finished_at);
-- sign_tasks
DROP TABLE IF EXISTS sign_tasks CASCADE;
CREATE TABLE sign_tasks (
id INTEGER PRIMARY KEY,
build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE,
buildtask_id INTEGER REFERENCES build_tasks(id) ON DELETE CASCADE,
started_at REAL,
finished_at REAL
);
CREATE INDEX sign_tasks_build_id
ON sign_tasks(build_id);
CREATE INDEX sign_tasks_buildtask_id
ON sign_tasks(buildtask_id);
CREATE INDEX sing_tasks_started_at
ON sign_tasks(started_at);
CREATE INDEX sign_tasks_finished_at
ON sign_tasks(finished_at);

View File

@ -1,2 +0,0 @@
0.1.0 (2023-03-01)
First version

View File

@ -1,14 +1,15 @@
from datetime import datetime
import logging
from urllib.parse import urljoin
from typing import Dict, List
import requests
from .models.build import Build
from .models.build_task import BuildTask
import requests
from .models.build_node_stats import BuildNodeStats
from .models.build_stat import BuildStat
from .models.web_node_stats import WebNodeStats
TZ_OFFSET = '+00:00'
@ -51,23 +52,68 @@ class APIclient():
response.raise_for_status()
return self._parse_build(response.json())
def __parse_build_node_stats(self, stats: Dict) -> BuildNodeStats:
logging.debug('raw json: %s', stats)
keys = ['build_all', 'build_binaries', 'build_packages', 'build_srpm', 'build_node_task',
'cas_notarize_artifacts', 'cas_source_authenticate', 'git_checkout', 'upload']
params = {}
for k in keys:
try:
params[k] = BuildStat(
start_ts=datetime.fromisoformat(
stats[k]['start_ts']+TZ_OFFSET) if stats[k]['start_ts'] else None,
end_ts=datetime.fromisoformat(
stats[k]['end_ts']+TZ_OFFSET) if stats[k]['end_ts'] else None)
except KeyError:
params[k] = BuildStat()
build_node_stats = BuildNodeStats(**params)
logging.debug('BuildNodeStats: %s', build_node_stats)
return build_node_stats
def __parse_web_node_stats(self, stats: Dict) -> WebNodeStats:
keys = ['build_done', 'logs_processing',
'packages_processing', 'multilib_processing']
params = {}
logging.debug('raw json: %s', stats)
for k in keys:
try:
params[k] = BuildStat(
start_ts=datetime.fromisoformat(
stats[k]['start_ts']+TZ_OFFSET) if stats[k]['start_ts'] else None,
end_ts=datetime.fromisoformat(
stats[k]['end_ts']+TZ_OFFSET) if stats[k]['end_ts'] else None)
except KeyError:
params[k] = BuildStat()
web_node_stats = WebNodeStats(**params)
logging.debug('WebNodeStats %s', web_node_stats)
return web_node_stats
def _parse_build_tasks(self, tasks_json: Dict, build_id: int) -> List[BuildTask]:
result = []
for task in tasks_json:
try:
started_at = datetime.fromisoformat(
task['started_at']+TZ_OFFSET) \
if task['started_at'] else None
finished_at = datetime.fromisoformat(task['finished_at']+TZ_OFFSET) \
if task['finished_at'] else None
task['started_at']+TZ_OFFSET) if task['started_at'] else None
finished_at = datetime.fromisoformat(
task['finished_at']+TZ_OFFSET) if task['finished_at'] else None
name = task['ref']['url'].split('/')[-1].replace('.git', '')
if not task['performance_stats']:
logging.warning(
"no perfomance_stats for build_id: %s, build_task_id: %s", build_id, task['id'])
stats = {'build_node_stats': {}, 'build_done_stats': {}}
else:
stats = task['performance_stats'][0]['statistics']
params = {'id': task['id'],
'name': name,
'build_id': build_id,
'started_at': started_at,
'finished_at': finished_at,
'arch': task['arch'],
'status_id': task['status']}
'status_id': task['status'],
'build_node_stats': self.__parse_build_node_stats(stats['build_node_stats']),
'web_node_stats': self.__parse_web_node_stats(stats['build_done_stats'])}
result.append(BuildTask(**params))
except Exception as err: # pylint: disable=broad-except
logging.error("Cant convert build_task JSON %s (build_id %s) to BuildTask model: %s",
@ -79,8 +125,8 @@ class APIclient():
def _parse_build(self, build_json: Dict) -> Build:
url = f"https://build.almalinux.org/build/{build_json['id']}"
created_at = datetime.fromisoformat(build_json['created_at']+TZ_OFFSET)
finished_at = datetime.fromisoformat(build_json['finished_at']+TZ_OFFSET) \
if build_json['finished_at'] else None
finished_at = datetime.fromisoformat(
build_json['finished_at']+TZ_OFFSET) if build_json['finished_at'] else None
build_tasks = self._parse_build_tasks(
build_json['tasks'], build_json['id'])

View File

@ -0,0 +1,42 @@
# pylint: disable=invalid-name
from enum import IntEnum
# supported schema version
DB_SCHEMA_VER = 2
# ENUMS
class ArchEnum(IntEnum):
i686 = 0
x86_64 = 1
aarch64 = 2
ppc64le = 3
s390x = 4
class BuildTaskEnum(IntEnum):
idle = 0
started = 1
completed = 2
failed = 3
excluded = 4
class WebNodeStatsEnum(IntEnum):
build_done = 0
logs_processing = 1
packages_processing = 2
multilib_processing = 3
class BuildNodeStatsEnum(IntEnum):
upload = 0
build_all = 1
build_srpm = 2
git_checkout = 3
build_binaries = 4
build_packages = 5
build_node_task = 6
cas_notarize_artifacts = 7
cas_source_authenticate = 8

View File

@ -0,0 +1,231 @@
from datetime import datetime
from typing import Union, Dict, List, Optional
import logging
import psycopg2
from .models.build_db import BuildDB
from .models.build_task_db import BuildTaskDB
from .models.build_node_stat_db import BuildNodeStatDB
from .models.db_config import DbConfig
from .models.web_node_stat_db import WebNodeStatDB
class DB():
def __init__(self, config: DbConfig):
self.__conn = psycopg2.connect(database=config.name,
host=config.host,
user=config.username,
password=config.password,
port=config.port)
def close_conn(self):
self.__conn.close()
def __del__(self):
self.close_conn()
def insert_build(self, build: BuildDB):
sql = '''
INSERT INTO builds(id, url, created_at, finished_at)
VALUES (%s, %s, %s, %s);
'''
cur = self.__conn.cursor()
cur.execute(sql, (build.id, build.url,
build.created_at, build.finished_at))
self.__conn.commit()
def insert_buildtask(self, build_task: BuildTaskDB, web_node_stats: List[WebNodeStatDB],
build_node_stats: List[BuildNodeStatDB]):
cur = self.__conn.cursor()
# inserting build_task
sql = '''
INSERT INTO build_tasks(id, name, build_id, arch_id, started_at, finished_at, status_id)
VALUES (%s, %s, %s, %s, %s, %s, %s);
'''
cur.execute(sql, (build_task.id, build_task.name, build_task.build_id, build_task.arch_id,
build_task.started_at, build_task.finished_at, build_task.status_id))
# inserting web node stats
for stat in web_node_stats:
# do not insert empty stats
if stat.start_ts is None:
continue
sql = '''
INSERT INTO web_node_stats (build_task_id, stat_name_id, start_ts, end_ts)
VALUES (%s, %s, %s, %s);
'''
cur.execute(sql, (stat.build_task_id, stat.stat_name_id,
stat.start_ts, stat.end_ts))
logging.debug('raw SQL query: %s', cur.query)
self.__conn.commit()
# inserting build node stats
for stat in build_node_stats:
# do not insert empty stats
if stat.start_ts is None:
continue
sql = '''
INSERT INTO build_node_stats(build_task_id, stat_name_id, start_ts, end_ts)
VALUES (%s, %s, %s, %s);
'''
cur.execute(sql, (stat.build_task_id, stat.stat_name_id,
stat.start_ts, stat.end_ts))
logging.debug('raw SQL query: %s', cur.query)
# commiting changes
self.__conn.commit()
def get_latest_build_id(self) -> Union[int, None]:
sql = "SELECT id from builds ORDER BY id DESC LIMIT 1;"
cur = self.__conn.cursor()
cur.execute(sql)
val = cur.fetchone()
if not val:
return None
return int(val[0])
def cleanup_builds(self, oldest_to_keep: datetime) -> int:
params = (int(oldest_to_keep.timestamp()),)
sql = "DELETE FROM builds WHERE created_at < %s;"
cur = self.__conn.cursor()
cur.execute(sql, params)
self.__conn.commit()
return cur.rowcount
def get_unfinished_builds(self) -> Dict[int, Dict[int, int]]:
"""
Getting list of unfinished builds and build_tasks
Dict[build_id, Dict[build_task_id, task_status_id]]
"""
res: Dict[int, Dict[int, int]] = {}
# getting unfinished builds
sql = 'SELECT id FROM builds where finished_at is NULL;'
cur = self.__conn.cursor()
cur.execute(sql)
for row in cur.fetchall():
res[row[0]] = {}
# getting list of unfinished tasks
sql = 'SELECT id, build_id, status_id FROM build_tasks WHERE status_id < 2;'
cur = self.__conn.cursor()
cur.execute(sql)
for row in cur.fetchall():
build_task_id: int = row[0]
build_id: int = row[1]
status_id: int = row[2]
try:
res[build_id][build_task_id] = status_id
except KeyError:
res[build_id] = {build_task_id: status_id}
return res
def update_build(self, build: BuildDB):
sql = '''
UPDATE builds
SET finished_at = %s
WHERE id = %s;
'''
cur = self.__conn.cursor()
cur.execute(sql, (build.finished_at, build.id))
self.__conn.commit()
def update_build_task(self, build_task: BuildTaskDB,
web_node_stats: List[WebNodeStatDB],
build_node_stats: List[BuildNodeStatDB]):
cur = self.__conn.cursor()
sql = '''
UPDATE build_tasks
SET status_id = %s,
started_at = %s,
finished_at = %s
WHERE id = %s;
'''
cur.execute(sql, (build_task.status_id, build_task.started_at,
build_task.finished_at, build_task.id))
logging.debug('raw SQL query: %s', cur.query)
# updating web_node_stats
for stat in web_node_stats:
logging.debug(
'updating web_node_stats %s build_task %s', stat.stat_name_id, build_task.id)
if self.stat_exists(build_task_id=stat.build_task_id,
stat_name_id=stat.stat_name_id,
table_name='web_node_stats'):
sql = '''
UPDATE web_node_stats
SET start_ts = %(start_ts)s, end_ts = %(end_ts)s
WHERE build_task_id = %(build_task_id)s AND stat_name_id = %(stat_name_id)s
'''
else:
sql = '''
INSERT INTO web_node_stats(build_task_id, stat_name_id, start_ts, end_ts)
VALUES (%(build_task_id)s, %(stat_name_id)s, %(start_ts)s, %(end_ts)s);
'''
params = {'build_task_id': build_task.id,
'stat_name_id': stat.stat_name_id,
'start_ts': stat.start_ts,
'end_ts': stat.end_ts}
cur.execute(sql, params)
logging.debug('raw SQL query: %s', cur.query)
# updating build_node_stats
for stat in build_node_stats:
logging.debug(
'updating build_node_stats %s build_task %s', stat.stat_name_id, build_task.id)
if self.stat_exists(build_task_id=stat.build_task_id,
stat_name_id=stat.stat_name_id,
table_name='build_node_stats'):
sql = '''
UPDATE build_node_stats
SET start_ts = %(start_ts)s, end_ts = %(end_ts)s
WHERE build_task_id = %(build_task_id)s AND stat_name_id = %(stat_name_id)s
'''
else:
sql = '''
INSERT INTO build_node_stats(build_task_id, stat_name_id, start_ts, end_ts)
VALUES (%(build_task_id)s, %(stat_name_id)s, %(start_ts)s, %(end_ts)s);
'''
params = {'build_task_id': build_task.id,
'stat_name_id': stat.stat_name_id,
'start_ts': stat.start_ts,
'end_ts': stat.end_ts}
logging.debug('raw SQL query: %s', cur.query)
cur.execute(sql, params)
# commiting changes
self.__conn.commit()
def get_db_schema_version(self) -> Optional[int]:
sql = '''
SELECT *
FROM schema_version
LIMIT 1;
'''
cur = self.__conn.cursor()
cur.execute(sql)
val = cur.fetchone()
if not val:
return None
return int(val[0])
def stat_exists(self, build_task_id: int, stat_name_id: int, table_name: str) -> bool:
sql = f'''
SELECT COUNT(build_task_id)
FROM {table_name}
WHERE build_task_id = %s AND stat_name_id = %s;
'''
cur = self.__conn.cursor()
cur.execute(sql, (build_task_id, stat_name_id))
val = int(cur.fetchone()[0])
return val == 1

View File

@ -4,7 +4,7 @@ import logging
from typing import List, Dict
from ..models.extractor_config import ExtractorConfig
from ..models.enums import BuildTaskEnum
from ..const import BuildTaskEnum
from ..models.build import BuildTask
from ..db import DB
from ..api_client import APIclient
@ -12,6 +12,7 @@ from ..api_client import APIclient
class Extractor:
def __init__(self, config: ExtractorConfig, api: APIclient, db: DB):
self.start_from = config.start_from
self.oldest_build_age = config.oldest_build_age
self.api = api
self.db = db
@ -21,7 +22,7 @@ class Extractor:
page_num = 1
last_build_id = self.db.get_latest_build_id()
if not last_build_id:
last_build_id = 0
last_build_id = self.start_from - 1
logging.info("last_build_id: %s", last_build_id)
stop = False
@ -34,11 +35,25 @@ class Extractor:
stop = True
break
# inserting build and build tasks
# inserting build build tasks and build tasks statistics
logging.info("inserting %s", build.id)
self.db.insert_build(build.as_db_model())
try:
self.db.insert_build(build.as_db_model())
except Exception as error: # pylint: disable=broad-except
logging.error('failed to insert build %d: %s',
build.id, error, exc_info=True)
continue
for build_task in build.build_tasks:
self.db.insert_buildtask(build_task.as_db_model())
try:
self.db.insert_buildtask(build_task.as_db_model(),
build_task.web_node_stats.as_db_model(
build_task.id),
build_task.build_node_stats.as_db_model(
build_task.id))
except Exception as error: # pylint: disable=broad-except
logging.error('failed to insert build task %d: %s',
build_task.id, error, exc_info=True)
build_count += 1
page_num += 1
return build_count
@ -49,25 +64,31 @@ class Extractor:
removed_count = self.db.cleanup_builds(self.oldest_build_age)
logging.info('removed %d entries', removed_count)
def __update_build_tasks_statuses(self, build_tasks: List[BuildTask],
build_tasks_status_db: Dict[int, int]):
def __update_build_tasks(self, build_tasks: List[BuildTask],
build_tasks_status_db: Dict[int, int]):
for b in build_tasks:
if b.status_id != build_tasks_status_db[b.id]:
logging.info('build taks %d status have changed %s -> %s. Updating DB',
logging.info('build: %s, build task %d status have changed %s -> %s. Updating DB',
b.build_id,
b.id, BuildTaskEnum(
build_tasks_status_db[b.id]).name,
BuildTaskEnum(b.status_id).name)
try:
self.db.update_build_task(b.as_db_model())
self.db.update_build_task(b.as_db_model(),
b.web_node_stats.as_db_model(
b.id),
b.build_node_stats.as_db_model(b.id))
except Exception as err: # pylint: disable=broad-except
logging.error(
'failed to update build task %d: %s',
b.id, err, exc_info=True)
'build: %d, failed to update build task %d: %s',
b.build_id, b.id, err, exc_info=True)
else:
logging.info('build task %d was updated', b.id)
logging.info(
'build: %d, build task %d was updated', b.build_id, b.id)
else:
logging.info(
"build_task %d is still %s. Skipping", b.id, BuildTaskEnum(b.status_id).name)
"build: %d, build_task %d is still %s. Skipping",
b.build_id, b.id, BuildTaskEnum(b.status_id).name)
def update_builds(self):
logging.info('Getting list of tasks from DB')
@ -80,7 +101,7 @@ class Extractor:
logging.info('Updating build tasks')
build_tasks_to_check = [
b for b in build.build_tasks if b.id in build_tasks_db]
self.__update_build_tasks_statuses(
self.__update_build_tasks(
build_tasks_to_check, build_tasks_db)
if build.finished_at:

View File

@ -1,6 +1,7 @@
from datetime import datetime, timedelta
import logging
from logging.handlers import RotatingFileHandler
import sys
import time
import yaml
@ -8,6 +9,7 @@ import yaml
# pylint: disable=relative-beyond-top-level
from ..api_client import APIclient
from ..db import DB
from ..const import DB_SCHEMA_VER
from .extractor import Extractor
from ..models.extractor_config import ExtractorConfig
from ..models.db_config import DbConfig
@ -46,11 +48,24 @@ def start(yml_path: str):
# configuring logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s <%(funcName)s> %(message)s',
format='%(asctime)s %(levelname)s %(funcName)s() %(message)s',
handlers=[RotatingFileHandler(config.log_file,
maxBytes=10000000,
backupCount=3)])
# some pre-flight checks
db = DB(config.db_config)
cur_version = db.get_db_schema_version()
if not cur_version:
logging.error(
'Cant get db schema version. Make sure that schema_version exists')
sys.exit(1)
if cur_version != DB_SCHEMA_VER:
logging.error('unsupported DB schema: want %s, have %s',
DB_SCHEMA_VER, cur_version)
sys.exit(1)
db.close_conn()
while True:
logging.info('Starting extraction proccess')
api = APIclient(api_root=config.albs_url,

View File

@ -0,0 +1,12 @@
from pydantic import BaseModel # pylint: disable=no-name-in-module
from typing import Optional
class BuildNodeStatDB(BaseModel):
"""
Build node stat as it sent to/received from database
"""
build_task_id: int
stat_name_id: int
start_ts: Optional[float] = None
end_ts: Optional[float] = None

View File

@ -0,0 +1,41 @@
from typing import List
from pydantic import BaseModel # pylint: disable=no-name-in-module
from .build_stat import BuildStat
from .build_node_stat_db import BuildNodeStatDB
from ..const import BuildNodeStatsEnum
class BuildNodeStats(BaseModel):
"""
Represents build statistics for build node
"""
build_all: BuildStat
build_binaries: BuildStat
build_packages: BuildStat
build_srpm: BuildStat
build_node_task: BuildStat
cas_notarize_artifacts: BuildStat
cas_source_authenticate: BuildStat
git_checkout: BuildStat
upload: BuildStat
def as_db_model(self, build_task_id: int) -> List[BuildNodeStatDB]:
result = []
for field_name in self.__fields__.keys():
stats: BuildStat = getattr(self, field_name)
start_ts = stats.start_ts.timestamp() \
if stats.start_ts else None
end_ts = stats.end_ts.timestamp() \
if stats.end_ts else None
stat_name_id = BuildNodeStatsEnum[field_name].value
build_node_stat_db = BuildNodeStatDB(build_task_id=build_task_id,
stat_name_id=stat_name_id,
start_ts=start_ts,
end_ts=end_ts)
result.append(build_node_stat_db)
return result

View File

@ -0,0 +1,15 @@
"""
Module for BuildStat model
"""
from datetime import datetime
from typing import Optional
from pydantic import BaseModel # pylint: disable=no-name-in-module
class BuildStat(BaseModel):
"""
BuildStat represents particular build statistic
"""
start_ts: Optional[datetime] = None
end_ts: Optional[datetime] = None

View File

@ -0,0 +1,16 @@
"""
Module for BuildStatDB model
"""
from pydantic import BaseModel # pylint: disable=no-name-in-module
class BuildStatDB(BaseModel):
"""
Represents build stat as it send to/received from database
"""
build_task_id: int
stat_name_id: int
start_ts: float
end_ts: float

View File

@ -4,7 +4,9 @@ from typing import Optional
from pydantic import BaseModel # pylint: disable=no-name-in-module
from .build_task_db import BuildTaskDB
from .enums import ArchEnum
from .build_node_stats import BuildNodeStats
from ..const import ArchEnum
from .web_node_stats import WebNodeStats
class BuildTask(BaseModel):
@ -15,6 +17,8 @@ class BuildTask(BaseModel):
started_at: Optional[datetime] = None
finished_at: Optional[datetime] = None
status_id: int
build_node_stats: BuildNodeStats
web_node_stats: WebNodeStats
def as_db_model(self) -> BuildTaskDB:
started_at = self.started_at.timestamp() \

View File

@ -10,6 +10,7 @@ ALBS_URL_DEFAULT = 'https://build.almalinux.org'
LOG_FILE_DEFAULT = '/tmp/extractor.log'
API_DEFAULT = 30
SCRAPE_INTERVAL_DEFAULT = 3600
START_FROM_DEFAULT = 5808
class ExtractorConfig(BaseModel):
@ -21,7 +22,7 @@ class ExtractorConfig(BaseModel):
albs_url: HttpUrl = Field(description='ALBS root URL',
default=ALBS_URL_DEFAULT)
oldest_build_age: datetime = \
Field(description='oldest build age to extract and store')
Field(description='oldest build age to store')
jwt: str = Field(description='ALBS JWT token')
db_config: DbConfig = Field(description="database configuration")
api_timeout: int = Field(
@ -29,3 +30,5 @@ class ExtractorConfig(BaseModel):
default=API_DEFAULT)
scrape_interval: int = Field(description='how often (in seconds) we will extract data from ALBS',
default=SCRAPE_INTERVAL_DEFAULT)
start_from: int = Field(description='build id to start populating empty db with',
default=START_FROM_DEFAULT)

View File

@ -0,0 +1,13 @@
from pydantic import BaseModel # pylint: disable=no-name-in-module
from typing import Optional
class WebNodeStatDB(BaseModel):
"""
Represents WebNodeStat as it sent to/received from databse
"""
build_task_id: int
stat_name_id: int
start_ts: Optional[float] = None
end_ts: Optional[float] = None

View File

@ -0,0 +1,36 @@
from typing import List
from pydantic import BaseModel # pylint: disable=no-name-in-module
from .build_stat import BuildStat
from .web_node_stat_db import WebNodeStatDB
from ..const import WebNodeStatsEnum
class WebNodeStats(BaseModel):
"""
Represents build statistics for web node
"""
build_done: BuildStat
logs_processing: BuildStat
packages_processing: BuildStat
multilib_processing: BuildStat
def as_db_model(self, build_task_id: int) -> List[WebNodeStatDB]:
result = []
for field_name in self.__fields__.keys():
stats: BuildStat = getattr(self, field_name)
start_ts = stats.start_ts.timestamp() \
if stats.start_ts else None
end_ts = stats.end_ts.timestamp() \
if stats.end_ts else None
stat_name_id = WebNodeStatsEnum[field_name].value
web_node_stat_db = WebNodeStatDB(build_task_id=build_task_id,
stat_name_id=stat_name_id,
start_ts=start_ts,
end_ts=end_ts)
result.append(web_node_stat_db)
return result

View File

@ -55,4 +55,9 @@ data_store_days: 30
# sleep time in seconds between data extraction
# required: no
# default: 3600
scrape_interval: 3600
scrape_interval: 3600
# build_id to start populating empty db with
# required: false
# default: 5808 (first build with correct metrics)
start_from:

View File

@ -0,0 +1,167 @@
BEGIN;
-- builds
CREATE TABLE builds (
id INTEGER PRIMARY KEY,
url VARCHAR(50) NOT NULL,
created_at DOUBLE PRECISION NOT NULL,
finished_at DOUBLE PRECISION
);
CREATE INDEX IF NOT EXISTS builds_created_at
ON builds(created_at);
CREATE INDEX IF NOT EXISTS builds_finished_at
ON builds(finished_at);
-- build_taks_enum
CREATE TABLE IF NOT EXISTS build_task_status_enum(
id INTEGER PRIMARY KEY,
value VARCHAR(15)
);
INSERT INTO build_task_status_enum (id, value)
VALUES
(0, 'idle'),
(1, 'started'),
(2, 'completed'),
(3, 'failed'),
(4, 'excluded');
-- arch_enum
CREATE TABLE arch_enum(
id INTEGER PRIMARY KEY,
value VARCHAR(15)
);
INSERT INTO arch_enum(id, value)
VALUES
(0, 'i686'),
(1, 'x86_64'),
(2, 'aarch64'),
(3, 'ppc64le'),
(4, 's390x');
-- web_node_stats_enum
CREATE TABLE web_node_stats_enum (
id INTEGER PRIMARY KEY,
value VARCHAR(50)
);
INSERT INTO web_node_stats_enum (id, value)
VALUEs
(0, 'build_done'),
(1, 'logs_processing'),
(2, 'packages_processing');
-- build_node_stats_enum
CREATE TABLE build_node_stats_enum(
id INTEGER PRIMARY KEY,
value VARCHAR(50)
);
INSERT INTO build_node_stats_enum (id, value)
VALUES
(0, 'upload'),
(1, 'build_all'),
(2, 'build_srpm'),
(3, 'git_checkout'),
(4, 'build_binaries'),
(5, 'build_packages'),
(6, 'build_node_task'),
(7, 'cas_notarize_artifacts'),
(8, 'cas_source_authenticate');
-- build_tasks
CREATE TABLE build_tasks (
id INTEGER PRIMARY KEY,
name VARCHAR(50) NOT NULL,
build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE,
arch_id INTEGER REFERENCES arch_enum(id) ON DELETE SET NULL,
status_id INTEGER REFERENCES build_task_status_enum(id) ON DELETE SET NULL,
started_at DOUBLE PRECISION,
finished_at DOUBLE PRECISION
);
CREATE INDEX build_tasks_build_id
ON build_tasks(build_id);
CREATE INDEX build_tasks_started_at
ON build_tasks(started_at);
CREATE INDEX build_tasks_finished_at
ON build_tasks(finished_at);
-- web_node_stats
CREATE TABLE web_node_stats (
build_task_id INTEGER REFERENCES build_tasks(id) ON DELETE CASCADE,
stat_name_id INTEGER REFERENCES web_node_stats_enum(id) ON DELETE SET NULL,
start_ts DOUBLE PRECISION,
end_ts DOUBLE PRECISION
);
CREATE INDEX web_node_stats_build_task_id
ON web_node_stats(build_task_id);
CREATE INDEX web_node_stats_start_ts
ON web_node_stats(start_ts);
CREATE INDEX web_node_stats_end_ts
ON web_node_stats(end_ts);
-- build_node_stats
CREATE TABLE build_node_stats (
build_task_id INTEGER REFERENCES build_tasks(id) ON DELETE CASCADE,
stat_name_id INTEGER REFERENCES build_node_stats_enum(id) ON DELETE SET NULL,
start_ts DOUBLE PRECISION,
end_ts DOUBLE PRECISION
);
CREATE INDEX build_node_stats_build_task_id
ON build_node_stats(build_task_id);
CREATE INDEX build_node_stats_build_start_ts
ON build_node_stats(start_ts);
CREATE INDEX build_node_stats_build_end_ts
ON build_node_stats(end_ts);
-- sign_tasks
CREATE TABLE sign_tasks (
id INTEGER PRIMARY KEY,
build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE,
buildtask_id INTEGER REFERENCES build_tasks(id) ON DELETE CASCADE,
started_at DOUBLE PRECISION,
finished_at DOUBLE PRECISION
);
CREATE INDEX sign_tasks_build_id
ON sign_tasks(build_id);
CREATE INDEX sign_tasks_buildtask_id
ON sign_tasks(buildtask_id);
CREATE INDEX sing_tasks_started_at
ON sign_tasks(started_at);
CREATE INDEX sign_tasks_finished_at
ON sign_tasks(finished_at);
-- schema_version
CREATE TABLE schema_version (
version INTEGER
);
INSERT INTO schema_version (version)
VALUES (1);
COMMIT;

View File

@ -0,0 +1,17 @@
BEGIN;
INSERT INTO web_node_stats_enum (id, value)
VALUES
(3, 'multilib_processing');
ALTER TABLE web_node_stats
ADD CONSTRAINT web_node_stats_unique UNIQUE (build_task_id, stat_name_id);
ALTER TABLE build_node_stats
ADD CONSTRAINT build_node_stats_unique UNIQUE (build_task_id, stat_name_id);
UPDATE schema_version
SET version = 2;
COMMIT;

View File

@ -0,0 +1,7 @@
0.1.0 (2023-03-01)
First version
0.2.0 (in progress)
- New parameter start_from
- Moved to double persition for timestamps
- Added metrics for build steps

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,980 @@
{
"__inputs": [
{
"name": "DS_POSTGRESQL",
"label": "PostgreSQL",
"description": "",
"type": "datasource",
"pluginId": "postgres",
"pluginName": "PostgreSQL"
},
{
"name": "DS_EXPRESSION",
"label": "Expression",
"description": "",
"type": "datasource",
"pluginId": "__expr__"
}
],
"__elements": {},
"__requires": [
{
"type": "datasource",
"id": "__expr__",
"version": "1.0.0"
},
{
"type": "panel",
"id": "bargauge",
"name": "Bar gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.3.2"
},
{
"type": "datasource",
"id": "postgres",
"name": "PostgreSQL",
"version": "1.0.0"
},
{
"type": "panel",
"id": "table",
"name": "Table",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": false,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "duration"
},
"properties": [
{
"id": "unit",
"value": "s"
}
]
},
{
"matcher": {
"id": "byName",
"options": "build_id"
},
"properties": [
{
"id": "links",
"value": [
{
"title": "",
"url": "/d/dmVtrz-4k/build-details?orgId=1&var-build_id=${__value.raw}\n\n"
}
]
}
]
}
]
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 5,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true
},
"pluginVersion": "9.3.2",
"targets": [
{
"cacheDurationSeconds": 300,
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"fields": [
{
"jsonPath": ""
}
],
"format": "table",
"method": "GET",
"queryParams": "",
"rawQuery": true,
"rawSql": "SELECT\n bt.id,\n bt.name,\n bt.build_id,\n arch_enum.value AS arch,\n status_enum.value AS status,\n bt.started_at * 1000 AS started,\n bt.finished_at * 1000 AS finished,\n bt.finished_at - bt.started_at AS duration \nFROM build_tasks AS bt\nJOIN build_task_status_enum AS status_enum ON bt.status_id = status_enum.id\nJOIN arch_enum on bt.arch_id = arch_enum.id\nWHERE bt.id = $build_task;",
"refId": "A",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"urlPath": ""
}
],
"title": "Task summary",
"transformations": [
{
"id": "convertFieldType",
"options": {
"conversions": [
{
"destinationType": "time",
"targetField": "started"
},
{
"destinationType": "time",
"targetField": "finished"
}
],
"fields": {}
}
}
],
"type": "table"
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"fieldConfig": {
"defaults": {
"color": {
"fixedColor": "#61a69d",
"mode": "fixed"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "s"
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "wait time"
},
"properties": [
{
"id": "color",
"value": {
"fixedColor": "dark-red",
"mode": "fixed"
}
}
]
}
]
},
"gridPos": {
"h": 9,
"w": 24,
"x": 0,
"y": 3
},
"id": 12,
"options": {
"displayMode": "gradient",
"minVizHeight": 10,
"minVizWidth": 0,
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showUnfilled": false
},
"pluginVersion": "9.3.2",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as git_checkout\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 3",
"refId": "git_checkout",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as source_authenticate\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 8;",
"refId": "cas_source_authenticate",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as build_srpm\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 2",
"refId": "build_srpm",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as build_binaries\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 4",
"refId": "build_binaries",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as notarize_artifacts\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 7;",
"refId": "cas_notarize_artifacts",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as logs_processing\nFROM web_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 1",
"refId": "logs_processing",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": false,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as packages_processing\nFROM web_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 2",
"refId": "packages_processing",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"cacheDurationSeconds": 300,
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"fields": [
{
"jsonPath": ""
}
],
"format": "table",
"method": "GET",
"queryParams": "",
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as upload\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 0",
"refId": "upload",
"sql": {
"columns": [
{
"parameters": [
{
"name": "start_ts",
"type": "functionParameter"
}
],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "build_node_stats",
"urlPath": ""
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": true,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as build_node_task\nFROM build_node_stats \nWHERE build_task_id = $build_task and stat_name_id = 6",
"refId": "build_node_task",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": true,
"rawQuery": true,
"rawSql": "SELECT end_ts - start_ts as web_node_task\nFROM web_node_stats\nWHERE build_task_id = $build_task and stat_name_id = 0",
"refId": "web_node_task",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"format": "table",
"hide": true,
"rawQuery": true,
"rawSql": "SELECT\n finished_at - started_at AS duration \nFROM build_tasks \nWHERE id = $build_task;",
"refId": "task_diration",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
}
},
{
"datasource": {
"type": "__expr__",
"uid": "${DS_EXPRESSION}"
},
"expression": "$task_diration - $build_node_task - $web_node_task",
"hide": false,
"refId": "wait time",
"type": "math"
}
],
"title": "Task duration",
"type": "bargauge"
},
{
"collapsed": true,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 12
},
"id": 8,
"panels": [
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"filterable": true,
"inspect": true
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "duration"
},
"properties": [
{
"id": "unit",
"value": "s"
}
]
},
{
"matcher": {
"id": "byName",
"options": "finished"
},
"properties": [
{
"id": "custom.width",
"value": 235
}
]
},
{
"matcher": {
"id": "byName",
"options": "started"
},
"properties": [
{
"id": "custom.width",
"value": 205
}
]
}
]
},
"gridPos": {
"h": 5,
"w": 24,
"x": 0,
"y": 13
},
"id": 2,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"frameIndex": 1,
"showHeader": true,
"sortBy": [
{
"desc": true,
"displayName": "started"
}
]
},
"pluginVersion": "9.3.2",
"targets": [
{
"cacheDurationSeconds": 300,
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"fields": [
{
"jsonPath": ""
}
],
"format": "table",
"method": "GET",
"queryParams": "",
"rawQuery": true,
"rawSql": "SELECT \n enum.value,\n stats.start_ts * 1000 AS started,\n stats.end_ts * 1000 AS finished,\n stats.end_ts - stats.start_ts AS duration\nFROM web_node_stats AS stats\nINNER JOIN web_node_stats_enum AS enum\n ON stats.stat_name_id = enum.id\nWHERE build_task_id = $build_task",
"refId": "web node stats",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"urlPath": ""
}
],
"title": "Web node stats",
"transformations": [
{
"id": "convertFieldType",
"options": {
"conversions": [
{
"destinationType": "time",
"targetField": "started"
},
{
"destinationType": "time",
"targetField": "finished"
}
],
"fields": {}
}
}
],
"type": "table"
},
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"filterable": true,
"inspect": true
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "duration"
},
"properties": [
{
"id": "unit",
"value": "s"
}
]
},
{
"matcher": {
"id": "byName",
"options": "started"
},
"properties": [
{
"id": "custom.width",
"value": 200
}
]
}
]
},
"gridPos": {
"h": 11,
"w": 24,
"x": 0,
"y": 18
},
"id": 3,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"frameIndex": 1,
"showHeader": true,
"sortBy": [
{
"desc": true,
"displayName": "duration"
}
]
},
"pluginVersion": "9.3.2",
"targets": [
{
"cacheDurationSeconds": 300,
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"fields": [
{
"jsonPath": ""
}
],
"format": "table",
"method": "GET",
"queryParams": "",
"rawQuery": true,
"rawSql": "SELECT enum.value,\n stats.start_ts * 1000 AS started,\n stats.end_ts * 1000 AS finished,\n stats.end_ts - stats.start_ts AS duration\nFROM build_node_stats AS stats\nINNER JOIN build_node_stats_enum AS enum\n ON stats.stat_name_id = enum.id\nWHERE build_task_id = $build_task",
"refId": "web node stats",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"urlPath": ""
}
],
"title": "Build node stats",
"transformations": [
{
"id": "convertFieldType",
"options": {
"conversions": [
{
"destinationType": "time",
"targetField": "started"
},
{
"destinationType": "time",
"targetField": "finished"
}
],
"fields": {}
}
}
],
"type": "table"
}
],
"title": "Details",
"type": "row"
}
],
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"current": {},
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"definition": "SELECT id from build_tasks ORDER BY id",
"hide": 0,
"includeAll": false,
"multi": false,
"name": "build_task",
"options": [],
"query": "SELECT id from build_tasks ORDER BY id",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"type": "query"
}
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Build task details",
"uid": "vtQClqxVk",
"version": 37,
"weekStart": ""
}

View File

@ -1,280 +0,0 @@
{
"__inputs": [
{
"name": "DS_POSTGRESQL",
"label": "PostgreSQL",
"description": "",
"type": "datasource",
"pluginId": "postgres",
"pluginName": "PostgreSQL"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.3.2"
},
{
"type": "datasource",
"id": "postgres",
"name": "PostgreSQL",
"version": "1.0.0"
},
{
"type": "panel",
"id": "table",
"name": "Table",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "id"
},
"properties": [
{
"id": "custom.width",
"value": 54
}
]
},
{
"matcher": {
"id": "byName",
"options": "created_at"
},
"properties": [
{
"id": "custom.width",
"value": 226
}
]
},
{
"matcher": {
"id": "byName",
"options": "finished_at"
},
"properties": [
{
"id": "custom.width",
"value": 209
}
]
},
{
"matcher": {
"id": "byName",
"options": "finished"
},
"properties": [
{
"id": "custom.width",
"value": 187
}
]
},
{
"matcher": {
"id": "byName",
"options": "created"
},
"properties": [
{
"id": "custom.width",
"value": 213
}
]
},
{
"matcher": {
"id": "byName",
"options": "url"
},
"properties": [
{
"id": "custom.width",
"value": 279
}
]
}
]
},
"gridPos": {
"h": 12,
"w": 24,
"x": 0,
"y": 0
},
"id": 2,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": [
{
"desc": true,
"displayName": "duration (h)"
}
]
},
"pluginVersion": "9.3.2",
"targets": [
{
"cacheDurationSeconds": 300,
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"fields": [
{
"jsonPath": ""
}
],
"format": "table",
"hide": false,
"method": "GET",
"queryParams": "",
"rawQuery": true,
"rawSql": "SELECT id, url, created_at * 1000 as created, finished_at * 1000 as finished, (finished_at - created_at) / (60*60) as duration\nFROM builds\nWHERE $__unixEpochFilter(created_at) AND finished_at IS NOT NULL",
"refId": "A",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"urlPath": ""
}
],
"title": "Finished builds",
"transformations": [
{
"id": "convertFieldType",
"options": {
"conversions": [
{
"destinationType": "time",
"targetField": "created"
},
{
"destinationType": "time",
"targetField": "finished"
}
],
"fields": {}
}
},
{
"id": "organize",
"options": {
"excludeByName": {},
"indexByName": {},
"renameByName": {
"duration": "duration (h)"
}
}
}
],
"type": "table"
}
],
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-3h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "albs_analytics",
"uid": "02mg4oxVk",
"version": 1,
"weekStart": ""
}