ALBS-1026: add statistics for each build_task step #1

Merged
kzhukov merged 12 commits from ALBS-1026 into main 2023-03-15 10:25:53 +00:00
31 changed files with 1920 additions and 338 deletions
Showing only changes of commit d737bae353 - Show all commits

View File

@ -1,17 +0,0 @@
from enum import IntEnum
class ArchEnum(IntEnum):
i686 = 0
x86_64 = 1
aarch64 = 2
ppc64le = 3
s390x = 4
class BuildTaskEnum(IntEnum):
idle = 0
started = 1
completed = 2
failed = 3
excluded = 4

View File

@ -1,2 +0,0 @@
0.1.0 (2023-03-01)
First version

View File

@ -1,14 +1,15 @@
from datetime import datetime from datetime import datetime
import logging import logging
from urllib.parse import urljoin from urllib.parse import urljoin
from typing import Dict, List from typing import Dict, List
import requests
from .models.build import Build from .models.build import Build
from .models.build_task import BuildTask from .models.build_task import BuildTask
from .models.build_node_stats import BuildNodeStats
import requests from .models.build_stat import BuildStat
from .models.web_node_stats import WebNodeStats
TZ_OFFSET = '+00:00' TZ_OFFSET = '+00:00'
@ -51,23 +52,61 @@ class APIclient():
response.raise_for_status() response.raise_for_status()
return self._parse_build(response.json()) return self._parse_build(response.json())
def __parse_build_node_stats(self, stats: Dict) -> BuildNodeStats:
keys = ['build_all', 'build_binaries', 'build_packages', 'build_srpm', 'build_node_task',
'cas_notarize_artifacts', 'cas_source_authenticate', 'git_checkout', 'upload']
params = {}
for k in keys:
try:
params[k] = BuildStat(
start_ts=datetime.fromisoformat(
stats[k]['start_ts']+TZ_OFFSET) if stats[k]['start_ts'] else None,
end_ts=datetime.fromisoformat(
stats[k]['end_ts']+TZ_OFFSET) if stats[k]['end_ts'] else None)
except KeyError:
params[k] = BuildStat()
return BuildNodeStats(**params)
def __parse_web_node_stats(self, stats: Dict) -> WebNodeStats:
keys = ['build_done', 'logs_processing', 'packages_processing']
params = {}
for k in keys:
try:
params[k] = BuildStat(
start_ts=datetime.fromisoformat(
stats[k]['start_ts']+TZ_OFFSET) if stats[k]['start_ts'] else None,
end_ts=datetime.fromisoformat(
stats[k]['end_ts']+TZ_OFFSET) if stats[k]['end_ts'] else None)
except KeyError:
params[k] = BuildStat()
return WebNodeStats(**params)
def _parse_build_tasks(self, tasks_json: Dict, build_id: int) -> List[BuildTask]: def _parse_build_tasks(self, tasks_json: Dict, build_id: int) -> List[BuildTask]:
result = [] result = []
for task in tasks_json: for task in tasks_json:
try: try:
started_at = datetime.fromisoformat( started_at = datetime.fromisoformat(
task['started_at']+TZ_OFFSET) \ task['started_at']+TZ_OFFSET) if task['started_at'] else None
if task['started_at'] else None finished_at = datetime.fromisoformat(
finished_at = datetime.fromisoformat(task['finished_at']+TZ_OFFSET) \ task['finished_at']+TZ_OFFSET) if task['finished_at'] else None
if task['finished_at'] else None
name = task['ref']['url'].split('/')[-1].replace('.git', '') name = task['ref']['url'].split('/')[-1].replace('.git', '')
if not task['performance_stats']:
logging.warning(
"no perfomance_stats for build_id: %s, build_task_id: %s", build_id, task['id'])
stats = {'build_node_stats': {}, 'build_done_stats': {}}
else:
stats = task['performance_stats'][0]['statistics']
params = {'id': task['id'], params = {'id': task['id'],
'name': name, 'name': name,
'build_id': build_id, 'build_id': build_id,
'started_at': started_at, 'started_at': started_at,
'finished_at': finished_at, 'finished_at': finished_at,
'arch': task['arch'], 'arch': task['arch'],
'status_id': task['status']} 'status_id': task['status'],
'build_node_stats': self.__parse_build_node_stats(stats['build_node_stats']),
'web_node_stats': self.__parse_web_node_stats(stats['build_done_stats'])}
result.append(BuildTask(**params)) result.append(BuildTask(**params))
except Exception as err: # pylint: disable=broad-except except Exception as err: # pylint: disable=broad-except
logging.error("Cant convert build_task JSON %s (build_id %s) to BuildTask model: %s", logging.error("Cant convert build_task JSON %s (build_id %s) to BuildTask model: %s",
@ -79,8 +118,8 @@ class APIclient():
def _parse_build(self, build_json: Dict) -> Build: def _parse_build(self, build_json: Dict) -> Build:
url = f"https://build.almalinux.org/build/{build_json['id']}" url = f"https://build.almalinux.org/build/{build_json['id']}"
created_at = datetime.fromisoformat(build_json['created_at']+TZ_OFFSET) created_at = datetime.fromisoformat(build_json['created_at']+TZ_OFFSET)
finished_at = datetime.fromisoformat(build_json['finished_at']+TZ_OFFSET) \ finished_at = datetime.fromisoformat(
if build_json['finished_at'] else None build_json['finished_at']+TZ_OFFSET) if build_json['finished_at'] else None
build_tasks = self._parse_build_tasks( build_tasks = self._parse_build_tasks(
build_json['tasks'], build_json['id']) build_json['tasks'], build_json['id'])

View File

@ -1,11 +1,13 @@
from datetime import datetime from datetime import datetime
from typing import Union, Dict from typing import Union, Dict, List
import psycopg2 import psycopg2
from .models.build_db import BuildDB from .models.build_db import BuildDB
from .models.build_task_db import BuildTaskDB from .models.build_task_db import BuildTaskDB
from .models.build_node_stat_db import BuildNodeStatDB
from .models.db_config import DbConfig from .models.db_config import DbConfig
from .models.web_node_stat_db import WebNodeStatDB
class DB(): class DB():
@ -33,15 +35,37 @@ class DB():
build.created_at, build.finished_at)) build.created_at, build.finished_at))
self.__conn.commit() self.__conn.commit()
def insert_buildtask(self, build_task: BuildTaskDB): def insert_buildtask(self, build_task: BuildTaskDB, web_node_stats: List[WebNodeStatDB],
build_node_stats: List[BuildNodeStatDB]):
cur = self.__conn.cursor()
# inserting build_task
sql = ''' sql = '''
INSERT INTO build_tasks(id, name, build_id, arch_id, started_at, finished_at, status_id) INSERT INTO build_tasks(id, name, build_id, arch_id, started_at, finished_at, status_id)
VALUES (%s, %s, %s, %s, %s, %s, %s); VALUES (%s, %s, %s, %s, %s, %s, %s);
''' '''
cur = self.__conn.cursor()
cur.execute(sql, (build_task.id, build_task.name, build_task.build_id, build_task.arch_id, cur.execute(sql, (build_task.id, build_task.name, build_task.build_id, build_task.arch_id,
build_task.started_at, build_task.finished_at, build_task.status_id)) build_task.started_at, build_task.finished_at, build_task.status_id))
# inserting web node stats
for stat in web_node_stats:
sql = '''
INSERT INTO web_node_stats (build_task_id, stat_name_id, start_ts, end_ts)
VALUES (%s, %s, %s, %s);
'''
cur.execute(sql, (stat.build_task_id, stat.stat_name_id,
stat.start_ts, stat.end_ts))
# inserting build node stats
for stat in build_node_stats:
sql = '''
INSERT INTO build_node_stats(build_task_id, stat_name_id, start_ts, end_ts)
VALUES (%s, %s, %s, %s);
'''
cur.execute(sql, (stat.build_task_id, stat.stat_name_id,
stat.start_ts, stat.end_ts))
# commiting changes
self.__conn.commit() self.__conn.commit()
def get_latest_build_id(self) -> Union[int, None]: def get_latest_build_id(self) -> Union[int, None]:
@ -101,7 +125,12 @@ class DB():
cur.execute(sql, (build.finished_at, build.id)) cur.execute(sql, (build.finished_at, build.id))
self.__conn.commit() self.__conn.commit()
def update_build_task(self, build: BuildTaskDB): def update_build_task(self, build_task: BuildTaskDB,
web_node_stats: List[WebNodeStatDB],
build_node_stats: List[BuildNodeStatDB]):
cur = self.__conn.cursor()
# updating build_task
sql = ''' sql = '''
UPDATE build_tasks UPDATE build_tasks
SET status_id = %s, SET status_id = %s,
@ -109,7 +138,28 @@ class DB():
finished_at = %s finished_at = %s
WHERE id = %s; WHERE id = %s;
''' '''
cur = self.__conn.cursor() cur.execute(sql, (build_task.status_id, build_task.started_at,
cur.execute(sql, (build.status_id, build.started_at, build_task.finished_at, build_task.id))
build.finished_at, build.id))
# updating web_node_stats
for stat in web_node_stats:
sql = '''
UPDATE web_node_stats
SET start_ts = %s,
end_ts = %s
WHERE build_task_id = %s;
'''
cur.execute(sql, (stat.start_ts, stat.end_ts))
# updating build_node_stats
for stat in build_node_stats:
sql = '''
UPDATE build_node_stats
SET start_ts = %s,
end_ts = %s,
WHERE build_task_id = %s;
'''
cur.execute(sql, (stat.start_ts, stat.end_ts))
# commiting changes
self.__conn.commit() self.__conn.commit()

View File

@ -12,6 +12,7 @@ from ..api_client import APIclient
class Extractor: class Extractor:
def __init__(self, config: ExtractorConfig, api: APIclient, db: DB): def __init__(self, config: ExtractorConfig, api: APIclient, db: DB):
self.start_from = config.start_from
self.oldest_build_age = config.oldest_build_age self.oldest_build_age = config.oldest_build_age
self.api = api self.api = api
self.db = db self.db = db
@ -21,7 +22,7 @@ class Extractor:
page_num = 1 page_num = 1
last_build_id = self.db.get_latest_build_id() last_build_id = self.db.get_latest_build_id()
if not last_build_id: if not last_build_id:
last_build_id = 0 last_build_id = self.start_from
logging.info("last_build_id: %s", last_build_id) logging.info("last_build_id: %s", last_build_id)
stop = False stop = False
@ -34,11 +35,25 @@ class Extractor:
stop = True stop = True
break break
# inserting build and build tasks # inserting build build tasks and build tasks statistics
logging.info("inserting %s", build.id) logging.info("inserting %s", build.id)
self.db.insert_build(build.as_db_model()) try:
self.db.insert_build(build.as_db_model())
except Exception as error: # pylint: disable=broad-except
logging.error('failed to insert build %d: %s',
build.id, error, exc_info=True)
continue
for build_task in build.build_tasks: for build_task in build.build_tasks:
self.db.insert_buildtask(build_task.as_db_model()) try:
self.db.insert_buildtask(build_task.as_db_model(),
build_task.web_node_stats.as_db_model(
build_task.id),
build_task.web_node_stats.as_db_model(
build_task.id))
except Exception as error: # pylint: disable=broad-except
logging.error('failed to insert build task %d: %s',
build_task.id, error, exc_info=True)
build_count += 1 build_count += 1
page_num += 1 page_num += 1
return build_count return build_count
@ -49,25 +64,31 @@ class Extractor:
removed_count = self.db.cleanup_builds(self.oldest_build_age) removed_count = self.db.cleanup_builds(self.oldest_build_age)
logging.info('removed %d entries', removed_count) logging.info('removed %d entries', removed_count)
def __update_build_tasks_statuses(self, build_tasks: List[BuildTask], def __update_build_tasks(self, build_tasks: List[BuildTask],
build_tasks_status_db: Dict[int, int]): build_tasks_status_db: Dict[int, int]):
for b in build_tasks: for b in build_tasks:
if b.status_id != build_tasks_status_db[b.id]: if b.status_id != build_tasks_status_db[b.id]:
logging.info('build taks %d status have changed %s -> %s. Updating DB', logging.info('build: %s, build task %d status have changed %s -> %s. Updating DB',
b.build_id,
b.id, BuildTaskEnum( b.id, BuildTaskEnum(
build_tasks_status_db[b.id]).name, build_tasks_status_db[b.id]).name,
BuildTaskEnum(b.status_id).name) BuildTaskEnum(b.status_id).name)
try: try:
self.db.update_build_task(b.as_db_model()) self.db.update_build_task(b.as_db_model(),
b.web_node_stats.as_db_model(
b.id),
b.build_node_stats.as_db_model(b.id))
except Exception as err: # pylint: disable=broad-except except Exception as err: # pylint: disable=broad-except
logging.error( logging.error(
'failed to update build task %d: %s', 'build: %d, failed to update build task %d: %s',
b.id, err, exc_info=True) b.build_id, b.id, err, exc_info=True)
else: else:
logging.info('build task %d was updated', b.id) logging.info(
'build: %d, build task %d was updated', b.build_id, b.id)
else: else:
logging.info( logging.info(
"build_task %d is still %s. Skipping", b.id, BuildTaskEnum(b.status_id).name) "build: %d, build_task %d is still %s. Skipping",
b.build_id, b.id, BuildTaskEnum(b.status_id).name)
def update_builds(self): def update_builds(self):
logging.info('Getting list of tasks from DB') logging.info('Getting list of tasks from DB')
@ -80,7 +101,7 @@ class Extractor:
logging.info('Updating build tasks') logging.info('Updating build tasks')
build_tasks_to_check = [ build_tasks_to_check = [
b for b in build.build_tasks if b.id in build_tasks_db] b for b in build.build_tasks if b.id in build_tasks_db]
self.__update_build_tasks_statuses( self.__update_build_tasks(
build_tasks_to_check, build_tasks_db) build_tasks_to_check, build_tasks_db)
if build.finished_at: if build.finished_at:

View File

@ -0,0 +1,12 @@
from pydantic import BaseModel # pylint: disable=no-name-in-module
from typing import Optional
class BuildNodeStatDB(BaseModel):
"""
Build node stat as it sent to/received from database
"""
build_task_id: int
stat_name_id: int
start_ts: Optional[float] = None
end_ts: Optional[float] = None

View File

@ -0,0 +1,41 @@
from typing import List
from pydantic import BaseModel # pylint: disable=no-name-in-module
from .build_stat import BuildStat
from .build_node_stat_db import BuildNodeStatDB
from .enums import BuildNodeStatsEnum
class BuildNodeStats(BaseModel):
"""
Represents build statistics for build node
"""
build_all: BuildStat
build_binaries: BuildStat
build_packages: BuildStat
build_srpm: BuildStat
build_node_task: BuildStat
cas_notarize_artifacts: BuildStat
cas_source_authenticate: BuildStat
git_checkout: BuildStat
upload: BuildStat
def as_db_model(self, build_task_id: int) -> List[BuildNodeStatDB]:
result = []
for field_name in self.__fields__.keys():
stats: BuildStat = getattr(self, field_name)
start_ts = stats.start_ts.timestamp() \
if stats.start_ts else None
end_ts = stats.end_ts.timestamp() \
if stats.end_ts else None
stat_name_id = BuildNodeStatsEnum[field_name].value
build_node_stat_db = BuildNodeStatDB(build_task_id=build_task_id,
stat_name_id=stat_name_id,
start_ts=start_ts,
end_ts=end_ts)
result.append(build_node_stat_db)
return result

View File

@ -0,0 +1,15 @@
"""
Module for BuildStat model
"""
from datetime import datetime
from typing import Optional
from pydantic import BaseModel # pylint: disable=no-name-in-module
class BuildStat(BaseModel):
"""
BuildStat represents particular build statistic
"""
start_ts: Optional[datetime] = None
end_ts: Optional[datetime] = None

View File

@ -0,0 +1,16 @@
"""
Module for BuildStatDB model
"""
from pydantic import BaseModel # pylint: disable=no-name-in-module
class BuildStatDB(BaseModel):
"""
Represents build stat as it send to/received from database
"""
build_task_id: int
stat_name_id: int
start_ts: float
end_ts: float

View File

@ -1,10 +1,12 @@
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional, Tuple
from pydantic import BaseModel # pylint: disable=no-name-in-module from pydantic import BaseModel # pylint: disable=no-name-in-module
from .build_task_db import BuildTaskDB from .build_task_db import BuildTaskDB
from .build_node_stats import BuildNodeStats
from .enums import ArchEnum from .enums import ArchEnum
from .web_node_stats import WebNodeStats
class BuildTask(BaseModel): class BuildTask(BaseModel):
@ -15,6 +17,8 @@ class BuildTask(BaseModel):
started_at: Optional[datetime] = None started_at: Optional[datetime] = None
finished_at: Optional[datetime] = None finished_at: Optional[datetime] = None
status_id: int status_id: int
build_node_stats: BuildNodeStats
web_node_stats: WebNodeStats
def as_db_model(self) -> BuildTaskDB: def as_db_model(self) -> BuildTaskDB:
started_at = self.started_at.timestamp() \ started_at = self.started_at.timestamp() \

View File

@ -0,0 +1,37 @@
# pylint: disable=invalid-name
from enum import IntEnum
class ArchEnum(IntEnum):
i686 = 0
x86_64 = 1
aarch64 = 2
ppc64le = 3
s390x = 4
class BuildTaskEnum(IntEnum):
idle = 0
started = 1
completed = 2
failed = 3
excluded = 4
class WebNodeStatsEnum(IntEnum):
build_done = 0
logs_processing = 1
packages_processing = 2
class BuildNodeStatsEnum(IntEnum):
upload = 0
build_all = 1
build_srpm = 2
git_checkout = 3
build_binaries = 4
build_packages = 5
build_node_task = 6
cas_notarize_artifacts = 7
cas_source_authenticate = 8

View File

@ -10,6 +10,7 @@ ALBS_URL_DEFAULT = 'https://build.almalinux.org'
LOG_FILE_DEFAULT = '/tmp/extractor.log' LOG_FILE_DEFAULT = '/tmp/extractor.log'
API_DEFAULT = 30 API_DEFAULT = 30
SCRAPE_INTERVAL_DEFAULT = 3600 SCRAPE_INTERVAL_DEFAULT = 3600
START_FROM_DEFAULT = 5808
class ExtractorConfig(BaseModel): class ExtractorConfig(BaseModel):
@ -21,7 +22,7 @@ class ExtractorConfig(BaseModel):
albs_url: HttpUrl = Field(description='ALBS root URL', albs_url: HttpUrl = Field(description='ALBS root URL',
default=ALBS_URL_DEFAULT) default=ALBS_URL_DEFAULT)
oldest_build_age: datetime = \ oldest_build_age: datetime = \
Field(description='oldest build age to extract and store') Field(description='oldest build age to store')
jwt: str = Field(description='ALBS JWT token') jwt: str = Field(description='ALBS JWT token')
db_config: DbConfig = Field(description="database configuration") db_config: DbConfig = Field(description="database configuration")
api_timeout: int = Field( api_timeout: int = Field(
@ -29,3 +30,5 @@ class ExtractorConfig(BaseModel):
default=API_DEFAULT) default=API_DEFAULT)
scrape_interval: int = Field(description='how often (in seconds) we will extract data from ALBS', scrape_interval: int = Field(description='how often (in seconds) we will extract data from ALBS',
default=SCRAPE_INTERVAL_DEFAULT) default=SCRAPE_INTERVAL_DEFAULT)
start_from: int = Field(description='build id to start populating empty db with',
default=START_FROM_DEFAULT)

View File

@ -0,0 +1,13 @@
from pydantic import BaseModel # pylint: disable=no-name-in-module
from typing import Optional
class WebNodeStatDB(BaseModel):
"""
Represents WebNodeStat as it sent to/received from databse
"""
build_task_id: int
stat_name_id: int
start_ts: Optional[float] = None
end_ts: Optional[float] = None

View File

@ -0,0 +1,35 @@
from typing import List
from pydantic import BaseModel # pylint: disable=no-name-in-module
from .build_stat import BuildStat
from .web_node_stat_db import WebNodeStatDB
from .enums import WebNodeStatsEnum
class WebNodeStats(BaseModel):
"""
Represents build statistics for web node
"""
build_done: BuildStat
logs_processing: BuildStat
packages_processing: BuildStat
def as_db_model(self, build_task_id: int) -> List[WebNodeStatDB]:
result = []
for field_name in self.__fields__.keys():
stats: BuildStat = getattr(self, field_name)
start_ts = stats.start_ts.timestamp() \
if stats.start_ts else None
end_ts = stats.end_ts.timestamp() \
if stats.end_ts else None
stat_name_id = WebNodeStatsEnum[field_name].value
web_node_stat_db = WebNodeStatDB(build_task_id=build_task_id,
stat_name_id=stat_name_id,
start_ts=start_ts,
end_ts=end_ts)
result.append(web_node_stat_db)
return result

View File

@ -55,4 +55,9 @@ data_store_days: 30
# sleep time in seconds between data extraction # sleep time in seconds between data extraction
# required: no # required: no
# default: 3600 # default: 3600
scrape_interval: 3600 scrape_interval: 3600
# build_id to start populating empty db with
# required: false
# default: 5808 (first build with correct metrics)
start_from: 5808

View File

@ -1,5 +1,6 @@
BEGIN;
-- builds -- builds
DROP TABLE IF EXISTS builds CASCADE;
CREATE TABLE builds ( CREATE TABLE builds (
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
url VARCHAR(50) NOT NULL, url VARCHAR(50) NOT NULL,
@ -16,7 +17,6 @@ ON builds(finished_at);
-- build_taks_enum -- build_taks_enum
DROP TABLE IF EXISTS build_task_status_enum CASCADE;
CREATE TABLE IF NOT EXISTS build_task_status_enum( CREATE TABLE IF NOT EXISTS build_task_status_enum(
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
value VARCHAR(15) value VARCHAR(15)
@ -32,7 +32,6 @@ VALUES
-- arch_enum -- arch_enum
DROP TABLE IF EXISTS arch_enum CASCADE;
CREATE TABLE arch_enum( CREATE TABLE arch_enum(
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
value VARCHAR(15) value VARCHAR(15)
@ -47,8 +46,39 @@ VALUES
(4, 's390x'); (4, 's390x');
-- web_node_stats_enum
CREATE TABLE web_node_stats_enum (
id INTEGER PRIMARY KEY,
value VARCHAR(50)
);
INSERT INTO web_node_stats_enum (id, value)
VALUEs
(0, 'build_done'),
(1, 'logs_processing'),
(2, 'packages_processing');
-- build_node_stats_enum
CREATE TABLE build_node_stats_enum(
id INTEGER PRIMARY KEY,
value VARCHAR(50)
);
INSERT INTO build_node_stats_enum (id, value)
VALUES
(0, 'upload'),
(1, 'build_all'),
(2, 'build_srpm'),
(3, 'git_checkout'),
(4, 'build_binaries'),
(5, 'build_packages'),
(6, 'build_node_task'),
(7, 'cas_notarize_artifacts'),
(8, 'cas_source_authenticate');
-- build_tasks -- build_tasks
DROP TABLE IF EXISTS build_tasks CASCADE;
CREATE TABLE build_tasks ( CREATE TABLE build_tasks (
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
name VARCHAR(50) NOT NULL, name VARCHAR(50) NOT NULL,
@ -69,8 +99,43 @@ CREATE INDEX build_tasks_finished_at
ON build_tasks(finished_at); ON build_tasks(finished_at);
-- web_node_stats
CREATE TABLE web_node_stats (
build_task_id INTEGER REFERENCES build_tasks(id) ON DELETE CASCADE,
stat_name_id INTEGER REFERENCES web_node_stats_enum(id) ON DELETE SET NULL,
start_ts REAL,
end_ts REAL
);
CREATE INDEX web_node_stats_build_task_id
ON web_node_stats(build_task_id);
CREATE INDEX web_node_stats_start_ts
ON web_node_stats(start_ts);
CREATE INDEX web_node_stats_end_ts
ON web_node_stats(end_ts);
-- build_node_stats
CREATE TABLE build_node_stats (
build_task_id INTEGER REFERENCES build_tasks(id) ON DELETE CASCADE,
stat_name_id INTEGER REFERENCES build_node_stats_enum(id) ON DELETE SET NULL,
start_ts REAL,
end_ts REAL
);
CREATE INDEX build_node_stats_build_task_id
ON build_node_stats(build_task_id);
CREATE INDEX build_node_stats_build_start_ts
ON build_node_stats(start_ts);
CREATE INDEX build_node_stats_build_end_ts
ON build_node_stats(end_ts);
-- sign_tasks -- sign_tasks
DROP TABLE IF EXISTS sign_tasks CASCADE;
CREATE TABLE sign_tasks ( CREATE TABLE sign_tasks (
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE, build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE,
@ -90,3 +155,14 @@ ON sign_tasks(started_at);
CREATE INDEX sign_tasks_finished_at CREATE INDEX sign_tasks_finished_at
ON sign_tasks(finished_at); ON sign_tasks(finished_at);
-- schema_version
CREATE TABLE schema_version (
version INTEGER
);
INSERT INTO schema_version (version)
VALUES (1);
COMMIT;

View File

@ -0,0 +1,5 @@
0.1.0 (2023-03-01)
First version
0.2.0
New parameter start_from

File diff suppressed because it is too large Load Diff

View File

@ -1,280 +0,0 @@
{
"__inputs": [
{
"name": "DS_POSTGRESQL",
"label": "PostgreSQL",
"description": "",
"type": "datasource",
"pluginId": "postgres",
"pluginName": "PostgreSQL"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.3.2"
},
{
"type": "datasource",
"id": "postgres",
"name": "PostgreSQL",
"version": "1.0.0"
},
{
"type": "panel",
"id": "table",
"name": "Table",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "id"
},
"properties": [
{
"id": "custom.width",
"value": 54
}
]
},
{
"matcher": {
"id": "byName",
"options": "created_at"
},
"properties": [
{
"id": "custom.width",
"value": 226
}
]
},
{
"matcher": {
"id": "byName",
"options": "finished_at"
},
"properties": [
{
"id": "custom.width",
"value": 209
}
]
},
{
"matcher": {
"id": "byName",
"options": "finished"
},
"properties": [
{
"id": "custom.width",
"value": 187
}
]
},
{
"matcher": {
"id": "byName",
"options": "created"
},
"properties": [
{
"id": "custom.width",
"value": 213
}
]
},
{
"matcher": {
"id": "byName",
"options": "url"
},
"properties": [
{
"id": "custom.width",
"value": 279
}
]
}
]
},
"gridPos": {
"h": 12,
"w": 24,
"x": 0,
"y": 0
},
"id": 2,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": [
{
"desc": true,
"displayName": "duration (h)"
}
]
},
"pluginVersion": "9.3.2",
"targets": [
{
"cacheDurationSeconds": 300,
"datasource": {
"type": "postgres",
"uid": "${DS_POSTGRESQL}"
},
"editorMode": "code",
"fields": [
{
"jsonPath": ""
}
],
"format": "table",
"hide": false,
"method": "GET",
"queryParams": "",
"rawQuery": true,
"rawSql": "SELECT id, url, created_at * 1000 as created, finished_at * 1000 as finished, (finished_at - created_at) / (60*60) as duration\nFROM builds\nWHERE $__unixEpochFilter(created_at) AND finished_at IS NOT NULL",
"refId": "A",
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"urlPath": ""
}
],
"title": "Finished builds",
"transformations": [
{
"id": "convertFieldType",
"options": {
"conversions": [
{
"destinationType": "time",
"targetField": "created"
},
{
"destinationType": "time",
"targetField": "finished"
}
],
"fields": {}
}
},
{
"id": "organize",
"options": {
"excludeByName": {},
"indexByName": {},
"renameByName": {
"duration": "duration (h)"
}
}
}
],
"type": "table"
}
],
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-3h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "albs_analytics",
"uid": "02mg4oxVk",
"version": 1,
"weekStart": ""
}