From 97db2097d3d316e528d81eb310ea0cc638e1abcf Mon Sep 17 00:00:00 2001 From: Kirill Zhukov Date: Wed, 1 Mar 2023 14:13:14 +0100 Subject: [PATCH] added name to build_task added api_timeout and scrape_interval to scrape config added requirements.txt --- build_analitycs/build_analytics/api_client.py | 2 ++ build_analitycs/build_analytics/db.py | 6 +++--- build_analitycs/build_analytics/extractor/start.py | 4 ++-- build_analitycs/build_analytics/models/build_task.py | 2 ++ build_analitycs/build_analytics/models/build_task_db.py | 3 ++- .../build_analytics/models/extractor_config.py | 4 ++-- build_analitycs/db_schema/postgres.sql | 1 + build_analitycs/requirements.txt | 9 +++++++++ 8 files changed, 23 insertions(+), 8 deletions(-) create mode 100644 build_analitycs/requirements.txt diff --git a/build_analitycs/build_analytics/api_client.py b/build_analitycs/build_analytics/api_client.py index a8cd460..bcc60ad 100644 --- a/build_analitycs/build_analytics/api_client.py +++ b/build_analitycs/build_analytics/api_client.py @@ -60,7 +60,9 @@ class APIclient(): if task['started_at'] else None finished_at = datetime.fromisoformat(task['finished_at']+TZ_OFFSET) \ if task['finished_at'] else None + name = task['ref']['url'].split('/')[-1].replace('.git', '') params = {'id': task['id'], + 'name': name, 'build_id': build_id, 'started_at': started_at, 'finished_at': finished_at, diff --git a/build_analitycs/build_analytics/db.py b/build_analitycs/build_analytics/db.py index cb5632c..3c5e948 100644 --- a/build_analitycs/build_analytics/db.py +++ b/build_analitycs/build_analytics/db.py @@ -35,12 +35,12 @@ class DB(): def insert_buildtask(self, build_task: BuildTaskDB): sql = ''' - INSERT INTO build_tasks(id, build_id, arch_id, started_at, finished_at, status_id) - VALUES (%s, %s, %s, %s, %s, %s); + INSERT INTO build_tasks(id, name, build_id, arch_id, started_at, finished_at, status_id) + VALUES (%s, %s, %s, %s, %s, %s, %s); ''' cur = self.__conn.cursor() - cur.execute(sql, (build_task.id, build_task.build_id, build_task.arch_id, + cur.execute(sql, (build_task.id, build_task.name, build_task.build_id, build_task.arch_id, build_task.started_at, build_task.finished_at, build_task.status_id)) self.__conn.commit() diff --git a/build_analitycs/build_analytics/extractor/start.py b/build_analitycs/build_analytics/extractor/start.py index f566c30..5467a1c 100644 --- a/build_analitycs/build_analytics/extractor/start.py +++ b/build_analitycs/build_analytics/extractor/start.py @@ -86,5 +86,5 @@ def start(yml_path: str): extractor.db.close_conn() logging.info("Extraction was finished") - logging.info("Sleeping for %d seconds", config.scrape_inteval) - time.sleep(config.scrape_inteval) + logging.info("Sleeping for %d seconds", config.scrape_interval) + time.sleep(config.scrape_interval) diff --git a/build_analitycs/build_analytics/models/build_task.py b/build_analitycs/build_analytics/models/build_task.py index 410e3b5..d4016d4 100644 --- a/build_analitycs/build_analytics/models/build_task.py +++ b/build_analitycs/build_analytics/models/build_task.py @@ -9,6 +9,7 @@ from .enums import ArchEnum class BuildTask(BaseModel): id: int + name: str build_id: int arch: str started_at: Optional[datetime] = None @@ -22,6 +23,7 @@ class BuildTask(BaseModel): if self.finished_at else None params = { 'id': self.id, + 'name': self.name, 'build_id': self.build_id, 'arch_id': ArchEnum[self.arch].value, 'started_at': started_at, diff --git a/build_analitycs/build_analytics/models/build_task_db.py b/build_analitycs/build_analytics/models/build_task_db.py index 16ccb13..2042100 100644 --- a/build_analitycs/build_analytics/models/build_task_db.py +++ b/build_analitycs/build_analytics/models/build_task_db.py @@ -1,5 +1,5 @@ from typing import Optional -from pydantic import BaseModel +from pydantic import BaseModel # pylint: disable=no-name-in-module class BuildTaskDB(BaseModel): @@ -7,6 +7,7 @@ class BuildTaskDB(BaseModel): BuildTask as it received from/sent to database """ id: int + name: str build_id: int arch_id: int started_at: Optional[float] = None diff --git a/build_analitycs/build_analytics/models/extractor_config.py b/build_analitycs/build_analytics/models/extractor_config.py index af314b3..8b8f58a 100644 --- a/build_analitycs/build_analytics/models/extractor_config.py +++ b/build_analitycs/build_analytics/models/extractor_config.py @@ -27,5 +27,5 @@ class ExtractorConfig(BaseModel): api_timeout: int = Field( description="max time in seconds to wait for API response", default=API_DEFAULT) - scrape_inteval: int = Field(description='how often (in seconds) we will extract data from ALBS', - default=SCRAPE_INTERVAL_DEFAULT) + scrape_interval: int = Field(description='how often (in seconds) we will extract data from ALBS', + default=SCRAPE_INTERVAL_DEFAULT) diff --git a/build_analitycs/db_schema/postgres.sql b/build_analitycs/db_schema/postgres.sql index b332a40..036e972 100644 --- a/build_analitycs/db_schema/postgres.sql +++ b/build_analitycs/db_schema/postgres.sql @@ -51,6 +51,7 @@ VALUES DROP TABLE IF EXISTS build_tasks CASCADE; CREATE TABLE build_tasks ( id INTEGER PRIMARY KEY, + name VARCHAR(50) NOT NULL, build_id INTEGER REFERENCES builds(id) ON DELETE CASCADE, arch_id INTEGER REFERENCES arch_enum(id) ON DELETE SET NULL, status_id INTEGER REFERENCES build_task_status_enum(id) ON DELETE SET NULL, diff --git a/build_analitycs/requirements.txt b/build_analitycs/requirements.txt new file mode 100644 index 0000000..b99d84a --- /dev/null +++ b/build_analitycs/requirements.txt @@ -0,0 +1,9 @@ +certifi==2022.12.7 +charset-normalizer==3.0.1 +idna==3.4 +psycopg2-binary==2.9.5 +pydantic==1.10.5 +PyYAML==6.0 +requests==2.28.2 +typing_extensions==4.5.0 +urllib3==1.26.14