albs_analytics/build_analytics/build_analytics/models/extractor_config.py

38 lines
1.5 KiB
Python

from pathlib import Path
from pydantic import HttpUrl, Field, BaseModel # pylint: disable=no-name-in-module
from .db_config import DbConfig
# DEFAULTS
ALBS_URL_DEFAULT = 'https://build.almalinux.org'
LOG_FILE_DEFAULT = '/tmp/extractor.log'
API_TIMEOUT_DEFAULT = 30
SCRAPE_INTERVAL_DEFAULT = 3600
START_FROM_DEFAULT = 5808
OLDEST_TO_UPDATE_DAYS_DEFAULT = 7
class ExtractorConfig(BaseModel):
"""
config model for Extractor service
"""
log_file: Path = Field(description='logfile path',
default=LOG_FILE_DEFAULT)
albs_url: HttpUrl = Field(description='ALBS root URL',
default=ALBS_URL_DEFAULT)
data_store_days: int = \
Field(description='oldest build (in days) to keep in DB')
jwt: str = Field(description='ALBS JWT token')
db_config: DbConfig = Field(description="database configuration")
api_timeout: int = Field(
description="max time in seconds to wait for API response",
default=API_TIMEOUT_DEFAULT)
scrape_interval: int = Field(description='how often (in seconds) we will extract data from ALBS',
default=SCRAPE_INTERVAL_DEFAULT)
start_from: int = Field(description='build id to start populating empty db with',
default=START_FROM_DEFAULT)
oldest_to_update_days: int = \
Field(description='oldest (in days) unfinished object (build/task/step...) that we will try to update',
default=OLDEST_TO_UPDATE_DAYS_DEFAULT)