from datetime import datetime, timedelta from pathlib import Path from pydantic import HttpUrl, Field, BaseModel # pylint: disable=no-name-in-module from .db_config import DbConfig # DEFAULTS ALBS_URL_DEFAULT = 'https://build.almalinux.org' LOG_FILE_DEFAULT = '/tmp/extractor.log' API_DEFAULT = 30 SCRAPE_INTERVAL_DEFAULT = 3600 START_FROM_DEFAULT = 5808 OLDEST_TO_UPDATE_DEFAULT = datetime.now().astimezone() - timedelta(days=3) class ExtractorConfig(BaseModel): """ config model for Extractor service """ log_file: Path = Field(description='logfile path', default=LOG_FILE_DEFAULT) albs_url: HttpUrl = Field(description='ALBS root URL', default=ALBS_URL_DEFAULT) oldest_build_age: datetime = \ Field(description='oldest build age to store') jwt: str = Field(description='ALBS JWT token') db_config: DbConfig = Field(description="database configuration") api_timeout: int = Field( description="max time in seconds to wait for API response", default=API_DEFAULT) scrape_interval: int = Field(description='how often (in seconds) we will extract data from ALBS', default=SCRAPE_INTERVAL_DEFAULT) start_from: int = Field(description='build id to start populating empty db with', default=START_FROM_DEFAULT) oldest_to_update: datetime = \ Field(description='oldest unfinished object (build/task/step...) that we will try to update', default=OLDEST_TO_UPDATE_DEFAULT)