diff --git a/pungi/scripts/create_extra_repo.py b/pungi/scripts/create_extra_repo.py index ebee8007..34f36f6f 100644 --- a/pungi/scripts/create_extra_repo.py +++ b/pungi/scripts/create_extra_repo.py @@ -241,9 +241,9 @@ class CreateExtraRepo(PackagesGenerator): """ Download all defined modularity packages and their data from a remote repo - :param modules_data: information about all of modules in a remote repo + :param modules_data: information about all modules in a remote repo :param repo_info: information about a remote repo - :param packages: information about all of packages (including + :param packages: information about all packages (including modularity) in a remote repo """ for module in modules_data: @@ -264,7 +264,7 @@ class CreateExtraRepo(PackagesGenerator): continue for rpm in module['data']['artifacts']['rpms']: # Empty repo_info.packages means that we will download - # all of packages from repo including + # all packages from repo including # the modularity packages if not repo_info.packages: break @@ -290,10 +290,9 @@ class CreateExtraRepo(PackagesGenerator): repo_info=repo_info, ) # parse the repodata (including modules.yaml.gz) - modules_data = self._parse_repomd_records( + modules_data = self._parse_module_repomd_record( repo_info=repo_info, repomd_records=repomd_records, - packages=packages, ) # convert the packages dict to more usable form # for future checking that a rpm from the module's artifacts diff --git a/pungi/scripts/create_packages_json.py b/pungi/scripts/create_packages_json.py index a04cb412..6fb3e3ff 100644 --- a/pungi/scripts/create_packages_json.py +++ b/pungi/scripts/create_packages_json.py @@ -14,7 +14,7 @@ import os import re import tempfile from collections import defaultdict -from typing import AnyStr, Dict, List, Optional +from typing import AnyStr, Dict, List, Optional, Any, Iterator import binascii import createrepo_c as cr @@ -23,7 +23,7 @@ import hawkey import requests import rpm import yaml -from createrepo_c import Package +from createrepo_c import Package, PackageIterator from dataclasses import dataclass @@ -60,10 +60,10 @@ class RepoInfo: arch: AnyStr # Is a repo remote or local is_remote: bool - # Is an reference repository (usually it's a RHEL repo) + # Is a reference repository (usually it's a RHEL repo) # Layout of packages from such repository will be taken as example # Only layout of specific package (which don't exist - # in an reference repository) will be taken as example + # in a reference repository) will be taken as example is_reference: bool = False @@ -77,6 +77,12 @@ class PackagesGenerator: self.repos = repos self.excluded_packages = excluded_packages self.included_packages = included_packages + self.tmp_files = [] + + def __del__(self): + for tmp_file in self.tmp_files: + if os.path.exists(tmp_file): + os.remove(tmp_file) @staticmethod def _warning_callback(warning_type, message): @@ -119,7 +125,7 @@ class PackagesGenerator: Parse primary.xml.gz, take from it info about packages and put it to dict packages :param primary_file_path: path to local primary.xml.gz - :param packages: dictionary which will be contain info about packages + :param packages: dictionary which will contain info about packages from repository """ cr.xml_parse_primary( @@ -140,7 +146,7 @@ class PackagesGenerator: Parse filelists.xml.gz, take from it info about packages and put it to dict packages :param filelists_file_path: path to local filelists.xml.gz - :param packages: dictionary which will be contain info about packages + :param packages: dictionary which will contain info about packages from repository """ cr.xml_parse_filelists( @@ -161,7 +167,7 @@ class PackagesGenerator: Parse other.xml.gz, take from it info about packages and put it to dict packages :param other_file_path: path to local other.xml.gz - :param packages: dictionary which will be contain info about packages + :param packages: dictionary which will contain info about packages from repository """ cr.xml_parse_other( @@ -178,11 +184,11 @@ class PackagesGenerator: cls, modules_file_path: AnyStr, - ) -> List[Dict]: + ) -> Iterator[Any]: """ Parse modules.yaml.gz and returns parsed data :param modules_file_path: path to local modules.yaml.gz - :return: List of dict for an each modules in a repo + :return: List of dict for each modules in a repo """ with open(modules_file_path, 'rb') as modules_file: @@ -220,28 +226,23 @@ class PackagesGenerator: os.remove(repomd_file_path) return repomd_object.records - def _parse_repomd_records( + def _download_repomd_records( self, repo_info: RepoInfo, repomd_records: List[cr.RepomdRecord], - packages: Dict[AnyStr, cr.Package], - ) -> Optional[List[Dict]]: + repomd_records_dict: Dict[str, str], + ): """ - Parse repomd records and extract from repodata file info about packages + Download repomd records :param repo_info: structure which contains info about a current repo :param repomd_records: list with repomd records - :param packages: dictionary which will be contain info about packages - from repository - :return: List of dict for an each modules in a repo if it contains - modules info otherwise returns None + :param repomd_records_dict: dict with paths to repodata files """ - modules_data = [] for repomd_record in repomd_records: if repomd_record.type not in ( 'primary', 'filelists', 'other', - 'modules', ): continue repomd_record_file_path = os.path.join( @@ -252,22 +253,35 @@ class PackagesGenerator: if repo_info.is_remote: repomd_record_file_path = self.get_remote_file_content( repomd_record_file_path) - if repomd_record.type == 'modules': - modules_data = self._parse_modules_file( - repomd_record_file_path, - ) - else: - parse_file_method = getattr( - self, - f'_parse_{repomd_record.type}_file' - ) - parse_file_method( - repomd_record_file_path, - packages, - ) + self.tmp_files.append(repomd_record_file_path) + repomd_records_dict[repomd_record.type] = repomd_record_file_path + + def _parse_module_repomd_record( + self, + repo_info: RepoInfo, + repomd_records: List[cr.RepomdRecord], + ) -> List[Dict]: + """ + Download repomd records + :param repo_info: structure which contains info about a current repo + :param repomd_records: list with repomd records + :param repomd_records_dict: dict with paths to repodata files + """ + for repomd_record in repomd_records: + if repomd_record.type != 'modules': + continue + repomd_record_file_path = os.path.join( + repo_info.path, + repo_info.folder, + repomd_record.location_href, + ) if repo_info.is_remote: - os.remove(repomd_record_file_path) - return list(modules_data) + repomd_record_file_path = self.get_remote_file_content( + repomd_record_file_path) + self.tmp_files.append(repomd_record_file_path) + return list(self._parse_modules_file( + repomd_record_file_path, + )) @staticmethod def compare_pkgs_version(package_1: Package, package_2: Package) -> int: @@ -307,16 +321,20 @@ class PackagesGenerator: 'i686', 'i386', ]) - packages = {} # type: Dict[AnyStr, cr.Package] repomd_records = self._get_repomd_records( repo_info=repo_info, ) - self._parse_repomd_records( - repo_info=repo_info, - repomd_records=repomd_records, - packages=packages, + repomd_records_dict = {} # type: Dict[str, str] + self._download_repomd_records(repo_info=repo_info, + repomd_records=repomd_records, + repomd_records_dict=repomd_records_dict) + packages_iterator = PackageIterator( + primary_path=repomd_records_dict['primary'], + filelists_path=repomd_records_dict['filelists'], + other_path=repomd_records_dict['other'], + warningcb=self._warning_callback, ) - for package in packages.values(): + for package in packages_iterator: if package.arch not in repo_arches: package_arch = repo_info.arch else: diff --git a/pungi/wrappers/kojimock.py b/pungi/wrappers/kojimock.py index 56813158..af3ff48b 100644 --- a/pungi/wrappers/kojimock.py +++ b/pungi/wrappers/kojimock.py @@ -1,6 +1,6 @@ import os -import subprocess import time +from pathlib import Path from attr import dataclass from kobo.rpmlib import parse_nvra @@ -48,27 +48,28 @@ class KojiMock: self._modules_dir = modules_dir self._packages_dir = packages_dir - def _gather_modules(self, modules_dir): + @staticmethod + def _gather_modules(modules_dir): modules = {} - for arch in os.listdir(modules_dir): - arch_dir = os.path.join( - modules_dir, - arch, + for index, (f, arch) in enumerate( + (sub_path.name, sub_path.parent.name) + for path in Path(modules_dir).glob('*') + for sub_path in path.iterdir() + ): + parsed = parse_nvra(f) + modules[index] = Module( + name=parsed['name'], + nvr=f, + version=parsed['release'], + context=parsed['arch'], + stream=parsed['version'], + build_id=index, + arch=arch, ) - for index, f in enumerate(os.listdir(arch_dir)): - parsed = parse_nvra(f) - modules[index] = Module( - name=parsed['name'], - nvr=f, - version=parsed['release'], - context=parsed['arch'], - stream=parsed['version'], - build_id=index, - arch=arch, - ) return modules - def getLastEvent(self, *args, **kwargs): + @staticmethod + def getLastEvent(*args, **kwargs): return {'id': LAST_EVENT_ID, 'ts': LAST_EVENT_TIME} def listTagged(self, tag_name, *args, **kwargs): @@ -111,7 +112,8 @@ class KojiMock: return builds - def getFullInheritance(self, *args, **kwargs): + @staticmethod + def getFullInheritance(*args, **kwargs): """ Unneeded because we use local storage. """