From 6d58bc2ed86f92e32ca2bb5a9fee9c7550a7744a Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Wed, 22 Mar 2023 15:56:58 +0200 Subject: [PATCH 1/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - [Generator of packages.json] Replace using CLI by config.yaml - [Gather RPMs] os.path is replaced by Path --- pungi/scripts/create_packages_json.py | 141 ++++++++++---------------- pungi/scripts/gather_rpms.py | 38 +++---- 2 files changed, 68 insertions(+), 111 deletions(-) diff --git a/pungi/scripts/create_packages_json.py b/pungi/scripts/create_packages_json.py index 1274945f..acf1cb83 100644 --- a/pungi/scripts/create_packages_json.py +++ b/pungi/scripts/create_packages_json.py @@ -9,12 +9,22 @@ https://github.com/rpm-software-management/createrepo_c/blob/master/examples/pyt import argparse import gzip import json +import logging import lzma import os import re import tempfile from collections import defaultdict -from typing import AnyStr, Dict, List, Any, Iterator +from pathlib import Path +from typing import ( + AnyStr, + Dict, + List, + Any, + Iterator, + Optional, + Tuple, +) import binascii import createrepo_c as cr @@ -26,6 +36,8 @@ import yaml from createrepo_c import Package, PackageIterator from dataclasses import dataclass +logging.basicConfig(level=logging.INFO) + def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes): return binascii.hexlify(first_two_bytes) == initial_bytes @@ -330,7 +342,7 @@ class PackagesGenerator: for variant_name, variant_arch in package_dict['variants']: package_arch = package_dict['arch'] package = package_dict['package'] - package_name = package.name + package_name = f'{package.name}.{package_arch}' if any(re.search(excluded_package, package_name) for excluded_package in self.excluded_packages): continue @@ -370,73 +382,15 @@ class PackagesGenerator: def create_parser(): parser = argparse.ArgumentParser() parser.add_argument( - '--repo-path', - action='append', - help='Path to a folder with repofolders. E.g. "/var/repos" or ' - '"http://koji.cloudlinux.com/mirrors/rhel_mirror"', - required=True, - ) - parser.add_argument( - '--repo-folder', - action='append', - help='A folder which contains folder repodata . E.g. "baseos-stream"', - required=True, - ) - parser.add_argument( - '--repo-arch', - action='append', - help='What architecture packages a repository contains. E.g. "x86_64"', - required=True, - ) - parser.add_argument( - '--repo-name', - action='append', - help='Name of a repository. E.g. "AppStream"', - required=True, - ) - parser.add_argument( - '--is-remote', - action='append', - type=str, - help='A repository is remote or local', - choices=['yes', 'no'], - required=True, - ) - parser.add_argument( - '--is-reference', - action='append', - type=str, - help='A repository is used as reference for packages layout', - choices=['yes', 'no'], - required=True, - ) - parser.add_argument( - '--repo-type', - action='append', - type=str, - help='Packages from repository will be removed or added to variant', - choices=['present', 'absent'], - required=True, - ) - parser.add_argument( - '--excluded-packages', - nargs='+', - type=str, - default=[], - help='A list of globally excluded packages from generated json.' - 'All of list elements should be separated by space', - required=False, - ) - parser.add_argument( - '--included-packages', - nargs='+', - type=str, - default=[], - help='A list of globally included packages from generated json.' - 'All of list elements should be separated by space', + '-c', + '--config', + type=Path, + default=Path('config.yaml'), required=False, + help='Path to a config', ) parser.add_argument( + '-o', '--json-output-path', type=str, help='Full path to output json file', @@ -446,32 +400,43 @@ def create_parser(): return parser +def read_config(config_path: Path) -> Optional[Dict]: + if not config_path.exists(): + logging.error('A config by path "%s" does not exist', config_path) + exit(1) + with config_path.open('r') as config_fd: + return yaml.safe_load(config_fd) + + +def process_config(config_data: Dict) -> Tuple[ + List[RepoInfo], + List[str], + List[str], +]: + excluded_packages = config_data.get('excluded_packages', []) + included_packages = config_data.get('included_packages', []) + repos = [RepoInfo( + path=variant_repo['path'], + folder=variant_repo['folder'], + name=variant_name, + arch=variant_repo['arch'], + is_remote=variant_repo['remote'], + is_reference=variant_repo['reference'], + repo_type=variant_repo.get('repo_type', 'present'), + ) for variant_name, variant_repos in config_data['variants'].items() + for variant_repo in variant_repos] + return repos, excluded_packages, included_packages + + def cli_main(): args = create_parser().parse_args() - repos = [] - for repo_path, repo_folder, repo_name, \ - repo_arch, is_remote, is_reference, repo_type in zip( - args.repo_path, - args.repo_folder, - args.repo_name, - args.repo_arch, - args.is_remote, - args.is_reference, - args.repo_type, - ): - repos.append(RepoInfo( - path=repo_path, - folder=repo_folder, - name=repo_name, - arch=repo_arch, - is_remote=True if is_remote == 'yes' else False, - is_reference=True if is_reference == 'yes' else False, - repo_type=repo_type, - )) + repos, excluded_packages, included_packages = process_config( + config_data=read_config(args.config) + ) pg = PackagesGenerator( repos=repos, - excluded_packages=args.excluded_packages, - included_packages=args.included_packages, + excluded_packages=excluded_packages, + included_packages=included_packages, ) result = pg.generate_packages_json() with open(args.json_output_path, 'w') as packages_file: diff --git a/pungi/scripts/gather_rpms.py b/pungi/scripts/gather_rpms.py index 9a9f9a50..369e2e79 100644 --- a/pungi/scripts/gather_rpms.py +++ b/pungi/scripts/gather_rpms.py @@ -2,38 +2,32 @@ from argparse import ArgumentParser import os from typing import List +from pathlib import Path -from attr import dataclass +from dataclasses import dataclass from productmd.common import parse_nvra @dataclass class Package: nvra: str - path: str + path: Path -def search_rpms(top_dir) -> List[Package]: +def search_rpms(top_dir: Path) -> List[Package]: """ Search for all *.rpm files recursively in given top directory Returns: list: list of paths """ - rpms = [] - for root, dirs, files in os.walk(top_dir): - path = root.split(os.sep) - for file in files: - if not file.endswith('.rpm'): - continue - nvra, _ = os.path.splitext(file) - rpms.append( - Package(nvra=nvra, path=os.path.join('/', *path, file)) - ) - return rpms + return [Package( + nvra=path.stem, + path=path, + ) for path in top_dir.rglob('*.rpm')] -def copy_rpms(packages: List[Package], target_top_dir: str): +def copy_rpms(packages: List[Package], target_top_dir: Path): """ Search synced repos for rpms and prepare koji-like structure for pungi @@ -46,24 +40,22 @@ def copy_rpms(packages: List[Package], target_top_dir: str): """ for package in packages: info = parse_nvra(package.nvra) - - target_arch_dir = os.path.join(target_top_dir, info['arch']) + target_arch_dir = target_top_dir.joinpath(info['arch']) + target_file = target_arch_dir.joinpath(package.path.name) os.makedirs(target_arch_dir, exist_ok=True) - target_file = os.path.join(target_arch_dir, os.path.basename(package.path)) - - if not os.path.exists(target_file): + if not target_file.exists(): try: os.link(package.path, target_file) except OSError: # hardlink failed, try symlinking - os.symlink(package.path, target_file) + package.path.symlink_to(target_file) def cli_main(): parser = ArgumentParser() - parser.add_argument('-p', '--path', required=True) - parser.add_argument('-t', '--target', required=True) + parser.add_argument('-p', '--path', required=True, type=Path) + parser.add_argument('-t', '--target', required=True, type=Path) namespace = parser.parse_args() From 0747e967b00152aa5bb8996a7789ec9b37bd9d9b Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Thu, 23 Mar 2023 09:36:52 +0200 Subject: [PATCH 2/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - Some refactoring --- pungi/wrappers/kojimock.py | 29 +++++------------------------ 1 file changed, 5 insertions(+), 24 deletions(-) diff --git a/pungi/wrappers/kojimock.py b/pungi/wrappers/kojimock.py index 1267438b..8f4de7ce 100644 --- a/pungi/wrappers/kojimock.py +++ b/pungi/wrappers/kojimock.py @@ -203,31 +203,12 @@ class KojiMock: packages = [] # get all rpms in folder - rpms = search_rpms(self._packages_dir) - all_rpms = [package.path for package in rpms] + rpms = search_rpms(Path(self._packages_dir)) - # get nvras for modular packages - nvras = set() - for module in self._modules.values(): - path = os.path.join( - self._modules_dir, - module.arch, - module.nvr, - ) - info = Modulemd.ModuleStream.read_string(open(path).read(), strict=True) - - for package in info.get_rpm_artifacts(): - data = parse_nvra(package) - nvras.add((data['name'], data['version'], data['release'], data['arch'])) - - # and remove modular packages from global list - for rpm in all_rpms[:]: - data = parse_nvra(os.path.basename(rpm[:-4])) - if (data['name'], data['version'], data['release'], data['arch']) in nvras: - all_rpms.remove(rpm) - - for rpm in all_rpms: - info = parse_nvra(os.path.basename(rpm)) + for rpm in rpms: + info = parse_nvra(rpm.path.stem) + if 'module' in info['release']: + continue packages.append({ "build_id": RELEASE_BUILD_ID, "name": info['name'], From 4b64d20826337b2f737c4bdb5aa78161df8aff01 Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Fri, 24 Mar 2023 12:45:28 +0200 Subject: [PATCH 3/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - Path.rglob/glob doesn't work with symlinks (it's the bug and reported) - Refactoring --- pungi/scripts/create_packages_json.py | 128 ++++++++++++++++---------- pungi/scripts/gather_rpms.py | 7 +- 2 files changed, 84 insertions(+), 51 deletions(-) diff --git a/pungi/scripts/create_packages_json.py b/pungi/scripts/create_packages_json.py index acf1cb83..2f2bdcb1 100644 --- a/pungi/scripts/create_packages_json.py +++ b/pungi/scripts/create_packages_json.py @@ -15,6 +15,7 @@ import os import re import tempfile from collections import defaultdict +from itertools import tee from pathlib import Path from typing import ( AnyStr, @@ -27,6 +28,8 @@ from typing import ( ) import binascii +from urllib.parse import urljoin + import createrepo_c as cr import dnf.subject import hawkey @@ -98,6 +101,7 @@ class PackagesGenerator: included_packages: List[AnyStr], ): self.repos = repos + self.pkgs_iterators = dict() self.excluded_packages = excluded_packages self.included_packages = included_packages self.tmp_files = [] @@ -110,6 +114,19 @@ class PackagesGenerator: if os.path.exists(tmp_file): os.remove(tmp_file) + @staticmethod + def _get_full_repo_path(repo_info: RepoInfo): + if repo_info.is_remote: + return urljoin( + repo_info.path + '/', + repo_info.folder, + ) + else: + return os.path.join( + repo_info.path, + repo_info.folder + ) + @staticmethod def _warning_callback(warning_type, message): """ @@ -151,7 +168,7 @@ class PackagesGenerator: """ Parse modules.yaml.gz and returns parsed data :param modules_file_path: path to local modules.yaml.gz - :return: List of dict for each modules in a repo + :return: List of dict for each module in a repo """ with open(modules_file_path, 'rb') as modules_file: @@ -174,16 +191,22 @@ class PackagesGenerator: :param repo_info: structure which contains info about a current repo :return: list with repomd records """ - repomd_file_path = os.path.join( - repo_info.path, - repo_info.folder, - 'repodata', - 'repomd.xml', - ) if repo_info.is_remote: + repomd_file_path = urljoin( + urljoin( + repo_info.path + '/', + repo_info.folder + ) + '/', + 'repodata/repomd.xml' + ) repomd_file_path = self.get_remote_file_content(repomd_file_path) else: - repomd_file_path = repomd_file_path + repomd_file_path = os.path.join( + repo_info.path, + repo_info.folder, + 'repodata', + 'repomd.xml', + ) repomd_object = self._parse_repomd(repomd_file_path) if repo_info.is_remote: os.remove(repomd_file_path) @@ -273,33 +296,43 @@ class PackagesGenerator: ) ) ) - all_packages = defaultdict(lambda: {'variants': list()}) + all_packages = defaultdict(lambda: { + 'variants': list(), + 'package_info': dict(), + }) for repo_info in sorted( self.repos, key=lambda i: i.repo_type, reverse=True, ): - repomd_records = self._get_repomd_records( - repo_info=repo_info, - ) - repomd_records_dict = {} # type: Dict[str, str] - self._download_repomd_records( - repo_info=repo_info, - repomd_records=repomd_records, - repomd_records_dict=repomd_records_dict, - ) - packages_iterator = PackageIterator( - primary_path=repomd_records_dict['primary'], - filelists_path=repomd_records_dict['filelists'], - other_path=repomd_records_dict['other'], - warningcb=self._warning_callback, - ) - for package in packages_iterator: + full_repo_path = self._get_full_repo_path(repo_info) + if full_repo_path in self.pkgs_iterators: + pkgs_iterator = tee(self.pkgs_iterators[full_repo_path]) + else: + repomd_records = self._get_repomd_records( + repo_info=repo_info, + ) + repomd_records_dict = {} # type: Dict[str, str] + self._download_repomd_records( + repo_info=repo_info, + repomd_records=repomd_records, + repomd_records_dict=repomd_records_dict, + ) + pkgs_iterator = PackageIterator( + primary_path=repomd_records_dict['primary'], + filelists_path=repomd_records_dict['filelists'], + other_path=repomd_records_dict['other'], + warningcb=self._warning_callback, + ) + self.pkgs_iterators[full_repo_path] = tee(pkgs_iterator) + for package in pkgs_iterator: if package.arch not in self.repo_arches[repo_info.arch]: package_arch = repo_info.arch else: package_arch = package.arch package_key = f'{package.name}.{package_arch}' + package_variants = all_packages[package_key]['variants'] + package_info = all_packages[package_key]['package_info'] if 'module' in package.release and not any( re.search(included_package, package.name) for included_package in self.included_packages @@ -307,41 +340,41 @@ class PackagesGenerator: # Even a module package will be added to packages.json if # it presents in the list of included packages continue - if package_key not in all_packages: - all_packages[package_key]['variants'].append( - (repo_info.name, repo_info.arch) - ) - all_packages[package_key]['arch'] = package_arch - all_packages[package_key]['package'] = package - all_packages[package_key]['type'] = repo_info.is_reference - elif repo_info.repo_type == 'absent' and (repo_info.name, repo_info.arch) in all_packages[package_key]['variants']: - all_packages[package_key]['variants'].remove((repo_info.name, repo_info.arch)) + if repo_info.repo_type == 'present' and not package_info: + package_variants.append((repo_info.name, repo_info.arch)) + package_info['arch'] = package_arch + package_info['package'] = package + package_info['type'] = repo_info.is_reference + elif repo_info.repo_type == 'absent' and \ + (repo_info.name, repo_info.arch) in package_variants: + package_variants.remove((repo_info.name, repo_info.arch)) # replace an older package if it's not reference or # a newer package is from reference repo - elif (not all_packages[package_key]['type'] or - all_packages[package_key]['type'] == + elif (not package_info['type'] or + package_info['type'] == repo_info.is_reference) and \ self.compare_pkgs_version( package, - all_packages[package_key]['package'] - ) > 0: + package_info['package'] + ) > 0 and repo_info.repo_type == 'present': all_packages[package_key]['variants'] = [ (repo_info.name, repo_info.arch) ] - all_packages[package_key]['arch'] = package_arch - all_packages[package_key]['package'] = package + package_info['arch'] = package_arch + package_info['package'] = package elif self.compare_pkgs_version( package, - all_packages[package_key]['package'] - ) == 0: - all_packages[package_key]['variants'].append( + package_info['package'] + ) == 0 and repo_info.repo_type == 'present': + package_variants.append( (repo_info.name, repo_info.arch) ) for package_dict in all_packages.values(): for variant_name, variant_arch in package_dict['variants']: - package_arch = package_dict['arch'] - package = package_dict['package'] + package_info = package_dict['package_info'] + package_arch = package_info['arch'] + package = package_info['package'] package_name = f'{package.name}.{package_arch}' if any(re.search(excluded_package, package_name) for excluded_package in self.excluded_packages): @@ -373,9 +406,8 @@ class PackagesGenerator: # pkgs_list.append(added_pkg) pkgs_list = packages_json[variant_name][ variant_arch][src_package_name] - added_pkg = f'{package_name}.{package_arch}' - if added_pkg not in pkgs_list: - pkgs_list.append(added_pkg) + if package_name not in pkgs_list: + pkgs_list.append(package_name) return packages_json diff --git a/pungi/scripts/gather_rpms.py b/pungi/scripts/gather_rpms.py index 369e2e79..1ce770c2 100644 --- a/pungi/scripts/gather_rpms.py +++ b/pungi/scripts/gather_rpms.py @@ -1,6 +1,7 @@ from argparse import ArgumentParser import os +from glob import iglob from typing import List from pathlib import Path @@ -22,9 +23,9 @@ def search_rpms(top_dir: Path) -> List[Package]: list: list of paths """ return [Package( - nvra=path.stem, - path=path, - ) for path in top_dir.rglob('*.rpm')] + nvra=Path(path).stem, + path=Path(path), + ) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)] def copy_rpms(packages: List[Package], target_top_dir: Path): From 141d00e9419bc9d5c9ce43934a137aad8c7ec610 Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Fri, 24 Mar 2023 16:39:10 +0200 Subject: [PATCH 4/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - More info about unsigned packages --- pungi/phases/pkgset/pkgsets.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/pungi/phases/pkgset/pkgsets.py b/pungi/phases/pkgset/pkgsets.py index c97d49fd..85ca2058 100644 --- a/pungi/phases/pkgset/pkgsets.py +++ b/pungi/phases/pkgset/pkgsets.py @@ -152,9 +152,16 @@ class PackageSetBase(kobo.log.LoggingBase): """ def nvr_formatter(package_info): - # joins NVR parts of the package with '-' character. - return "-".join( - (package_info["name"], package_info["version"], package_info["release"]) + if package_info['epoch'] is not None: + epoch_suffix = ':' + package_info['epoch'] + else: + epoch_suffix = '' + return ( + f"{package_info['name']}" + f"{epoch_suffix}-" + f"{package_info['version']}-" + f"{package_info['release']}." + f"{package_info['arch']}" ) def get_error(sigkeys, infos): From 596c5c0b7fb14224d2f446a440442bdddaf8224a Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Tue, 28 Mar 2023 12:58:08 +0300 Subject: [PATCH 5/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - Refactoring - Some absent packages are in packages.json now --- pungi/scripts/create_packages_json.py | 319 ++++++++++++++------------ 1 file changed, 169 insertions(+), 150 deletions(-) diff --git a/pungi/scripts/create_packages_json.py b/pungi/scripts/create_packages_json.py index 2f2bdcb1..d26fbf16 100644 --- a/pungi/scripts/create_packages_json.py +++ b/pungi/scripts/create_packages_json.py @@ -25,19 +25,23 @@ from typing import ( Iterator, Optional, Tuple, + Union, ) import binascii from urllib.parse import urljoin -import createrepo_c as cr -import dnf.subject -import hawkey import requests import rpm import yaml -from createrepo_c import Package, PackageIterator -from dataclasses import dataclass +from createrepo_c import ( + Package, + PackageIterator, + Repomd, + RepomdRecord, +) +from dataclasses import dataclass, field +from kobo.rpmlib import parse_nvra logging.basicConfig(level=logging.INFO) @@ -66,23 +70,33 @@ class RepoInfo: # 'appstream', 'baseos', etc. # Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are # using remote repo - path: AnyStr + path: str # name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc - folder: AnyStr - # name of repo. E.g. 'BaseOS', 'AppStream', etc - name: AnyStr - # architecture of repo. E.g. 'x86_64', 'i686', etc - arch: AnyStr + folder: str # Is a repo remote or local is_remote: bool # Is a reference repository (usually it's a RHEL repo) # Layout of packages from such repository will be taken as example - # Only layout of specific package (which don't exist + # Only layout of specific package (which doesn't exist # in a reference repository) will be taken as example is_reference: bool = False + # The packages from 'present' repo will be added to a variant. + # The packages from 'absent' repo will be removed from a variant. repo_type: str = 'present' +@dataclass +class VariantInfo: + # name of variant. E.g. 'BaseOS', 'AppStream', etc + name: AnyStr + # architecture of variant. E.g. 'x86_64', 'i686', etc + arch: AnyStr + # The packages which will be not added to a variant + excluded_packages: List[str] = field(default_factory=list) + # Repos of a variant + repos: List[RepoInfo] = field(default_factory=list) + + class PackagesGenerator: repo_arches = defaultdict(lambda: list(('noarch',))) @@ -96,12 +110,12 @@ class PackagesGenerator: def __init__( self, - repos: List[RepoInfo], + variants: List[VariantInfo], excluded_packages: List[AnyStr], included_packages: List[AnyStr], ): - self.repos = repos - self.pkgs_iterators = dict() + self.variants = variants + self.pkgs = dict() self.excluded_packages = excluded_packages self.included_packages = included_packages self.tmp_files = [] @@ -152,12 +166,12 @@ class PackagesGenerator: return file_stream.name @staticmethod - def _parse_repomd(repomd_file_path: AnyStr) -> cr.Repomd: + def _parse_repomd(repomd_file_path: AnyStr) -> Repomd: """ Parse file repomd.xml and create object Repomd :param repomd_file_path: path to local repomd.xml """ - return cr.Repomd(repomd_file_path) + return Repomd(repomd_file_path) @classmethod def _parse_modules_file( @@ -185,7 +199,7 @@ class PackagesGenerator: def _get_repomd_records( self, repo_info: RepoInfo, - ) -> List[cr.RepomdRecord]: + ) -> List[RepomdRecord]: """ Get, parse file repomd.xml and extract from it repomd records :param repo_info: structure which contains info about a current repo @@ -215,7 +229,7 @@ class PackagesGenerator: def _download_repomd_records( self, repo_info: RepoInfo, - repomd_records: List[cr.RepomdRecord], + repomd_records: List[RepomdRecord], repomd_records_dict: Dict[str, str], ): """ @@ -245,13 +259,12 @@ class PackagesGenerator: def _parse_module_repomd_record( self, repo_info: RepoInfo, - repomd_records: List[cr.RepomdRecord], + repomd_records: List[RepomdRecord], ) -> List[Dict]: """ Download repomd records :param repo_info: structure which contains info about a current repo :param repomd_records: list with repomd records - :param repomd_records_dict: dict with paths to repodata files """ for repomd_record in repomd_records: if repomd_record.type != 'modules': @@ -283,132 +296,136 @@ class PackagesGenerator: ) return rpm.labelCompare(version_tuple_1, version_tuple_2) + def get_packages_iterator( + self, + repo_info: RepoInfo, + ) -> Union[PackageIterator, Iterator]: + full_repo_path = self._get_full_repo_path(repo_info) + if full_repo_path in self.pkgs: + return self.pkgs[full_repo_path] + else: + repomd_records = self._get_repomd_records( + repo_info=repo_info, + ) + repomd_records_dict = {} # type: Dict[str, str] + self._download_repomd_records( + repo_info=repo_info, + repomd_records=repomd_records, + repomd_records_dict=repomd_records_dict, + ) + pkgs_iterator = PackageIterator( + primary_path=repomd_records_dict['primary'], + filelists_path=repomd_records_dict['filelists'], + other_path=repomd_records_dict['other'], + warningcb=self._warning_callback, + ) + pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator) + return pkgs_iterator + + def get_package_arch( + self, + package: Package, + variant_arch: str, + ) -> str: + if package.arch in self.repo_arches[variant_arch]: + return package.arch + else: + return variant_arch + + def is_skipped_module_package(self, package: Package) -> bool: + # Even a module package will be added to packages.json if + # it presents in the list of included packages + return 'module' in package.release and not any( + re.search(included_package, package.name) + for included_package in self.included_packages + ) + + def is_excluded_package( + self, + package: Package, + variant_arch: str, + excluded_packages: List[str], + ) -> bool: + return any( + re.search( + excluded_pkg, + self.get_package_key(package, variant_arch), + ) for excluded_pkg in excluded_packages + ) + + @staticmethod + def get_source_rpm_name(package: Package) -> str: + source_rpm_nvra = parse_nvra(package.rpm_sourcerpm) + return source_rpm_nvra['name'] + + def get_package_key(self, package: Package, variant_arch: str) -> str: + return ( + f'{package.name}.' + f'{self.get_package_arch(package, variant_arch)}' + ) + def generate_packages_json( self ) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]: """ Generate packages.json """ - packages_json = defaultdict( - lambda: defaultdict( - lambda: defaultdict( - list, - ) - ) - ) - all_packages = defaultdict(lambda: { + packages = defaultdict(lambda: defaultdict(lambda: { 'variants': list(), - 'package_info': dict(), - }) - for repo_info in sorted( - self.repos, - key=lambda i: i.repo_type, - reverse=True, - ): - full_repo_path = self._get_full_repo_path(repo_info) - if full_repo_path in self.pkgs_iterators: - pkgs_iterator = tee(self.pkgs_iterators[full_repo_path]) - else: - repomd_records = self._get_repomd_records( - repo_info=repo_info, - ) - repomd_records_dict = {} # type: Dict[str, str] - self._download_repomd_records( - repo_info=repo_info, - repomd_records=repomd_records, - repomd_records_dict=repomd_records_dict, - ) - pkgs_iterator = PackageIterator( - primary_path=repomd_records_dict['primary'], - filelists_path=repomd_records_dict['filelists'], - other_path=repomd_records_dict['other'], - warningcb=self._warning_callback, - ) - self.pkgs_iterators[full_repo_path] = tee(pkgs_iterator) - for package in pkgs_iterator: - if package.arch not in self.repo_arches[repo_info.arch]: - package_arch = repo_info.arch - else: - package_arch = package.arch - package_key = f'{package.name}.{package_arch}' - package_variants = all_packages[package_key]['variants'] - package_info = all_packages[package_key]['package_info'] - if 'module' in package.release and not any( - re.search(included_package, package.name) - for included_package in self.included_packages - ): - # Even a module package will be added to packages.json if - # it presents in the list of included packages - continue - if repo_info.repo_type == 'present' and not package_info: - package_variants.append((repo_info.name, repo_info.arch)) - package_info['arch'] = package_arch - package_info['package'] = package - package_info['type'] = repo_info.is_reference - elif repo_info.repo_type == 'absent' and \ - (repo_info.name, repo_info.arch) in package_variants: - package_variants.remove((repo_info.name, repo_info.arch)) - # replace an older package if it's not reference or - # a newer package is from reference repo - elif (not package_info['type'] or - package_info['type'] == - repo_info.is_reference) and \ - self.compare_pkgs_version( + })) + for variant_info in self.variants: + for repo_info in variant_info.repos: + is_reference = repo_info.is_reference + for package in self.get_packages_iterator(repo_info=repo_info): + if self.is_skipped_module_package(package): + continue + if self.is_excluded_package( package, - package_info['package'] - ) > 0 and repo_info.repo_type == 'present': - all_packages[package_key]['variants'] = [ - (repo_info.name, repo_info.arch) - ] - package_info['arch'] = package_arch - package_info['package'] = package - elif self.compare_pkgs_version( + variant_info.arch, + self.excluded_packages, + ): + continue + if self.is_excluded_package( package, - package_info['package'] - ) == 0 and repo_info.repo_type == 'present': - package_variants.append( - (repo_info.name, repo_info.arch) + variant_info.arch, + variant_info.excluded_packages, + ): + continue + package_key = self.get_package_key( + package, + variant_info.arch, ) - - for package_dict in all_packages.values(): - for variant_name, variant_arch in package_dict['variants']: - package_info = package_dict['package_info'] - package_arch = package_info['arch'] - package = package_info['package'] - package_name = f'{package.name}.{package_arch}' - if any(re.search(excluded_package, package_name) - for excluded_package in self.excluded_packages): - continue - src_package_name = dnf.subject.Subject( - package.rpm_sourcerpm, - ).get_nevra_possibilities( - forms=hawkey.FORM_NEVRA, - ) - if len(src_package_name) > 1: - # We should stop utility if we can't get exact name of srpm - raise ValueError( - 'We can\'t get exact name of srpm ' - f'by its NEVRA "{package.rpm_sourcerpm}"' - ) - else: - src_package_name = src_package_name[0].name - # TODO: for x86_64 + i686 in one packages.json - # don't remove! - # if package.arch in self.addon_repos[variant_arch]: - # arches = self.addon_repos[variant_arch] + [variant_arch] - # else: - # arches = [variant_arch] - # for arch in arches: - # pkgs_list = packages_json[variant_name][ - # arch][src_package_name] - # added_pkg = f'{package_name}.{package_arch}' - # if added_pkg not in pkgs_list: - # pkgs_list.append(added_pkg) - pkgs_list = packages_json[variant_name][ - variant_arch][src_package_name] - if package_name not in pkgs_list: - pkgs_list.append(package_name) - return packages_json + source_rpm_name = self.get_source_rpm_name(package) + package_info = packages[source_rpm_name][package_key] + if 'is_reference' not in package_info: + package_info['variants'].append(variant_info.name) + package_info['is_reference'] = is_reference + package_info['package'] = package + elif not package_info['is_reference'] or \ + package_info['is_reference'] == is_reference and \ + self.compare_pkgs_version( + package_1=package, + package_2=package_info['package'], + ) > 0: + package_info['variants'] = [variant_info.name] + package_info['is_reference'] = is_reference + package_info['package'] = package + elif self.compare_pkgs_version( + package_1=package, + package_2=package_info['package'], + ) == 0 and repo_info.repo_type != 'absent': + package_info['variants'].append(variant_info.name) + result = defaultdict(lambda: defaultdict( + lambda: defaultdict(list), + )) + for variant_info in self.variants: + for source_rpm_name, packages_info in packages.items(): + for package_key, package_info in packages_info.items(): + variant_pkgs = result[variant_info.name][variant_info.arch] + if variant_info.name not in package_info['variants']: + continue + variant_pkgs[source_rpm_name].append(package_key) + return result def create_parser(): @@ -441,32 +458,34 @@ def read_config(config_path: Path) -> Optional[Dict]: def process_config(config_data: Dict) -> Tuple[ - List[RepoInfo], + List[VariantInfo], List[str], List[str], ]: excluded_packages = config_data.get('excluded_packages', []) included_packages = config_data.get('included_packages', []) - repos = [RepoInfo( - path=variant_repo['path'], - folder=variant_repo['folder'], + variants = [VariantInfo( name=variant_name, - arch=variant_repo['arch'], - is_remote=variant_repo['remote'], - is_reference=variant_repo['reference'], - repo_type=variant_repo.get('repo_type', 'present'), - ) for variant_name, variant_repos in config_data['variants'].items() - for variant_repo in variant_repos] - return repos, excluded_packages, included_packages + arch=variant_info['arch'], + excluded_packages=variant_info.get('excluded_packages', []), + repos=[RepoInfo( + path=variant_repo['path'], + folder=variant_repo['folder'], + is_remote=variant_repo['remote'], + is_reference=variant_repo['reference'], + repo_type=variant_repo.get('repo_type', 'present'), + ) for variant_repo in variant_info['repos']] + ) for variant_name, variant_info in config_data['variants'].items()] + return variants, excluded_packages, included_packages def cli_main(): args = create_parser().parse_args() - repos, excluded_packages, included_packages = process_config( + variants, excluded_packages, included_packages = process_config( config_data=read_config(args.config) ) pg = PackagesGenerator( - repos=repos, + variants=variants, excluded_packages=excluded_packages, included_packages=included_packages, ) From 004fc4382faa8f37e8313ff4ccd3ece0f6cbd481 Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Wed, 29 Mar 2023 11:40:00 +0300 Subject: [PATCH 6/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - Review comments --- pungi/phases/pkgset/pkgsets.py | 3 +-- pungi/scripts/create_packages_json.py | 39 +++++++++++++-------------- 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/pungi/phases/pkgset/pkgsets.py b/pungi/phases/pkgset/pkgsets.py index 85ca2058..6e269099 100644 --- a/pungi/phases/pkgset/pkgsets.py +++ b/pungi/phases/pkgset/pkgsets.py @@ -152,10 +152,9 @@ class PackageSetBase(kobo.log.LoggingBase): """ def nvr_formatter(package_info): + epoch_suffix = '' if package_info['epoch'] is not None: epoch_suffix = ':' + package_info['epoch'] - else: - epoch_suffix = '' return ( f"{package_info['name']}" f"{epoch_suffix}-" diff --git a/pungi/scripts/create_packages_json.py b/pungi/scripts/create_packages_json.py index d26fbf16..620a0f2f 100644 --- a/pungi/scripts/create_packages_json.py +++ b/pungi/scripts/create_packages_json.py @@ -130,16 +130,16 @@ class PackagesGenerator: @staticmethod def _get_full_repo_path(repo_info: RepoInfo): + result = os.path.join( + repo_info.path, + repo_info.folder + ) if repo_info.is_remote: - return urljoin( + result = urljoin( repo_info.path + '/', repo_info.folder, ) - else: - return os.path.join( - repo_info.path, - repo_info.folder - ) + return result @staticmethod def _warning_callback(warning_type, message): @@ -205,6 +205,12 @@ class PackagesGenerator: :param repo_info: structure which contains info about a current repo :return: list with repomd records """ + repomd_file_path = os.path.join( + repo_info.path, + repo_info.folder, + 'repodata', + 'repomd.xml', + ) if repo_info.is_remote: repomd_file_path = urljoin( urljoin( @@ -214,13 +220,7 @@ class PackagesGenerator: 'repodata/repomd.xml' ) repomd_file_path = self.get_remote_file_content(repomd_file_path) - else: - repomd_file_path = os.path.join( - repo_info.path, - repo_info.folder, - 'repodata', - 'repomd.xml', - ) + repomd_object = self._parse_repomd(repomd_file_path) if repo_info.is_remote: os.remove(repomd_file_path) @@ -301,9 +301,8 @@ class PackagesGenerator: repo_info: RepoInfo, ) -> Union[PackageIterator, Iterator]: full_repo_path = self._get_full_repo_path(repo_info) - if full_repo_path in self.pkgs: - return self.pkgs[full_repo_path] - else: + pkgs_iterator = self.pkgs.get(full_repo_path) + if pkgs_iterator is None: repomd_records = self._get_repomd_records( repo_info=repo_info, ) @@ -320,17 +319,17 @@ class PackagesGenerator: warningcb=self._warning_callback, ) pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator) - return pkgs_iterator + return pkgs_iterator def get_package_arch( self, package: Package, variant_arch: str, ) -> str: + result = variant_arch if package.arch in self.repo_arches[variant_arch]: - return package.arch - else: - return variant_arch + result = package.arch + return result def is_skipped_module_package(self, package: Package) -> bool: # Even a module package will be added to packages.json if From 943fd8e77d19f6d0d39c1ba08d44a56182f55bd1 Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Thu, 30 Mar 2023 12:52:51 +0300 Subject: [PATCH 7/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - Script `create extra repo` is fixed - Unittests are fixed --- pungi/scripts/create_extra_repo.py | 171 +++++++++++++++-------------- tests/test_create_extra_repo.py | 25 +++-- tests/test_create_packages_json.py | 26 +++-- 3 files changed, 123 insertions(+), 99 deletions(-) diff --git a/pungi/scripts/create_extra_repo.py b/pungi/scripts/create_extra_repo.py index e7133fe7..cab91844 100644 --- a/pungi/scripts/create_extra_repo.py +++ b/pungi/scripts/create_extra_repo.py @@ -5,35 +5,43 @@ import os import subprocess import tempfile from shutil import rmtree -from typing import AnyStr, List, Dict, Optional +from typing import ( + AnyStr, + List, + Dict, + Optional, +) import createrepo_c as cr import requests import yaml from dataclasses import dataclass, field -from .create_packages_json import PackagesGenerator, RepoInfo +from .create_packages_json import ( + PackagesGenerator, + RepoInfo, + VariantInfo, +) @dataclass -class ExtraRepoInfo(RepoInfo): +class ExtraVariantInfo(VariantInfo): modules: List[AnyStr] = field(default_factory=list) packages: List[AnyStr] = field(default_factory=list) - is_remote: bool = True class CreateExtraRepo(PackagesGenerator): def __init__( self, - repos: List[ExtraRepoInfo], + variants: List[ExtraVariantInfo], bs_auth_token: AnyStr, local_repository_path: AnyStr, clear_target_repo: bool = True, ): - self.repos = [] # type: List[ExtraRepoInfo] - super().__init__(repos, [], []) + self.variants = [] # type: List[ExtraVariantInfo] + super().__init__(variants, [], []) self.auth_headers = { 'Authorization': f'Bearer {bs_auth_token}', } @@ -92,7 +100,7 @@ class CreateExtraRepo(PackagesGenerator): arch: AnyStr, packages: Optional[List[AnyStr]] = None, modules: Optional[List[AnyStr]] = None, - ) -> List[ExtraRepoInfo]: + ) -> List[ExtraVariantInfo]: """ Get info about a BS repo and save it to an object of class ExtraRepoInfo @@ -110,7 +118,7 @@ class CreateExtraRepo(PackagesGenerator): api_uri = 'api/v1' bs_repo_suffix = 'build_repos' - repos_info = [] + variants_info = [] # get the full info about a BS repo repo_request = requests.get( @@ -132,22 +140,26 @@ class CreateExtraRepo(PackagesGenerator): # skip repo with unsuitable architecture if architecture != arch: continue - repo_info = ExtraRepoInfo( - path=os.path.join( - bs_url, - bs_repo_suffix, - build_id, - platform_name, - ), - folder=architecture, + variant_info = ExtraVariantInfo( name=f'{build_id}-{platform_name}-{architecture}', arch=architecture, - is_remote=True, packages=packages, modules=modules, + repos=[ + RepoInfo( + path=os.path.join( + bs_url, + bs_repo_suffix, + build_id, + platform_name, + ), + folder=architecture, + is_remote=True, + ) + ] ) - repos_info.append(repo_info) - return repos_info + variants_info.append(variant_info) + return variants_info def _create_local_extra_repo(self): """ @@ -184,7 +196,7 @@ class CreateExtraRepo(PackagesGenerator): def _download_rpm_to_local_repo( self, package_location: AnyStr, - repo_info: ExtraRepoInfo, + repo_info: RepoInfo, ) -> None: """ Download a rpm package from a remote repo and save it to a local repo @@ -212,37 +224,38 @@ class CreateExtraRepo(PackagesGenerator): def _download_packages( self, packages: Dict[AnyStr, cr.Package], - repo_info: ExtraRepoInfo + variant_info: ExtraVariantInfo ): """ Download all defined packages from a remote repo :param packages: information about all packages (including modularity) in a remote repo - :param repo_info: information about a remote repo + :param variant_info: information about a remote variant """ for package in packages.values(): package_name = package.name # Skip a current package from a remote repo if we defined # the list packages and a current package doesn't belong to it - if repo_info.packages and \ - package_name not in repo_info.packages: + if variant_info.packages and \ + package_name not in variant_info.packages: continue - self._download_rpm_to_local_repo( - package_location=package.location_href, - repo_info=repo_info, - ) + for repo_info in variant_info.repos: + self._download_rpm_to_local_repo( + package_location=package.location_href, + repo_info=repo_info, + ) def _download_modules( self, modules_data: List[Dict], - repo_info: ExtraRepoInfo, + variant_info: ExtraVariantInfo, packages: Dict[AnyStr, cr.Package] ): """ Download all defined modularity packages and their data from a remote repo :param modules_data: information about all modules in a remote repo - :param repo_info: information about a remote repo + :param variant_info: information about a remote variant :param packages: information about all packages (including modularity) in a remote repo """ @@ -250,8 +263,8 @@ class CreateExtraRepo(PackagesGenerator): module_data = module['data'] # Skip a current module from a remote repo if we defined # the list modules and a current module doesn't belong to it - if repo_info.modules and \ - module_data['name'] not in repo_info.modules: + if variant_info.modules and \ + module_data['name'] not in variant_info.modules: continue # we should add info about a module if the local repodata # doesn't have it @@ -266,15 +279,16 @@ class CreateExtraRepo(PackagesGenerator): # Empty repo_info.packages means that we will download # all packages from repo including # the modularity packages - if not repo_info.packages: + if not variant_info.packages: break # skip a rpm if it doesn't belong to a processed repo if rpm not in packages: continue - self._download_rpm_to_local_repo( - package_location=packages[rpm].location_href, - repo_info=repo_info, - ) + for repo_info in variant_info.repos: + self._download_rpm_to_local_repo( + package_location=packages[rpm].location_href, + repo_info=repo_info, + ) def create_extra_repo(self): """ @@ -284,45 +298,34 @@ class CreateExtraRepo(PackagesGenerator): 3. Call `createrepo_c` which creates a local repo with the right repodata """ - for repo_info in self.repos: - packages = {} # type: Dict[AnyStr, cr.Package] - repomd_records = self._get_repomd_records( - repo_info=repo_info, - ) - repomd_records_dict = {} # type: Dict[str, str] - self._download_repomd_records( - repo_info=repo_info, - repomd_records=repomd_records, - repomd_records_dict=repomd_records_dict, - ) - packages_iterator = cr.PackageIterator( - primary_path=repomd_records_dict['primary'], - filelists_path=repomd_records_dict['filelists'], - other_path=repomd_records_dict['other'], - warningcb=self._warning_callback, - ) - # parse the repodata (including modules.yaml.gz) - modules_data = self._parse_module_repomd_record( - repo_info=repo_info, - repomd_records=repomd_records, - ) - # convert the packages dict to more usable form - # for future checking that a rpm from the module's artifacts - # belongs to a processed repository - packages = { - f'{package.name}-{package.epoch}:{package.version}-' - f'{package.release}.{package.arch}': - package for package in packages_iterator - } - self._download_modules( - modules_data=modules_data, - repo_info=repo_info, - packages=packages, - ) - self._download_packages( - packages=packages, - repo_info=repo_info, - ) + for variant_info in self.variants: + for repo_info in variant_info.repos: + repomd_records = self._get_repomd_records( + repo_info=repo_info, + ) + packages_iterator = self.get_packages_iterator(repo_info) + # parse the repodata (including modules.yaml.gz) + modules_data = self._parse_module_repomd_record( + repo_info=repo_info, + repomd_records=repomd_records, + ) + # convert the packages dict to more usable form + # for future checking that a rpm from the module's artifacts + # belongs to a processed repository + packages = { + f'{package.name}-{package.epoch}:{package.version}-' + f'{package.release}.{package.arch}': + package for package in packages_iterator + } + self._download_modules( + modules_data=modules_data, + variant_info=variant_info, + packages=packages, + ) + self._download_packages( + packages=packages, + variant_info=variant_info, + ) self._dump_local_modules_yaml() self._create_local_extra_repo() @@ -333,7 +336,6 @@ def create_parser(): parser.add_argument( '--bs-auth-token', help='Auth token for Build System', - required=True, ) parser.add_argument( '--local-repo-path', @@ -402,11 +404,16 @@ def cli_main(): packages = packages.split() if repo.startswith('http://'): repos_info.append( - ExtraRepoInfo( - path=repo, - folder=repo_folder, + ExtraVariantInfo( name=repo_folder, arch=repo_arch, + repos=[ + RepoInfo( + path=repo, + folder=repo_folder, + is_remote=True, + ) + ], modules=modules, packages=packages, ) @@ -422,7 +429,7 @@ def cli_main(): ) ) cer = CreateExtraRepo( - repos=repos_info, + variants=repos_info, bs_auth_token=args.bs_auth_token, local_repository_path=args.local_repo_path, clear_target_repo=args.clear_local_repo, diff --git a/tests/test_create_extra_repo.py b/tests/test_create_extra_repo.py index aa4d217c..05140124 100644 --- a/tests/test_create_extra_repo.py +++ b/tests/test_create_extra_repo.py @@ -5,7 +5,7 @@ from unittest import TestCase, mock, main import yaml -from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraRepoInfo +from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraVariantInfo, RepoInfo FOLDER_WITH_TEST_DATA = os.path.join( os.path.dirname( @@ -114,14 +114,17 @@ data: ... """, Loader=yaml.BaseLoader) -TEST_REPO_INFO = ExtraRepoInfo( +TEST_REPO_INFO = RepoInfo( path=FOLDER_WITH_TEST_DATA, folder='test_repo', + is_remote=False, +) +TEST_VARIANT_INFO = ExtraVariantInfo( name='TestRepo', arch='x86_64', - is_remote=False, packages=[], modules=[], + repos=[TEST_REPO_INFO] ) BS_BUILD_INFO = { @@ -161,15 +164,19 @@ class TestCreteExtraRepo(TestCase): ) self.assertEqual( [ - ExtraRepoInfo( - path='https://build.cloudlinux.com/' - f'build_repos/{build_id}/fake_platform', - folder=arch, + ExtraVariantInfo( name=f'{build_id}-fake_platform-{arch}', arch=arch, - is_remote=True, packages=packages, modules=modules, + repos=[ + RepoInfo( + path='https://build.cloudlinux.com/' + f'build_repos/{build_id}/fake_platform', + folder=arch, + is_remote=True, + ) + ] ) ], repos_info, @@ -197,7 +204,7 @@ class TestCreteExtraRepo(TestCase): 'CreateExtraRepo._create_local_extra_repo' ) as mock__create_local_extra_repo: cer = CreateExtraRepo( - repos=[TEST_REPO_INFO], + variants=[TEST_VARIANT_INFO], bs_auth_token='fake_auth_token', local_repository_path='/path/to/local/repo', clear_target_repo=False, diff --git a/tests/test_create_packages_json.py b/tests/test_create_packages_json.py index 4855a8a4..562a477b 100644 --- a/tests/test_create_packages_json.py +++ b/tests/test_create_packages_json.py @@ -4,7 +4,11 @@ import os from collections import defaultdict from unittest import TestCase, mock, main -from pungi.scripts.create_packages_json import PackagesGenerator, RepoInfo +from pungi.scripts.create_packages_json import ( + PackagesGenerator, + RepoInfo, + VariantInfo, +) FOLDER_WITH_TEST_DATA = os.path.join( os.path.dirname( @@ -16,8 +20,6 @@ FOLDER_WITH_TEST_DATA = os.path.join( test_repo_info = RepoInfo( path=FOLDER_WITH_TEST_DATA, folder='test_repo', - name='TestRepo', - arch='x86_64', is_remote=False, is_reference=True, ) @@ -25,11 +27,19 @@ test_repo_info = RepoInfo( test_repo_info_2 = RepoInfo( path=FOLDER_WITH_TEST_DATA, folder='test_repo_2', - name='TestRepo2', - arch='x86_64', is_remote=False, is_reference=True, ) +variant_info_1 = VariantInfo( + name='TestRepo', + arch='x86_64', + repos=[test_repo_info] +) +variant_info_2 = VariantInfo( + name='TestRepo2', + arch='x86_64', + repos=[test_repo_info_2] +) class TestPackagesJson(TestCase): @@ -60,9 +70,9 @@ class TestPackagesJson(TestCase): def test_02_generate_additional_packages(self): pg = PackagesGenerator( - repos=[ - test_repo_info, - test_repo_info_2, + variants=[ + variant_info_1, + variant_info_2, ], excluded_packages=['zziplib-utils'], included_packages=['vim-file*'], From 6592735aecd44fde648527ffa3faf846e94391c3 Mon Sep 17 00:00:00 2001 From: soksanichenko Date: Thu, 30 Mar 2023 14:05:47 +0300 Subject: [PATCH 8/8] ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically - Unittests are fixed --- tests/test_gather_rpms.py | 104 ++++++++++++++++++++++++----------- tests/test_pkgset_pkgsets.py | 4 +- 2 files changed, 74 insertions(+), 34 deletions(-) diff --git a/tests/test_gather_rpms.py b/tests/test_gather_rpms.py index 9a6f7f2d..460349f3 100644 --- a/tests/test_gather_rpms.py +++ b/tests/test_gather_rpms.py @@ -15,10 +15,13 @@ class TestGatherRpms(TestCase): maxDiff = None FILES_TO_CREATE = [ - 'powertools/Packages/libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm', + 'powertools/Packages/libvirt-6.0.0-28.module_el' + '8.3.0+555+a55c8938.i686.rpm', 'powertools/Packages/libgit2-devel-0.26.8-2.el8.x86_64.rpm', - 'powertools/Packages/xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm', - 'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm', + 'powertools/Packages/xalan-j2-2.7.1-38.module_el' + '8.0.0+30+832da3a1.noarch.rpm', + 'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el' + '8.0.0+30+832da3a1.noarch.rpm', 'appstream/Packages/OpenEXR-devel-2.2.0-11.el8.i686.rpm', 'appstream/Packages/mingw-binutils-generic-2.30-1.el8.x86_64.rpm', 'appstream/Packages/somenonrpm', @@ -30,30 +33,51 @@ class TestGatherRpms(TestCase): os.makedirs(PATH_TO_REPOS) for filepath in self.FILES_TO_CREATE: - os.makedirs(os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)), exist_ok=True) + os.makedirs( + os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)), + exist_ok=True, + ) open(os.path.join(PATH_TO_REPOS, filepath), 'w').close() def test_gather_rpms(self): self.assertEqual( [Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686', - path=f'{PATH_TO_REPOS}/powertools/Packages/' - f'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm'), + path=Path( + f'{PATH_TO_REPOS}/powertools/Packages/' + f'libvirt-6.0.0-28.module_el' + f'8.3.0+555+a55c8938.i686.rpm' + )), Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64', - path=f'{PATH_TO_REPOS}/powertools/Packages/' - f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'), - Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch', - path=f'{PATH_TO_REPOS}/powertools/Packages/' - f'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm'), - Package(nvra='bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch', - path='/path/to/repos/appstream/Packages/' - 'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'), + path=Path( + f'{PATH_TO_REPOS}/powertools/Packages/' + f'libgit2-devel-0.26.8-2.el8.x86_64.rpm' + )), + Package(nvra='xalan-j2-2.7.1-38.module_el' + '8.0.0+30+832da3a1.noarch', + path=Path( + f'{PATH_TO_REPOS}/powertools/Packages/' + f'xalan-j2-2.7.1-38.module_el' + f'8.0.0+30+832da3a1.noarch.rpm' + )), + Package(nvra='bnd-maven-plugin-3.5.0-4.module_el' + '8.0.0+30+832da3a1.noarch', + path=Path( + '/path/to/repos/appstream/Packages/' + 'bnd-maven-plugin-3.5.0-4.module_el' + '8.0.0+30+832da3a1.noarch.rpm' + )), Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686', - path=f'{PATH_TO_REPOS}/appstream/Packages/' - f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'), + path=Path( + f'{PATH_TO_REPOS}/appstream/Packages/' + f'OpenEXR-devel-2.2.0-11.el8.i686.rpm' + )), Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64', - path=f'{PATH_TO_REPOS}/appstream/Packages/' - f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm')], - search_rpms(PATH_TO_REPOS) + path=Path( + f'{PATH_TO_REPOS}/appstream/Packages/' + f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm' + )) + ], + search_rpms(Path(PATH_TO_REPOS)) ) def test_copy_rpms(self): @@ -61,23 +85,39 @@ class TestGatherRpms(TestCase): packages = [ Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686', - path=f'{PATH_TO_REPOS}/powertools/Packages/' - f'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm'), + path=Path( + f'{PATH_TO_REPOS}/powertools/Packages/' + f'libvirt-6.0.0-28.module_el' + f'8.3.0+555+a55c8938.i686.rpm' + )), Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64', - path=f'{PATH_TO_REPOS}/powertools/Packages/' - f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'), + path=Path( + f'{PATH_TO_REPOS}/powertools/Packages/' + f'libgit2-devel-0.26.8-2.el8.x86_64.rpm' + )), Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch', - path=f'{PATH_TO_REPOS}/powertools/Packages/' - f'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm'), - Package(nvra='bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch', - path='/path/to/repos/appstream/Packages/' - 'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'), + path=Path( + f'{PATH_TO_REPOS}/powertools/Packages/' + f'xalan-j2-2.7.1-38.module_el' + f'8.0.0+30+832da3a1.noarch.rpm' + )), + Package(nvra='bnd-maven-plugin-3.5.0-4.module_el' + '8.0.0+30+832da3a1.noarch', + path=Path( + '/path/to/repos/appstream/Packages/' + 'bnd-maven-plugin-3.5.0-4.module_el' + '8.0.0+30+832da3a1.noarch.rpm' + )), Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686', - path=f'{PATH_TO_REPOS}/appstream/Packages/' - f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'), + path=Path( + f'{PATH_TO_REPOS}/appstream/Packages/' + f'OpenEXR-devel-2.2.0-11.el8.i686.rpm' + )), Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64', - path=f'{PATH_TO_REPOS}/appstream/Packages/' - f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm') + path=Path( + f'{PATH_TO_REPOS}/appstream/Packages/' + f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm' + )) ] copy_rpms(packages, target_path) diff --git a/tests/test_pkgset_pkgsets.py b/tests/test_pkgset_pkgsets.py index 8d029ce1..d0f46cc5 100644 --- a/tests/test_pkgset_pkgsets.py +++ b/tests/test_pkgset_pkgsets.py @@ -322,7 +322,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase): ) figure = re.compile( - r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501 + r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24\.x86_64.+bash-debuginfo-4\.3\.42-4\.fc24\.x86_64$", # noqa: E501 re.DOTALL, ) self.assertRegex(str(ctx.exception), figure) @@ -399,7 +399,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase): pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms) figure = re.compile( - r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501 + r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24\.x86_64.+bash-debuginfo-4\.3\.42-4\.fc24\.x86_64$", # noqa: E501 re.DOTALL, ) self.assertRegex(str(ctx.exception), figure)