ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically #15

Merged
soksanichenko merged 8 commits from ALBS-987 into al_master 2023-03-31 09:03:40 +00:00
9 changed files with 454 additions and 395 deletions

View File

@ -152,9 +152,15 @@ class PackageSetBase(kobo.log.LoggingBase):
""" """
def nvr_formatter(package_info): def nvr_formatter(package_info):
# joins NVR parts of the package with '-' character. epoch_suffix = ''
return "-".join( if package_info['epoch'] is not None:
(package_info["name"], package_info["version"], package_info["release"]) epoch_suffix = ':' + package_info['epoch']
soksanichenko marked this conversation as resolved
Review

unnecessary else statement, you can declare epoch_suffix before if statement

unnecessary `else` statement, you can declare `epoch_suffix` before `if` statement
return (
f"{package_info['name']}"
f"{epoch_suffix}-"
f"{package_info['version']}-"
f"{package_info['release']}."
f"{package_info['arch']}"
) )
def get_error(sigkeys, infos): def get_error(sigkeys, infos):

View File

@ -5,35 +5,43 @@ import os
import subprocess import subprocess
import tempfile import tempfile
from shutil import rmtree from shutil import rmtree
from typing import AnyStr, List, Dict, Optional from typing import (
AnyStr,
List,
Dict,
Optional,
)
import createrepo_c as cr import createrepo_c as cr
import requests import requests
import yaml import yaml
from dataclasses import dataclass, field from dataclasses import dataclass, field
from .create_packages_json import PackagesGenerator, RepoInfo from .create_packages_json import (
PackagesGenerator,
RepoInfo,
VariantInfo,
)
@dataclass @dataclass
class ExtraRepoInfo(RepoInfo): class ExtraVariantInfo(VariantInfo):
modules: List[AnyStr] = field(default_factory=list) modules: List[AnyStr] = field(default_factory=list)
packages: List[AnyStr] = field(default_factory=list) packages: List[AnyStr] = field(default_factory=list)
is_remote: bool = True
class CreateExtraRepo(PackagesGenerator): class CreateExtraRepo(PackagesGenerator):
def __init__( def __init__(
self, self,
repos: List[ExtraRepoInfo], variants: List[ExtraVariantInfo],
bs_auth_token: AnyStr, bs_auth_token: AnyStr,
local_repository_path: AnyStr, local_repository_path: AnyStr,
clear_target_repo: bool = True, clear_target_repo: bool = True,
): ):
self.repos = [] # type: List[ExtraRepoInfo] self.variants = [] # type: List[ExtraVariantInfo]
super().__init__(repos, [], []) super().__init__(variants, [], [])
self.auth_headers = { self.auth_headers = {
'Authorization': f'Bearer {bs_auth_token}', 'Authorization': f'Bearer {bs_auth_token}',
} }
@ -92,7 +100,7 @@ class CreateExtraRepo(PackagesGenerator):
arch: AnyStr, arch: AnyStr,
packages: Optional[List[AnyStr]] = None, packages: Optional[List[AnyStr]] = None,
modules: Optional[List[AnyStr]] = None, modules: Optional[List[AnyStr]] = None,
) -> List[ExtraRepoInfo]: ) -> List[ExtraVariantInfo]:
""" """
Get info about a BS repo and save it to Get info about a BS repo and save it to
an object of class ExtraRepoInfo an object of class ExtraRepoInfo
@ -110,7 +118,7 @@ class CreateExtraRepo(PackagesGenerator):
api_uri = 'api/v1' api_uri = 'api/v1'
bs_repo_suffix = 'build_repos' bs_repo_suffix = 'build_repos'
repos_info = [] variants_info = []
# get the full info about a BS repo # get the full info about a BS repo
repo_request = requests.get( repo_request = requests.get(
@ -132,22 +140,26 @@ class CreateExtraRepo(PackagesGenerator):
# skip repo with unsuitable architecture # skip repo with unsuitable architecture
if architecture != arch: if architecture != arch:
continue continue
repo_info = ExtraRepoInfo( variant_info = ExtraVariantInfo(
path=os.path.join(
bs_url,
bs_repo_suffix,
build_id,
platform_name,
),
folder=architecture,
name=f'{build_id}-{platform_name}-{architecture}', name=f'{build_id}-{platform_name}-{architecture}',
arch=architecture, arch=architecture,
is_remote=True,
packages=packages, packages=packages,
modules=modules, modules=modules,
repos=[
RepoInfo(
path=os.path.join(
bs_url,
bs_repo_suffix,
build_id,
platform_name,
),
folder=architecture,
is_remote=True,
)
]
) )
repos_info.append(repo_info) variants_info.append(variant_info)
return repos_info return variants_info
def _create_local_extra_repo(self): def _create_local_extra_repo(self):
""" """
@ -184,7 +196,7 @@ class CreateExtraRepo(PackagesGenerator):
def _download_rpm_to_local_repo( def _download_rpm_to_local_repo(
self, self,
package_location: AnyStr, package_location: AnyStr,
repo_info: ExtraRepoInfo, repo_info: RepoInfo,
) -> None: ) -> None:
""" """
Download a rpm package from a remote repo and save it to a local repo Download a rpm package from a remote repo and save it to a local repo
@ -212,37 +224,38 @@ class CreateExtraRepo(PackagesGenerator):
def _download_packages( def _download_packages(
self, self,
packages: Dict[AnyStr, cr.Package], packages: Dict[AnyStr, cr.Package],
repo_info: ExtraRepoInfo variant_info: ExtraVariantInfo
): ):
""" """
Download all defined packages from a remote repo Download all defined packages from a remote repo
:param packages: information about all packages (including :param packages: information about all packages (including
modularity) in a remote repo modularity) in a remote repo
:param repo_info: information about a remote repo :param variant_info: information about a remote variant
""" """
for package in packages.values(): for package in packages.values():
package_name = package.name package_name = package.name
# Skip a current package from a remote repo if we defined # Skip a current package from a remote repo if we defined
# the list packages and a current package doesn't belong to it # the list packages and a current package doesn't belong to it
if repo_info.packages and \ if variant_info.packages and \
package_name not in repo_info.packages: package_name not in variant_info.packages:
continue continue
self._download_rpm_to_local_repo( for repo_info in variant_info.repos:
package_location=package.location_href, self._download_rpm_to_local_repo(
repo_info=repo_info, package_location=package.location_href,
) repo_info=repo_info,
)
def _download_modules( def _download_modules(
self, self,
modules_data: List[Dict], modules_data: List[Dict],
repo_info: ExtraRepoInfo, variant_info: ExtraVariantInfo,
packages: Dict[AnyStr, cr.Package] packages: Dict[AnyStr, cr.Package]
): ):
""" """
Download all defined modularity packages and their data from Download all defined modularity packages and their data from
a remote repo a remote repo
:param modules_data: information about all modules in a remote repo :param modules_data: information about all modules in a remote repo
:param repo_info: information about a remote repo :param variant_info: information about a remote variant
:param packages: information about all packages (including :param packages: information about all packages (including
modularity) in a remote repo modularity) in a remote repo
""" """
@ -250,8 +263,8 @@ class CreateExtraRepo(PackagesGenerator):
module_data = module['data'] module_data = module['data']
# Skip a current module from a remote repo if we defined # Skip a current module from a remote repo if we defined
# the list modules and a current module doesn't belong to it # the list modules and a current module doesn't belong to it
if repo_info.modules and \ if variant_info.modules and \
module_data['name'] not in repo_info.modules: module_data['name'] not in variant_info.modules:
continue continue
# we should add info about a module if the local repodata # we should add info about a module if the local repodata
# doesn't have it # doesn't have it
@ -266,15 +279,16 @@ class CreateExtraRepo(PackagesGenerator):
# Empty repo_info.packages means that we will download # Empty repo_info.packages means that we will download
# all packages from repo including # all packages from repo including
# the modularity packages # the modularity packages
if not repo_info.packages: if not variant_info.packages:
break break
# skip a rpm if it doesn't belong to a processed repo # skip a rpm if it doesn't belong to a processed repo
if rpm not in packages: if rpm not in packages:
continue continue
self._download_rpm_to_local_repo( for repo_info in variant_info.repos:
package_location=packages[rpm].location_href, self._download_rpm_to_local_repo(
repo_info=repo_info, package_location=packages[rpm].location_href,
) repo_info=repo_info,
)
def create_extra_repo(self): def create_extra_repo(self):
""" """
@ -284,45 +298,34 @@ class CreateExtraRepo(PackagesGenerator):
3. Call `createrepo_c` which creates a local repo 3. Call `createrepo_c` which creates a local repo
with the right repodata with the right repodata
""" """
for repo_info in self.repos: for variant_info in self.variants:
packages = {} # type: Dict[AnyStr, cr.Package] for repo_info in variant_info.repos:
repomd_records = self._get_repomd_records( repomd_records = self._get_repomd_records(
repo_info=repo_info, repo_info=repo_info,
) )
repomd_records_dict = {} # type: Dict[str, str] packages_iterator = self.get_packages_iterator(repo_info)
self._download_repomd_records( # parse the repodata (including modules.yaml.gz)
repo_info=repo_info, modules_data = self._parse_module_repomd_record(
repomd_records=repomd_records, repo_info=repo_info,
repomd_records_dict=repomd_records_dict, repomd_records=repomd_records,
) )
packages_iterator = cr.PackageIterator( # convert the packages dict to more usable form
primary_path=repomd_records_dict['primary'], # for future checking that a rpm from the module's artifacts
filelists_path=repomd_records_dict['filelists'], # belongs to a processed repository
other_path=repomd_records_dict['other'], packages = {
warningcb=self._warning_callback, f'{package.name}-{package.epoch}:{package.version}-'
) f'{package.release}.{package.arch}':
# parse the repodata (including modules.yaml.gz) package for package in packages_iterator
modules_data = self._parse_module_repomd_record( }
repo_info=repo_info, self._download_modules(
repomd_records=repomd_records, modules_data=modules_data,
) variant_info=variant_info,
# convert the packages dict to more usable form packages=packages,
# for future checking that a rpm from the module's artifacts )
# belongs to a processed repository self._download_packages(
packages = { packages=packages,
f'{package.name}-{package.epoch}:{package.version}-' variant_info=variant_info,
f'{package.release}.{package.arch}': )
package for package in packages_iterator
}
self._download_modules(
modules_data=modules_data,
repo_info=repo_info,
packages=packages,
)
self._download_packages(
packages=packages,
repo_info=repo_info,
)
self._dump_local_modules_yaml() self._dump_local_modules_yaml()
self._create_local_extra_repo() self._create_local_extra_repo()
@ -333,7 +336,6 @@ def create_parser():
parser.add_argument( parser.add_argument(
'--bs-auth-token', '--bs-auth-token',
help='Auth token for Build System', help='Auth token for Build System',
required=True,
) )
parser.add_argument( parser.add_argument(
'--local-repo-path', '--local-repo-path',
@ -402,11 +404,16 @@ def cli_main():
packages = packages.split() packages = packages.split()
if repo.startswith('http://'): if repo.startswith('http://'):
repos_info.append( repos_info.append(
ExtraRepoInfo( ExtraVariantInfo(
path=repo,
folder=repo_folder,
name=repo_folder, name=repo_folder,
arch=repo_arch, arch=repo_arch,
repos=[
RepoInfo(
path=repo,
folder=repo_folder,
is_remote=True,
)
],
modules=modules, modules=modules,
packages=packages, packages=packages,
) )
@ -422,7 +429,7 @@ def cli_main():
) )
) )
cer = CreateExtraRepo( cer = CreateExtraRepo(
repos=repos_info, variants=repos_info,
bs_auth_token=args.bs_auth_token, bs_auth_token=args.bs_auth_token,
local_repository_path=args.local_repo_path, local_repository_path=args.local_repo_path,
clear_target_repo=args.clear_local_repo, clear_target_repo=args.clear_local_repo,

View File

@ -9,22 +9,41 @@ https://github.com/rpm-software-management/createrepo_c/blob/master/examples/pyt
import argparse import argparse
import gzip import gzip
import json import json
import logging
import lzma import lzma
import os import os
import re import re
import tempfile import tempfile
from collections import defaultdict from collections import defaultdict
from typing import AnyStr, Dict, List, Any, Iterator from itertools import tee
from pathlib import Path
from typing import (
AnyStr,
Dict,
List,
Any,
Iterator,
Optional,
Tuple,
Union,
)
import binascii import binascii
import createrepo_c as cr from urllib.parse import urljoin
import dnf.subject
import hawkey
import requests import requests
import rpm import rpm
import yaml import yaml
from createrepo_c import Package, PackageIterator from createrepo_c import (
from dataclasses import dataclass Package,
PackageIterator,
Repomd,
RepomdRecord,
)
from dataclasses import dataclass, field
from kobo.rpmlib import parse_nvra
logging.basicConfig(level=logging.INFO)
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes): def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
@ -51,23 +70,33 @@ class RepoInfo:
# 'appstream', 'baseos', etc. # 'appstream', 'baseos', etc.
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are # Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
# using remote repo # using remote repo
path: AnyStr path: str
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc # name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
folder: AnyStr folder: str
# name of repo. E.g. 'BaseOS', 'AppStream', etc
name: AnyStr
# architecture of repo. E.g. 'x86_64', 'i686', etc
arch: AnyStr
# Is a repo remote or local # Is a repo remote or local
is_remote: bool is_remote: bool
# Is a reference repository (usually it's a RHEL repo) # Is a reference repository (usually it's a RHEL repo)
# Layout of packages from such repository will be taken as example # Layout of packages from such repository will be taken as example
# Only layout of specific package (which don't exist # Only layout of specific package (which doesn't exist
# in a reference repository) will be taken as example # in a reference repository) will be taken as example
is_reference: bool = False is_reference: bool = False
# The packages from 'present' repo will be added to a variant.
# The packages from 'absent' repo will be removed from a variant.
repo_type: str = 'present' repo_type: str = 'present'
@dataclass
class VariantInfo:
# name of variant. E.g. 'BaseOS', 'AppStream', etc
name: AnyStr
# architecture of variant. E.g. 'x86_64', 'i686', etc
arch: AnyStr
# The packages which will be not added to a variant
excluded_packages: List[str] = field(default_factory=list)
# Repos of a variant
repos: List[RepoInfo] = field(default_factory=list)
class PackagesGenerator: class PackagesGenerator:
repo_arches = defaultdict(lambda: list(('noarch',))) repo_arches = defaultdict(lambda: list(('noarch',)))
@ -81,11 +110,12 @@ class PackagesGenerator:
def __init__( def __init__(
self, self,
repos: List[RepoInfo], variants: List[VariantInfo],
excluded_packages: List[AnyStr], excluded_packages: List[AnyStr],
included_packages: List[AnyStr], included_packages: List[AnyStr],
): ):
self.repos = repos self.variants = variants
self.pkgs = dict()
self.excluded_packages = excluded_packages self.excluded_packages = excluded_packages
self.included_packages = included_packages self.included_packages = included_packages
self.tmp_files = [] self.tmp_files = []
@ -98,6 +128,19 @@ class PackagesGenerator:
if os.path.exists(tmp_file): if os.path.exists(tmp_file):
os.remove(tmp_file) os.remove(tmp_file)
@staticmethod
def _get_full_repo_path(repo_info: RepoInfo):
result = os.path.join(
repo_info.path,
repo_info.folder
)
if repo_info.is_remote:
result = urljoin(
soksanichenko marked this conversation as resolved Outdated

same

same
repo_info.path + '/',
repo_info.folder,
)
return result
@staticmethod @staticmethod
def _warning_callback(warning_type, message): def _warning_callback(warning_type, message):
""" """
@ -123,12 +166,12 @@ class PackagesGenerator:
return file_stream.name return file_stream.name
@staticmethod @staticmethod
def _parse_repomd(repomd_file_path: AnyStr) -> cr.Repomd: def _parse_repomd(repomd_file_path: AnyStr) -> Repomd:
""" """
Parse file repomd.xml and create object Repomd Parse file repomd.xml and create object Repomd
:param repomd_file_path: path to local repomd.xml :param repomd_file_path: path to local repomd.xml
""" """
return cr.Repomd(repomd_file_path) return Repomd(repomd_file_path)
@classmethod @classmethod
def _parse_modules_file( def _parse_modules_file(
@ -139,7 +182,7 @@ class PackagesGenerator:
""" """
Parse modules.yaml.gz and returns parsed data Parse modules.yaml.gz and returns parsed data
:param modules_file_path: path to local modules.yaml.gz :param modules_file_path: path to local modules.yaml.gz
:return: List of dict for each modules in a repo :return: List of dict for each module in a repo
""" """
with open(modules_file_path, 'rb') as modules_file: with open(modules_file_path, 'rb') as modules_file:
@ -156,7 +199,7 @@ class PackagesGenerator:
def _get_repomd_records( def _get_repomd_records(
self, self,
repo_info: RepoInfo, repo_info: RepoInfo,
) -> List[cr.RepomdRecord]: ) -> List[RepomdRecord]:
""" """
Get, parse file repomd.xml and extract from it repomd records Get, parse file repomd.xml and extract from it repomd records
:param repo_info: structure which contains info about a current repo :param repo_info: structure which contains info about a current repo
@ -169,9 +212,15 @@ class PackagesGenerator:
'repomd.xml', 'repomd.xml',
) )
if repo_info.is_remote: if repo_info.is_remote:
repomd_file_path = urljoin(
urljoin(
repo_info.path + '/',
soksanichenko marked this conversation as resolved Outdated

same

same
repo_info.folder
) + '/',
'repodata/repomd.xml'
)
repomd_file_path = self.get_remote_file_content(repomd_file_path) repomd_file_path = self.get_remote_file_content(repomd_file_path)
else:
repomd_file_path = repomd_file_path
repomd_object = self._parse_repomd(repomd_file_path) repomd_object = self._parse_repomd(repomd_file_path)
if repo_info.is_remote: if repo_info.is_remote:
os.remove(repomd_file_path) os.remove(repomd_file_path)
@ -180,7 +229,7 @@ class PackagesGenerator:
def _download_repomd_records( def _download_repomd_records(
self, self,
repo_info: RepoInfo, repo_info: RepoInfo,
repomd_records: List[cr.RepomdRecord], repomd_records: List[RepomdRecord],
repomd_records_dict: Dict[str, str], repomd_records_dict: Dict[str, str],
): ):
""" """
@ -210,13 +259,12 @@ class PackagesGenerator:
def _parse_module_repomd_record( def _parse_module_repomd_record(
self, self,
repo_info: RepoInfo, repo_info: RepoInfo,
repomd_records: List[cr.RepomdRecord], repomd_records: List[RepomdRecord],
) -> List[Dict]: ) -> List[Dict]:
""" """
Download repomd records Download repomd records
:param repo_info: structure which contains info about a current repo :param repo_info: structure which contains info about a current repo
:param repomd_records: list with repomd records :param repomd_records: list with repomd records
:param repomd_records_dict: dict with paths to repodata files
""" """
for repomd_record in repomd_records: for repomd_record in repomd_records:
if repomd_record.type != 'modules': if repomd_record.type != 'modules':
@ -248,25 +296,13 @@ class PackagesGenerator:
) )
return rpm.labelCompare(version_tuple_1, version_tuple_2) return rpm.labelCompare(version_tuple_1, version_tuple_2)
def generate_packages_json( def get_packages_iterator(
self self,
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]: repo_info: RepoInfo,
""" ) -> Union[PackageIterator, Iterator]:
Generate packages.json full_repo_path = self._get_full_repo_path(repo_info)
""" pkgs_iterator = self.pkgs.get(full_repo_path)
packages_json = defaultdict( if pkgs_iterator is None:
lambda: defaultdict(
lambda: defaultdict(
list,
)
)
)
all_packages = defaultdict(lambda: {'variants': list()})
for repo_info in sorted(
self.repos,
key=lambda i: i.repo_type,
reverse=True,
):
repomd_records = self._get_repomd_records( repomd_records = self._get_repomd_records(
soksanichenko marked this conversation as resolved
Review

same

same
repo_info=repo_info, repo_info=repo_info,
) )
@ -276,167 +312,133 @@ class PackagesGenerator:
repomd_records=repomd_records, repomd_records=repomd_records,
repomd_records_dict=repomd_records_dict, repomd_records_dict=repomd_records_dict,
) )
packages_iterator = PackageIterator( pkgs_iterator = PackageIterator(
primary_path=repomd_records_dict['primary'], primary_path=repomd_records_dict['primary'],
filelists_path=repomd_records_dict['filelists'], filelists_path=repomd_records_dict['filelists'],
other_path=repomd_records_dict['other'], other_path=repomd_records_dict['other'],
warningcb=self._warning_callback, warningcb=self._warning_callback,
) )
for package in packages_iterator: pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator)
if package.arch not in self.repo_arches[repo_info.arch]: return pkgs_iterator
package_arch = repo_info.arch
else:
package_arch = package.arch
package_key = f'{package.name}.{package_arch}'
if 'module' in package.release and not any(
re.search(included_package, package.name)
for included_package in self.included_packages
):
# Even a module package will be added to packages.json if
# it presents in the list of included packages
continue
if package_key not in all_packages:
all_packages[package_key]['variants'].append(
(repo_info.name, repo_info.arch)
)
all_packages[package_key]['arch'] = package_arch
all_packages[package_key]['package'] = package
all_packages[package_key]['type'] = repo_info.is_reference
elif repo_info.repo_type == 'absent' and (repo_info.name, repo_info.arch) in all_packages[package_key]['variants']:
all_packages[package_key]['variants'].remove((repo_info.name, repo_info.arch))
# replace an older package if it's not reference or
# a newer package is from reference repo
elif (not all_packages[package_key]['type'] or
all_packages[package_key]['type'] ==
repo_info.is_reference) and \
self.compare_pkgs_version(
package,
all_packages[package_key]['package']
) > 0:
all_packages[package_key]['variants'] = [
(repo_info.name, repo_info.arch)
]
all_packages[package_key]['arch'] = package_arch
all_packages[package_key]['package'] = package
elif self.compare_pkgs_version(
package,
all_packages[package_key]['package']
) == 0:
all_packages[package_key]['variants'].append(
(repo_info.name, repo_info.arch)
)
for package_dict in all_packages.values(): def get_package_arch(
for variant_name, variant_arch in package_dict['variants']: self,
package_arch = package_dict['arch'] package: Package,
package = package_dict['package'] variant_arch: str,
package_name = package.name ) -> str:
if any(re.search(excluded_package, package_name) result = variant_arch
for excluded_package in self.excluded_packages): if package.arch in self.repo_arches[variant_arch]:
continue result = package.arch
src_package_name = dnf.subject.Subject( return result
soksanichenko marked this conversation as resolved Outdated

same

same
package.rpm_sourcerpm,
).get_nevra_possibilities( def is_skipped_module_package(self, package: Package) -> bool:
forms=hawkey.FORM_NEVRA, # Even a module package will be added to packages.json if
) # it presents in the list of included packages
if len(src_package_name) > 1: return 'module' in package.release and not any(
# We should stop utility if we can't get exact name of srpm re.search(included_package, package.name)
raise ValueError( for included_package in self.included_packages
'We can\'t get exact name of srpm ' )
f'by its NEVRA "{package.rpm_sourcerpm}"'
def is_excluded_package(
self,
package: Package,
variant_arch: str,
excluded_packages: List[str],
) -> bool:
return any(
re.search(
excluded_pkg,
self.get_package_key(package, variant_arch),
) for excluded_pkg in excluded_packages
)
@staticmethod
def get_source_rpm_name(package: Package) -> str:
source_rpm_nvra = parse_nvra(package.rpm_sourcerpm)
return source_rpm_nvra['name']
def get_package_key(self, package: Package, variant_arch: str) -> str:
return (
f'{package.name}.'
f'{self.get_package_arch(package, variant_arch)}'
)
def generate_packages_json(
self
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
"""
Generate packages.json
"""
packages = defaultdict(lambda: defaultdict(lambda: {
'variants': list(),
}))
for variant_info in self.variants:
for repo_info in variant_info.repos:
is_reference = repo_info.is_reference
for package in self.get_packages_iterator(repo_info=repo_info):
if self.is_skipped_module_package(package):
continue
if self.is_excluded_package(
package,
anfimovdm marked this conversation as resolved Outdated

those if statements can be united

those `if` statements can be united

It's splitted for more readable code.

It's splitted for more readable code.
variant_info.arch,
self.excluded_packages,
):
continue
if self.is_excluded_package(
package,
variant_info.arch,
variant_info.excluded_packages,
):
continue
package_key = self.get_package_key(
package,
variant_info.arch,
) )
else: source_rpm_name = self.get_source_rpm_name(package)
src_package_name = src_package_name[0].name package_info = packages[source_rpm_name][package_key]
# TODO: for x86_64 + i686 in one packages.json if 'is_reference' not in package_info:
# don't remove! package_info['variants'].append(variant_info.name)
# if package.arch in self.addon_repos[variant_arch]: package_info['is_reference'] = is_reference
# arches = self.addon_repos[variant_arch] + [variant_arch] package_info['package'] = package
# else: elif not package_info['is_reference'] or \
# arches = [variant_arch] package_info['is_reference'] == is_reference and \
# for arch in arches: self.compare_pkgs_version(
# pkgs_list = packages_json[variant_name][ package_1=package,
# arch][src_package_name] package_2=package_info['package'],
# added_pkg = f'{package_name}.{package_arch}' ) > 0:
Review

wrong indents

wrong indents
Review

Pycharm didn't agree with you)

Pycharm didn't agree with you)
Review

btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this

self.compare_pkgs_version(
    package_1=package,
    package_2=package_info['package'],
):

I prefer to use black for code formatting, here is an example of black formatting for those lines

if "is_reference" not in package_info:
    package_info["variants"].append(variant_info.name)
    package_info["is_reference"] = is_reference
    package_info["package"] = package
elif (
    not package_info["is_reference"]
    or package_info["is_reference"] == is_reference
    and self.compare_pkgs_version(
        package_1=package,
        package_2=package_info["package"],
    )
    > 0
):
    package_info["variants"] = [variant_info.name]
    package_info["is_reference"] = is_reference
    package_info["package"] = package
elif (
    self.compare_pkgs_version(
        package_1=package,
        package_2=package_info["package"],
    )
    == 0
    and repo_info.repo_type != "absent"
):
    package_info["variants"].append(variant_info.name)


btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this ```python3 self.compare_pkgs_version( package_1=package, package_2=package_info['package'], ): ``` I prefer to use `black` for code formatting, here is an example of black formatting for those lines ```python3 if "is_reference" not in package_info: package_info["variants"].append(variant_info.name) package_info["is_reference"] = is_reference package_info["package"] = package elif ( not package_info["is_reference"] or package_info["is_reference"] == is_reference and self.compare_pkgs_version( package_1=package, package_2=package_info["package"], ) > 0 ): package_info["variants"] = [variant_info.name] package_info["is_reference"] = is_reference package_info["package"] = package elif ( self.compare_pkgs_version( package_1=package, package_2=package_info["package"], ) == 0 and repo_info.repo_type != "absent" ): package_info["variants"].append(variant_info.name) ```
# if added_pkg not in pkgs_list: package_info['variants'] = [variant_info.name]
# pkgs_list.append(added_pkg) package_info['is_reference'] = is_reference
pkgs_list = packages_json[variant_name][ package_info['package'] = package
variant_arch][src_package_name] elif self.compare_pkgs_version(
added_pkg = f'{package_name}.{package_arch}' package_1=package,
if added_pkg not in pkgs_list: package_2=package_info['package'],
pkgs_list.append(added_pkg) ) == 0 and repo_info.repo_type != 'absent':
return packages_json package_info['variants'].append(variant_info.name)
result = defaultdict(lambda: defaultdict(
lambda: defaultdict(list),
))
for variant_info in self.variants:
for source_rpm_name, packages_info in packages.items():
for package_key, package_info in packages_info.items():
variant_pkgs = result[variant_info.name][variant_info.arch]
if variant_info.name not in package_info['variants']:
continue
variant_pkgs[source_rpm_name].append(package_key)
return result
def create_parser(): def create_parser():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--repo-path', '-c',
action='append', '--config',
help='Path to a folder with repofolders. E.g. "/var/repos" or ' type=Path,
'"http://koji.cloudlinux.com/mirrors/rhel_mirror"', default=Path('config.yaml'),
required=True,
)
parser.add_argument(
'--repo-folder',
action='append',
help='A folder which contains folder repodata . E.g. "baseos-stream"',
required=True,
)
parser.add_argument(
'--repo-arch',
action='append',
help='What architecture packages a repository contains. E.g. "x86_64"',
required=True,
)
parser.add_argument(
'--repo-name',
action='append',
help='Name of a repository. E.g. "AppStream"',
required=True,
)
parser.add_argument(
'--is-remote',
action='append',
type=str,
help='A repository is remote or local',
choices=['yes', 'no'],
required=True,
)
parser.add_argument(
'--is-reference',
action='append',
type=str,
help='A repository is used as reference for packages layout',
choices=['yes', 'no'],
required=True,
)
parser.add_argument(
'--repo-type',
action='append',
type=str,
help='Packages from repository will be removed or added to variant',
choices=['present', 'absent'],
required=True,
)
parser.add_argument(
'--excluded-packages',
nargs='+',
type=str,
default=[],
help='A list of globally excluded packages from generated json.'
'All of list elements should be separated by space',
required=False,
)
parser.add_argument(
'--included-packages',
nargs='+',
type=str,
default=[],
help='A list of globally included packages from generated json.'
'All of list elements should be separated by space',
required=False, required=False,
help='Path to a config',
) )
parser.add_argument( parser.add_argument(
'-o',
'--json-output-path', '--json-output-path',
type=str, type=str,
help='Full path to output json file', help='Full path to output json file',
@ -446,32 +448,45 @@ def create_parser():
return parser return parser
def read_config(config_path: Path) -> Optional[Dict]:
if not config_path.exists():
logging.error('A config by path "%s" does not exist', config_path)
exit(1)
with config_path.open('r') as config_fd:
return yaml.safe_load(config_fd)
def process_config(config_data: Dict) -> Tuple[
List[VariantInfo],
List[str],
List[str],
]:
excluded_packages = config_data.get('excluded_packages', [])
included_packages = config_data.get('included_packages', [])
variants = [VariantInfo(
name=variant_name,
arch=variant_info['arch'],
excluded_packages=variant_info.get('excluded_packages', []),
repos=[RepoInfo(
path=variant_repo['path'],
soksanichenko marked this conversation as resolved
Review

you can unpack dicts instead of passing keyword arguments

VariantInfo(**variant_info)

you can unpack dicts instead of passing keyword arguments ```python3 VariantInfo(**variant_info) ```
Review

Yes, I know, but that variant is more readable

Yes, I know, but that variant is more readable
folder=variant_repo['folder'],
is_remote=variant_repo['remote'],
is_reference=variant_repo['reference'],
repo_type=variant_repo.get('repo_type', 'present'),
) for variant_repo in variant_info['repos']]
) for variant_name, variant_info in config_data['variants'].items()]
return variants, excluded_packages, included_packages
def cli_main(): def cli_main():
args = create_parser().parse_args() args = create_parser().parse_args()
repos = [] variants, excluded_packages, included_packages = process_config(
for repo_path, repo_folder, repo_name, \ config_data=read_config(args.config)
repo_arch, is_remote, is_reference, repo_type in zip( )
args.repo_path,
args.repo_folder,
args.repo_name,
args.repo_arch,
args.is_remote,
args.is_reference,
args.repo_type,
):
repos.append(RepoInfo(
path=repo_path,
folder=repo_folder,
name=repo_name,
arch=repo_arch,
is_remote=True if is_remote == 'yes' else False,
is_reference=True if is_reference == 'yes' else False,
repo_type=repo_type,
))
pg = PackagesGenerator( pg = PackagesGenerator(
repos=repos, variants=variants,
excluded_packages=args.excluded_packages, excluded_packages=excluded_packages,
included_packages=args.included_packages, included_packages=included_packages,
) )
result = pg.generate_packages_json() result = pg.generate_packages_json()
with open(args.json_output_path, 'w') as packages_file: with open(args.json_output_path, 'w') as packages_file:

View File

@ -1,39 +1,34 @@
from argparse import ArgumentParser from argparse import ArgumentParser
import os import os
from glob import iglob
from typing import List from typing import List
from pathlib import Path
from attr import dataclass from dataclasses import dataclass
from productmd.common import parse_nvra from productmd.common import parse_nvra
@dataclass @dataclass
class Package: class Package:
nvra: str nvra: str
path: str path: Path
def search_rpms(top_dir) -> List[Package]: def search_rpms(top_dir: Path) -> List[Package]:
""" """
Search for all *.rpm files recursively Search for all *.rpm files recursively
in given top directory in given top directory
Returns: Returns:
list: list of paths list: list of paths
""" """
rpms = [] return [Package(
for root, dirs, files in os.walk(top_dir): nvra=Path(path).stem,
path = root.split(os.sep) path=Path(path),
for file in files: ) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
if not file.endswith('.rpm'):
continue
nvra, _ = os.path.splitext(file)
rpms.append(
Package(nvra=nvra, path=os.path.join('/', *path, file))
)
return rpms
def copy_rpms(packages: List[Package], target_top_dir: str): def copy_rpms(packages: List[Package], target_top_dir: Path):
""" """
Search synced repos for rpms and prepare Search synced repos for rpms and prepare
koji-like structure for pungi koji-like structure for pungi
@ -46,24 +41,22 @@ def copy_rpms(packages: List[Package], target_top_dir: str):
""" """
for package in packages: for package in packages:
info = parse_nvra(package.nvra) info = parse_nvra(package.nvra)
target_arch_dir = target_top_dir.joinpath(info['arch'])
target_arch_dir = os.path.join(target_top_dir, info['arch']) target_file = target_arch_dir.joinpath(package.path.name)
os.makedirs(target_arch_dir, exist_ok=True) os.makedirs(target_arch_dir, exist_ok=True)
target_file = os.path.join(target_arch_dir, os.path.basename(package.path)) if not target_file.exists():
if not os.path.exists(target_file):
try: try:
os.link(package.path, target_file) os.link(package.path, target_file)
except OSError: except OSError:
# hardlink failed, try symlinking # hardlink failed, try symlinking
os.symlink(package.path, target_file) package.path.symlink_to(target_file)
def cli_main(): def cli_main():
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument('-p', '--path', required=True) parser.add_argument('-p', '--path', required=True, type=Path)
parser.add_argument('-t', '--target', required=True) parser.add_argument('-t', '--target', required=True, type=Path)
namespace = parser.parse_args() namespace = parser.parse_args()

View File

@ -203,31 +203,12 @@ class KojiMock:
packages = [] packages = []
# get all rpms in folder # get all rpms in folder
rpms = search_rpms(self._packages_dir) rpms = search_rpms(Path(self._packages_dir))
all_rpms = [package.path for package in rpms]
# get nvras for modular packages for rpm in rpms:
nvras = set() info = parse_nvra(rpm.path.stem)
for module in self._modules.values(): if 'module' in info['release']:
path = os.path.join( continue
self._modules_dir,
module.arch,
module.nvr,
)
info = Modulemd.ModuleStream.read_string(open(path).read(), strict=True)
for package in info.get_rpm_artifacts():
data = parse_nvra(package)
nvras.add((data['name'], data['version'], data['release'], data['arch']))
# and remove modular packages from global list
for rpm in all_rpms[:]:
data = parse_nvra(os.path.basename(rpm[:-4]))
if (data['name'], data['version'], data['release'], data['arch']) in nvras:
all_rpms.remove(rpm)
for rpm in all_rpms:
info = parse_nvra(os.path.basename(rpm))
packages.append({ packages.append({
"build_id": RELEASE_BUILD_ID, "build_id": RELEASE_BUILD_ID,
"name": info['name'], "name": info['name'],

View File

@ -5,7 +5,7 @@ from unittest import TestCase, mock, main
import yaml import yaml
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraRepoInfo from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraVariantInfo, RepoInfo
FOLDER_WITH_TEST_DATA = os.path.join( FOLDER_WITH_TEST_DATA = os.path.join(
os.path.dirname( os.path.dirname(
@ -114,14 +114,17 @@ data:
... ...
""", Loader=yaml.BaseLoader) """, Loader=yaml.BaseLoader)
TEST_REPO_INFO = ExtraRepoInfo( TEST_REPO_INFO = RepoInfo(
path=FOLDER_WITH_TEST_DATA, path=FOLDER_WITH_TEST_DATA,
folder='test_repo', folder='test_repo',
is_remote=False,
)
TEST_VARIANT_INFO = ExtraVariantInfo(
name='TestRepo', name='TestRepo',
arch='x86_64', arch='x86_64',
is_remote=False,
packages=[], packages=[],
modules=[], modules=[],
repos=[TEST_REPO_INFO]
) )
BS_BUILD_INFO = { BS_BUILD_INFO = {
@ -161,15 +164,19 @@ class TestCreteExtraRepo(TestCase):
) )
self.assertEqual( self.assertEqual(
[ [
ExtraRepoInfo( ExtraVariantInfo(
path='https://build.cloudlinux.com/'
f'build_repos/{build_id}/fake_platform',
folder=arch,
name=f'{build_id}-fake_platform-{arch}', name=f'{build_id}-fake_platform-{arch}',
arch=arch, arch=arch,
is_remote=True,
packages=packages, packages=packages,
modules=modules, modules=modules,
repos=[
RepoInfo(
path='https://build.cloudlinux.com/'
f'build_repos/{build_id}/fake_platform',
folder=arch,
is_remote=True,
)
]
) )
], ],
repos_info, repos_info,
@ -197,7 +204,7 @@ class TestCreteExtraRepo(TestCase):
'CreateExtraRepo._create_local_extra_repo' 'CreateExtraRepo._create_local_extra_repo'
) as mock__create_local_extra_repo: ) as mock__create_local_extra_repo:
cer = CreateExtraRepo( cer = CreateExtraRepo(
repos=[TEST_REPO_INFO], variants=[TEST_VARIANT_INFO],
bs_auth_token='fake_auth_token', bs_auth_token='fake_auth_token',
local_repository_path='/path/to/local/repo', local_repository_path='/path/to/local/repo',
clear_target_repo=False, clear_target_repo=False,

View File

@ -4,7 +4,11 @@ import os
from collections import defaultdict from collections import defaultdict
from unittest import TestCase, mock, main from unittest import TestCase, mock, main
from pungi.scripts.create_packages_json import PackagesGenerator, RepoInfo from pungi.scripts.create_packages_json import (
PackagesGenerator,
RepoInfo,
VariantInfo,
)
FOLDER_WITH_TEST_DATA = os.path.join( FOLDER_WITH_TEST_DATA = os.path.join(
os.path.dirname( os.path.dirname(
@ -16,8 +20,6 @@ FOLDER_WITH_TEST_DATA = os.path.join(
test_repo_info = RepoInfo( test_repo_info = RepoInfo(
path=FOLDER_WITH_TEST_DATA, path=FOLDER_WITH_TEST_DATA,
folder='test_repo', folder='test_repo',
name='TestRepo',
arch='x86_64',
is_remote=False, is_remote=False,
is_reference=True, is_reference=True,
) )
@ -25,11 +27,19 @@ test_repo_info = RepoInfo(
test_repo_info_2 = RepoInfo( test_repo_info_2 = RepoInfo(
path=FOLDER_WITH_TEST_DATA, path=FOLDER_WITH_TEST_DATA,
folder='test_repo_2', folder='test_repo_2',
name='TestRepo2',
arch='x86_64',
is_remote=False, is_remote=False,
is_reference=True, is_reference=True,
) )
variant_info_1 = VariantInfo(
name='TestRepo',
arch='x86_64',
repos=[test_repo_info]
)
variant_info_2 = VariantInfo(
name='TestRepo2',
arch='x86_64',
repos=[test_repo_info_2]
)
class TestPackagesJson(TestCase): class TestPackagesJson(TestCase):
@ -60,9 +70,9 @@ class TestPackagesJson(TestCase):
def test_02_generate_additional_packages(self): def test_02_generate_additional_packages(self):
pg = PackagesGenerator( pg = PackagesGenerator(
repos=[ variants=[
test_repo_info, variant_info_1,
test_repo_info_2, variant_info_2,
], ],
excluded_packages=['zziplib-utils'], excluded_packages=['zziplib-utils'],
included_packages=['vim-file*'], included_packages=['vim-file*'],

View File

@ -15,10 +15,13 @@ class TestGatherRpms(TestCase):
maxDiff = None maxDiff = None
FILES_TO_CREATE = [ FILES_TO_CREATE = [
'powertools/Packages/libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm', 'powertools/Packages/libvirt-6.0.0-28.module_el'
'8.3.0+555+a55c8938.i686.rpm',
'powertools/Packages/libgit2-devel-0.26.8-2.el8.x86_64.rpm', 'powertools/Packages/libgit2-devel-0.26.8-2.el8.x86_64.rpm',
'powertools/Packages/xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm', 'powertools/Packages/xalan-j2-2.7.1-38.module_el'
'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm', '8.0.0+30+832da3a1.noarch.rpm',
'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el'
'8.0.0+30+832da3a1.noarch.rpm',
'appstream/Packages/OpenEXR-devel-2.2.0-11.el8.i686.rpm', 'appstream/Packages/OpenEXR-devel-2.2.0-11.el8.i686.rpm',
'appstream/Packages/mingw-binutils-generic-2.30-1.el8.x86_64.rpm', 'appstream/Packages/mingw-binutils-generic-2.30-1.el8.x86_64.rpm',
'appstream/Packages/somenonrpm', 'appstream/Packages/somenonrpm',
@ -30,30 +33,51 @@ class TestGatherRpms(TestCase):
os.makedirs(PATH_TO_REPOS) os.makedirs(PATH_TO_REPOS)
for filepath in self.FILES_TO_CREATE: for filepath in self.FILES_TO_CREATE:
os.makedirs(os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)), exist_ok=True) os.makedirs(
os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)),
exist_ok=True,
)
open(os.path.join(PATH_TO_REPOS, filepath), 'w').close() open(os.path.join(PATH_TO_REPOS, filepath), 'w').close()
def test_gather_rpms(self): def test_gather_rpms(self):
self.assertEqual( self.assertEqual(
[Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686', [Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686',
path=f'{PATH_TO_REPOS}/powertools/Packages/' path=Path(
f'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm'), f'{PATH_TO_REPOS}/powertools/Packages/'
f'libvirt-6.0.0-28.module_el'
f'8.3.0+555+a55c8938.i686.rpm'
)),
Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64', Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64',
path=f'{PATH_TO_REPOS}/powertools/Packages/' path=Path(
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'), f'{PATH_TO_REPOS}/powertools/Packages/'
Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch', f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
path=f'{PATH_TO_REPOS}/powertools/Packages/' )),
f'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm'), Package(nvra='xalan-j2-2.7.1-38.module_el'
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch', '8.0.0+30+832da3a1.noarch',
path='/path/to/repos/appstream/Packages/' path=Path(
'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'), f'{PATH_TO_REPOS}/powertools/Packages/'
f'xalan-j2-2.7.1-38.module_el'
f'8.0.0+30+832da3a1.noarch.rpm'
)),
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el'
'8.0.0+30+832da3a1.noarch',
path=Path(
'/path/to/repos/appstream/Packages/'
'bnd-maven-plugin-3.5.0-4.module_el'
'8.0.0+30+832da3a1.noarch.rpm'
)),
Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686', Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686',
path=f'{PATH_TO_REPOS}/appstream/Packages/' path=Path(
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'), f'{PATH_TO_REPOS}/appstream/Packages/'
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
)),
Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64', Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64',
path=f'{PATH_TO_REPOS}/appstream/Packages/' path=Path(
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm')], f'{PATH_TO_REPOS}/appstream/Packages/'
search_rpms(PATH_TO_REPOS) f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
))
],
search_rpms(Path(PATH_TO_REPOS))
) )
def test_copy_rpms(self): def test_copy_rpms(self):
@ -61,23 +85,39 @@ class TestGatherRpms(TestCase):
packages = [ packages = [
Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686', Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686',
path=f'{PATH_TO_REPOS}/powertools/Packages/' path=Path(
f'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm'), f'{PATH_TO_REPOS}/powertools/Packages/'
f'libvirt-6.0.0-28.module_el'
f'8.3.0+555+a55c8938.i686.rpm'
)),
Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64', Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64',
path=f'{PATH_TO_REPOS}/powertools/Packages/' path=Path(
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'), f'{PATH_TO_REPOS}/powertools/Packages/'
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
)),
Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch', Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch',
path=f'{PATH_TO_REPOS}/powertools/Packages/' path=Path(
f'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm'), f'{PATH_TO_REPOS}/powertools/Packages/'
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch', f'xalan-j2-2.7.1-38.module_el'
path='/path/to/repos/appstream/Packages/' f'8.0.0+30+832da3a1.noarch.rpm'
'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'), )),
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el'
'8.0.0+30+832da3a1.noarch',
path=Path(
'/path/to/repos/appstream/Packages/'
'bnd-maven-plugin-3.5.0-4.module_el'
'8.0.0+30+832da3a1.noarch.rpm'
)),
Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686', Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686',
path=f'{PATH_TO_REPOS}/appstream/Packages/' path=Path(
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'), f'{PATH_TO_REPOS}/appstream/Packages/'
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
)),
Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64', Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64',
path=f'{PATH_TO_REPOS}/appstream/Packages/' path=Path(
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm') f'{PATH_TO_REPOS}/appstream/Packages/'
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
))
] ]
copy_rpms(packages, target_path) copy_rpms(packages, target_path)

View File

@ -322,7 +322,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
) )
figure = re.compile( figure = re.compile(
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501 r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24\.x86_64.+bash-debuginfo-4\.3\.42-4\.fc24\.x86_64$", # noqa: E501
re.DOTALL, re.DOTALL,
) )
self.assertRegex(str(ctx.exception), figure) self.assertRegex(str(ctx.exception), figure)
@ -399,7 +399,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms) pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
figure = re.compile( figure = re.compile(
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501 r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24\.x86_64.+bash-debuginfo-4\.3\.42-4\.fc24\.x86_64$", # noqa: E501
re.DOTALL, re.DOTALL,
) )
self.assertRegex(str(ctx.exception), figure) self.assertRegex(str(ctx.exception), figure)