ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically #15
@ -25,19 +25,23 @@ from typing import (
|
|||||||
Iterator,
|
Iterator,
|
||||||
Optional,
|
Optional,
|
||||||
Tuple,
|
Tuple,
|
||||||
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import createrepo_c as cr
|
|
||||||
import dnf.subject
|
|
||||||
import hawkey
|
|
||||||
import requests
|
import requests
|
||||||
import rpm
|
import rpm
|
||||||
import yaml
|
import yaml
|
||||||
from createrepo_c import Package, PackageIterator
|
from createrepo_c import (
|
||||||
from dataclasses import dataclass
|
Package,
|
||||||
|
PackageIterator,
|
||||||
|
Repomd,
|
||||||
|
RepomdRecord,
|
||||||
|
)
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from kobo.rpmlib import parse_nvra
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
@ -66,23 +70,33 @@ class RepoInfo:
|
|||||||
# 'appstream', 'baseos', etc.
|
# 'appstream', 'baseos', etc.
|
||||||
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
||||||
# using remote repo
|
# using remote repo
|
||||||
path: AnyStr
|
path: str
|
||||||
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
||||||
folder: AnyStr
|
folder: str
|
||||||
# name of repo. E.g. 'BaseOS', 'AppStream', etc
|
|
||||||
name: AnyStr
|
|
||||||
# architecture of repo. E.g. 'x86_64', 'i686', etc
|
|
||||||
arch: AnyStr
|
|
||||||
# Is a repo remote or local
|
# Is a repo remote or local
|
||||||
is_remote: bool
|
is_remote: bool
|
||||||
# Is a reference repository (usually it's a RHEL repo)
|
# Is a reference repository (usually it's a RHEL repo)
|
||||||
# Layout of packages from such repository will be taken as example
|
# Layout of packages from such repository will be taken as example
|
||||||
# Only layout of specific package (which don't exist
|
# Only layout of specific package (which doesn't exist
|
||||||
# in a reference repository) will be taken as example
|
# in a reference repository) will be taken as example
|
||||||
is_reference: bool = False
|
is_reference: bool = False
|
||||||
|
# The packages from 'present' repo will be added to a variant.
|
||||||
|
# The packages from 'absent' repo will be removed from a variant.
|
||||||
repo_type: str = 'present'
|
repo_type: str = 'present'
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class VariantInfo:
|
||||||
|
# name of variant. E.g. 'BaseOS', 'AppStream', etc
|
||||||
|
name: AnyStr
|
||||||
|
# architecture of variant. E.g. 'x86_64', 'i686', etc
|
||||||
|
arch: AnyStr
|
||||||
|
# The packages which will be not added to a variant
|
||||||
|
excluded_packages: List[str] = field(default_factory=list)
|
||||||
|
# Repos of a variant
|
||||||
|
repos: List[RepoInfo] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
class PackagesGenerator:
|
class PackagesGenerator:
|
||||||
|
|
||||||
repo_arches = defaultdict(lambda: list(('noarch',)))
|
repo_arches = defaultdict(lambda: list(('noarch',)))
|
||||||
@ -96,12 +110,12 @@ class PackagesGenerator:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
repos: List[RepoInfo],
|
variants: List[VariantInfo],
|
||||||
excluded_packages: List[AnyStr],
|
excluded_packages: List[AnyStr],
|
||||||
included_packages: List[AnyStr],
|
included_packages: List[AnyStr],
|
||||||
):
|
):
|
||||||
self.repos = repos
|
self.variants = variants
|
||||||
self.pkgs_iterators = dict()
|
self.pkgs = dict()
|
||||||
self.excluded_packages = excluded_packages
|
self.excluded_packages = excluded_packages
|
||||||
self.included_packages = included_packages
|
self.included_packages = included_packages
|
||||||
self.tmp_files = []
|
self.tmp_files = []
|
||||||
@ -152,12 +166,12 @@ class PackagesGenerator:
|
|||||||
return file_stream.name
|
return file_stream.name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_repomd(repomd_file_path: AnyStr) -> cr.Repomd:
|
def _parse_repomd(repomd_file_path: AnyStr) -> Repomd:
|
||||||
"""
|
"""
|
||||||
Parse file repomd.xml and create object Repomd
|
Parse file repomd.xml and create object Repomd
|
||||||
:param repomd_file_path: path to local repomd.xml
|
:param repomd_file_path: path to local repomd.xml
|
||||||
"""
|
"""
|
||||||
return cr.Repomd(repomd_file_path)
|
return Repomd(repomd_file_path)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _parse_modules_file(
|
def _parse_modules_file(
|
||||||
@ -185,7 +199,7 @@ class PackagesGenerator:
|
|||||||
def _get_repomd_records(
|
def _get_repomd_records(
|
||||||
self,
|
self,
|
||||||
repo_info: RepoInfo,
|
repo_info: RepoInfo,
|
||||||
) -> List[cr.RepomdRecord]:
|
) -> List[RepomdRecord]:
|
||||||
"""
|
"""
|
||||||
Get, parse file repomd.xml and extract from it repomd records
|
Get, parse file repomd.xml and extract from it repomd records
|
||||||
:param repo_info: structure which contains info about a current repo
|
:param repo_info: structure which contains info about a current repo
|
||||||
@ -215,7 +229,7 @@ class PackagesGenerator:
|
|||||||
def _download_repomd_records(
|
def _download_repomd_records(
|
||||||
self,
|
self,
|
||||||
repo_info: RepoInfo,
|
repo_info: RepoInfo,
|
||||||
repomd_records: List[cr.RepomdRecord],
|
repomd_records: List[RepomdRecord],
|
||||||
repomd_records_dict: Dict[str, str],
|
repomd_records_dict: Dict[str, str],
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@ -245,13 +259,12 @@ class PackagesGenerator:
|
|||||||
def _parse_module_repomd_record(
|
def _parse_module_repomd_record(
|
||||||
self,
|
self,
|
||||||
repo_info: RepoInfo,
|
repo_info: RepoInfo,
|
||||||
repomd_records: List[cr.RepomdRecord],
|
repomd_records: List[RepomdRecord],
|
||||||
) -> List[Dict]:
|
) -> List[Dict]:
|
||||||
"""
|
"""
|
||||||
Download repomd records
|
Download repomd records
|
||||||
:param repo_info: structure which contains info about a current repo
|
:param repo_info: structure which contains info about a current repo
|
||||||
:param repomd_records: list with repomd records
|
:param repomd_records: list with repomd records
|
||||||
:param repomd_records_dict: dict with paths to repodata files
|
|
||||||
"""
|
"""
|
||||||
for repomd_record in repomd_records:
|
for repomd_record in repomd_records:
|
||||||
if repomd_record.type != 'modules':
|
if repomd_record.type != 'modules':
|
||||||
@ -283,132 +296,136 @@ class PackagesGenerator:
|
|||||||
)
|
)
|
||||||
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
||||||
|
|
||||||
|
def get_packages_iterator(
|
||||||
|
self,
|
||||||
|
repo_info: RepoInfo,
|
||||||
|
) -> Union[PackageIterator, Iterator]:
|
||||||
|
full_repo_path = self._get_full_repo_path(repo_info)
|
||||||
|
if full_repo_path in self.pkgs:
|
||||||
|
return self.pkgs[full_repo_path]
|
||||||
|
else:
|
||||||
soksanichenko marked this conversation as resolved
|
|||||||
|
repomd_records = self._get_repomd_records(
|
||||||
|
repo_info=repo_info,
|
||||||
|
)
|
||||||
|
repomd_records_dict = {} # type: Dict[str, str]
|
||||||
|
self._download_repomd_records(
|
||||||
|
repo_info=repo_info,
|
||||||
|
repomd_records=repomd_records,
|
||||||
|
repomd_records_dict=repomd_records_dict,
|
||||||
|
)
|
||||||
|
pkgs_iterator = PackageIterator(
|
||||||
|
primary_path=repomd_records_dict['primary'],
|
||||||
|
filelists_path=repomd_records_dict['filelists'],
|
||||||
|
other_path=repomd_records_dict['other'],
|
||||||
|
warningcb=self._warning_callback,
|
||||||
|
)
|
||||||
|
pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator)
|
||||||
|
return pkgs_iterator
|
||||||
|
|
||||||
|
def get_package_arch(
|
||||||
|
self,
|
||||||
|
package: Package,
|
||||||
|
variant_arch: str,
|
||||||
|
) -> str:
|
||||||
|
if package.arch in self.repo_arches[variant_arch]:
|
||||||
|
return package.arch
|
||||||
|
else:
|
||||||
soksanichenko marked this conversation as resolved
Outdated
anfimovdm
commented
same same
|
|||||||
|
return variant_arch
|
||||||
|
|
||||||
|
def is_skipped_module_package(self, package: Package) -> bool:
|
||||||
|
# Even a module package will be added to packages.json if
|
||||||
|
# it presents in the list of included packages
|
||||||
|
return 'module' in package.release and not any(
|
||||||
|
re.search(included_package, package.name)
|
||||||
|
for included_package in self.included_packages
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_excluded_package(
|
||||||
|
self,
|
||||||
|
package: Package,
|
||||||
|
variant_arch: str,
|
||||||
|
excluded_packages: List[str],
|
||||||
|
) -> bool:
|
||||||
|
return any(
|
||||||
|
re.search(
|
||||||
|
excluded_pkg,
|
||||||
|
self.get_package_key(package, variant_arch),
|
||||||
|
) for excluded_pkg in excluded_packages
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_source_rpm_name(package: Package) -> str:
|
||||||
|
source_rpm_nvra = parse_nvra(package.rpm_sourcerpm)
|
||||||
|
return source_rpm_nvra['name']
|
||||||
|
|
||||||
|
def get_package_key(self, package: Package, variant_arch: str) -> str:
|
||||||
|
return (
|
||||||
|
f'{package.name}.'
|
||||||
|
f'{self.get_package_arch(package, variant_arch)}'
|
||||||
|
)
|
||||||
|
|
||||||
def generate_packages_json(
|
def generate_packages_json(
|
||||||
self
|
self
|
||||||
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
||||||
"""
|
"""
|
||||||
Generate packages.json
|
Generate packages.json
|
||||||
"""
|
"""
|
||||||
packages_json = defaultdict(
|
packages = defaultdict(lambda: defaultdict(lambda: {
|
||||||
lambda: defaultdict(
|
|
||||||
lambda: defaultdict(
|
|
||||||
list,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
all_packages = defaultdict(lambda: {
|
|
||||||
'variants': list(),
|
'variants': list(),
|
||||||
'package_info': dict(),
|
}))
|
||||||
})
|
for variant_info in self.variants:
|
||||||
for repo_info in sorted(
|
for repo_info in variant_info.repos:
|
||||||
self.repos,
|
is_reference = repo_info.is_reference
|
||||||
key=lambda i: i.repo_type,
|
for package in self.get_packages_iterator(repo_info=repo_info):
|
||||||
reverse=True,
|
if self.is_skipped_module_package(package):
|
||||||
):
|
continue
|
||||||
full_repo_path = self._get_full_repo_path(repo_info)
|
if self.is_excluded_package(
|
||||||
anfimovdm marked this conversation as resolved
Outdated
anfimovdm
commented
those those `if` statements can be united
soksanichenko
commented
It's splitted for more readable code. It's splitted for more readable code.
|
|||||||
if full_repo_path in self.pkgs_iterators:
|
|
||||||
pkgs_iterator = tee(self.pkgs_iterators[full_repo_path])
|
|
||||||
else:
|
|
||||||
repomd_records = self._get_repomd_records(
|
|
||||||
repo_info=repo_info,
|
|
||||||
)
|
|
||||||
repomd_records_dict = {} # type: Dict[str, str]
|
|
||||||
self._download_repomd_records(
|
|
||||||
repo_info=repo_info,
|
|
||||||
repomd_records=repomd_records,
|
|
||||||
repomd_records_dict=repomd_records_dict,
|
|
||||||
)
|
|
||||||
pkgs_iterator = PackageIterator(
|
|
||||||
primary_path=repomd_records_dict['primary'],
|
|
||||||
filelists_path=repomd_records_dict['filelists'],
|
|
||||||
other_path=repomd_records_dict['other'],
|
|
||||||
warningcb=self._warning_callback,
|
|
||||||
)
|
|
||||||
self.pkgs_iterators[full_repo_path] = tee(pkgs_iterator)
|
|
||||||
for package in pkgs_iterator:
|
|
||||||
if package.arch not in self.repo_arches[repo_info.arch]:
|
|
||||||
package_arch = repo_info.arch
|
|
||||||
else:
|
|
||||||
package_arch = package.arch
|
|
||||||
package_key = f'{package.name}.{package_arch}'
|
|
||||||
package_variants = all_packages[package_key]['variants']
|
|
||||||
package_info = all_packages[package_key]['package_info']
|
|
||||||
if 'module' in package.release and not any(
|
|
||||||
re.search(included_package, package.name)
|
|
||||||
for included_package in self.included_packages
|
|
||||||
):
|
|
||||||
# Even a module package will be added to packages.json if
|
|
||||||
# it presents in the list of included packages
|
|
||||||
continue
|
|
||||||
if repo_info.repo_type == 'present' and not package_info:
|
|
||||||
package_variants.append((repo_info.name, repo_info.arch))
|
|
||||||
package_info['arch'] = package_arch
|
|
||||||
package_info['package'] = package
|
|
||||||
package_info['type'] = repo_info.is_reference
|
|
||||||
elif repo_info.repo_type == 'absent' and \
|
|
||||||
(repo_info.name, repo_info.arch) in package_variants:
|
|
||||||
package_variants.remove((repo_info.name, repo_info.arch))
|
|
||||||
# replace an older package if it's not reference or
|
|
||||||
# a newer package is from reference repo
|
|
||||||
elif (not package_info['type'] or
|
|
||||||
package_info['type'] ==
|
|
||||||
repo_info.is_reference) and \
|
|
||||||
self.compare_pkgs_version(
|
|
||||||
package,
|
package,
|
||||||
package_info['package']
|
variant_info.arch,
|
||||||
) > 0 and repo_info.repo_type == 'present':
|
self.excluded_packages,
|
||||||
all_packages[package_key]['variants'] = [
|
):
|
||||||
(repo_info.name, repo_info.arch)
|
continue
|
||||||
]
|
if self.is_excluded_package(
|
||||||
package_info['arch'] = package_arch
|
|
||||||
package_info['package'] = package
|
|
||||||
elif self.compare_pkgs_version(
|
|
||||||
package,
|
package,
|
||||||
package_info['package']
|
variant_info.arch,
|
||||||
) == 0 and repo_info.repo_type == 'present':
|
variant_info.excluded_packages,
|
||||||
package_variants.append(
|
):
|
||||||
(repo_info.name, repo_info.arch)
|
continue
|
||||||
|
package_key = self.get_package_key(
|
||||||
|
package,
|
||||||
|
variant_info.arch,
|
||||||
)
|
)
|
||||||
|
source_rpm_name = self.get_source_rpm_name(package)
|
||||||
for package_dict in all_packages.values():
|
package_info = packages[source_rpm_name][package_key]
|
||||||
for variant_name, variant_arch in package_dict['variants']:
|
if 'is_reference' not in package_info:
|
||||||
package_info = package_dict['package_info']
|
package_info['variants'].append(variant_info.name)
|
||||||
package_arch = package_info['arch']
|
package_info['is_reference'] = is_reference
|
||||||
package = package_info['package']
|
package_info['package'] = package
|
||||||
package_name = f'{package.name}.{package_arch}'
|
elif not package_info['is_reference'] or \
|
||||||
if any(re.search(excluded_package, package_name)
|
package_info['is_reference'] == is_reference and \
|
||||||
for excluded_package in self.excluded_packages):
|
self.compare_pkgs_version(
|
||||||
continue
|
package_1=package,
|
||||||
src_package_name = dnf.subject.Subject(
|
package_2=package_info['package'],
|
||||||
anfimovdm
commented
wrong indents wrong indents
soksanichenko
commented
Pycharm didn't agree with you) Pycharm didn't agree with you)
anfimovdm
commented
btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this
I prefer to use
btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this
```python3
self.compare_pkgs_version(
package_1=package,
package_2=package_info['package'],
):
```
I prefer to use `black` for code formatting, here is an example of black formatting for those lines
```python3
if "is_reference" not in package_info:
package_info["variants"].append(variant_info.name)
package_info["is_reference"] = is_reference
package_info["package"] = package
elif (
not package_info["is_reference"]
or package_info["is_reference"] == is_reference
and self.compare_pkgs_version(
package_1=package,
package_2=package_info["package"],
)
> 0
):
package_info["variants"] = [variant_info.name]
package_info["is_reference"] = is_reference
package_info["package"] = package
elif (
self.compare_pkgs_version(
package_1=package,
package_2=package_info["package"],
)
== 0
and repo_info.repo_type != "absent"
):
package_info["variants"].append(variant_info.name)
```
|
|||||||
package.rpm_sourcerpm,
|
) > 0:
|
||||||
).get_nevra_possibilities(
|
package_info['variants'] = [variant_info.name]
|
||||||
forms=hawkey.FORM_NEVRA,
|
package_info['is_reference'] = is_reference
|
||||||
)
|
package_info['package'] = package
|
||||||
if len(src_package_name) > 1:
|
elif self.compare_pkgs_version(
|
||||||
# We should stop utility if we can't get exact name of srpm
|
package_1=package,
|
||||||
raise ValueError(
|
package_2=package_info['package'],
|
||||||
'We can\'t get exact name of srpm '
|
) == 0 and repo_info.repo_type != 'absent':
|
||||||
f'by its NEVRA "{package.rpm_sourcerpm}"'
|
package_info['variants'].append(variant_info.name)
|
||||||
)
|
result = defaultdict(lambda: defaultdict(
|
||||||
else:
|
lambda: defaultdict(list),
|
||||||
src_package_name = src_package_name[0].name
|
))
|
||||||
# TODO: for x86_64 + i686 in one packages.json
|
for variant_info in self.variants:
|
||||||
# don't remove!
|
for source_rpm_name, packages_info in packages.items():
|
||||||
# if package.arch in self.addon_repos[variant_arch]:
|
for package_key, package_info in packages_info.items():
|
||||||
# arches = self.addon_repos[variant_arch] + [variant_arch]
|
variant_pkgs = result[variant_info.name][variant_info.arch]
|
||||||
# else:
|
if variant_info.name not in package_info['variants']:
|
||||||
# arches = [variant_arch]
|
continue
|
||||||
# for arch in arches:
|
variant_pkgs[source_rpm_name].append(package_key)
|
||||||
# pkgs_list = packages_json[variant_name][
|
return result
|
||||||
# arch][src_package_name]
|
|
||||||
# added_pkg = f'{package_name}.{package_arch}'
|
|
||||||
# if added_pkg not in pkgs_list:
|
|
||||||
# pkgs_list.append(added_pkg)
|
|
||||||
pkgs_list = packages_json[variant_name][
|
|
||||||
variant_arch][src_package_name]
|
|
||||||
if package_name not in pkgs_list:
|
|
||||||
pkgs_list.append(package_name)
|
|
||||||
return packages_json
|
|
||||||
|
|
||||||
|
|
||||||
def create_parser():
|
def create_parser():
|
||||||
@ -441,32 +458,34 @@ def read_config(config_path: Path) -> Optional[Dict]:
|
|||||||
|
|
||||||
|
|
||||||
def process_config(config_data: Dict) -> Tuple[
|
def process_config(config_data: Dict) -> Tuple[
|
||||||
List[RepoInfo],
|
List[VariantInfo],
|
||||||
List[str],
|
List[str],
|
||||||
List[str],
|
List[str],
|
||||||
]:
|
]:
|
||||||
excluded_packages = config_data.get('excluded_packages', [])
|
excluded_packages = config_data.get('excluded_packages', [])
|
||||||
included_packages = config_data.get('included_packages', [])
|
included_packages = config_data.get('included_packages', [])
|
||||||
repos = [RepoInfo(
|
variants = [VariantInfo(
|
||||||
path=variant_repo['path'],
|
|
||||||
folder=variant_repo['folder'],
|
|
||||||
name=variant_name,
|
name=variant_name,
|
||||||
arch=variant_repo['arch'],
|
arch=variant_info['arch'],
|
||||||
is_remote=variant_repo['remote'],
|
excluded_packages=variant_info.get('excluded_packages', []),
|
||||||
is_reference=variant_repo['reference'],
|
repos=[RepoInfo(
|
||||||
soksanichenko marked this conversation as resolved
anfimovdm
commented
you can unpack dicts instead of passing keyword arguments
you can unpack dicts instead of passing keyword arguments
```python3
VariantInfo(**variant_info)
```
soksanichenko
commented
Yes, I know, but that variant is more readable Yes, I know, but that variant is more readable
|
|||||||
repo_type=variant_repo.get('repo_type', 'present'),
|
path=variant_repo['path'],
|
||||||
) for variant_name, variant_repos in config_data['variants'].items()
|
folder=variant_repo['folder'],
|
||||||
for variant_repo in variant_repos]
|
is_remote=variant_repo['remote'],
|
||||||
return repos, excluded_packages, included_packages
|
is_reference=variant_repo['reference'],
|
||||||
|
repo_type=variant_repo.get('repo_type', 'present'),
|
||||||
|
) for variant_repo in variant_info['repos']]
|
||||||
|
) for variant_name, variant_info in config_data['variants'].items()]
|
||||||
|
return variants, excluded_packages, included_packages
|
||||||
|
|
||||||
|
|
||||||
def cli_main():
|
def cli_main():
|
||||||
args = create_parser().parse_args()
|
args = create_parser().parse_args()
|
||||||
repos, excluded_packages, included_packages = process_config(
|
variants, excluded_packages, included_packages = process_config(
|
||||||
config_data=read_config(args.config)
|
config_data=read_config(args.config)
|
||||||
)
|
)
|
||||||
pg = PackagesGenerator(
|
pg = PackagesGenerator(
|
||||||
repos=repos,
|
variants=variants,
|
||||||
excluded_packages=excluded_packages,
|
excluded_packages=excluded_packages,
|
||||||
included_packages=included_packages,
|
included_packages=included_packages,
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user
same