ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically #15
@ -15,6 +15,7 @@ import os
|
||||
import re
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
from itertools import tee
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
AnyStr,
|
||||
@ -27,6 +28,8 @@ from typing import (
|
||||
)
|
||||
|
||||
import binascii
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import createrepo_c as cr
|
||||
import dnf.subject
|
||||
import hawkey
|
||||
@ -98,6 +101,7 @@ class PackagesGenerator:
|
||||
included_packages: List[AnyStr],
|
||||
):
|
||||
self.repos = repos
|
||||
self.pkgs_iterators = dict()
|
||||
self.excluded_packages = excluded_packages
|
||||
self.included_packages = included_packages
|
||||
self.tmp_files = []
|
||||
@ -110,6 +114,19 @@ class PackagesGenerator:
|
||||
if os.path.exists(tmp_file):
|
||||
os.remove(tmp_file)
|
||||
|
||||
@staticmethod
|
||||
def _get_full_repo_path(repo_info: RepoInfo):
|
||||
if repo_info.is_remote:
|
||||
return urljoin(
|
||||
repo_info.path + '/',
|
||||
repo_info.folder,
|
||||
)
|
||||
else:
|
||||
return os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _warning_callback(warning_type, message):
|
||||
"""
|
||||
@ -151,7 +168,7 @@ class PackagesGenerator:
|
||||
"""
|
||||
Parse modules.yaml.gz and returns parsed data
|
||||
:param modules_file_path: path to local modules.yaml.gz
|
||||
:return: List of dict for each modules in a repo
|
||||
:return: List of dict for each module in a repo
|
||||
"""
|
||||
|
||||
with open(modules_file_path, 'rb') as modules_file:
|
||||
@ -174,16 +191,22 @@ class PackagesGenerator:
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:return: list with repomd records
|
||||
"""
|
||||
repomd_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
'repodata',
|
||||
'repomd.xml',
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_file_path = urljoin(
|
||||
urljoin(
|
||||
repo_info.path + '/',
|
||||
repo_info.folder
|
||||
) + '/',
|
||||
'repodata/repomd.xml'
|
||||
)
|
||||
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
||||
else:
|
||||
repomd_file_path = repomd_file_path
|
||||
repomd_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
'repodata',
|
||||
'repomd.xml',
|
||||
)
|
||||
repomd_object = self._parse_repomd(repomd_file_path)
|
||||
if repo_info.is_remote:
|
||||
os.remove(repomd_file_path)
|
||||
@ -273,33 +296,43 @@ class PackagesGenerator:
|
||||
)
|
||||
)
|
||||
)
|
||||
all_packages = defaultdict(lambda: {'variants': list()})
|
||||
all_packages = defaultdict(lambda: {
|
||||
'variants': list(),
|
||||
'package_info': dict(),
|
||||
})
|
||||
for repo_info in sorted(
|
||||
self.repos,
|
||||
key=lambda i: i.repo_type,
|
||||
reverse=True,
|
||||
soksanichenko marked this conversation as resolved
|
||||
):
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
repomd_records_dict = {} # type: Dict[str, str]
|
||||
self._download_repomd_records(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
repomd_records_dict=repomd_records_dict,
|
||||
)
|
||||
packages_iterator = PackageIterator(
|
||||
primary_path=repomd_records_dict['primary'],
|
||||
filelists_path=repomd_records_dict['filelists'],
|
||||
other_path=repomd_records_dict['other'],
|
||||
warningcb=self._warning_callback,
|
||||
)
|
||||
for package in packages_iterator:
|
||||
full_repo_path = self._get_full_repo_path(repo_info)
|
||||
if full_repo_path in self.pkgs_iterators:
|
||||
pkgs_iterator = tee(self.pkgs_iterators[full_repo_path])
|
||||
else:
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
repomd_records_dict = {} # type: Dict[str, str]
|
||||
self._download_repomd_records(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
repomd_records_dict=repomd_records_dict,
|
||||
)
|
||||
pkgs_iterator = PackageIterator(
|
||||
primary_path=repomd_records_dict['primary'],
|
||||
filelists_path=repomd_records_dict['filelists'],
|
||||
other_path=repomd_records_dict['other'],
|
||||
warningcb=self._warning_callback,
|
||||
)
|
||||
self.pkgs_iterators[full_repo_path] = tee(pkgs_iterator)
|
||||
for package in pkgs_iterator:
|
||||
if package.arch not in self.repo_arches[repo_info.arch]:
|
||||
package_arch = repo_info.arch
|
||||
else:
|
||||
package_arch = package.arch
|
||||
package_key = f'{package.name}.{package_arch}'
|
||||
package_variants = all_packages[package_key]['variants']
|
||||
package_info = all_packages[package_key]['package_info']
|
||||
if 'module' in package.release and not any(
|
||||
re.search(included_package, package.name)
|
||||
for included_package in self.included_packages
|
||||
@ -307,41 +340,41 @@ class PackagesGenerator:
|
||||
# Even a module package will be added to packages.json if
|
||||
# it presents in the list of included packages
|
||||
continue
|
||||
if package_key not in all_packages:
|
||||
all_packages[package_key]['variants'].append(
|
||||
(repo_info.name, repo_info.arch)
|
||||
)
|
||||
all_packages[package_key]['arch'] = package_arch
|
||||
all_packages[package_key]['package'] = package
|
||||
all_packages[package_key]['type'] = repo_info.is_reference
|
||||
elif repo_info.repo_type == 'absent' and (repo_info.name, repo_info.arch) in all_packages[package_key]['variants']:
|
||||
all_packages[package_key]['variants'].remove((repo_info.name, repo_info.arch))
|
||||
if repo_info.repo_type == 'present' and not package_info:
|
||||
package_variants.append((repo_info.name, repo_info.arch))
|
||||
package_info['arch'] = package_arch
|
||||
package_info['package'] = package
|
||||
package_info['type'] = repo_info.is_reference
|
||||
elif repo_info.repo_type == 'absent' and \
|
||||
(repo_info.name, repo_info.arch) in package_variants:
|
||||
package_variants.remove((repo_info.name, repo_info.arch))
|
||||
# replace an older package if it's not reference or
|
||||
# a newer package is from reference repo
|
||||
elif (not all_packages[package_key]['type'] or
|
||||
all_packages[package_key]['type'] ==
|
||||
elif (not package_info['type'] or
|
||||
package_info['type'] ==
|
||||
repo_info.is_reference) and \
|
||||
self.compare_pkgs_version(
|
||||
package,
|
||||
all_packages[package_key]['package']
|
||||
) > 0:
|
||||
package_info['package']
|
||||
) > 0 and repo_info.repo_type == 'present':
|
||||
all_packages[package_key]['variants'] = [
|
||||
(repo_info.name, repo_info.arch)
|
||||
]
|
||||
all_packages[package_key]['arch'] = package_arch
|
||||
all_packages[package_key]['package'] = package
|
||||
package_info['arch'] = package_arch
|
||||
package_info['package'] = package
|
||||
elif self.compare_pkgs_version(
|
||||
package,
|
||||
all_packages[package_key]['package']
|
||||
) == 0:
|
||||
all_packages[package_key]['variants'].append(
|
||||
package_info['package']
|
||||
) == 0 and repo_info.repo_type == 'present':
|
||||
package_variants.append(
|
||||
(repo_info.name, repo_info.arch)
|
||||
)
|
||||
|
||||
for package_dict in all_packages.values():
|
||||
for variant_name, variant_arch in package_dict['variants']:
|
||||
package_arch = package_dict['arch']
|
||||
package = package_dict['package']
|
||||
package_info = package_dict['package_info']
|
||||
package_arch = package_info['arch']
|
||||
package = package_info['package']
|
||||
package_name = f'{package.name}.{package_arch}'
|
||||
if any(re.search(excluded_package, package_name)
|
||||
for excluded_package in self.excluded_packages):
|
||||
@ -373,9 +406,8 @@ class PackagesGenerator:
|
||||
# pkgs_list.append(added_pkg)
|
||||
pkgs_list = packages_json[variant_name][
|
||||
variant_arch][src_package_name]
|
||||
anfimovdm
commented
wrong indents wrong indents
soksanichenko
commented
Pycharm didn't agree with you) Pycharm didn't agree with you)
anfimovdm
commented
btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this
I prefer to use
btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this
```python3
self.compare_pkgs_version(
package_1=package,
package_2=package_info['package'],
):
```
I prefer to use `black` for code formatting, here is an example of black formatting for those lines
```python3
if "is_reference" not in package_info:
package_info["variants"].append(variant_info.name)
package_info["is_reference"] = is_reference
package_info["package"] = package
elif (
not package_info["is_reference"]
or package_info["is_reference"] == is_reference
and self.compare_pkgs_version(
package_1=package,
package_2=package_info["package"],
)
> 0
):
package_info["variants"] = [variant_info.name]
package_info["is_reference"] = is_reference
package_info["package"] = package
elif (
self.compare_pkgs_version(
package_1=package,
package_2=package_info["package"],
)
== 0
and repo_info.repo_type != "absent"
):
package_info["variants"].append(variant_info.name)
```
|
||||
added_pkg = f'{package_name}.{package_arch}'
|
||||
if added_pkg not in pkgs_list:
|
||||
pkgs_list.append(added_pkg)
|
||||
if package_name not in pkgs_list:
|
||||
pkgs_list.append(package_name)
|
||||
return packages_json
|
||||
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import os
|
||||
from glob import iglob
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
@ -22,9 +23,9 @@ def search_rpms(top_dir: Path) -> List[Package]:
|
||||
list: list of paths
|
||||
"""
|
||||
return [Package(
|
||||
nvra=path.stem,
|
||||
path=path,
|
||||
) for path in top_dir.rglob('*.rpm')]
|
||||
nvra=Path(path).stem,
|
||||
path=Path(path),
|
||||
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
||||
|
||||
|
||||
def copy_rpms(packages: List[Package], target_top_dir: Path):
|
||||
|
Loading…
Reference in New Issue
Block a user
same