ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically #15
@ -15,6 +15,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
import tempfile
|
import tempfile
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from itertools import tee
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
AnyStr,
|
AnyStr,
|
||||||
@ -27,6 +28,8 @@ from typing import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import createrepo_c as cr
|
import createrepo_c as cr
|
||||||
import dnf.subject
|
import dnf.subject
|
||||||
import hawkey
|
import hawkey
|
||||||
@ -98,6 +101,7 @@ class PackagesGenerator:
|
|||||||
included_packages: List[AnyStr],
|
included_packages: List[AnyStr],
|
||||||
):
|
):
|
||||||
self.repos = repos
|
self.repos = repos
|
||||||
|
self.pkgs_iterators = dict()
|
||||||
self.excluded_packages = excluded_packages
|
self.excluded_packages = excluded_packages
|
||||||
self.included_packages = included_packages
|
self.included_packages = included_packages
|
||||||
self.tmp_files = []
|
self.tmp_files = []
|
||||||
@ -110,6 +114,19 @@ class PackagesGenerator:
|
|||||||
if os.path.exists(tmp_file):
|
if os.path.exists(tmp_file):
|
||||||
os.remove(tmp_file)
|
os.remove(tmp_file)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_full_repo_path(repo_info: RepoInfo):
|
||||||
|
if repo_info.is_remote:
|
||||||
|
return urljoin(
|
||||||
|
repo_info.path + '/',
|
||||||
|
repo_info.folder,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return os.path.join(
|
||||||
|
repo_info.path,
|
||||||
|
repo_info.folder
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _warning_callback(warning_type, message):
|
def _warning_callback(warning_type, message):
|
||||||
"""
|
"""
|
||||||
@ -151,7 +168,7 @@ class PackagesGenerator:
|
|||||||
"""
|
"""
|
||||||
Parse modules.yaml.gz and returns parsed data
|
Parse modules.yaml.gz and returns parsed data
|
||||||
:param modules_file_path: path to local modules.yaml.gz
|
:param modules_file_path: path to local modules.yaml.gz
|
||||||
:return: List of dict for each modules in a repo
|
:return: List of dict for each module in a repo
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with open(modules_file_path, 'rb') as modules_file:
|
with open(modules_file_path, 'rb') as modules_file:
|
||||||
@ -174,16 +191,22 @@ class PackagesGenerator:
|
|||||||
:param repo_info: structure which contains info about a current repo
|
:param repo_info: structure which contains info about a current repo
|
||||||
:return: list with repomd records
|
:return: list with repomd records
|
||||||
"""
|
"""
|
||||||
|
if repo_info.is_remote:
|
||||||
|
repomd_file_path = urljoin(
|
||||||
|
urljoin(
|
||||||
|
repo_info.path + '/',
|
||||||
|
repo_info.folder
|
||||||
|
) + '/',
|
||||||
|
'repodata/repomd.xml'
|
||||||
|
)
|
||||||
|
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
||||||
|
else:
|
||||||
repomd_file_path = os.path.join(
|
repomd_file_path = os.path.join(
|
||||||
repo_info.path,
|
repo_info.path,
|
||||||
repo_info.folder,
|
repo_info.folder,
|
||||||
'repodata',
|
'repodata',
|
||||||
'repomd.xml',
|
'repomd.xml',
|
||||||
)
|
)
|
||||||
if repo_info.is_remote:
|
|
||||||
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
|
||||||
else:
|
|
||||||
repomd_file_path = repomd_file_path
|
|
||||||
repomd_object = self._parse_repomd(repomd_file_path)
|
repomd_object = self._parse_repomd(repomd_file_path)
|
||||||
if repo_info.is_remote:
|
if repo_info.is_remote:
|
||||||
os.remove(repomd_file_path)
|
os.remove(repomd_file_path)
|
||||||
@ -273,12 +296,19 @@ class PackagesGenerator:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
all_packages = defaultdict(lambda: {'variants': list()})
|
all_packages = defaultdict(lambda: {
|
||||||
|
'variants': list(),
|
||||||
|
'package_info': dict(),
|
||||||
|
})
|
||||||
for repo_info in sorted(
|
for repo_info in sorted(
|
||||||
self.repos,
|
self.repos,
|
||||||
key=lambda i: i.repo_type,
|
key=lambda i: i.repo_type,
|
||||||
reverse=True,
|
reverse=True,
|
||||||
soksanichenko marked this conversation as resolved
|
|||||||
):
|
):
|
||||||
|
full_repo_path = self._get_full_repo_path(repo_info)
|
||||||
|
if full_repo_path in self.pkgs_iterators:
|
||||||
|
pkgs_iterator = tee(self.pkgs_iterators[full_repo_path])
|
||||||
|
else:
|
||||||
repomd_records = self._get_repomd_records(
|
repomd_records = self._get_repomd_records(
|
||||||
repo_info=repo_info,
|
repo_info=repo_info,
|
||||||
)
|
)
|
||||||
@ -288,18 +318,21 @@ class PackagesGenerator:
|
|||||||
repomd_records=repomd_records,
|
repomd_records=repomd_records,
|
||||||
repomd_records_dict=repomd_records_dict,
|
repomd_records_dict=repomd_records_dict,
|
||||||
)
|
)
|
||||||
packages_iterator = PackageIterator(
|
pkgs_iterator = PackageIterator(
|
||||||
primary_path=repomd_records_dict['primary'],
|
primary_path=repomd_records_dict['primary'],
|
||||||
filelists_path=repomd_records_dict['filelists'],
|
filelists_path=repomd_records_dict['filelists'],
|
||||||
other_path=repomd_records_dict['other'],
|
other_path=repomd_records_dict['other'],
|
||||||
warningcb=self._warning_callback,
|
warningcb=self._warning_callback,
|
||||||
)
|
)
|
||||||
for package in packages_iterator:
|
self.pkgs_iterators[full_repo_path] = tee(pkgs_iterator)
|
||||||
|
for package in pkgs_iterator:
|
||||||
if package.arch not in self.repo_arches[repo_info.arch]:
|
if package.arch not in self.repo_arches[repo_info.arch]:
|
||||||
package_arch = repo_info.arch
|
package_arch = repo_info.arch
|
||||||
else:
|
else:
|
||||||
package_arch = package.arch
|
package_arch = package.arch
|
||||||
soksanichenko marked this conversation as resolved
Outdated
anfimovdm
commented
same same
|
|||||||
package_key = f'{package.name}.{package_arch}'
|
package_key = f'{package.name}.{package_arch}'
|
||||||
|
package_variants = all_packages[package_key]['variants']
|
||||||
|
package_info = all_packages[package_key]['package_info']
|
||||||
if 'module' in package.release and not any(
|
if 'module' in package.release and not any(
|
||||||
re.search(included_package, package.name)
|
re.search(included_package, package.name)
|
||||||
for included_package in self.included_packages
|
for included_package in self.included_packages
|
||||||
@ -307,41 +340,41 @@ class PackagesGenerator:
|
|||||||
# Even a module package will be added to packages.json if
|
# Even a module package will be added to packages.json if
|
||||||
# it presents in the list of included packages
|
# it presents in the list of included packages
|
||||||
continue
|
continue
|
||||||
if package_key not in all_packages:
|
if repo_info.repo_type == 'present' and not package_info:
|
||||||
all_packages[package_key]['variants'].append(
|
package_variants.append((repo_info.name, repo_info.arch))
|
||||||
(repo_info.name, repo_info.arch)
|
package_info['arch'] = package_arch
|
||||||
)
|
package_info['package'] = package
|
||||||
all_packages[package_key]['arch'] = package_arch
|
package_info['type'] = repo_info.is_reference
|
||||||
all_packages[package_key]['package'] = package
|
elif repo_info.repo_type == 'absent' and \
|
||||||
all_packages[package_key]['type'] = repo_info.is_reference
|
(repo_info.name, repo_info.arch) in package_variants:
|
||||||
elif repo_info.repo_type == 'absent' and (repo_info.name, repo_info.arch) in all_packages[package_key]['variants']:
|
package_variants.remove((repo_info.name, repo_info.arch))
|
||||||
all_packages[package_key]['variants'].remove((repo_info.name, repo_info.arch))
|
|
||||||
# replace an older package if it's not reference or
|
# replace an older package if it's not reference or
|
||||||
# a newer package is from reference repo
|
# a newer package is from reference repo
|
||||||
elif (not all_packages[package_key]['type'] or
|
elif (not package_info['type'] or
|
||||||
all_packages[package_key]['type'] ==
|
package_info['type'] ==
|
||||||
repo_info.is_reference) and \
|
repo_info.is_reference) and \
|
||||||
self.compare_pkgs_version(
|
self.compare_pkgs_version(
|
||||||
package,
|
package,
|
||||||
all_packages[package_key]['package']
|
package_info['package']
|
||||||
) > 0:
|
) > 0 and repo_info.repo_type == 'present':
|
||||||
all_packages[package_key]['variants'] = [
|
all_packages[package_key]['variants'] = [
|
||||||
(repo_info.name, repo_info.arch)
|
(repo_info.name, repo_info.arch)
|
||||||
]
|
]
|
||||||
all_packages[package_key]['arch'] = package_arch
|
package_info['arch'] = package_arch
|
||||||
all_packages[package_key]['package'] = package
|
package_info['package'] = package
|
||||||
elif self.compare_pkgs_version(
|
elif self.compare_pkgs_version(
|
||||||
package,
|
package,
|
||||||
all_packages[package_key]['package']
|
package_info['package']
|
||||||
) == 0:
|
) == 0 and repo_info.repo_type == 'present':
|
||||||
all_packages[package_key]['variants'].append(
|
package_variants.append(
|
||||||
(repo_info.name, repo_info.arch)
|
(repo_info.name, repo_info.arch)
|
||||||
)
|
)
|
||||||
|
|
||||||
for package_dict in all_packages.values():
|
for package_dict in all_packages.values():
|
||||||
for variant_name, variant_arch in package_dict['variants']:
|
for variant_name, variant_arch in package_dict['variants']:
|
||||||
package_arch = package_dict['arch']
|
package_info = package_dict['package_info']
|
||||||
package = package_dict['package']
|
package_arch = package_info['arch']
|
||||||
|
package = package_info['package']
|
||||||
package_name = f'{package.name}.{package_arch}'
|
package_name = f'{package.name}.{package_arch}'
|
||||||
if any(re.search(excluded_package, package_name)
|
if any(re.search(excluded_package, package_name)
|
||||||
for excluded_package in self.excluded_packages):
|
for excluded_package in self.excluded_packages):
|
||||||
@ -373,9 +406,8 @@ class PackagesGenerator:
|
|||||||
# pkgs_list.append(added_pkg)
|
# pkgs_list.append(added_pkg)
|
||||||
pkgs_list = packages_json[variant_name][
|
pkgs_list = packages_json[variant_name][
|
||||||
variant_arch][src_package_name]
|
variant_arch][src_package_name]
|
||||||
anfimovdm
commented
wrong indents wrong indents
soksanichenko
commented
Pycharm didn't agree with you) Pycharm didn't agree with you)
anfimovdm
commented
btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this
I prefer to use
btw, even if linter doesn't show any warnings about indents, it looks wierd, because indents for function arguments supposed to be like this
```python3
self.compare_pkgs_version(
package_1=package,
package_2=package_info['package'],
):
```
I prefer to use `black` for code formatting, here is an example of black formatting for those lines
```python3
if "is_reference" not in package_info:
package_info["variants"].append(variant_info.name)
package_info["is_reference"] = is_reference
package_info["package"] = package
elif (
not package_info["is_reference"]
or package_info["is_reference"] == is_reference
and self.compare_pkgs_version(
package_1=package,
package_2=package_info["package"],
)
> 0
):
package_info["variants"] = [variant_info.name]
package_info["is_reference"] = is_reference
package_info["package"] = package
elif (
self.compare_pkgs_version(
package_1=package,
package_2=package_info["package"],
)
== 0
and repo_info.repo_type != "absent"
):
package_info["variants"].append(variant_info.name)
```
|
|||||||
added_pkg = f'{package_name}.{package_arch}'
|
if package_name not in pkgs_list:
|
||||||
if added_pkg not in pkgs_list:
|
pkgs_list.append(package_name)
|
||||||
pkgs_list.append(added_pkg)
|
|
||||||
return packages_json
|
return packages_json
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from glob import iglob
|
||||||
from typing import List
|
from typing import List
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -22,9 +23,9 @@ def search_rpms(top_dir: Path) -> List[Package]:
|
|||||||
list: list of paths
|
list: list of paths
|
||||||
"""
|
"""
|
||||||
return [Package(
|
return [Package(
|
||||||
nvra=path.stem,
|
nvra=Path(path).stem,
|
||||||
path=path,
|
path=Path(path),
|
||||||
) for path in top_dir.rglob('*.rpm')]
|
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
||||||
|
|
||||||
|
|
||||||
def copy_rpms(packages: List[Package], target_top_dir: Path):
|
def copy_rpms(packages: List[Package], target_top_dir: Path):
|
||||||
|
Loading…
Reference in New Issue
Block a user
same