ALBS-1032: Generate i686 section for all variants in packages.json #12

Merged
soksanichenko merged 1 commits from ALBS-1032 into al_master 2023-03-14 16:21:41 +00:00
1 changed files with 28 additions and 97 deletions

View File

@ -65,9 +65,20 @@ class RepoInfo:
# Only layout of specific package (which don't exist
# in a reference repository) will be taken as example
is_reference: bool = False
strict_arch: bool = False
class PackagesGenerator:
repo_arches = defaultdict(lambda: list(('noarch',)))
addon_repos = {
'x86_64': ['i686'],
'ppc64le': [],
'aarch64': [],
's390x': [],
'i686': [],
}
def __init__(
self,
repos: List[RepoInfo],
@ -78,6 +89,9 @@ class PackagesGenerator:
self.excluded_packages = excluded_packages
self.included_packages = included_packages
self.tmp_files = []
for arch, arch_list in self.addon_repos.items():
self.repo_arches[arch].extend(arch_list)
self.repo_arches[arch].append(arch)
def __del__(self):
for tmp_file in self.tmp_files:
@ -116,69 +130,6 @@ class PackagesGenerator:
"""
return cr.Repomd(repomd_file_path)
def _parse_primary_file(
self,
primary_file_path: AnyStr,
packages: Dict[AnyStr, cr.Package],
) -> None:
"""
Parse primary.xml.gz, take from it info about packages and put it to
dict packages
:param primary_file_path: path to local primary.xml.gz
:param packages: dictionary which will contain info about packages
from repository
"""
cr.xml_parse_primary(
path=primary_file_path,
pkgcb=lambda pkg: packages.update({
pkg.pkgId: pkg,
}),
do_files=False,
warningcb=self._warning_callback,
)
def _parse_filelists_file(
self,
filelists_file_path: AnyStr,
packages: Dict[AnyStr, cr.Package],
) -> None:
"""
Parse filelists.xml.gz, take from it info about packages and put it to
dict packages
:param filelists_file_path: path to local filelists.xml.gz
:param packages: dictionary which will contain info about packages
from repository
"""
cr.xml_parse_filelists(
path=filelists_file_path,
newpkgcb=lambda pkg_id, name, arch: packages.get(
pkg_id,
None,
),
warningcb=self._warning_callback,
)
def _parse_other_file(
self,
other_file_path: AnyStr,
packages: Dict[AnyStr, cr.Package],
) -> None:
"""
Parse other.xml.gz, take from it info about packages and put it to
dict packages
:param other_file_path: path to local other.xml.gz
:param packages: dictionary which will contain info about packages
from repository
"""
cr.xml_parse_other(
path=other_file_path,
newpkgcb=lambda pkg_id, name, arch: packages.get(
pkg_id,
None,
),
warningcb=self._warning_callback,
)
@classmethod
def _parse_modules_file(
cls,
@ -312,15 +263,6 @@ class PackagesGenerator:
)
all_packages = defaultdict(lambda: {'variants': list()})
for repo_info in self.repos:
repo_arches = [
repo_info.arch,
'noarch',
]
if repo_info.arch == 'x86_64':
repo_arches.extend([
'i686',
'i386',
])
repomd_records = self._get_repomd_records(
repo_info=repo_info,
)
@ -337,7 +279,7 @@ class PackagesGenerator:
warningcb=self._warning_callback,
)
for package in packages_iterator:
if package.arch not in repo_arches:
if package.arch not in self.repo_arches[repo_info.arch]:
package_arch = repo_info.arch
else:
package_arch = package.arch
@ -351,9 +293,9 @@ class PackagesGenerator:
continue
if package_key not in all_packages:
all_packages[package_key]['variants'].append(
repo_info.name
(repo_info.name, repo_info.arch)
)
all_packages[package_key]['arch'] = repo_info.arch
all_packages[package_key]['arch'] = package_arch
all_packages[package_key]['package'] = package
all_packages[package_key]['type'] = repo_info.is_reference
# replace an older package if it's not reference or
@ -365,35 +307,24 @@ class PackagesGenerator:
package,
all_packages[package_key]['package']
) > 0:
all_packages[package_key]['variants'] = [repo_info.name]
all_packages[package_key]['arch'] = repo_info.arch
all_packages[package_key]['variants'] = [
(repo_info.name, repo_info.arch)
]
all_packages[package_key]['arch'] = package_arch
all_packages[package_key]['package'] = package
elif self.compare_pkgs_version(
package,
all_packages[package_key]['package']
) == 0:
all_packages[package_key]['variants'].append(
repo_info.name
(repo_info.name, repo_info.arch)
)
for package_dict in all_packages.values():
repo_arches = [
package_dict['arch'],
'noarch',
]
if package_dict['arch'] == 'x86_64':
repo_arches.extend([
'i686',
'i386',
])
for variant in package_dict['variants']:
repo_arch = package_dict['arch']
for variant_name, variant_arch in package_dict['variants']:
package_arch = package_dict['arch']
package = package_dict['package']
package_name = package.name
if package.arch not in repo_arches:
package_arch = package_dict['arch']
else:
package_arch = package.arch
if any(re.search(excluded_package, package_name)
for excluded_package in self.excluded_packages):
continue
@ -410,11 +341,11 @@ class PackagesGenerator:
)
else:
src_package_name = src_package_name[0].name
pkgs_list = packages_json[variant][
repo_arch][src_package_name]
pkgs_list = packages_json[variant_name][
variant_arch][src_package_name]
added_pkg = f'{package_name}.{package_arch}'
if added_pkg not in pkgs_list:
pkgs_list.append(added_pkg)
if added_pkg not in pkgs_list:
pkgs_list.append(added_pkg)
return packages_json