ALBS-97: Build AlmaLinux PPC64le repos and ISOs with pungi

- Scripts `create_packages_json` & `gather_modules` can process lzma compressed yaml files
- Script `create_package_json` can use repodata there are packages with different
  arch by compare with passed to the script

@BS-NOBUILD
@BS-TARGET-CL8

Change-Id: Ia9a3bacfa4344f0cf33b9f416649fd4a5f8d3c37
This commit is contained in:
soksanichenko 2021-12-28 12:53:46 +02:00
parent 58a16e5688
commit b9d86b90e1
2 changed files with 63 additions and 6 deletions

View File

@ -9,6 +9,7 @@ https://github.com/rpm-software-management/createrepo_c/blob/master/examples/pyt
import argparse
import gzip
import json
import lzma
import os
import re
import tempfile
@ -24,6 +25,7 @@ import yaml
from createrepo_c import Package
from dataclasses import dataclass
from .gather_modules import is_gzip_file, is_xz_file
@dataclass
class RepoInfo:
@ -153,8 +155,9 @@ class PackagesGenerator:
warningcb=self._warning_callback,
)
@staticmethod
@classmethod
def _parse_modules_file(
cls,
modules_file_path: AnyStr,
) -> List[Dict]:
@ -163,10 +166,15 @@ class PackagesGenerator:
:param modules_file_path: path to local modules.yaml.gz
:return: List of dict for an each modules in a repo
"""
with open(modules_file_path, 'rb') as modules_file:
uncompressed_data = gzip.decompress(modules_file.read())
data = modules_file.read()
if is_gzip_file(data[:2]):
data = gzip.decompress(data)
elif is_xz_file(data[:2]):
data = lzma.decompress(data)
return yaml.load_all(
uncompressed_data,
data,
Loader=yaml.BaseLoader,
)
@ -273,6 +281,15 @@ class PackagesGenerator:
)
all_packages = defaultdict(lambda: {'variants': list()})
for repo_info in self.repos:
repo_arches = [
repo_info.arch,
'noarch',
]
if repo_info.arch == 'x86_64':
repo_arches.extend([
'i686',
'i386',
])
packages = {} # type: Dict[AnyStr, cr.Package]
repomd_records = self._get_repomd_records(
repo_info=repo_info,
@ -283,7 +300,11 @@ class PackagesGenerator:
packages=packages,
)
for package in packages.values():
package_key = f'{package.name}.{package.arch}'
if package.arch not in repo_arches:
package_arch = repo_info.arch
else:
package_arch = package.arch
package_key = f'{package.name}.{package_arch}'
if 'module' in package.release and not any(
re.search(included_package, package.name)
for included_package in self.included_packages
@ -319,10 +340,22 @@ class PackagesGenerator:
)
for package_dict in all_packages.values():
repo_arches = [
package_dict['arch'],
'noarch',
]
if package_dict['arch'] == 'x86_64':
repo_arches.extend([
'i686',
'i386',
])
for variant in package_dict['variants']:
repo_arch = package_dict['arch']
package = package_dict['package']
package_name = package.name
if package.arch not in repo_arches:
package_arch = package_dict['arch']
else:
package_arch = package.arch
if any(re.search(excluded_package, package_name)
for excluded_package in self.excluded_packages):

View File

@ -1,4 +1,6 @@
import binascii
import gzip
import lzma
import os
from argparse import ArgumentParser, FileType
from io import BytesIO
@ -10,6 +12,24 @@ import createrepo_c as cr
from typing.io import BinaryIO
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
return binascii.hexlify(first_two_bytes) == initial_bytes
def is_gzip_file(first_two_bytes):
return _is_compressed_file(
first_two_bytes=first_two_bytes,
initial_bytes=b'1f8b',
)
def is_xz_file(first_two_bytes):
return _is_compressed_file(
first_two_bytes=first_two_bytes,
initial_bytes=b'fd37',
)
def grep_list_of_modules_yaml_gz(repo_path: AnyStr) -> List[BytesIO]:
"""
Find all of valid *modules.yaml.gz in repos
@ -47,7 +67,11 @@ def collect_modules(modules_paths: List[BinaryIO], target_dir: str):
os.makedirs(module_defaults_path, exist_ok=True)
for module_file in modules_paths:
data = gzip.decompress(module_file.read())
data = module_file.read()
if is_gzip_file(data[:2]):
data = gzip.decompress(data)
elif is_xz_file(data[:2]):
data = lzma.decompress(data)
documents = yaml.load_all(data, Loader=yaml.BaseLoader)
for doc in documents:
if doc['document'] == 'modulemd-defaults':