2021-01-15 08:53:01 +00:00
|
|
|
# coding=utf-8
|
|
|
|
"""
|
|
|
|
The tool allow to generate package.json. This file is used by pungi
|
|
|
|
# as parameter `gather_prepopulate`
|
|
|
|
Sample of using repodata files taken from
|
|
|
|
https://github.com/rpm-software-management/createrepo_c/blob/master/examples/python/repodata_parsing.py
|
|
|
|
"""
|
|
|
|
|
|
|
|
import argparse
|
2021-02-04 23:20:14 +00:00
|
|
|
import gzip
|
2021-01-15 08:53:01 +00:00
|
|
|
import json
|
2021-12-28 10:53:46 +00:00
|
|
|
import lzma
|
2021-01-15 08:53:01 +00:00
|
|
|
import os
|
2021-02-04 23:20:14 +00:00
|
|
|
import re
|
2021-01-15 08:53:01 +00:00
|
|
|
import tempfile
|
|
|
|
from collections import defaultdict
|
2022-10-18 19:53:50 +00:00
|
|
|
from typing import AnyStr, Dict, List, Optional, Any, Iterator
|
2021-01-15 08:53:01 +00:00
|
|
|
|
2022-04-29 18:39:51 +00:00
|
|
|
import binascii
|
2021-01-15 08:53:01 +00:00
|
|
|
import createrepo_c as cr
|
|
|
|
import dnf.subject
|
|
|
|
import hawkey
|
|
|
|
import requests
|
2021-06-04 09:36:03 +00:00
|
|
|
import rpm
|
2021-02-04 23:20:14 +00:00
|
|
|
import yaml
|
2022-10-18 19:53:50 +00:00
|
|
|
from createrepo_c import Package, PackageIterator
|
2021-01-15 08:53:01 +00:00
|
|
|
from dataclasses import dataclass
|
|
|
|
|
|
|
|
|
2022-04-29 18:39:51 +00:00
|
|
|
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
|
|
|
|
return binascii.hexlify(first_two_bytes) == initial_bytes
|
|
|
|
|
|
|
|
|
|
|
|
def is_gzip_file(first_two_bytes):
|
|
|
|
return _is_compressed_file(
|
|
|
|
first_two_bytes=first_two_bytes,
|
|
|
|
initial_bytes=b'1f8b',
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def is_xz_file(first_two_bytes):
|
|
|
|
return _is_compressed_file(
|
|
|
|
first_two_bytes=first_two_bytes,
|
|
|
|
initial_bytes=b'fd37',
|
|
|
|
)
|
2022-04-29 18:25:59 +00:00
|
|
|
|
2022-05-01 00:32:01 +00:00
|
|
|
|
2021-01-15 08:53:01 +00:00
|
|
|
@dataclass
|
|
|
|
class RepoInfo:
|
|
|
|
# path to a directory with repo directories. E.g. '/var/repos' contains
|
|
|
|
# 'appstream', 'baseos', etc.
|
|
|
|
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
|
|
|
# using remote repo
|
|
|
|
path: AnyStr
|
|
|
|
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
|
|
|
folder: AnyStr
|
|
|
|
# name of repo. E.g. 'BaseOS', 'AppStream', etc
|
|
|
|
name: AnyStr
|
|
|
|
# architecture of repo. E.g. 'x86_64', 'i686', etc
|
|
|
|
arch: AnyStr
|
|
|
|
# Is a repo remote or local
|
|
|
|
is_remote: bool
|
2022-10-18 19:53:50 +00:00
|
|
|
# Is a reference repository (usually it's a RHEL repo)
|
2021-06-15 13:16:10 +00:00
|
|
|
# Layout of packages from such repository will be taken as example
|
|
|
|
# Only layout of specific package (which don't exist
|
2022-10-18 19:53:50 +00:00
|
|
|
# in a reference repository) will be taken as example
|
2021-06-15 13:16:10 +00:00
|
|
|
is_reference: bool = False
|
2021-01-15 08:53:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
class PackagesGenerator:
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
repos: List[RepoInfo],
|
|
|
|
excluded_packages: List[AnyStr],
|
2021-06-15 13:16:10 +00:00
|
|
|
included_packages: List[AnyStr],
|
2021-01-15 08:53:01 +00:00
|
|
|
):
|
|
|
|
self.repos = repos
|
|
|
|
self.excluded_packages = excluded_packages
|
2021-06-15 13:16:10 +00:00
|
|
|
self.included_packages = included_packages
|
2022-10-18 19:53:50 +00:00
|
|
|
self.tmp_files = []
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
for tmp_file in self.tmp_files:
|
|
|
|
if os.path.exists(tmp_file):
|
|
|
|
os.remove(tmp_file)
|
2021-01-15 08:53:01 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _warning_callback(warning_type, message):
|
|
|
|
"""
|
|
|
|
Warning callback for createrepo_c parsing functions
|
|
|
|
"""
|
|
|
|
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
|
|
|
return True
|
|
|
|
|
|
|
|
@staticmethod
|
2022-04-29 18:25:59 +00:00
|
|
|
def get_remote_file_content(file_url: AnyStr) -> AnyStr:
|
2021-01-15 08:53:01 +00:00
|
|
|
"""
|
|
|
|
Get content from a remote file and write it to a temp file
|
|
|
|
:param file_url: url of a remote file
|
|
|
|
:return: path to a temp file
|
|
|
|
"""
|
|
|
|
|
|
|
|
file_request = requests.get(
|
|
|
|
url=file_url,
|
|
|
|
)
|
|
|
|
file_request.raise_for_status()
|
2022-05-01 23:25:32 +00:00
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
2021-01-15 08:53:01 +00:00
|
|
|
file_stream.write(file_request.content)
|
|
|
|
return file_stream.name
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _parse_repomd(repomd_file_path: AnyStr) -> cr.Repomd:
|
|
|
|
"""
|
|
|
|
Parse file repomd.xml and create object Repomd
|
|
|
|
:param repomd_file_path: path to local repomd.xml
|
|
|
|
"""
|
|
|
|
return cr.Repomd(repomd_file_path)
|
|
|
|
|
|
|
|
def _parse_primary_file(
|
|
|
|
self,
|
|
|
|
primary_file_path: AnyStr,
|
|
|
|
packages: Dict[AnyStr, cr.Package],
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Parse primary.xml.gz, take from it info about packages and put it to
|
|
|
|
dict packages
|
|
|
|
:param primary_file_path: path to local primary.xml.gz
|
2022-10-18 19:53:50 +00:00
|
|
|
:param packages: dictionary which will contain info about packages
|
2021-01-15 08:53:01 +00:00
|
|
|
from repository
|
|
|
|
"""
|
|
|
|
cr.xml_parse_primary(
|
|
|
|
path=primary_file_path,
|
|
|
|
pkgcb=lambda pkg: packages.update({
|
|
|
|
pkg.pkgId: pkg,
|
|
|
|
}),
|
|
|
|
do_files=False,
|
|
|
|
warningcb=self._warning_callback,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _parse_filelists_file(
|
|
|
|
self,
|
|
|
|
filelists_file_path: AnyStr,
|
|
|
|
packages: Dict[AnyStr, cr.Package],
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Parse filelists.xml.gz, take from it info about packages and put it to
|
|
|
|
dict packages
|
|
|
|
:param filelists_file_path: path to local filelists.xml.gz
|
2022-10-18 19:53:50 +00:00
|
|
|
:param packages: dictionary which will contain info about packages
|
2021-01-15 08:53:01 +00:00
|
|
|
from repository
|
|
|
|
"""
|
|
|
|
cr.xml_parse_filelists(
|
|
|
|
path=filelists_file_path,
|
|
|
|
newpkgcb=lambda pkg_id, name, arch: packages.get(
|
|
|
|
pkg_id,
|
|
|
|
None,
|
|
|
|
),
|
|
|
|
warningcb=self._warning_callback,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _parse_other_file(
|
|
|
|
self,
|
|
|
|
other_file_path: AnyStr,
|
|
|
|
packages: Dict[AnyStr, cr.Package],
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Parse other.xml.gz, take from it info about packages and put it to
|
|
|
|
dict packages
|
|
|
|
:param other_file_path: path to local other.xml.gz
|
2022-10-18 19:53:50 +00:00
|
|
|
:param packages: dictionary which will contain info about packages
|
2021-01-15 08:53:01 +00:00
|
|
|
from repository
|
|
|
|
"""
|
|
|
|
cr.xml_parse_other(
|
|
|
|
path=other_file_path,
|
|
|
|
newpkgcb=lambda pkg_id, name, arch: packages.get(
|
|
|
|
pkg_id,
|
|
|
|
None,
|
|
|
|
),
|
|
|
|
warningcb=self._warning_callback,
|
|
|
|
)
|
|
|
|
|
2021-12-28 10:53:46 +00:00
|
|
|
@classmethod
|
2021-02-04 23:20:14 +00:00
|
|
|
def _parse_modules_file(
|
2021-12-28 10:53:46 +00:00
|
|
|
cls,
|
2021-02-04 23:20:14 +00:00
|
|
|
modules_file_path: AnyStr,
|
|
|
|
|
2022-10-18 19:53:50 +00:00
|
|
|
) -> Iterator[Any]:
|
2021-02-04 23:20:14 +00:00
|
|
|
"""
|
|
|
|
Parse modules.yaml.gz and returns parsed data
|
|
|
|
:param modules_file_path: path to local modules.yaml.gz
|
2022-10-18 19:53:50 +00:00
|
|
|
:return: List of dict for each modules in a repo
|
2021-02-04 23:20:14 +00:00
|
|
|
"""
|
2021-12-28 10:53:46 +00:00
|
|
|
|
2021-02-04 23:20:14 +00:00
|
|
|
with open(modules_file_path, 'rb') as modules_file:
|
2021-12-28 10:53:46 +00:00
|
|
|
data = modules_file.read()
|
|
|
|
if is_gzip_file(data[:2]):
|
|
|
|
data = gzip.decompress(data)
|
|
|
|
elif is_xz_file(data[:2]):
|
|
|
|
data = lzma.decompress(data)
|
2021-02-04 23:20:14 +00:00
|
|
|
return yaml.load_all(
|
2021-12-28 10:53:46 +00:00
|
|
|
data,
|
2021-02-04 23:20:14 +00:00
|
|
|
Loader=yaml.BaseLoader,
|
|
|
|
)
|
|
|
|
|
2021-01-15 08:53:01 +00:00
|
|
|
def _get_repomd_records(
|
|
|
|
self,
|
|
|
|
repo_info: RepoInfo,
|
|
|
|
) -> List[cr.RepomdRecord]:
|
|
|
|
"""
|
|
|
|
Get, parse file repomd.xml and extract from it repomd records
|
|
|
|
:param repo_info: structure which contains info about a current repo
|
|
|
|
:return: list with repomd records
|
|
|
|
"""
|
|
|
|
repomd_file_path = os.path.join(
|
|
|
|
repo_info.path,
|
|
|
|
repo_info.folder,
|
|
|
|
'repodata',
|
|
|
|
'repomd.xml',
|
|
|
|
)
|
|
|
|
if repo_info.is_remote:
|
2022-04-29 18:25:59 +00:00
|
|
|
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
2021-01-15 08:53:01 +00:00
|
|
|
else:
|
|
|
|
repomd_file_path = repomd_file_path
|
|
|
|
repomd_object = self._parse_repomd(repomd_file_path)
|
|
|
|
if repo_info.is_remote:
|
|
|
|
os.remove(repomd_file_path)
|
|
|
|
return repomd_object.records
|
|
|
|
|
2022-10-18 19:53:50 +00:00
|
|
|
def _download_repomd_records(
|
2021-01-15 08:53:01 +00:00
|
|
|
self,
|
|
|
|
repo_info: RepoInfo,
|
|
|
|
repomd_records: List[cr.RepomdRecord],
|
2022-10-18 19:53:50 +00:00
|
|
|
repomd_records_dict: Dict[str, str],
|
|
|
|
):
|
2021-01-15 08:53:01 +00:00
|
|
|
"""
|
2022-10-18 19:53:50 +00:00
|
|
|
Download repomd records
|
2021-01-15 08:53:01 +00:00
|
|
|
:param repo_info: structure which contains info about a current repo
|
|
|
|
:param repomd_records: list with repomd records
|
2022-10-18 19:53:50 +00:00
|
|
|
:param repomd_records_dict: dict with paths to repodata files
|
2021-01-15 08:53:01 +00:00
|
|
|
"""
|
|
|
|
for repomd_record in repomd_records:
|
|
|
|
if repomd_record.type not in (
|
|
|
|
'primary',
|
|
|
|
'filelists',
|
|
|
|
'other',
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
repomd_record_file_path = os.path.join(
|
|
|
|
repo_info.path,
|
|
|
|
repo_info.folder,
|
|
|
|
repomd_record.location_href,
|
|
|
|
)
|
|
|
|
if repo_info.is_remote:
|
2022-04-29 18:25:59 +00:00
|
|
|
repomd_record_file_path = self.get_remote_file_content(
|
|
|
|
repomd_record_file_path)
|
2022-10-18 19:53:50 +00:00
|
|
|
self.tmp_files.append(repomd_record_file_path)
|
|
|
|
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
|
|
|
|
|
|
|
def _parse_module_repomd_record(
|
|
|
|
self,
|
|
|
|
repo_info: RepoInfo,
|
|
|
|
repomd_records: List[cr.RepomdRecord],
|
|
|
|
) -> List[Dict]:
|
|
|
|
"""
|
|
|
|
Download repomd records
|
|
|
|
:param repo_info: structure which contains info about a current repo
|
|
|
|
:param repomd_records: list with repomd records
|
|
|
|
:param repomd_records_dict: dict with paths to repodata files
|
|
|
|
"""
|
|
|
|
for repomd_record in repomd_records:
|
|
|
|
if repomd_record.type != 'modules':
|
|
|
|
continue
|
|
|
|
repomd_record_file_path = os.path.join(
|
|
|
|
repo_info.path,
|
|
|
|
repo_info.folder,
|
|
|
|
repomd_record.location_href,
|
|
|
|
)
|
2021-01-15 08:53:01 +00:00
|
|
|
if repo_info.is_remote:
|
2022-10-18 19:53:50 +00:00
|
|
|
repomd_record_file_path = self.get_remote_file_content(
|
|
|
|
repomd_record_file_path)
|
|
|
|
self.tmp_files.append(repomd_record_file_path)
|
|
|
|
return list(self._parse_modules_file(
|
|
|
|
repomd_record_file_path,
|
|
|
|
))
|
2021-01-15 08:53:01 +00:00
|
|
|
|
2021-06-04 09:36:03 +00:00
|
|
|
@staticmethod
|
|
|
|
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
|
|
|
version_tuple_1 = (
|
|
|
|
package_1.epoch,
|
|
|
|
package_1.version,
|
|
|
|
package_1.release,
|
|
|
|
)
|
|
|
|
version_tuple_2 = (
|
|
|
|
package_2.epoch,
|
|
|
|
package_2.version,
|
|
|
|
package_2.release,
|
|
|
|
)
|
|
|
|
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
|
|
|
|
2021-01-15 08:53:01 +00:00
|
|
|
def generate_packages_json(
|
|
|
|
self
|
|
|
|
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
|
|
|
"""
|
|
|
|
Generate packages.json
|
|
|
|
"""
|
|
|
|
packages_json = defaultdict(
|
|
|
|
lambda: defaultdict(
|
|
|
|
lambda: defaultdict(
|
|
|
|
list,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2021-11-10 13:29:59 +00:00
|
|
|
all_packages = defaultdict(lambda: {'variants': list()})
|
2021-01-15 08:53:01 +00:00
|
|
|
for repo_info in self.repos:
|
2021-12-28 10:53:46 +00:00
|
|
|
repo_arches = [
|
|
|
|
repo_info.arch,
|
|
|
|
'noarch',
|
|
|
|
]
|
|
|
|
if repo_info.arch == 'x86_64':
|
|
|
|
repo_arches.extend([
|
|
|
|
'i686',
|
|
|
|
'i386',
|
|
|
|
])
|
2021-01-15 08:53:01 +00:00
|
|
|
repomd_records = self._get_repomd_records(
|
|
|
|
repo_info=repo_info,
|
|
|
|
)
|
2022-10-18 19:53:50 +00:00
|
|
|
repomd_records_dict = {} # type: Dict[str, str]
|
|
|
|
self._download_repomd_records(repo_info=repo_info,
|
|
|
|
repomd_records=repomd_records,
|
|
|
|
repomd_records_dict=repomd_records_dict)
|
|
|
|
packages_iterator = PackageIterator(
|
|
|
|
primary_path=repomd_records_dict['primary'],
|
|
|
|
filelists_path=repomd_records_dict['filelists'],
|
|
|
|
other_path=repomd_records_dict['other'],
|
|
|
|
warningcb=self._warning_callback,
|
2021-01-15 08:53:01 +00:00
|
|
|
)
|
2022-10-18 19:53:50 +00:00
|
|
|
for package in packages_iterator:
|
2021-12-28 10:53:46 +00:00
|
|
|
if package.arch not in repo_arches:
|
|
|
|
package_arch = repo_info.arch
|
|
|
|
else:
|
|
|
|
package_arch = package.arch
|
|
|
|
package_key = f'{package.name}.{package_arch}'
|
2021-06-15 13:16:10 +00:00
|
|
|
if 'module' in package.release and not any(
|
|
|
|
re.search(included_package, package.name)
|
|
|
|
for included_package in self.included_packages
|
|
|
|
):
|
|
|
|
# Even a module package will be added to packages.json if
|
|
|
|
# it presents in the list of included packages
|
2021-01-15 08:53:01 +00:00
|
|
|
continue
|
2021-06-04 09:36:03 +00:00
|
|
|
if package_key not in all_packages:
|
2021-11-10 13:29:59 +00:00
|
|
|
all_packages[package_key]['variants'].append(
|
|
|
|
repo_info.name
|
|
|
|
)
|
2021-06-04 09:36:03 +00:00
|
|
|
all_packages[package_key]['arch'] = repo_info.arch
|
|
|
|
all_packages[package_key]['package'] = package
|
2021-06-18 11:23:42 +00:00
|
|
|
all_packages[package_key]['type'] = repo_info.is_reference
|
|
|
|
# replace an older package if it's not reference or
|
|
|
|
# a newer package is from reference repo
|
|
|
|
elif (not all_packages[package_key]['type'] or
|
|
|
|
all_packages[package_key]['type'] ==
|
|
|
|
repo_info.is_reference) and \
|
|
|
|
self.compare_pkgs_version(
|
2021-06-04 09:36:03 +00:00
|
|
|
package,
|
|
|
|
all_packages[package_key]['package']
|
|
|
|
) > 0:
|
2021-11-10 13:29:59 +00:00
|
|
|
all_packages[package_key]['variants'] = [repo_info.name]
|
2021-06-04 09:36:03 +00:00
|
|
|
all_packages[package_key]['arch'] = repo_info.arch
|
|
|
|
all_packages[package_key]['package'] = package
|
2021-11-10 13:29:59 +00:00
|
|
|
elif self.compare_pkgs_version(
|
|
|
|
package,
|
|
|
|
all_packages[package_key]['package']
|
|
|
|
) == 0:
|
|
|
|
all_packages[package_key]['variants'].append(
|
|
|
|
repo_info.name
|
|
|
|
)
|
2021-06-04 09:36:03 +00:00
|
|
|
|
|
|
|
for package_dict in all_packages.values():
|
2021-12-28 10:53:46 +00:00
|
|
|
repo_arches = [
|
|
|
|
package_dict['arch'],
|
|
|
|
'noarch',
|
|
|
|
]
|
|
|
|
if package_dict['arch'] == 'x86_64':
|
|
|
|
repo_arches.extend([
|
|
|
|
'i686',
|
|
|
|
'i386',
|
|
|
|
])
|
2021-11-10 13:29:59 +00:00
|
|
|
for variant in package_dict['variants']:
|
|
|
|
repo_arch = package_dict['arch']
|
|
|
|
package = package_dict['package']
|
|
|
|
package_name = package.name
|
2021-12-28 10:53:46 +00:00
|
|
|
if package.arch not in repo_arches:
|
|
|
|
package_arch = package_dict['arch']
|
|
|
|
else:
|
|
|
|
package_arch = package.arch
|
2021-11-10 13:29:59 +00:00
|
|
|
if any(re.search(excluded_package, package_name)
|
|
|
|
for excluded_package in self.excluded_packages):
|
|
|
|
continue
|
|
|
|
src_package_name = dnf.subject.Subject(
|
|
|
|
package.rpm_sourcerpm,
|
|
|
|
).get_nevra_possibilities(
|
|
|
|
forms=hawkey.FORM_NEVRA,
|
2021-01-15 08:53:01 +00:00
|
|
|
)
|
2021-11-10 13:29:59 +00:00
|
|
|
if len(src_package_name) > 1:
|
|
|
|
# We should stop utility if we can't get exact name of srpm
|
|
|
|
raise ValueError(
|
|
|
|
'We can\'t get exact name of srpm '
|
|
|
|
f'by its NEVRA "{package.rpm_sourcerpm}"'
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
src_package_name = src_package_name[0].name
|
|
|
|
pkgs_list = packages_json[variant][
|
|
|
|
repo_arch][src_package_name]
|
|
|
|
added_pkg = f'{package_name}.{package_arch}'
|
|
|
|
if added_pkg not in pkgs_list:
|
|
|
|
pkgs_list.append(added_pkg)
|
2021-01-15 08:53:01 +00:00
|
|
|
return packages_json
|
|
|
|
|
|
|
|
|
|
|
|
def create_parser():
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument(
|
|
|
|
'--repo-path',
|
|
|
|
action='append',
|
|
|
|
help='Path to a folder with repofolders. E.g. "/var/repos" or '
|
|
|
|
'"http://koji.cloudlinux.com/mirrors/rhel_mirror"',
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--repo-folder',
|
|
|
|
action='append',
|
|
|
|
help='A folder which contains folder repodata . E.g. "baseos-stream"',
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--repo-arch',
|
|
|
|
action='append',
|
|
|
|
help='What architecture packages a repository contains. E.g. "x86_64"',
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--repo-name',
|
|
|
|
action='append',
|
|
|
|
help='Name of a repository. E.g. "AppStream"',
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--is-remote',
|
|
|
|
action='append',
|
|
|
|
type=str,
|
|
|
|
help='A repository is remote or local',
|
|
|
|
choices=['yes', 'no'],
|
|
|
|
required=True,
|
|
|
|
)
|
2021-06-15 13:16:10 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--is-reference',
|
|
|
|
action='append',
|
|
|
|
type=str,
|
|
|
|
help='A repository is used as reference for packages layout',
|
|
|
|
choices=['yes', 'no'],
|
|
|
|
required=True,
|
|
|
|
)
|
2021-01-15 08:53:01 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--excluded-packages',
|
|
|
|
nargs='+',
|
|
|
|
type=str,
|
|
|
|
default=[],
|
|
|
|
help='A list of globally excluded packages from generated json.'
|
|
|
|
'All of list elements should be separated by space',
|
|
|
|
required=False,
|
|
|
|
)
|
2021-06-15 13:16:10 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--included-packages',
|
|
|
|
nargs='+',
|
|
|
|
type=str,
|
|
|
|
default=[],
|
|
|
|
help='A list of globally included packages from generated json.'
|
|
|
|
'All of list elements should be separated by space',
|
|
|
|
required=False,
|
|
|
|
)
|
2021-01-28 12:03:40 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--json-output-path',
|
|
|
|
type=str,
|
|
|
|
help='Full path to output json file',
|
|
|
|
required=True,
|
|
|
|
)
|
2021-01-15 08:53:01 +00:00
|
|
|
|
|
|
|
return parser
|
|
|
|
|
|
|
|
|
|
|
|
def cli_main():
|
|
|
|
args = create_parser().parse_args()
|
|
|
|
repos = []
|
2021-06-15 13:16:10 +00:00
|
|
|
for repo_path, repo_folder, repo_name, \
|
|
|
|
repo_arch, is_remote, is_reference in zip(
|
|
|
|
args.repo_path,
|
|
|
|
args.repo_folder,
|
|
|
|
args.repo_name,
|
|
|
|
args.repo_arch,
|
|
|
|
args.is_remote,
|
|
|
|
args.is_reference,
|
|
|
|
):
|
2021-01-15 08:53:01 +00:00
|
|
|
repos.append(RepoInfo(
|
|
|
|
path=repo_path,
|
|
|
|
folder=repo_folder,
|
|
|
|
name=repo_name,
|
|
|
|
arch=repo_arch,
|
|
|
|
is_remote=True if is_remote == 'yes' else False,
|
2021-06-15 13:16:10 +00:00
|
|
|
is_reference=True if is_reference == 'yes' else False
|
2021-01-15 08:53:01 +00:00
|
|
|
))
|
|
|
|
pg = PackagesGenerator(
|
|
|
|
repos=repos,
|
|
|
|
excluded_packages=args.excluded_packages,
|
2021-06-15 13:16:10 +00:00
|
|
|
included_packages=args.included_packages,
|
2021-01-15 08:53:01 +00:00
|
|
|
)
|
|
|
|
result = pg.generate_packages_json()
|
2021-01-28 12:03:40 +00:00
|
|
|
with open(args.json_output_path, 'w') as packages_file:
|
2021-01-15 08:53:01 +00:00
|
|
|
json.dump(
|
|
|
|
result,
|
|
|
|
packages_file,
|
|
|
|
indent=4,
|
|
|
|
sort_keys=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
cli_main()
|