423 lines
14 KiB
Python
423 lines
14 KiB
Python
# coding=utf-8
|
|
|
|
import argparse
|
|
import os
|
|
import subprocess
|
|
import tempfile
|
|
from shutil import rmtree
|
|
from typing import AnyStr, List, Dict, Optional
|
|
|
|
import createrepo_c as cr
|
|
import requests
|
|
import yaml
|
|
from dataclasses import dataclass, field
|
|
|
|
from .create_packages_json import PackagesGenerator, RepoInfo
|
|
|
|
|
|
@dataclass
|
|
class ExtraRepoInfo(RepoInfo):
|
|
|
|
modules: List[AnyStr] = field(default_factory=list)
|
|
packages: List[AnyStr] = field(default_factory=list)
|
|
is_remote: bool = True
|
|
|
|
|
|
class CreateExtraRepo(PackagesGenerator):
|
|
|
|
def __init__(
|
|
self,
|
|
repos: List[ExtraRepoInfo],
|
|
bs_auth_token: AnyStr,
|
|
local_repository_path: AnyStr,
|
|
clear_target_repo: bool = True,
|
|
):
|
|
self.repos = [] # type: List[ExtraRepoInfo]
|
|
super().__init__(repos, [], [])
|
|
self.auth_headers = {
|
|
'Authorization': f'Bearer {bs_auth_token}',
|
|
}
|
|
# modules data of modules.yaml.gz from an existing local repo
|
|
self.local_modules_data = []
|
|
self.local_repository_path = local_repository_path
|
|
# path to modules.yaml, which generated by the class
|
|
self.default_modules_yaml_path = os.path.join(
|
|
local_repository_path,
|
|
'modules.yaml',
|
|
)
|
|
if clear_target_repo:
|
|
if os.path.exists(self.local_repository_path):
|
|
rmtree(self.local_repository_path)
|
|
os.makedirs(self.local_repository_path, exist_ok=True)
|
|
else:
|
|
self._read_local_modules_yaml()
|
|
|
|
def _read_local_modules_yaml(self):
|
|
"""
|
|
Read modules data from an existin local repo
|
|
"""
|
|
repomd_file_path = os.path.join(
|
|
self.local_repository_path,
|
|
'repodata',
|
|
'repomd.xml',
|
|
)
|
|
repomd_object = self._parse_repomd(repomd_file_path)
|
|
for repomd_record in repomd_object.records:
|
|
if repomd_record.type != 'modules':
|
|
continue
|
|
modules_yaml_path = os.path.join(
|
|
self.local_repository_path,
|
|
repomd_record.location_href,
|
|
)
|
|
self.local_modules_data = list(self._parse_modules_file(
|
|
modules_yaml_path,
|
|
))
|
|
break
|
|
|
|
def _dump_local_modules_yaml(self):
|
|
"""
|
|
Dump merged modules data to an local repo
|
|
"""
|
|
if self.local_modules_data:
|
|
with open(self.default_modules_yaml_path, 'w') as yaml_file:
|
|
yaml.dump_all(
|
|
self.local_modules_data,
|
|
yaml_file,
|
|
)
|
|
|
|
@staticmethod
|
|
def get_repo_info_from_bs_repo(
|
|
auth_token: AnyStr,
|
|
build_id: AnyStr,
|
|
arch: AnyStr,
|
|
packages: Optional[List[AnyStr]] = None,
|
|
modules: Optional[List[AnyStr]] = None,
|
|
) -> List[ExtraRepoInfo]:
|
|
"""
|
|
Get info about a BS repo and save it to
|
|
an object of class ExtraRepoInfo
|
|
:param auth_token: Auth token to Build System
|
|
:param build_id: ID of a build from BS
|
|
:param arch: an architecture of repo which will be used
|
|
:param packages: list of names of packages which will be put to an
|
|
local repo from a BS repo
|
|
:param modules: list of names of modules which will be put to an
|
|
local repo from a BS repo
|
|
:return: list of ExtraRepoInfo with info about the BS repos
|
|
"""
|
|
|
|
bs_url = 'https://build.cloudlinux.com'
|
|
api_uri = 'api/v1'
|
|
bs_repo_suffix = 'build_repos'
|
|
|
|
repos_info = []
|
|
|
|
# get the full info about a BS repo
|
|
repo_request = requests.get(
|
|
url=os.path.join(
|
|
bs_url,
|
|
api_uri,
|
|
'builds',
|
|
build_id,
|
|
),
|
|
headers={
|
|
'Authorization': f'Bearer {auth_token}',
|
|
},
|
|
)
|
|
repo_request.raise_for_status()
|
|
result = repo_request.json()
|
|
for build_platform in result['build_platforms']:
|
|
platform_name = build_platform['name']
|
|
for architecture in build_platform['architectures']:
|
|
# skip repo with unsuitable architecture
|
|
if architecture != arch:
|
|
continue
|
|
repo_info = ExtraRepoInfo(
|
|
path=os.path.join(
|
|
bs_url,
|
|
bs_repo_suffix,
|
|
build_id,
|
|
platform_name,
|
|
),
|
|
folder=architecture,
|
|
name=f'{build_id}-{platform_name}-{architecture}',
|
|
arch=architecture,
|
|
is_remote=True,
|
|
packages=packages,
|
|
modules=modules,
|
|
)
|
|
repos_info.append(repo_info)
|
|
return repos_info
|
|
|
|
def _create_local_extra_repo(self):
|
|
"""
|
|
Call `createrepo_c <path_to_repo>` for creating a local repo
|
|
"""
|
|
subprocess.call(
|
|
f'createrepo_c {self.local_repository_path}',
|
|
shell=True,
|
|
)
|
|
# remove an unnecessary temporary modules.yaml
|
|
if os.path.exists(self.default_modules_yaml_path):
|
|
os.remove(self.default_modules_yaml_path)
|
|
|
|
def get_remote_file_content(
|
|
self,
|
|
file_url: AnyStr,
|
|
) -> AnyStr:
|
|
"""
|
|
Get content from a remote file and write it to a temp file
|
|
:param file_url: url of a remote file
|
|
:return: path to a temp file
|
|
"""
|
|
|
|
file_request = requests.get(
|
|
url=file_url,
|
|
# for the case when we get a file from BS
|
|
headers=self.auth_headers,
|
|
)
|
|
file_request.raise_for_status()
|
|
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
|
file_stream.write(file_request.content)
|
|
return file_stream.name
|
|
|
|
def _download_rpm_to_local_repo(
|
|
self,
|
|
package_location: AnyStr,
|
|
repo_info: ExtraRepoInfo,
|
|
) -> None:
|
|
"""
|
|
Download a rpm package from a remote repo and save it to a local repo
|
|
:param package_location: relative uri of a package in a remote repo
|
|
:param repo_info: info about a remote repo which contains a specific
|
|
rpm package
|
|
"""
|
|
rpm_package_remote_path = os.path.join(
|
|
repo_info.path,
|
|
repo_info.folder,
|
|
package_location,
|
|
)
|
|
rpm_package_local_path = os.path.join(
|
|
self.local_repository_path,
|
|
os.path.basename(package_location),
|
|
)
|
|
rpm_request = requests.get(
|
|
url=rpm_package_remote_path,
|
|
headers=self.auth_headers,
|
|
)
|
|
rpm_request.raise_for_status()
|
|
with open(rpm_package_local_path, 'wb') as rpm_file:
|
|
rpm_file.write(rpm_request.content)
|
|
|
|
def _download_packages(
|
|
self,
|
|
packages: Dict[AnyStr, cr.Package],
|
|
repo_info: ExtraRepoInfo
|
|
):
|
|
"""
|
|
Download all defined packages from a remote repo
|
|
:param packages: information about all of packages (including
|
|
modularity) in a remote repo
|
|
:param repo_info: information about a remote repo
|
|
"""
|
|
for package in packages.values():
|
|
package_name = package.name
|
|
# Skip a current package from a remote repo if we defined
|
|
# the list packages and a current package doesn't belong to it
|
|
if repo_info.packages and \
|
|
package_name not in repo_info.packages:
|
|
continue
|
|
self._download_rpm_to_local_repo(
|
|
package_location=package.location_href,
|
|
repo_info=repo_info,
|
|
)
|
|
|
|
def _download_modules(
|
|
self,
|
|
modules_data: List[Dict],
|
|
repo_info: ExtraRepoInfo,
|
|
packages: Dict[AnyStr, cr.Package]
|
|
):
|
|
"""
|
|
Download all defined modularity packages and their data from
|
|
a remote repo
|
|
:param modules_data: information about all modules in a remote repo
|
|
:param repo_info: information about a remote repo
|
|
:param packages: information about all packages (including
|
|
modularity) in a remote repo
|
|
"""
|
|
for module in modules_data:
|
|
module_data = module['data']
|
|
# Skip a current module from a remote repo if we defined
|
|
# the list modules and a current module doesn't belong to it
|
|
if repo_info.modules and \
|
|
module_data['name'] not in repo_info.modules:
|
|
continue
|
|
# we should add info about a module if the local repodata
|
|
# doesn't have it
|
|
if module not in self.local_modules_data:
|
|
self.local_modules_data.append(module)
|
|
# just skip a module's record if it doesn't have rpm artifact
|
|
if module['document'] != 'modulemd' or \
|
|
'artifacts' not in module_data or \
|
|
'rpms' not in module_data['artifacts']:
|
|
continue
|
|
for rpm in module['data']['artifacts']['rpms']:
|
|
# Empty repo_info.packages means that we will download
|
|
# all packages from repo including
|
|
# the modularity packages
|
|
if not repo_info.packages:
|
|
break
|
|
# skip a rpm if it doesn't belong to a processed repo
|
|
if rpm not in packages:
|
|
continue
|
|
self._download_rpm_to_local_repo(
|
|
package_location=packages[rpm].location_href,
|
|
repo_info=repo_info,
|
|
)
|
|
|
|
def create_extra_repo(self):
|
|
"""
|
|
1. Get from the remote repos the specific (or all) packages/modules
|
|
2. Save them to a local repo
|
|
3. Save info about the modules to a local repo
|
|
3. Call `createrepo_c` which creates a local repo
|
|
with the right repodata
|
|
"""
|
|
for repo_info in self.repos:
|
|
packages = {} # type: Dict[AnyStr, cr.Package]
|
|
repomd_records = self._get_repomd_records(
|
|
repo_info=repo_info,
|
|
)
|
|
# parse the repodata (including modules.yaml.gz)
|
|
modules_data = self._parse_module_repomd_record(
|
|
repo_info=repo_info,
|
|
repomd_records=repomd_records,
|
|
)
|
|
# convert the packages dict to more usable form
|
|
# for future checking that a rpm from the module's artifacts
|
|
# belongs to a processed repository
|
|
packages = {
|
|
f'{package.name}-{package.epoch}:{package.version}-'
|
|
f'{package.release}.{package.arch}':
|
|
package for package in packages.values()
|
|
}
|
|
self._download_modules(
|
|
modules_data=modules_data,
|
|
repo_info=repo_info,
|
|
packages=packages,
|
|
)
|
|
self._download_packages(
|
|
packages=packages,
|
|
repo_info=repo_info,
|
|
)
|
|
|
|
self._dump_local_modules_yaml()
|
|
self._create_local_extra_repo()
|
|
|
|
|
|
def create_parser():
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument(
|
|
'--bs-auth-token',
|
|
help='Auth token for Build System',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--local-repo-path',
|
|
help='Path to a local repo. E.g. /var/repo/test_repo',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--clear-local-repo',
|
|
help='Clear a local repo before creating a new',
|
|
action='store_true',
|
|
default=False,
|
|
)
|
|
parser.add_argument(
|
|
'--repo',
|
|
action='append',
|
|
help='Path to a folder with repofolders or build id. E.g. '
|
|
'"http://koji.cloudlinux.com/mirrors/rhel_mirror" or '
|
|
'"601809b3c2f5b0e458b14cd3"',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--repo-folder',
|
|
action='append',
|
|
help='A folder which contains folder repodata . E.g. "baseos-stream"',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--repo-arch',
|
|
action='append',
|
|
help='What architecture packages a repository contains. E.g. "x86_64"',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--packages',
|
|
action='append',
|
|
type=str,
|
|
default=[],
|
|
help='A list of packages names which we want to download to local '
|
|
'extra repo. We will download all of packages if param is empty',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--modules',
|
|
action='append',
|
|
type=str,
|
|
default=[],
|
|
help='A list of modules names which we want to download to local '
|
|
'extra repo. We will download all of modules if param is empty',
|
|
required=True,
|
|
)
|
|
|
|
return parser
|
|
|
|
|
|
def cli_main():
|
|
args = create_parser().parse_args()
|
|
repos_info = []
|
|
for repo, repo_folder, repo_arch, packages, modules in zip(
|
|
args.repo,
|
|
args.repo_folder,
|
|
args.repo_arch,
|
|
args.packages,
|
|
args.modules,
|
|
):
|
|
modules = modules.split()
|
|
packages = packages.split()
|
|
if repo.startswith('http://'):
|
|
repos_info.append(
|
|
ExtraRepoInfo(
|
|
path=repo,
|
|
folder=repo_folder,
|
|
name=repo_folder,
|
|
arch=repo_arch,
|
|
modules=modules,
|
|
packages=packages,
|
|
)
|
|
)
|
|
else:
|
|
repos_info.extend(
|
|
CreateExtraRepo.get_repo_info_from_bs_repo(
|
|
auth_token=args.bs_auth_token,
|
|
build_id=repo,
|
|
arch=repo_arch,
|
|
modules=modules,
|
|
packages=packages,
|
|
)
|
|
)
|
|
cer = CreateExtraRepo(
|
|
repos=repos_info,
|
|
bs_auth_token=args.bs_auth_token,
|
|
local_repository_path=args.local_repo_path,
|
|
clear_target_repo=args.clear_local_repo,
|
|
)
|
|
cer.create_extra_repo()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
cli_main()
|