ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically
- Script `create extra repo` is fixed - Unittests are fixed
This commit is contained in:
parent
004fc4382f
commit
943fd8e77d
@ -5,35 +5,43 @@ import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from shutil import rmtree
|
||||
from typing import AnyStr, List, Dict, Optional
|
||||
from typing import (
|
||||
AnyStr,
|
||||
List,
|
||||
Dict,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import createrepo_c as cr
|
||||
import requests
|
||||
import yaml
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .create_packages_json import PackagesGenerator, RepoInfo
|
||||
from .create_packages_json import (
|
||||
PackagesGenerator,
|
||||
RepoInfo,
|
||||
VariantInfo,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExtraRepoInfo(RepoInfo):
|
||||
class ExtraVariantInfo(VariantInfo):
|
||||
|
||||
modules: List[AnyStr] = field(default_factory=list)
|
||||
packages: List[AnyStr] = field(default_factory=list)
|
||||
is_remote: bool = True
|
||||
|
||||
|
||||
class CreateExtraRepo(PackagesGenerator):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
repos: List[ExtraRepoInfo],
|
||||
variants: List[ExtraVariantInfo],
|
||||
bs_auth_token: AnyStr,
|
||||
local_repository_path: AnyStr,
|
||||
clear_target_repo: bool = True,
|
||||
):
|
||||
self.repos = [] # type: List[ExtraRepoInfo]
|
||||
super().__init__(repos, [], [])
|
||||
self.variants = [] # type: List[ExtraVariantInfo]
|
||||
super().__init__(variants, [], [])
|
||||
self.auth_headers = {
|
||||
'Authorization': f'Bearer {bs_auth_token}',
|
||||
}
|
||||
@ -92,7 +100,7 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
arch: AnyStr,
|
||||
packages: Optional[List[AnyStr]] = None,
|
||||
modules: Optional[List[AnyStr]] = None,
|
||||
) -> List[ExtraRepoInfo]:
|
||||
) -> List[ExtraVariantInfo]:
|
||||
"""
|
||||
Get info about a BS repo and save it to
|
||||
an object of class ExtraRepoInfo
|
||||
@ -110,7 +118,7 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
api_uri = 'api/v1'
|
||||
bs_repo_suffix = 'build_repos'
|
||||
|
||||
repos_info = []
|
||||
variants_info = []
|
||||
|
||||
# get the full info about a BS repo
|
||||
repo_request = requests.get(
|
||||
@ -132,22 +140,26 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
# skip repo with unsuitable architecture
|
||||
if architecture != arch:
|
||||
continue
|
||||
repo_info = ExtraRepoInfo(
|
||||
path=os.path.join(
|
||||
bs_url,
|
||||
bs_repo_suffix,
|
||||
build_id,
|
||||
platform_name,
|
||||
),
|
||||
folder=architecture,
|
||||
variant_info = ExtraVariantInfo(
|
||||
name=f'{build_id}-{platform_name}-{architecture}',
|
||||
arch=architecture,
|
||||
is_remote=True,
|
||||
packages=packages,
|
||||
modules=modules,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path=os.path.join(
|
||||
bs_url,
|
||||
bs_repo_suffix,
|
||||
build_id,
|
||||
platform_name,
|
||||
),
|
||||
folder=architecture,
|
||||
is_remote=True,
|
||||
)
|
||||
]
|
||||
)
|
||||
repos_info.append(repo_info)
|
||||
return repos_info
|
||||
variants_info.append(variant_info)
|
||||
return variants_info
|
||||
|
||||
def _create_local_extra_repo(self):
|
||||
"""
|
||||
@ -184,7 +196,7 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
def _download_rpm_to_local_repo(
|
||||
self,
|
||||
package_location: AnyStr,
|
||||
repo_info: ExtraRepoInfo,
|
||||
repo_info: RepoInfo,
|
||||
) -> None:
|
||||
"""
|
||||
Download a rpm package from a remote repo and save it to a local repo
|
||||
@ -212,37 +224,38 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
def _download_packages(
|
||||
self,
|
||||
packages: Dict[AnyStr, cr.Package],
|
||||
repo_info: ExtraRepoInfo
|
||||
variant_info: ExtraVariantInfo
|
||||
):
|
||||
"""
|
||||
Download all defined packages from a remote repo
|
||||
:param packages: information about all packages (including
|
||||
modularity) in a remote repo
|
||||
:param repo_info: information about a remote repo
|
||||
:param variant_info: information about a remote variant
|
||||
"""
|
||||
for package in packages.values():
|
||||
package_name = package.name
|
||||
# Skip a current package from a remote repo if we defined
|
||||
# the list packages and a current package doesn't belong to it
|
||||
if repo_info.packages and \
|
||||
package_name not in repo_info.packages:
|
||||
if variant_info.packages and \
|
||||
package_name not in variant_info.packages:
|
||||
continue
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=package.location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
for repo_info in variant_info.repos:
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=package.location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
|
||||
def _download_modules(
|
||||
self,
|
||||
modules_data: List[Dict],
|
||||
repo_info: ExtraRepoInfo,
|
||||
variant_info: ExtraVariantInfo,
|
||||
packages: Dict[AnyStr, cr.Package]
|
||||
):
|
||||
"""
|
||||
Download all defined modularity packages and their data from
|
||||
a remote repo
|
||||
:param modules_data: information about all modules in a remote repo
|
||||
:param repo_info: information about a remote repo
|
||||
:param variant_info: information about a remote variant
|
||||
:param packages: information about all packages (including
|
||||
modularity) in a remote repo
|
||||
"""
|
||||
@ -250,8 +263,8 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
module_data = module['data']
|
||||
# Skip a current module from a remote repo if we defined
|
||||
# the list modules and a current module doesn't belong to it
|
||||
if repo_info.modules and \
|
||||
module_data['name'] not in repo_info.modules:
|
||||
if variant_info.modules and \
|
||||
module_data['name'] not in variant_info.modules:
|
||||
continue
|
||||
# we should add info about a module if the local repodata
|
||||
# doesn't have it
|
||||
@ -266,15 +279,16 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
# Empty repo_info.packages means that we will download
|
||||
# all packages from repo including
|
||||
# the modularity packages
|
||||
if not repo_info.packages:
|
||||
if not variant_info.packages:
|
||||
break
|
||||
# skip a rpm if it doesn't belong to a processed repo
|
||||
if rpm not in packages:
|
||||
continue
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=packages[rpm].location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
for repo_info in variant_info.repos:
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=packages[rpm].location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
|
||||
def create_extra_repo(self):
|
||||
"""
|
||||
@ -284,45 +298,34 @@ class CreateExtraRepo(PackagesGenerator):
|
||||
3. Call `createrepo_c` which creates a local repo
|
||||
with the right repodata
|
||||
"""
|
||||
for repo_info in self.repos:
|
||||
packages = {} # type: Dict[AnyStr, cr.Package]
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
repomd_records_dict = {} # type: Dict[str, str]
|
||||
self._download_repomd_records(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
repomd_records_dict=repomd_records_dict,
|
||||
)
|
||||
packages_iterator = cr.PackageIterator(
|
||||
primary_path=repomd_records_dict['primary'],
|
||||
filelists_path=repomd_records_dict['filelists'],
|
||||
other_path=repomd_records_dict['other'],
|
||||
warningcb=self._warning_callback,
|
||||
)
|
||||
# parse the repodata (including modules.yaml.gz)
|
||||
modules_data = self._parse_module_repomd_record(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
)
|
||||
# convert the packages dict to more usable form
|
||||
# for future checking that a rpm from the module's artifacts
|
||||
# belongs to a processed repository
|
||||
packages = {
|
||||
f'{package.name}-{package.epoch}:{package.version}-'
|
||||
f'{package.release}.{package.arch}':
|
||||
package for package in packages_iterator
|
||||
}
|
||||
self._download_modules(
|
||||
modules_data=modules_data,
|
||||
repo_info=repo_info,
|
||||
packages=packages,
|
||||
)
|
||||
self._download_packages(
|
||||
packages=packages,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
for variant_info in self.variants:
|
||||
for repo_info in variant_info.repos:
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
packages_iterator = self.get_packages_iterator(repo_info)
|
||||
# parse the repodata (including modules.yaml.gz)
|
||||
modules_data = self._parse_module_repomd_record(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
)
|
||||
# convert the packages dict to more usable form
|
||||
# for future checking that a rpm from the module's artifacts
|
||||
# belongs to a processed repository
|
||||
packages = {
|
||||
f'{package.name}-{package.epoch}:{package.version}-'
|
||||
f'{package.release}.{package.arch}':
|
||||
package for package in packages_iterator
|
||||
}
|
||||
self._download_modules(
|
||||
modules_data=modules_data,
|
||||
variant_info=variant_info,
|
||||
packages=packages,
|
||||
)
|
||||
self._download_packages(
|
||||
packages=packages,
|
||||
variant_info=variant_info,
|
||||
)
|
||||
|
||||
self._dump_local_modules_yaml()
|
||||
self._create_local_extra_repo()
|
||||
@ -333,7 +336,6 @@ def create_parser():
|
||||
parser.add_argument(
|
||||
'--bs-auth-token',
|
||||
help='Auth token for Build System',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--local-repo-path',
|
||||
@ -402,11 +404,16 @@ def cli_main():
|
||||
packages = packages.split()
|
||||
if repo.startswith('http://'):
|
||||
repos_info.append(
|
||||
ExtraRepoInfo(
|
||||
path=repo,
|
||||
folder=repo_folder,
|
||||
ExtraVariantInfo(
|
||||
name=repo_folder,
|
||||
arch=repo_arch,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path=repo,
|
||||
folder=repo_folder,
|
||||
is_remote=True,
|
||||
)
|
||||
],
|
||||
modules=modules,
|
||||
packages=packages,
|
||||
)
|
||||
@ -422,7 +429,7 @@ def cli_main():
|
||||
)
|
||||
)
|
||||
cer = CreateExtraRepo(
|
||||
repos=repos_info,
|
||||
variants=repos_info,
|
||||
bs_auth_token=args.bs_auth_token,
|
||||
local_repository_path=args.local_repo_path,
|
||||
clear_target_repo=args.clear_local_repo,
|
||||
|
@ -5,7 +5,7 @@ from unittest import TestCase, mock, main
|
||||
|
||||
import yaml
|
||||
|
||||
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraRepoInfo
|
||||
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraVariantInfo, RepoInfo
|
||||
|
||||
FOLDER_WITH_TEST_DATA = os.path.join(
|
||||
os.path.dirname(
|
||||
@ -114,14 +114,17 @@ data:
|
||||
...
|
||||
""", Loader=yaml.BaseLoader)
|
||||
|
||||
TEST_REPO_INFO = ExtraRepoInfo(
|
||||
TEST_REPO_INFO = RepoInfo(
|
||||
path=FOLDER_WITH_TEST_DATA,
|
||||
folder='test_repo',
|
||||
is_remote=False,
|
||||
)
|
||||
TEST_VARIANT_INFO = ExtraVariantInfo(
|
||||
name='TestRepo',
|
||||
arch='x86_64',
|
||||
is_remote=False,
|
||||
packages=[],
|
||||
modules=[],
|
||||
repos=[TEST_REPO_INFO]
|
||||
)
|
||||
|
||||
BS_BUILD_INFO = {
|
||||
@ -161,15 +164,19 @@ class TestCreteExtraRepo(TestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
[
|
||||
ExtraRepoInfo(
|
||||
path='https://build.cloudlinux.com/'
|
||||
f'build_repos/{build_id}/fake_platform',
|
||||
folder=arch,
|
||||
ExtraVariantInfo(
|
||||
name=f'{build_id}-fake_platform-{arch}',
|
||||
arch=arch,
|
||||
is_remote=True,
|
||||
packages=packages,
|
||||
modules=modules,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path='https://build.cloudlinux.com/'
|
||||
f'build_repos/{build_id}/fake_platform',
|
||||
folder=arch,
|
||||
is_remote=True,
|
||||
)
|
||||
]
|
||||
)
|
||||
],
|
||||
repos_info,
|
||||
@ -197,7 +204,7 @@ class TestCreteExtraRepo(TestCase):
|
||||
'CreateExtraRepo._create_local_extra_repo'
|
||||
) as mock__create_local_extra_repo:
|
||||
cer = CreateExtraRepo(
|
||||
repos=[TEST_REPO_INFO],
|
||||
variants=[TEST_VARIANT_INFO],
|
||||
bs_auth_token='fake_auth_token',
|
||||
local_repository_path='/path/to/local/repo',
|
||||
clear_target_repo=False,
|
||||
|
@ -4,7 +4,11 @@ import os
|
||||
from collections import defaultdict
|
||||
from unittest import TestCase, mock, main
|
||||
|
||||
from pungi.scripts.create_packages_json import PackagesGenerator, RepoInfo
|
||||
from pungi.scripts.create_packages_json import (
|
||||
PackagesGenerator,
|
||||
RepoInfo,
|
||||
VariantInfo,
|
||||
)
|
||||
|
||||
FOLDER_WITH_TEST_DATA = os.path.join(
|
||||
os.path.dirname(
|
||||
@ -16,8 +20,6 @@ FOLDER_WITH_TEST_DATA = os.path.join(
|
||||
test_repo_info = RepoInfo(
|
||||
path=FOLDER_WITH_TEST_DATA,
|
||||
folder='test_repo',
|
||||
name='TestRepo',
|
||||
arch='x86_64',
|
||||
is_remote=False,
|
||||
is_reference=True,
|
||||
)
|
||||
@ -25,11 +27,19 @@ test_repo_info = RepoInfo(
|
||||
test_repo_info_2 = RepoInfo(
|
||||
path=FOLDER_WITH_TEST_DATA,
|
||||
folder='test_repo_2',
|
||||
name='TestRepo2',
|
||||
arch='x86_64',
|
||||
is_remote=False,
|
||||
is_reference=True,
|
||||
)
|
||||
variant_info_1 = VariantInfo(
|
||||
name='TestRepo',
|
||||
arch='x86_64',
|
||||
repos=[test_repo_info]
|
||||
)
|
||||
variant_info_2 = VariantInfo(
|
||||
name='TestRepo2',
|
||||
arch='x86_64',
|
||||
repos=[test_repo_info_2]
|
||||
)
|
||||
|
||||
|
||||
class TestPackagesJson(TestCase):
|
||||
@ -60,9 +70,9 @@ class TestPackagesJson(TestCase):
|
||||
|
||||
def test_02_generate_additional_packages(self):
|
||||
pg = PackagesGenerator(
|
||||
repos=[
|
||||
test_repo_info,
|
||||
test_repo_info_2,
|
||||
variants=[
|
||||
variant_info_1,
|
||||
variant_info_2,
|
||||
],
|
||||
excluded_packages=['zziplib-utils'],
|
||||
included_packages=['vim-file*'],
|
||||
|
Loading…
Reference in New Issue
Block a user