LU-2133: Prepare CI for iso builds of CLOSS 8

@BS-TARGET-CL8
@BS-NOBUILD

- It's added the script which can collect packages/modules
  from the remote repos (including BS repos) and merge them
  to an one local repo with the right repodata (including
  modules.yaml.gz)
- The script `create_packages_json` can use regexps for list of excluded packages

Change-Id: I1365b712460959db6bb451d1199d640bff6ffe5e
This commit is contained in:
soksanichenko 2021-02-05 01:20:14 +02:00
parent 3b5501b4bf
commit 5434d24027
10 changed files with 724 additions and 12 deletions

View File

@ -126,6 +126,7 @@ rm %{buildroot}%{_bindir}/%{name}-fedmsg-notification
%{_bindir}/%{name}-gather-rpms
%{_bindir}/%{name}-gather-modules
%{_bindir}/%{name}-generate-packages-json
%{_bindir}/%{name}-create-extra-repo
%{_bindir}/comps_filter
%{_bindir}/%{name}-make-ostree
%{_mandir}/man1/pungi.1.gz

View File

@ -0,0 +1,422 @@
# coding=utf-8
import argparse
import os
import subprocess
import tempfile
from shutil import rmtree
from typing import AnyStr, List, Dict, Optional
import createrepo_c as cr
import requests
import yaml
from dataclasses import dataclass, field
from .create_packages_json import PackagesGenerator, RepoInfo
@dataclass
class ExtraRepoInfo(RepoInfo):
modules: List[AnyStr] = field(default_factory=list)
packages: List[AnyStr] = field(default_factory=list)
is_remote: bool = True
class CreateExtraRepo(PackagesGenerator):
def __init__(
self,
repos: List[ExtraRepoInfo],
bs_auth_token: AnyStr,
local_repository_path: AnyStr,
clear_target_repo: bool = True,
):
self.repos = [] # type: List[ExtraRepoInfo]
super().__init__(repos, [])
self.auth_headers = {
'Authorization': f'Bearer {bs_auth_token}',
}
# modules data of modules.yaml.gz from an existing local repo
self.local_modules_data = []
self.local_repository_path = local_repository_path
# path to modules.yaml, which generated by the class
self.default_modules_yaml_path = os.path.join(
local_repository_path,
'modules.yaml',
)
if clear_target_repo:
if os.path.exists(self.local_repository_path):
rmtree(self.local_repository_path)
os.makedirs(self.local_repository_path, exist_ok=True)
else:
self._read_local_modules_yaml()
def _read_local_modules_yaml(self):
"""
Read modules data from an existin local repo
"""
repomd_file_path = os.path.join(
self.local_repository_path,
'repodata',
'repomd.xml',
)
repomd_object = self._parse_repomd(repomd_file_path)
for repomd_record in repomd_object.records:
if repomd_record.type != 'modules':
continue
modules_yaml_path = os.path.join(
self.local_repository_path,
repomd_record.location_href,
)
self.local_modules_data = list(self._parse_modules_file(
modules_yaml_path,
))
break
def _dump_local_modules_yaml(self):
"""
Dump merged modules data to an local repo
"""
with open(self.default_modules_yaml_path, 'w') as yaml_file:
yaml.dump_all(
self.local_modules_data,
yaml_file,
)
@staticmethod
def get_repo_info_from_bs_repo(
auth_token: AnyStr,
build_id: AnyStr,
arch: AnyStr,
packages: Optional[List[AnyStr]] = None,
modules: Optional[List[AnyStr]] = None,
) -> List[ExtraRepoInfo]:
"""
Get info about a BS repo and save it to
an object of class ExtraRepoInfo
:param auth_token: Auth token to Build System
:param build_id: ID of a build from BS
:param arch: an architecture of repo which will be used
:param packages: list of names of packages which will be put to an
local repo from a BS repo
:param modules: list of names of modules which will be put to an
local repo from a BS repo
:return: list of ExtraRepoInfo with info about the BS repos
"""
bs_url = 'https://build.cloudlinux.com'
api_uri = 'api/v1'
bs_repo_suffix = 'build_repos'
repos_info = []
# get the full info about a BS repo
repo_request = requests.get(
url=os.path.join(
bs_url,
api_uri,
'builds',
build_id,
),
headers={
'Authorization': f'Bearer {auth_token}',
},
)
repo_request.raise_for_status()
result = repo_request.json()
for build_platform in result['build_platforms']:
platform_name = build_platform['name']
for architecture in build_platform['architectures']:
# skip repo with unsuitable architecture
if architecture != arch:
continue
repo_info = ExtraRepoInfo(
path=os.path.join(
bs_url,
bs_repo_suffix,
build_id,
platform_name,
),
folder=architecture,
name=f'{build_id}-{platform_name}-{architecture}',
arch=architecture,
is_remote=True,
packages=packages,
modules=modules,
)
repos_info.append(repo_info)
return repos_info
def _create_local_extra_repo(self):
"""
Call `createrepo_c <path_to_repo>` for creating a local repo
"""
subprocess.call(
f'createrepo_c {self.local_repository_path}',
shell=True,
)
# remove an unnecessary temporary modules.yaml
if os.path.exists(self.default_modules_yaml_path):
os.remove(self.default_modules_yaml_path)
def _get_remote_file_content(
self,
file_url: AnyStr,
) -> AnyStr:
"""
Get content from a remote file and write it to a temp file
:param file_url: url of a remote file
:return: path to a temp file
"""
file_request = requests.get(
url=file_url,
# for the case when we get a file from BS
headers=self.auth_headers,
)
file_request.raise_for_status()
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
file_stream.write(file_request.content)
return file_stream.name
def _download_rpm_to_local_repo(
self,
package_location: AnyStr,
repo_info: ExtraRepoInfo,
) -> None:
"""
Download a rpm package from a remote repo and save it to a local repo
:param package_location: relative uri of a package in a remote repo
:param repo_info: info about a remote repo which contains a specific
rpm package
"""
rpm_package_remote_path = os.path.join(
repo_info.path,
repo_info.folder,
package_location,
)
rpm_package_local_path = os.path.join(
self.local_repository_path,
os.path.basename(package_location),
)
rpm_request = requests.get(
url=rpm_package_remote_path,
headers=self.auth_headers,
)
rpm_request.raise_for_status()
with open(rpm_package_local_path, 'wb') as rpm_file:
rpm_file.write(rpm_request.content)
def _download_packages(
self,
packages: Dict[AnyStr, cr.Package],
repo_info: ExtraRepoInfo
):
"""
Download all defined packages from a remote repo
:param packages: information about all of packages (including
modularity) in a remote repo
:param repo_info: information about a remote repo
"""
for package in packages.values():
package_name = package.name
# Skip a current package from a remote repo if we defined
# the list packages and a current package doesn't belong to it
if repo_info.packages and \
package_name not in repo_info.packages:
continue
self._download_rpm_to_local_repo(
package_location=package.location_href,
repo_info=repo_info,
)
def _download_modules(
self,
modules_data: List[Dict],
repo_info: ExtraRepoInfo,
packages: Dict[AnyStr, cr.Package]
):
"""
Download all defined modularity packages and their data from
a remote repo
:param modules_data: information about all of modules in a remote repo
:param repo_info: information about a remote repo
:param packages: information about all of packages (including
modularity) in a remote repo
"""
for module in modules_data:
module_data = module['data']
# Skip a current module from a remote repo if we defined
# the list modules and a current module doesn't belong to it
if repo_info.modules and \
module_data['name'] not in repo_info.modules:
continue
# we should add info about a module if the local repodata
# doesn't have it
if module not in self.local_modules_data:
self.local_modules_data.append(module)
# just skip a module's record if it doesn't have rpm artifact
if module['document'] != 'modulemd' or \
'artifacts' not in module_data or \
'rpms' not in module_data['artifacts']:
continue
for rpm in module['data']['artifacts']['rpms']:
# Empty repo_info.packages means that we will download
# all of packages from repo including
# the modularity packages
if not repo_info.packages:
break
# skip a rpm if it doesn't belong to a processed repo
if rpm not in packages:
continue
self._download_rpm_to_local_repo(
package_location=packages[rpm].location_href,
repo_info=repo_info,
)
def create_extra_repo(self):
"""
1. Get from the remote repos the specific (or all) packages/modules
2. Save them to a local repo
3. Save info about the modules to a local repo
3. Call `createrepo_c` which creates a local repo
with the right repodata
"""
for repo_info in self.repos:
packages = {} # type: Dict[AnyStr, cr.Package]
repomd_records = self._get_repomd_records(
repo_info=repo_info,
)
# parse the repodata (including modules.yaml.gz)
modules_data = self._parse_repomd_records(
repo_info=repo_info,
repomd_records=repomd_records,
packages=packages,
)
# convert the packages dict to more usable form
# for future checking that a rpm from the module's artifacts
# belongs to a processed repository
packages = {
f'{package.name}-{package.epoch}:{package.version}-'
f'{package.release}.{package.arch}':
package for package in packages.values()
}
self._download_modules(
modules_data=modules_data,
repo_info=repo_info,
packages=packages,
)
self._download_packages(
packages=packages,
repo_info=repo_info,
)
self._dump_local_modules_yaml()
self._create_local_extra_repo()
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--bs-auth-token',
help='Auth token for Build System',
required=True,
)
parser.add_argument(
'--local-repo-path',
help='Path to a local repo. E.g. /var/repo/test_repo',
required=True,
)
parser.add_argument(
'--clear-local-repo',
help='Clear a local repo before creating a new',
action='store_true',
default=False,
)
parser.add_argument(
'--repo',
action='append',
help='Path to a folder with repofolders or build id. E.g. '
'"http://koji.cloudlinux.com/mirrors/rhel_mirror" or '
'"601809b3c2f5b0e458b14cd3"',
required=True,
)
parser.add_argument(
'--repo-folder',
action='append',
help='A folder which contains folder repodata . E.g. "baseos-stream"',
required=True,
)
parser.add_argument(
'--repo-arch',
action='append',
help='What architecture packages a repository contains. E.g. "x86_64"',
required=True,
)
parser.add_argument(
'--packages',
action='append',
type=str,
default=[],
help='A list of packages names which we want to download to local '
'extra repo. We will download all of packages if param is empty',
required=True,
)
parser.add_argument(
'--modules',
action='append',
type=str,
default=[],
help='A list of modules names which we want to download to local '
'extra repo. We will download all of modules if param is empty',
required=True,
)
return parser
def cli_main():
args = create_parser().parse_args()
repos_info = []
for repo, repo_folder, repo_arch, packages, modules in zip(
args.repo,
args.repo_folder,
args.repo_arch,
args.packages,
args.modules,
):
modules = modules.split()
packages = packages.split()
if repo.startswith('http://'):
repos_info.append(
ExtraRepoInfo(
path=repo,
folder=repo_folder,
name=repo_folder,
arch=repo_arch,
modules=modules,
packages=packages,
)
)
else:
repos_info.extend(
CreateExtraRepo.get_repo_info_from_bs_repo(
auth_token=args.bs_auth_token,
build_id=repo,
arch=repo_arch,
modules=modules,
packages=packages,
)
)
cer = CreateExtraRepo(
repos=repos_info,
bs_auth_token=args.bs_auth_token,
local_repository_path=args.local_repo_path,
clear_target_repo=args.clear_local_repo,
)
cer.create_extra_repo()
if __name__ == '__main__':
cli_main()

View File

@ -7,16 +7,19 @@ https://github.com/rpm-software-management/createrepo_c/blob/master/examples/pyt
"""
import argparse
import gzip
import json
import os
import re
import tempfile
from collections import defaultdict
from typing import AnyStr, Dict, List
from typing import AnyStr, Dict, List, Optional
import createrepo_c as cr
import dnf.subject
import hawkey
import requests
import yaml
from dataclasses import dataclass
@ -141,6 +144,23 @@ class PackagesGenerator:
warningcb=self._warning_callback,
)
@staticmethod
def _parse_modules_file(
modules_file_path: AnyStr,
) -> List[Dict]:
"""
Parse modules.yaml.gz and returns parsed data
:param modules_file_path: path to local modules.yaml.gz
:return: List of dict for an each modules in a repo
"""
with open(modules_file_path, 'rb') as modules_file:
uncompressed_data = gzip.decompress(modules_file.read())
return yaml.load_all(
uncompressed_data,
Loader=yaml.BaseLoader,
)
def _get_repomd_records(
self,
repo_info: RepoInfo,
@ -170,19 +190,23 @@ class PackagesGenerator:
repo_info: RepoInfo,
repomd_records: List[cr.RepomdRecord],
packages: Dict[AnyStr, cr.Package],
) -> None:
) -> Optional[List[Dict]]:
"""
Parse repomd records and extract from repodata file info about packages
:param repo_info: structure which contains info about a current repo
:param repomd_records: list with repomd records
:param packages: dictionary which will be contain info about packages
from repository
:return: List of dict for an each modules in a repo if it contains
modules info otherwise returns None
"""
modules_data = []
for repomd_record in repomd_records:
if repomd_record.type not in (
'primary',
'filelists',
'other',
'modules',
):
continue
repomd_record_file_path = os.path.join(
@ -194,16 +218,22 @@ class PackagesGenerator:
repomd_record_file_path = self._get_remote_file_content(
repomd_record_file_path,
)
parse_file_method = getattr(
self,
f'_parse_{repomd_record.type}_file'
)
parse_file_method(
repomd_record_file_path,
packages,
)
if repomd_record.type == 'modules':
modules_data = self._parse_modules_file(
repomd_record_file_path,
)
else:
parse_file_method = getattr(
self,
f'_parse_{repomd_record.type}_file'
)
parse_file_method(
repomd_record_file_path,
packages,
)
if repo_info.is_remote:
os.remove(repomd_record_file_path)
return list(modules_data)
def generate_packages_json(
self
@ -219,7 +249,7 @@ class PackagesGenerator:
)
)
for repo_info in self.repos:
packages = {}
packages = {} # type: Dict[AnyStr, cr.Package]
repomd_records = self._get_repomd_records(
repo_info=repo_info,
)
@ -233,7 +263,8 @@ class PackagesGenerator:
package_arch = package.arch
if 'module' in package.release:
continue
if package_name in self.excluded_packages:
if any(re.search(excluded_package, package_name)
for excluded_package in self.excluded_packages):
continue
src_package_name = dnf.subject.Subject(
package.rpm_sourcerpm,

View File

@ -50,6 +50,7 @@ setup(
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
"pungi-create-extra-repo = pungi.scripts.create_extra_repo:cli_main"
]
},
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
<revision>1612479076</revision>
<data type="primary">
<checksum type="sha256">08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb</checksum>
<open-checksum type="sha256">2a15e618f049a883d360ccbf3e764b30640255f47dc526c633b1722fe23cbcbc</open-checksum>
<location href="repodata/08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb-primary.xml.gz"/>
<timestamp>1612479075</timestamp>
<size>1240</size>
<open-size>3888</open-size>
</data>
<data type="filelists">
<checksum type="sha256">e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f</checksum>
<open-checksum type="sha256">b1782bc4207a5b7c3e64115d5a1d001802e8d363f022ea165df7cdab6f14651c</open-checksum>
<location href="repodata/e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f-filelists.xml.gz"/>
<timestamp>1612479075</timestamp>
<size>439</size>
<open-size>1295</open-size>
</data>
<data type="other">
<checksum type="sha256">92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1</checksum>
<open-checksum type="sha256">3b847919691ad32279b13463de6c08f1f8b32f51e87b7d8d7e95a3ec2f46ef51</open-checksum>
<location href="repodata/92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1-other.xml.gz"/>
<timestamp>1612479075</timestamp>
<size>630</size>
<open-size>1911</open-size>
</data>
<data type="modules">
<checksum type="sha256">e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57</checksum>
<open-checksum type="sha256">d59fee86c18018cc18bb7325aa74aa0abf923c64d29a4ec45e08dcd01a0c3966</open-checksum>
<location href="repodata/e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57-modules.yaml.gz"/>
<timestamp>1612479075</timestamp>
<size>920</size>
<open-size>3308</open-size>
</data>
</repomd>

View File

@ -0,0 +1,221 @@
# coding=utf-8
import os
from unittest import TestCase, mock, main
import yaml
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraRepoInfo
FOLDER_WITH_TEST_DATA = os.path.join(
os.path.dirname(
os.path.abspath(__file__)
),
'data/test_create_extra_repo/',
)
TEST_MODULE_INFO = yaml.load("""
---
document: modulemd
version: 2
data:
name: perl-App-cpanminus
stream: 1.7044
version: 8030020210126085450
context: 3a33b840
arch: x86_64
summary: Get, unpack, build and install CPAN modules
description: >
This is a CPAN client that requires zero configuration, and stands alone but it's
maintainable and extensible with plug-ins and friendly to shell scripting.
license:
module:
- MIT
content:
- (GPL+ or Artistic) and GPLv2+
- ASL 2.0
- GPL+ or Artistic
dependencies:
- buildrequires:
perl: [5.30]
platform: [el8.3.0]
requires:
perl: [5.30]
perl-YAML: []
platform: [el8]
references:
community: https://metacpan.org/release/App-cpanminus
profiles:
common:
description: App-cpanminus distribution
rpms:
- perl-App-cpanminus
api:
rpms:
- perl-App-cpanminus
filter:
rpms:
- perl-CPAN-DistnameInfo-dummy
- perl-Test-Deep
buildopts:
rpms:
macros: >
%_without_perl_CPAN_Meta_Check_enables_extra_test 1
components:
rpms:
perl-App-cpanminus:
rationale: The API.
ref: perl-App-cpanminus-1.7044-5.module+el8.2.0+4278+abcfa81a.src.rpm
buildorder: 1
arches: [i686, x86_64]
perl-CPAN-DistnameInfo:
rationale: Run-time dependency.
ref: stream-0.12-rhel-8.3.0
arches: [i686, x86_64]
perl-CPAN-Meta-Check:
rationale: Run-time dependency.
ref: perl-CPAN-Meta-Check-0.014-6.module+el8.2.0+4278+abcfa81a.src.rpm
buildorder: 1
arches: [i686, x86_64]
perl-File-pushd:
rationale: Run-time dependency.
ref: perl-File-pushd-1.014-6.module+el8.2.0+4278+abcfa81a.src.rpm
arches: [i686, x86_64]
perl-Module-CPANfile:
rationale: Run-time dependency.
ref: perl-Module-CPANfile-1.1002-7.module+el8.2.0+4278+abcfa81a.src.rpm
arches: [i686, x86_64]
perl-Parse-PMFile:
rationale: Run-time dependency.
ref: perl-Parse-PMFile-0.41-7.module+el8.2.0+4278+abcfa81a.src.rpm
arches: [i686, x86_64]
perl-String-ShellQuote:
rationale: Run-time dependency.
ref: perl-String-ShellQuote-1.04-24.module+el8.2.0+4278+abcfa81a.src.rpm
arches: [i686, x86_64]
perl-Test-Deep:
rationale: Build-time dependency.
ref: stream-1.127-rhel-8.3.0
arches: [i686, x86_64]
artifacts:
rpms:
- perl-App-cpanminus-0:1.7044-5.module_el8.3.0+2027+c8990d1d.noarch
- perl-App-cpanminus-0:1.7044-5.module_el8.3.0+2027+c8990d1d.src
- perl-CPAN-Meta-Check-0:0.014-6.module_el8.3.0+2027+c8990d1d.noarch
- perl-CPAN-Meta-Check-0:0.014-6.module_el8.3.0+2027+c8990d1d.src
- perl-File-pushd-0:1.014-6.module_el8.3.0+2027+c8990d1d.noarch
- perl-File-pushd-0:1.014-6.module_el8.3.0+2027+c8990d1d.src
- perl-Module-CPANfile-0:1.1002-7.module_el8.3.0+2027+c8990d1d.noarch
- perl-Module-CPANfile-0:1.1002-7.module_el8.3.0+2027+c8990d1d.src
- perl-Parse-PMFile-0:0.41-7.module_el8.3.0+2027+c8990d1d.noarch
- perl-Parse-PMFile-0:0.41-7.module_el8.3.0+2027+c8990d1d.src
- perl-String-ShellQuote-0:1.04-24.module_el8.3.0+2027+c8990d1d.noarch
- perl-String-ShellQuote-0:1.04-24.module_el8.3.0+2027+c8990d1d.src
...
""", Loader=yaml.BaseLoader)
TEST_REPO_INFO = ExtraRepoInfo(
path=FOLDER_WITH_TEST_DATA,
folder='test_repo',
name='TestRepo',
arch='x86_64',
is_remote=False,
packages=[],
modules=[],
)
BS_BUILD_INFO = {
'build_platforms': [
{
'architectures': ['non_fake_arch', 'fake_arch'],
'name': 'fake_platform'
}
]
}
class TestCreteExtraRepo(TestCase):
maxDiff = None
def test_01_get_repo_info_from_bs_repo(self):
auth_token = 'fake_auth_token'
build_id = 'fake_build_id'
arch = 'fake_arch'
packages = ['fake_package1', 'fake_package2']
modules = ['fake_module1', 'fake_module2']
request_object = mock.Mock()
request_object.raise_for_status = lambda: True
request_object.json = lambda: BS_BUILD_INFO
with mock.patch(
'pungi.scripts.create_extra_repo.requests.get',
return_value=request_object,
) as mock_request_get:
repos_info = CreateExtraRepo.get_repo_info_from_bs_repo(
auth_token=auth_token,
build_id=build_id,
arch=arch,
packages=packages,
modules=modules,
)
self.assertEqual(
[
ExtraRepoInfo(
path='https://build.cloudlinux.com/'
f'build_repos/{build_id}/fake_platform',
folder=arch,
name=f'{build_id}-fake_platform-{arch}',
arch=arch,
is_remote=True,
packages=packages,
modules=modules,
)
],
repos_info,
)
mock_request_get.assert_called_once_with(
url=f'https://build.cloudlinux.com/api/v1/builds/{build_id}',
headers={
'Authorization': f'Bearer {auth_token}',
}
)
def test_02_create_extra_repo(self):
with mock.patch(
'pungi.scripts.create_extra_repo.'
'CreateExtraRepo._read_local_modules_yaml',
return_value=[],
) as mock__read_local_modules_yaml, mock.patch(
'pungi.scripts.create_extra_repo.'
'CreateExtraRepo._download_rpm_to_local_repo',
) as mock__download_rpm_to_local_repo, mock.patch(
'pungi.scripts.create_extra_repo.'
'CreateExtraRepo._dump_local_modules_yaml'
) as mock__dump_local_modules_yaml, mock.patch(
'pungi.scripts.create_extra_repo.'
'CreateExtraRepo._create_local_extra_repo'
) as mock__create_local_extra_repo:
cer = CreateExtraRepo(
repos=[TEST_REPO_INFO],
bs_auth_token='fake_auth_token',
local_repository_path='/path/to/local/repo',
clear_target_repo=False,
)
mock__read_local_modules_yaml.assert_called_once_with()
cer.create_extra_repo()
mock__download_rpm_to_local_repo.assert_called_once_with(
package_location='perl-App-cpanminus-1.7044-5.'
'module_el8.3.0+2027+c8990d1d.noarch.rpm',
repo_info=TEST_REPO_INFO,
)
mock__dump_local_modules_yaml.assert_called_once_with()
mock__create_local_extra_repo.assert_called_once_with()
self.assertEqual(
[TEST_MODULE_INFO],
cer.local_modules_data,
)
if __name__ == '__main__':
main()