Update code to pass around a list of pkgsets

This opens up a path to having multiple package sets in the compose. The
pkgset phase now creates a list of them (although at this time there is
always a single item in that list).

Any consumer of the package sets objects is updated to handle a list.
Generally this means an extra loop.

JIRA: COMPOSE-3620
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
Lubomír Sedlář 2019-07-25 10:15:14 +02:00
parent a99bf8c828
commit 90393c4b49
13 changed files with 213 additions and 170 deletions

View File

@ -71,10 +71,11 @@ def copy_extra_files(compose, cfg, arch, variant, package_sets, checksum_type=No
rpms = [] rpms = []
pattern = scm_dict["repo"] % var_dict pattern = scm_dict["repo"] % var_dict
pkg_name, pkg_arch = split_name_arch(pattern) pkg_name, pkg_arch = split_name_arch(pattern)
for pkgset_file in package_sets[arch]: for package_set in package_sets:
pkg_obj = package_sets[arch][pkgset_file] for pkgset_file in package_set[arch]:
if pkg_is_rpm(pkg_obj) and _pkg_matches(pkg_obj, pkg_name, pkg_arch): pkg_obj = package_set[arch][pkgset_file]
rpms.append(pkg_obj.file_path) if pkg_is_rpm(pkg_obj) and _pkg_matches(pkg_obj, pkg_name, pkg_arch):
rpms.append(pkg_obj.file_path)
if not rpms: if not rpms:
raise RuntimeError('No package matching %s in the package set.' % pattern) raise RuntimeError('No package matching %s in the package set.' % pattern)
scm_dict["repo"] = rpms scm_dict["repo"] = rpms

View File

@ -653,17 +653,13 @@ def get_system_release_packages(compose, arch, variant, package_sets):
packages = set() packages = set()
filter_packages = set() filter_packages = set()
if not package_sets or not package_sets.get(arch, None):
return packages, filter_packages
package_set = package_sets[arch]
system_release_packages = set() system_release_packages = set()
for i in package_set:
pkg = package_set[i]
if pkg.is_system_release: for pkgset in package_sets or []:
system_release_packages.add(pkg) for i in pkgset.get(arch, []):
pkg = pkgset[arch][i]
if pkg.is_system_release:
system_release_packages.add(pkg)
if not system_release_packages: if not system_release_packages:
return packages, filter_packages return packages, filter_packages

View File

@ -53,9 +53,16 @@ def get_package_path(filename, hashed_directory=False):
return filename return filename
def _find_by_path(pkg_sets, arch, path):
"""Find object in an list of package sets by path."""
for pkg_set in pkg_sets:
if path in pkg_set[arch]:
return pkg_set[arch][path]
raise RuntimeError("Path %r not found in any package set." % path)
def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={}): def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={}):
# srpm_map instance is shared between link_files() runs # srpm_map instance is shared between link_files() runs
pkg_set = pkg_sets[arch]
msg = "Linking packages (arch: %s, variant: %s)" % (arch, variant) msg = "Linking packages (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg) compose.log_info("[BEGIN] %s" % msg)
@ -77,7 +84,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
pool.queue_put((pkg["path"], dst)) pool.queue_put((pkg["path"], dst))
# update rpm manifest # update rpm manifest
pkg_obj = pkg_set[pkg["path"]] pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
nevra = pkg_obj.nevra nevra = pkg_obj.nevra
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="source") manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="source")
@ -96,7 +103,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
pool.queue_put((pkg["path"], dst)) pool.queue_put((pkg["path"], dst))
# update rpm manifest # update rpm manifest
pkg_obj = pkg_set[pkg["path"]] pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
nevra = pkg_obj.nevra nevra = pkg_obj.nevra
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map) src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="binary", srpm_nevra=src_nevra) manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="binary", srpm_nevra=src_nevra)
@ -113,7 +120,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
pool.queue_put((pkg["path"], dst)) pool.queue_put((pkg["path"], dst))
# update rpm manifest # update rpm manifest
pkg_obj = pkg_set[pkg["path"]] pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
nevra = pkg_obj.nevra nevra = pkg_obj.nevra
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map) src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="debug", srpm_nevra=src_nevra) manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="debug", srpm_nevra=src_nevra)

View File

@ -54,16 +54,18 @@ def raise_on_invalid_sigkeys(arch, variant, package_sets, result):
Raises RuntimeError if some package in compose is signed with an invalid Raises RuntimeError if some package in compose is signed with an invalid
sigkey. sigkey.
""" """
invalid_sigkey_rpms = [] invalid_sigkey_rpms = {}
for package in result["rpm"]: for package in result["rpm"]:
name = parse_nvra(package["path"])["name"] name = parse_nvra(package["path"])["name"]
for forbidden_package in package_sets["global"].invalid_sigkey_rpms: for pkgset in package_sets:
if name == forbidden_package["name"]: for forbidden_package in pkgset["global"].invalid_sigkey_rpms:
invalid_sigkey_rpms.append(forbidden_package) if name == forbidden_package["name"]:
invalid_sigkey_rpms.setdefault(
pkgset["global"].sigkey_ordering, []
).append(forbidden_package)
if invalid_sigkey_rpms: if invalid_sigkey_rpms:
package_sets["global"].raise_invalid_sigkeys_exception( package_sets["global"].raise_invalid_sigkeys_exception(invalid_sigkey_rpms)
invalid_sigkey_rpms)
def _format_packages(pkgs): def _format_packages(pkgs):
@ -137,12 +139,13 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
# already on the whitelist. # already on the whitelist.
package_names = set(p.rsplit('-', 2)[0] for p in package_whitelist) package_names = set(p.rsplit('-', 2)[0] for p in package_whitelist)
for i in get_valid_arches(arch, multilib=multilib, add_noarch=True, add_src=True): for i in get_valid_arches(arch, multilib=multilib, add_noarch=True, add_src=True):
for rpm_obj in package_sets[arch].rpms_by_arch.get(i, []): for pkgset in package_sets:
if rpm_obj.name in package_names: for rpm_obj in pkgset[arch].rpms_by_arch.get(i, []):
# We already have a package with this name in the whitelist, skip it. if rpm_obj.name in package_names:
continue # We already have a package with this name in the whitelist, skip it.
package_whitelist.add( continue
'{0.name}-{1}:{0.version}-{0.release}'.format(rpm_obj, rpm_obj.epoch or 0)) package_whitelist.add(
'{0.name}-{1}:{0.version}-{0.release}'.format(rpm_obj, rpm_obj.epoch or 0))
pungi_wrapper.write_kickstart( pungi_wrapper.write_kickstart(
ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str, ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str,

View File

@ -91,9 +91,10 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
""" """
if arch not in self.package_maps: if arch not in self.package_maps:
pkg_map = {} pkg_map = {}
for pkg_arch in self.package_sets[arch].rpms_by_arch: for pkgset in self.package_sets:
for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]: for pkg_arch in pkgset[arch].rpms_by_arch:
pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg for pkg in pkgset[arch].rpms_by_arch[pkg_arch]:
pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg
self.package_maps[arch] = pkg_map self.package_maps[arch] = pkg_map
return self.package_maps[arch] return self.package_maps[arch]
@ -117,9 +118,10 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
indexed by package architecture and then by package name. There can be indexed by package architecture and then by package name. There can be
more than one debuginfo package with the same name. more than one debuginfo package with the same name.
""" """
for pkg_arch in self.package_sets[self.arch].rpms_by_arch: for pkgset in self.package_sets:
for pkg in self.package_sets[self.arch].rpms_by_arch[pkg_arch]: for pkg_arch in pkgset[self.arch].rpms_by_arch:
self.debuginfo[pkg.arch][pkg.name].add(pkg) for pkg in pkgset[self.arch].rpms_by_arch[pkg_arch]:
self.debuginfo[pkg.arch][pkg.name].add(pkg)
def _get_debuginfo(self, name, arch): def _get_debuginfo(self, name, arch):
if not self.debuginfo: if not self.debuginfo:
@ -131,12 +133,13 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
of the pattern. of the pattern.
""" """
expanded = set() expanded = set()
for pkg_arch in self.package_sets[self.arch].rpms_by_arch: for pkgset in self.package_sets:
for pkg in self.package_sets[self.arch].rpms_by_arch[pkg_arch]: for pkg_arch in pkgset[self.arch].rpms_by_arch:
for pattern in patterns: for pkg in pkgset[self.arch].rpms_by_arch[pkg_arch]:
if fnmatch(pkg.name, pattern): for pattern in patterns:
expanded.add(pkg) if fnmatch(pkg.name, pattern):
break expanded.add(pkg)
break
return expanded return expanded
def prepare_modular_packages(self): def prepare_modular_packages(self):
@ -154,16 +157,17 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
# Replace %s with * for fnmatch. # Replace %s with * for fnmatch.
install_match = install % "*" install_match = install % "*"
self.langpacks[name] = set() self.langpacks[name] = set()
for pkg_arch in self.package_sets[arch].rpms_by_arch: for pkgset in self.package_sets:
for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]: for pkg_arch in pkgset[arch].rpms_by_arch:
if not fnmatch(pkg.name, install_match): for pkg in pkgset[arch].rpms_by_arch[pkg_arch]:
# Does not match the pattern, ignore... if not fnmatch(pkg.name, install_match):
continue # Does not match the pattern, ignore...
if pkg.name.endswith("-devel") or pkg.name.endswith("-static"): continue
continue if pkg.name.endswith("-devel") or pkg.name.endswith("-static"):
if pkg_is_debug(pkg): continue
continue if pkg_is_debug(pkg):
self.langpacks[name].add(pkg.name) continue
self.langpacks[name].add(pkg.name)
def __call__( def __call__(
self, self,

View File

@ -40,7 +40,6 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
def worker(self, log, arch, variant, pkgs, groups, filter_packages, def worker(self, log, arch, variant, pkgs, groups, filter_packages,
multilib_whitelist, multilib_blacklist, package_sets, multilib_whitelist, multilib_blacklist, package_sets,
path_prefix=None, fulltree_excludes=None, prepopulate=None): path_prefix=None, fulltree_excludes=None, prepopulate=None):
pkgset = package_sets[arch]
result = { result = {
"rpm": [], "rpm": [],
"srpm": [], "srpm": [],
@ -60,8 +59,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
compatible_arches[i] = pungi.arch.get_compatible_arches(i) compatible_arches[i] = pungi.arch.get_compatible_arches(i)
log.write('\nGathering rpms\n') log.write('\nGathering rpms\n')
for i in pkgset: for pkg in iterate_packages(package_sets, arch):
pkg = pkgset[i]
if not pkg_is_rpm(pkg): if not pkg_is_rpm(pkg):
continue continue
for gathered_pkg, pkg_arch in packages: for gathered_pkg, pkg_arch in packages:
@ -82,8 +80,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
% (pkg, gathered_pkg, pkg_arch, pkg.sourcerpm)) % (pkg, gathered_pkg, pkg_arch, pkg.sourcerpm))
log.write('\nGathering source rpms\n') log.write('\nGathering source rpms\n')
for i in pkgset: for pkg in iterate_packages(package_sets, arch):
pkg = pkgset[i]
if not pkg_is_srpm(pkg): if not pkg_is_srpm(pkg):
continue continue
if pkg.file_name in seen_srpms: if pkg.file_name in seen_srpms:
@ -94,8 +91,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
log.write('Adding %s\n' % pkg) log.write('Adding %s\n' % pkg)
log.write('\nGathering debuginfo packages\n') log.write('\nGathering debuginfo packages\n')
for i in pkgset: for pkg in iterate_packages(package_sets, arch):
pkg = pkgset[i]
if not pkg_is_debug(pkg): if not pkg_is_debug(pkg):
continue continue
if pkg.sourcerpm not in seen_srpms: if pkg.sourcerpm not in seen_srpms:
@ -162,3 +158,9 @@ def expand_groups(compose, arch, variant, groups, set_pkg_arch=True):
raise ex raise ex
return packages return packages
def iterate_packages(package_sets, arch):
for pkgset in package_sets:
for pkg in pkgset[arch]:
yield pkgset[arch][pkg]

View File

@ -28,4 +28,5 @@ class PkgsetPhase(PhaseBase):
PkgsetSourceContainer.register_module(sources) PkgsetSourceContainer.register_module(sources)
container = PkgsetSourceContainer() container = PkgsetSourceContainer()
SourceClass = container[pkgset_source] SourceClass = container[pkgset_source]
self.package_sets, self.path_prefix = SourceClass(self.compose)() package_sets, self.path_prefix = SourceClass(self.compose)()
self.package_sets = [package_sets]

View File

@ -85,7 +85,7 @@ class PackageSetBase(kobo.log.LoggingBase):
super(PackageSetBase, self).__init__(logger=logger) super(PackageSetBase, self).__init__(logger=logger)
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper) self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
self.old_file_cache = None self.old_file_cache = None
self.sigkey_ordering = sigkey_ordering or [None] self.sigkey_ordering = tuple(sigkey_ordering or [None])
self.arches = arches self.arches = arches
self.rpms_by_arch = {} self.rpms_by_arch = {}
self.srpms_by_name = {} self.srpms_by_name = {}
@ -124,10 +124,16 @@ class PackageSetBase(kobo.log.LoggingBase):
def nvr_formatter(package_info): def nvr_formatter(package_info):
# joins NVR parts of the package with '-' character. # joins NVR parts of the package with '-' character.
return '-'.join((package_info['name'], package_info['version'], package_info['release'])) return '-'.join((package_info['name'], package_info['version'], package_info['release']))
raise RuntimeError(
"RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s" % ( def get_error(sigkeys, infos):
self.sigkey_ordering, return "RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s" % (
'\n'.join(sorted(set([nvr_formatter(rpminfo) for rpminfo in rpminfos]))))) sigkeys,
'\n'.join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
)
if not isinstance(rpminfos, dict):
rpminfos = {self.sigkey_ordering: rpminfos}
raise RuntimeError("\n".join(get_error(k, v) for k, v in rpminfos.items()))
def read_packages(self, rpms, srpms): def read_packages(self, rpms, srpms):
srpm_pool = ReaderPool(self, self._logger) srpm_pool = ReaderPool(self, self._logger)

View File

@ -259,14 +259,15 @@ def explode_anaconda(compose, arch, variant, package_sets):
# if scm is "rpm" and repo contains a package name, find the package(s) in package set # if scm is "rpm" and repo contains a package name, find the package(s) in package set
if scm_dict["scm"] == "rpm" and not (scm_dict["repo"].startswith("/") or "://" in scm_dict["repo"]): if scm_dict["scm"] == "rpm" and not (scm_dict["repo"].startswith("/") or "://" in scm_dict["repo"]):
rpms = [] rpms = []
for pkgset_file in package_sets[arch]: for pkgset in package_sets:
pkg_obj = package_sets[arch][pkgset_file] for pkgset_file in pkgset[arch]:
if not pkg_is_rpm(pkg_obj): pkg_obj = pkgset[arch][pkgset_file]
continue if not pkg_is_rpm(pkg_obj):
pkg_name, pkg_arch = split_name_arch(scm_dict["repo"]) continue
if fnmatch.fnmatch(pkg_obj.name, pkg_name) and (pkg_arch is None or pkg_arch == pkg_obj.arch): pkg_name, pkg_arch = split_name_arch(scm_dict["repo"])
compose.log_critical("%s %s %s" % (pkg_obj.name, pkg_name, pkg_arch)) if fnmatch.fnmatch(pkg_obj.name, pkg_name) and (pkg_arch is None or pkg_arch == pkg_obj.arch):
rpms.append(pkg_obj.file_path) compose.log_critical("%s %s %s" % (pkg_obj.name, pkg_name, pkg_arch))
rpms.append(pkg_obj.file_path)
scm_dict["repo"] = rpms scm_dict["repo"] = rpms
if not rpms: if not rpms:

View File

@ -123,11 +123,15 @@ class TestCopyFiles(helpers.PungiTestCase):
src_po.configure_mock(name='extra-data-1.1-1.fc24.src.rpm', src_po.configure_mock(name='extra-data-1.1-1.fc24.src.rpm',
file_path='/src/location', file_path='/src/location',
arch='src') arch='src')
package_sets = { package_sets = [
'x86_64': {server_po.name: server_po, {
client_po.name: client_po, "x86_64": {
src_po.name: src_po} server_po.name: server_po,
} client_po.name: client_po,
src_po.name: src_po,
},
},
]
get_file_from_scm.side_effect = self.fake_get_file get_file_from_scm.side_effect = self.fake_get_file
@ -153,7 +157,7 @@ class TestCopyFiles(helpers.PungiTestCase):
def test_copy_from_non_existing_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm): def test_copy_from_non_existing_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'bad-%(variant_uid_lower)s*'} cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'bad-%(variant_uid_lower)s*'}
package_sets = {'x86_64': {}} package_sets = [{"x86_64": {}}]
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
extra_files.copy_extra_files( extra_files.copy_extra_files(

View File

@ -44,7 +44,7 @@ class TestMethodHybrid(helpers.PungiTestCase):
) )
CW.return_value.get_langpacks.return_value = {"glibc": "glibc-langpack-%s"} CW.return_value.get_langpacks.return_value = {"glibc": "glibc-langpack-%s"}
eg.return_value = ["foo", "bar"] eg.return_value = ["foo", "bar"]
package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": [pkg]})} package_sets = [{"x86_64": mock.Mock(rpms_by_arch={"x86_64": [pkg]})}]
arch = "x86_64" arch = "x86_64"
variant = compose.variants["Server"] variant = compose.variants["Server"]
@ -103,50 +103,52 @@ class TestMethodHybrid(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
CW.return_value.get_langpacks.return_value = {"foo": "foo-%s"} CW.return_value.get_langpacks.return_value = {"foo": "foo-%s"}
m = hybrid.GatherMethodHybrid(compose) m = hybrid.GatherMethodHybrid(compose)
m.package_sets = { m.package_sets = [
"x86_64": mock.Mock( {
rpms_by_arch={ "x86_64": mock.Mock(
"x86_64": [ rpms_by_arch={
MockPkg( "x86_64": [
name="foo", MockPkg(
version="1", name="foo",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
MockPkg( ),
name="foo-en", MockPkg(
version="1", name="foo-en",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
MockPkg( ),
name="foo-devel", MockPkg(
version="1", name="foo-devel",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
MockPkg( ),
name="foo-debuginfo", MockPkg(
version="1", name="foo-debuginfo",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
] ),
} ]
) }
} )
}
]
m.prepare_langpacks("x86_64", compose.variants["Server"]) m.prepare_langpacks("x86_64", compose.variants["Server"])
self.assertEqual(m.langpacks, {"foo": set(["foo-en"])}) self.assertEqual(m.langpacks, {"foo": set(["foo-en"])})
@ -155,41 +157,43 @@ class TestMethodHybrid(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
m = hybrid.GatherMethodHybrid(compose) m = hybrid.GatherMethodHybrid(compose)
m.arch = "x86_64" m.arch = "x86_64"
m.package_sets = { m.package_sets = [
"x86_64": mock.Mock( {
rpms_by_arch={ "x86_64": mock.Mock(
"x86_64": [ rpms_by_arch={
MockPkg( "x86_64": [
name="foo", MockPkg(
version="1", name="foo",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
MockPkg( ),
name="foo-en", MockPkg(
version="1", name="foo-en",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
MockPkg( ),
name="bar", MockPkg(
version="1", name="bar",
release="2", version="1",
arch="x86_64", release="2",
epoch=0, arch="x86_64",
sourcerpm=None, epoch=0,
file_path=None, sourcerpm=None,
), file_path=None,
] ),
} ]
) }
} )
}
]
expanded = m.expand_list(["foo*"]) expanded = m.expand_list(["foo*"])
self.assertItemsEqual([p.name for p in expanded], ["foo", "foo-en"]) self.assertItemsEqual([p.name for p in expanded], ["foo", "foo-en"])
@ -357,7 +361,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
} }
po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"]) po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"])
self.phase.packages = {"p-1-1.x86_64": mock.Mock()} self.phase.packages = {"p-1-1.x86_64": mock.Mock()}
self.phase.package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})} self.phase.package_sets = [{"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})}]
res = self.phase.run_solver( res = self.phase.run_solver(
self.compose.variants["Server"], self.compose.variants["Server"],
@ -524,7 +528,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
"pkg-1.0-1.x86_64": mock.Mock(), "pkg-1.0-1.x86_64": mock.Mock(),
"pkg-en-1.0-1.noarch": mock.Mock(), "pkg-en-1.0-1.noarch": mock.Mock(),
} }
self.phase.package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})} self.phase.package_sets = [{"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})}]
res = self.phase.run_solver( res = self.phase.run_solver(
self.compose.variants["Server"], self.compose.variants["Server"],

View File

@ -297,7 +297,8 @@ class TestGetSystemRelease(unittest.TestCase):
def test_no_system_release_package(self): def test_no_system_release_package(self):
pkgset = MockPackageSet(MockPkg('/build/bash-1.0.0-1.x86_64.rpm')) pkgset = MockPackageSet(MockPkg('/build/bash-1.0.0-1.x86_64.rpm'))
packages, filter_packages = gather.get_system_release_packages( packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset}) self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, []) self.assertItemsEqual(packages, [])
self.assertItemsEqual(filter_packages, []) self.assertItemsEqual(filter_packages, [])
@ -307,7 +308,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/dummy-1.0.0-1.x86_64.rpm', is_system_release=True), MockPkg('/build/dummy-1.0.0-1.x86_64.rpm', is_system_release=True),
) )
packages, filter_packages = gather.get_system_release_packages( packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset}) self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [('dummy', None)]) self.assertItemsEqual(packages, [('dummy', None)])
self.assertItemsEqual(filter_packages, []) self.assertItemsEqual(filter_packages, [])
@ -318,7 +320,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/system-release-server-1.0.0-1.x86_64.rpm', is_system_release=True), MockPkg('/build/system-release-server-1.0.0-1.x86_64.rpm', is_system_release=True),
) )
packages, filter_packages = gather.get_system_release_packages( packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset}) self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [('system-release-server', None)]) self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(filter_packages, [('system-release', None)]) self.assertItemsEqual(filter_packages, [('system-release', None)])
@ -329,7 +332,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/system-release-bar-1.0.0-1.x86_64.rpm', is_system_release=True), MockPkg('/build/system-release-bar-1.0.0-1.x86_64.rpm', is_system_release=True),
) )
packages, filter_packages = gather.get_system_release_packages( packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset}) self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
# In this case a random package is picked, so let's check that both # In this case a random package is picked, so let's check that both
# list contain one package and that they are different. # list contain one package and that they are different.
@ -344,7 +348,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/system-release-client-1.0.0-1.x86_64.rpm', is_system_release=True), MockPkg('/build/system-release-client-1.0.0-1.x86_64.rpm', is_system_release=True),
) )
packages, filter_packages = gather.get_system_release_packages( packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.addon, {'x86_64': pkgset}) self.compose, "x86_64", self.addon, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [('system-release-server', None)]) self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(filter_packages, self.assertItemsEqual(filter_packages,
@ -490,7 +495,11 @@ class TestGetVariantPackages(helpers.PungiTestCase):
) )
packages, groups, filter_packages = gather.get_variant_packages( packages, groups, filter_packages = gather.get_variant_packages(
compose, 'x86_64', compose.variants['Server'], 'comps', package_sets={'x86_64': pkgset}) compose,
"x86_64",
compose.variants["Server"], "comps",
package_sets=[{"x86_64": pkgset}],
)
self.assertItemsEqual(packages, [('system-release-server', None)]) self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(groups, []) self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [('system-release', None)]) self.assertItemsEqual(filter_packages, [('system-release', None)])
@ -509,7 +518,12 @@ class TestGetVariantPackages(helpers.PungiTestCase):
) )
packages, groups, filter_packages = gather.get_variant_packages( packages, groups, filter_packages = gather.get_variant_packages(
compose, 'x86_64', compose.variants['Server'], 'comps', package_sets={'x86_64': pkgset}) compose,
"x86_64",
compose.variants["Server"],
"comps",
package_sets=[{"x86_64": pkgset}],
)
self.assertItemsEqual(packages, []) self.assertItemsEqual(packages, [])
self.assertItemsEqual(groups, []) self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, []) self.assertItemsEqual(filter_packages, [])

View File

@ -78,7 +78,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
super(TestPopulateGlobalPkgset, self).setUp() super(TestPopulateGlobalPkgset, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': 'f25', 'pkgset_koji_tag': 'f25',
'sigkeys': mock.Mock(), 'sigkeys': ["foo", "bar"],
}) })
self.koji_wrapper = mock.Mock() self.koji_wrapper = mock.Mock()
self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle') self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle')
@ -125,7 +125,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, pickle_dumps): def test_populate_with_multiple_koji_tags(self, KojiPackageSet, pickle_dumps):
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': ['f25', 'f25-extra'], 'pkgset_koji_tag': ['f25', 'f25-extra'],
'sigkeys': mock.Mock(), 'sigkeys': ["foo", "bar"],
}) })
pickle_dumps.return_value = b'DATA' pickle_dumps.return_value = b'DATA'
@ -175,7 +175,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(self.topdir, {
'gather_method': 'nodeps', 'gather_method': 'nodeps',
'pkgset_koji_tag': 'f25', 'pkgset_koji_tag': 'f25',
'sigkeys': mock.Mock(), 'sigkeys': ["foo", "bar"],
'additional_packages': [ 'additional_packages': [
('.*', {'*': ['pkg', 'foo.x86_64']}), ('.*', {'*': ['pkg', 'foo.x86_64']}),
] ]