Update code to pass around a list of pkgsets

This opens up a path to having multiple package sets in the compose. The
pkgset phase now creates a list of them (although at this time there is
always a single item in that list).

Any consumer of the package sets objects is updated to handle a list.
Generally this means an extra loop.

JIRA: COMPOSE-3620
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
Lubomír Sedlář 2019-07-25 10:15:14 +02:00
parent a99bf8c828
commit 90393c4b49
13 changed files with 213 additions and 170 deletions

View File

@ -71,10 +71,11 @@ def copy_extra_files(compose, cfg, arch, variant, package_sets, checksum_type=No
rpms = []
pattern = scm_dict["repo"] % var_dict
pkg_name, pkg_arch = split_name_arch(pattern)
for pkgset_file in package_sets[arch]:
pkg_obj = package_sets[arch][pkgset_file]
if pkg_is_rpm(pkg_obj) and _pkg_matches(pkg_obj, pkg_name, pkg_arch):
rpms.append(pkg_obj.file_path)
for package_set in package_sets:
for pkgset_file in package_set[arch]:
pkg_obj = package_set[arch][pkgset_file]
if pkg_is_rpm(pkg_obj) and _pkg_matches(pkg_obj, pkg_name, pkg_arch):
rpms.append(pkg_obj.file_path)
if not rpms:
raise RuntimeError('No package matching %s in the package set.' % pattern)
scm_dict["repo"] = rpms

View File

@ -653,17 +653,13 @@ def get_system_release_packages(compose, arch, variant, package_sets):
packages = set()
filter_packages = set()
if not package_sets or not package_sets.get(arch, None):
return packages, filter_packages
package_set = package_sets[arch]
system_release_packages = set()
for i in package_set:
pkg = package_set[i]
if pkg.is_system_release:
system_release_packages.add(pkg)
for pkgset in package_sets or []:
for i in pkgset.get(arch, []):
pkg = pkgset[arch][i]
if pkg.is_system_release:
system_release_packages.add(pkg)
if not system_release_packages:
return packages, filter_packages

View File

@ -53,9 +53,16 @@ def get_package_path(filename, hashed_directory=False):
return filename
def _find_by_path(pkg_sets, arch, path):
"""Find object in an list of package sets by path."""
for pkg_set in pkg_sets:
if path in pkg_set[arch]:
return pkg_set[arch][path]
raise RuntimeError("Path %r not found in any package set." % path)
def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={}):
# srpm_map instance is shared between link_files() runs
pkg_set = pkg_sets[arch]
msg = "Linking packages (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
@ -77,7 +84,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
pool.queue_put((pkg["path"], dst))
# update rpm manifest
pkg_obj = pkg_set[pkg["path"]]
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
nevra = pkg_obj.nevra
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="source")
@ -96,7 +103,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
pool.queue_put((pkg["path"], dst))
# update rpm manifest
pkg_obj = pkg_set[pkg["path"]]
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
nevra = pkg_obj.nevra
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="binary", srpm_nevra=src_nevra)
@ -113,7 +120,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
pool.queue_put((pkg["path"], dst))
# update rpm manifest
pkg_obj = pkg_set[pkg["path"]]
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
nevra = pkg_obj.nevra
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="debug", srpm_nevra=src_nevra)

View File

@ -54,16 +54,18 @@ def raise_on_invalid_sigkeys(arch, variant, package_sets, result):
Raises RuntimeError if some package in compose is signed with an invalid
sigkey.
"""
invalid_sigkey_rpms = []
invalid_sigkey_rpms = {}
for package in result["rpm"]:
name = parse_nvra(package["path"])["name"]
for forbidden_package in package_sets["global"].invalid_sigkey_rpms:
if name == forbidden_package["name"]:
invalid_sigkey_rpms.append(forbidden_package)
for pkgset in package_sets:
for forbidden_package in pkgset["global"].invalid_sigkey_rpms:
if name == forbidden_package["name"]:
invalid_sigkey_rpms.setdefault(
pkgset["global"].sigkey_ordering, []
).append(forbidden_package)
if invalid_sigkey_rpms:
package_sets["global"].raise_invalid_sigkeys_exception(
invalid_sigkey_rpms)
package_sets["global"].raise_invalid_sigkeys_exception(invalid_sigkey_rpms)
def _format_packages(pkgs):
@ -137,12 +139,13 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
# already on the whitelist.
package_names = set(p.rsplit('-', 2)[0] for p in package_whitelist)
for i in get_valid_arches(arch, multilib=multilib, add_noarch=True, add_src=True):
for rpm_obj in package_sets[arch].rpms_by_arch.get(i, []):
if rpm_obj.name in package_names:
# We already have a package with this name in the whitelist, skip it.
continue
package_whitelist.add(
'{0.name}-{1}:{0.version}-{0.release}'.format(rpm_obj, rpm_obj.epoch or 0))
for pkgset in package_sets:
for rpm_obj in pkgset[arch].rpms_by_arch.get(i, []):
if rpm_obj.name in package_names:
# We already have a package with this name in the whitelist, skip it.
continue
package_whitelist.add(
'{0.name}-{1}:{0.version}-{0.release}'.format(rpm_obj, rpm_obj.epoch or 0))
pungi_wrapper.write_kickstart(
ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str,

View File

@ -91,9 +91,10 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
"""
if arch not in self.package_maps:
pkg_map = {}
for pkg_arch in self.package_sets[arch].rpms_by_arch:
for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]:
pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg
for pkgset in self.package_sets:
for pkg_arch in pkgset[arch].rpms_by_arch:
for pkg in pkgset[arch].rpms_by_arch[pkg_arch]:
pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg
self.package_maps[arch] = pkg_map
return self.package_maps[arch]
@ -117,9 +118,10 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
indexed by package architecture and then by package name. There can be
more than one debuginfo package with the same name.
"""
for pkg_arch in self.package_sets[self.arch].rpms_by_arch:
for pkg in self.package_sets[self.arch].rpms_by_arch[pkg_arch]:
self.debuginfo[pkg.arch][pkg.name].add(pkg)
for pkgset in self.package_sets:
for pkg_arch in pkgset[self.arch].rpms_by_arch:
for pkg in pkgset[self.arch].rpms_by_arch[pkg_arch]:
self.debuginfo[pkg.arch][pkg.name].add(pkg)
def _get_debuginfo(self, name, arch):
if not self.debuginfo:
@ -131,12 +133,13 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
of the pattern.
"""
expanded = set()
for pkg_arch in self.package_sets[self.arch].rpms_by_arch:
for pkg in self.package_sets[self.arch].rpms_by_arch[pkg_arch]:
for pattern in patterns:
if fnmatch(pkg.name, pattern):
expanded.add(pkg)
break
for pkgset in self.package_sets:
for pkg_arch in pkgset[self.arch].rpms_by_arch:
for pkg in pkgset[self.arch].rpms_by_arch[pkg_arch]:
for pattern in patterns:
if fnmatch(pkg.name, pattern):
expanded.add(pkg)
break
return expanded
def prepare_modular_packages(self):
@ -154,16 +157,17 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
# Replace %s with * for fnmatch.
install_match = install % "*"
self.langpacks[name] = set()
for pkg_arch in self.package_sets[arch].rpms_by_arch:
for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]:
if not fnmatch(pkg.name, install_match):
# Does not match the pattern, ignore...
continue
if pkg.name.endswith("-devel") or pkg.name.endswith("-static"):
continue
if pkg_is_debug(pkg):
continue
self.langpacks[name].add(pkg.name)
for pkgset in self.package_sets:
for pkg_arch in pkgset[arch].rpms_by_arch:
for pkg in pkgset[arch].rpms_by_arch[pkg_arch]:
if not fnmatch(pkg.name, install_match):
# Does not match the pattern, ignore...
continue
if pkg.name.endswith("-devel") or pkg.name.endswith("-static"):
continue
if pkg_is_debug(pkg):
continue
self.langpacks[name].add(pkg.name)
def __call__(
self,

View File

@ -40,7 +40,6 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
def worker(self, log, arch, variant, pkgs, groups, filter_packages,
multilib_whitelist, multilib_blacklist, package_sets,
path_prefix=None, fulltree_excludes=None, prepopulate=None):
pkgset = package_sets[arch]
result = {
"rpm": [],
"srpm": [],
@ -60,8 +59,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
compatible_arches[i] = pungi.arch.get_compatible_arches(i)
log.write('\nGathering rpms\n')
for i in pkgset:
pkg = pkgset[i]
for pkg in iterate_packages(package_sets, arch):
if not pkg_is_rpm(pkg):
continue
for gathered_pkg, pkg_arch in packages:
@ -82,8 +80,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
% (pkg, gathered_pkg, pkg_arch, pkg.sourcerpm))
log.write('\nGathering source rpms\n')
for i in pkgset:
pkg = pkgset[i]
for pkg in iterate_packages(package_sets, arch):
if not pkg_is_srpm(pkg):
continue
if pkg.file_name in seen_srpms:
@ -94,8 +91,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
log.write('Adding %s\n' % pkg)
log.write('\nGathering debuginfo packages\n')
for i in pkgset:
pkg = pkgset[i]
for pkg in iterate_packages(package_sets, arch):
if not pkg_is_debug(pkg):
continue
if pkg.sourcerpm not in seen_srpms:
@ -162,3 +158,9 @@ def expand_groups(compose, arch, variant, groups, set_pkg_arch=True):
raise ex
return packages
def iterate_packages(package_sets, arch):
for pkgset in package_sets:
for pkg in pkgset[arch]:
yield pkgset[arch][pkg]

View File

@ -28,4 +28,5 @@ class PkgsetPhase(PhaseBase):
PkgsetSourceContainer.register_module(sources)
container = PkgsetSourceContainer()
SourceClass = container[pkgset_source]
self.package_sets, self.path_prefix = SourceClass(self.compose)()
package_sets, self.path_prefix = SourceClass(self.compose)()
self.package_sets = [package_sets]

View File

@ -85,7 +85,7 @@ class PackageSetBase(kobo.log.LoggingBase):
super(PackageSetBase, self).__init__(logger=logger)
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
self.old_file_cache = None
self.sigkey_ordering = sigkey_ordering or [None]
self.sigkey_ordering = tuple(sigkey_ordering or [None])
self.arches = arches
self.rpms_by_arch = {}
self.srpms_by_name = {}
@ -124,10 +124,16 @@ class PackageSetBase(kobo.log.LoggingBase):
def nvr_formatter(package_info):
# joins NVR parts of the package with '-' character.
return '-'.join((package_info['name'], package_info['version'], package_info['release']))
raise RuntimeError(
"RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s" % (
self.sigkey_ordering,
'\n'.join(sorted(set([nvr_formatter(rpminfo) for rpminfo in rpminfos])))))
def get_error(sigkeys, infos):
return "RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s" % (
sigkeys,
'\n'.join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
)
if not isinstance(rpminfos, dict):
rpminfos = {self.sigkey_ordering: rpminfos}
raise RuntimeError("\n".join(get_error(k, v) for k, v in rpminfos.items()))
def read_packages(self, rpms, srpms):
srpm_pool = ReaderPool(self, self._logger)

View File

@ -259,14 +259,15 @@ def explode_anaconda(compose, arch, variant, package_sets):
# if scm is "rpm" and repo contains a package name, find the package(s) in package set
if scm_dict["scm"] == "rpm" and not (scm_dict["repo"].startswith("/") or "://" in scm_dict["repo"]):
rpms = []
for pkgset_file in package_sets[arch]:
pkg_obj = package_sets[arch][pkgset_file]
if not pkg_is_rpm(pkg_obj):
continue
pkg_name, pkg_arch = split_name_arch(scm_dict["repo"])
if fnmatch.fnmatch(pkg_obj.name, pkg_name) and (pkg_arch is None or pkg_arch == pkg_obj.arch):
compose.log_critical("%s %s %s" % (pkg_obj.name, pkg_name, pkg_arch))
rpms.append(pkg_obj.file_path)
for pkgset in package_sets:
for pkgset_file in pkgset[arch]:
pkg_obj = pkgset[arch][pkgset_file]
if not pkg_is_rpm(pkg_obj):
continue
pkg_name, pkg_arch = split_name_arch(scm_dict["repo"])
if fnmatch.fnmatch(pkg_obj.name, pkg_name) and (pkg_arch is None or pkg_arch == pkg_obj.arch):
compose.log_critical("%s %s %s" % (pkg_obj.name, pkg_name, pkg_arch))
rpms.append(pkg_obj.file_path)
scm_dict["repo"] = rpms
if not rpms:

View File

@ -123,11 +123,15 @@ class TestCopyFiles(helpers.PungiTestCase):
src_po.configure_mock(name='extra-data-1.1-1.fc24.src.rpm',
file_path='/src/location',
arch='src')
package_sets = {
'x86_64': {server_po.name: server_po,
client_po.name: client_po,
src_po.name: src_po}
}
package_sets = [
{
"x86_64": {
server_po.name: server_po,
client_po.name: client_po,
src_po.name: src_po,
},
},
]
get_file_from_scm.side_effect = self.fake_get_file
@ -153,7 +157,7 @@ class TestCopyFiles(helpers.PungiTestCase):
def test_copy_from_non_existing_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm):
compose = helpers.DummyCompose(self.topdir, {})
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'bad-%(variant_uid_lower)s*'}
package_sets = {'x86_64': {}}
package_sets = [{"x86_64": {}}]
with self.assertRaises(RuntimeError) as ctx:
extra_files.copy_extra_files(

View File

@ -44,7 +44,7 @@ class TestMethodHybrid(helpers.PungiTestCase):
)
CW.return_value.get_langpacks.return_value = {"glibc": "glibc-langpack-%s"}
eg.return_value = ["foo", "bar"]
package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": [pkg]})}
package_sets = [{"x86_64": mock.Mock(rpms_by_arch={"x86_64": [pkg]})}]
arch = "x86_64"
variant = compose.variants["Server"]
@ -103,50 +103,52 @@ class TestMethodHybrid(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {})
CW.return_value.get_langpacks.return_value = {"foo": "foo-%s"}
m = hybrid.GatherMethodHybrid(compose)
m.package_sets = {
"x86_64": mock.Mock(
rpms_by_arch={
"x86_64": [
MockPkg(
name="foo",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-en",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-devel",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-debuginfo",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
]
}
)
}
m.package_sets = [
{
"x86_64": mock.Mock(
rpms_by_arch={
"x86_64": [
MockPkg(
name="foo",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-en",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-devel",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-debuginfo",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
]
}
)
}
]
m.prepare_langpacks("x86_64", compose.variants["Server"])
self.assertEqual(m.langpacks, {"foo": set(["foo-en"])})
@ -155,41 +157,43 @@ class TestMethodHybrid(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {})
m = hybrid.GatherMethodHybrid(compose)
m.arch = "x86_64"
m.package_sets = {
"x86_64": mock.Mock(
rpms_by_arch={
"x86_64": [
MockPkg(
name="foo",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-en",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="bar",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
]
}
)
}
m.package_sets = [
{
"x86_64": mock.Mock(
rpms_by_arch={
"x86_64": [
MockPkg(
name="foo",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="foo-en",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
MockPkg(
name="bar",
version="1",
release="2",
arch="x86_64",
epoch=0,
sourcerpm=None,
file_path=None,
),
]
}
)
}
]
expanded = m.expand_list(["foo*"])
self.assertItemsEqual([p.name for p in expanded], ["foo", "foo-en"])
@ -357,7 +361,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
}
po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"])
self.phase.packages = {"p-1-1.x86_64": mock.Mock()}
self.phase.package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})}
self.phase.package_sets = [{"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})}]
res = self.phase.run_solver(
self.compose.variants["Server"],
@ -524,7 +528,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
"pkg-1.0-1.x86_64": mock.Mock(),
"pkg-en-1.0-1.noarch": mock.Mock(),
}
self.phase.package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})}
self.phase.package_sets = [{"x86_64": mock.Mock(rpms_by_arch={"x86_64": []})}]
res = self.phase.run_solver(
self.compose.variants["Server"],

View File

@ -297,7 +297,8 @@ class TestGetSystemRelease(unittest.TestCase):
def test_no_system_release_package(self):
pkgset = MockPackageSet(MockPkg('/build/bash-1.0.0-1.x86_64.rpm'))
packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset})
self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [])
self.assertItemsEqual(filter_packages, [])
@ -307,7 +308,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/dummy-1.0.0-1.x86_64.rpm', is_system_release=True),
)
packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset})
self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [('dummy', None)])
self.assertItemsEqual(filter_packages, [])
@ -318,7 +320,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/system-release-server-1.0.0-1.x86_64.rpm', is_system_release=True),
)
packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset})
self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(filter_packages, [('system-release', None)])
@ -329,7 +332,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/system-release-bar-1.0.0-1.x86_64.rpm', is_system_release=True),
)
packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.variant, {'x86_64': pkgset})
self.compose, "x86_64", self.variant, [{"x86_64": pkgset}]
)
# In this case a random package is picked, so let's check that both
# list contain one package and that they are different.
@ -344,7 +348,8 @@ class TestGetSystemRelease(unittest.TestCase):
MockPkg('/build/system-release-client-1.0.0-1.x86_64.rpm', is_system_release=True),
)
packages, filter_packages = gather.get_system_release_packages(
self.compose, 'x86_64', self.addon, {'x86_64': pkgset})
self.compose, "x86_64", self.addon, [{"x86_64": pkgset}]
)
self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(filter_packages,
@ -490,7 +495,11 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
compose, 'x86_64', compose.variants['Server'], 'comps', package_sets={'x86_64': pkgset})
compose,
"x86_64",
compose.variants["Server"], "comps",
package_sets=[{"x86_64": pkgset}],
)
self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [('system-release', None)])
@ -509,7 +518,12 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
compose, 'x86_64', compose.variants['Server'], 'comps', package_sets={'x86_64': pkgset})
compose,
"x86_64",
compose.variants["Server"],
"comps",
package_sets=[{"x86_64": pkgset}],
)
self.assertItemsEqual(packages, [])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [])

View File

@ -78,7 +78,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
super(TestPopulateGlobalPkgset, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': 'f25',
'sigkeys': mock.Mock(),
'sigkeys': ["foo", "bar"],
})
self.koji_wrapper = mock.Mock()
self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle')
@ -125,7 +125,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, pickle_dumps):
self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': ['f25', 'f25-extra'],
'sigkeys': mock.Mock(),
'sigkeys': ["foo", "bar"],
})
pickle_dumps.return_value = b'DATA'
@ -175,7 +175,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
self.compose = helpers.DummyCompose(self.topdir, {
'gather_method': 'nodeps',
'pkgset_koji_tag': 'f25',
'sigkeys': mock.Mock(),
'sigkeys': ["foo", "bar"],
'additional_packages': [
('.*', {'*': ['pkg', 'foo.x86_64']}),
]