diff --git a/doc/configuration.rst b/doc/configuration.rst
index fb2d9edd..077a1179 100644
--- a/doc/configuration.rst
+++ b/doc/configuration.rst
@@ -588,12 +588,14 @@ Options
-------
**gather_method** [mandatory]
- (*str*|*dict*) -- Options are ``deps`` and ``nodeps``. Specifies whether
- package dependencies should be pulled in as well. Either a single value or
- a dictionary mapping variant UID and source type to a value. Make sure only
- one regex matches each variant, as there is no guarantee which value will
- be used if there are multiple matching ones. All used sources must have a
- configured method.
+ (*str*|*dict*) -- Options are ``deps``, ``nodeps`` and ``hybrid``.
+ Specifies whether and how package dependencies should be pulled in.
+ Possible configuration can be one value for all variants, or if configured
+ per-variant it can be a simple string ``hybrid`` or a a dictionary mapping
+ source type to a value of ``deps`` or ``nodeps``. Make sure only one regex
+ matches each variant, as there is no guarantee which value will be used if
+ there are multiple matching ones. All used sources must have a configured
+ method unless hybrid solving is used.
**gather_fulltree** = False
(*bool*) -- When set to ``True`` all RPMs built from an SRPM will always be
@@ -745,6 +747,7 @@ Example
"comps": "deps",
"module": "nodeps"
}
+ "^OtherMixed$": "hybrid", # Using hybrid depsolver
}
additional_packages = [
diff --git a/pungi/checks.py b/pungi/checks.py
index a54ed8fd..017fab23 100644
--- a/pungi/checks.py
+++ b/pungi/checks.py
@@ -561,20 +561,28 @@ def make_schema():
"type": "object",
"patternProperties": {
".+": {
- "type": "object",
- "patternProperties": {
- "^module|comps|json$": {
+ "oneOf": [
+ {
"type": "string",
- "enum": ["deps", "nodeps"],
+ "enum": ["hybrid"],
+ },
+ {
+ "type": "object",
+ "patternProperties": {
+ "^module|comps|json$": {
+ "type": "string",
+ "enum": ["deps", "nodeps"],
+ }
+ },
}
- }
+ ]
}
},
"additionalProperties": False,
},
{
"type": "string",
- "enum": ["deps", "nodeps"],
+ "enum": ["deps", "nodeps", "hybrid"],
}
],
},
diff --git a/pungi/paths.py b/pungi/paths.py
index eaaae581..b92d0b49 100644
--- a/pungi/paths.py
+++ b/pungi/paths.py
@@ -153,20 +153,31 @@ class WorkPaths(object):
makedirs(path)
return path
- def comps_repo(self, arch=None, variant=None, create_dir=True):
- """
- Examples:
- work/x86_64/comps-repo
- work/global/comps-repo
- """
+ def _repo(self, type, arch=None, variant=None, create_dir=True):
arch = arch or "global"
- path = os.path.join(self.topdir(arch, create_dir=create_dir), "comps_repo")
+ path = os.path.join(self.topdir(arch, create_dir=create_dir), "%s_repo" % type)
if variant:
- path += '_' + variant.uid
+ path += "_" + variant.uid
if create_dir:
makedirs(path)
return path
+ def comps_repo(self, arch=None, variant=None, create_dir=True):
+ """
+ Examples:
+ work/x86_64/comps_repo_Server
+ work/global/comps_repo
+ """
+ return self._repo("comps", arch, variant, create_dir=create_dir)
+
+ def module_repo(self, arch=None, variant=None, create_dir=True):
+ """
+ Examples:
+ work/x86_64/module_repo_Server
+ work/global/module_repo
+ """
+ return self._repo("module", arch, variant, create_dir=create_dir)
+
def arch_repo(self, arch=None, create_dir=True):
"""
Examples:
diff --git a/pungi/phases/gather/__init__.py b/pungi/phases/gather/__init__.py
index c30db2a1..f845e286 100644
--- a/pungi/phases/gather/__init__.py
+++ b/pungi/phases/gather/__init__.py
@@ -162,31 +162,64 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
prepopulate = get_prepopulate_packages(compose, arch, variant)
fulltree_excludes = fulltree_excludes or set()
- for source_name in ('module', 'comps', 'json'):
+ if methods == "hybrid":
+ # This variant is using a hybrid solver. Gather all inputs and run the
+ # method once.
- packages, groups, filter_packages = get_variant_packages(compose, arch, variant,
- source_name, package_sets)
- if not packages and not groups:
- # No inputs, nothing to do really.
- continue
+ packages = []
+ groups = []
+ filter_packages = []
- try:
- method_name = global_method_name or methods[source_name]
- except KeyError:
- raise RuntimeError("Variant %s has no configured gather_method for source %s"
- % (variant.uid, source_name))
+ # Run the module source. This is needed to set up module metadata for
+ # the variant, but we don't really care about the returned packages.
+ # They will be pulled in based on the actual module.
+ get_variant_packages(compose, arch, variant, "module", package_sets)
- GatherMethod = get_gather_method(method_name)
- method = GatherMethod(compose)
- method.source_name = source_name
- compose.log_debug("Gathering source %s, method %s" % (source_name, method_name))
- pkg_map = method(arch, variant, packages, groups, filter_packages,
- multilib_whitelist, multilib_blacklist, package_sets,
- fulltree_excludes=fulltree_excludes,
- prepopulate=prepopulate if source_name == 'comps' else set())
+ # Here we do want to get list of comps groups and additional packages.
+ packages, groups, filter_packages = get_variant_packages(
+ compose, arch, variant, "comps", package_sets
+ )
- for t in ('rpm', 'srpm', 'debuginfo'):
- result[t].extend(pkg_map.get(t, []))
+ result = get_gather_method("hybrid")(compose)(
+ arch,
+ variant,
+ packages=packages,
+ groups=groups,
+ filter_packages=filter_packages,
+ multilib_whitelist=multilib_whitelist,
+ multilib_blacklist=multilib_blacklist,
+ package_sets=package_sets,
+ fulltree_excludes=fulltree_excludes,
+ prepopulate=prepopulate,
+ )
+
+ else:
+
+ for source_name in ('module', 'comps', 'json'):
+
+ packages, groups, filter_packages = get_variant_packages(compose, arch, variant,
+ source_name, package_sets)
+ if not packages and not groups:
+ # No inputs, nothing to do really.
+ continue
+
+ try:
+ method_name = global_method_name or methods[source_name]
+ except KeyError:
+ raise RuntimeError("Variant %s has no configured gather_method for source %s"
+ % (variant.uid, source_name))
+
+ GatherMethod = get_gather_method(method_name)
+ method = GatherMethod(compose)
+ method.source_name = source_name
+ compose.log_debug("Gathering source %s, method %s" % (source_name, method_name))
+ pkg_map = method(arch, variant, packages, groups, filter_packages,
+ multilib_whitelist, multilib_blacklist, package_sets,
+ fulltree_excludes=fulltree_excludes,
+ prepopulate=prepopulate if source_name == 'comps' else set())
+
+ for t in ('rpm', 'srpm', 'debuginfo'):
+ result[t].extend(pkg_map.get(t, []))
compose.log_info("[DONE ] %s" % msg)
return result
diff --git a/pungi/phases/gather/methods/method_hybrid.py b/pungi/phases/gather/methods/method_hybrid.py
new file mode 100644
index 00000000..7161f072
--- /dev/null
+++ b/pungi/phases/gather/methods/method_hybrid.py
@@ -0,0 +1,408 @@
+# -*- coding: utf-8 -*-
+
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
+from collections import defaultdict
+import os
+from kobo.shortcuts import run
+import kobo.rpmlib
+
+import pungi.phases.gather.method
+from pungi import Modulemd, multilib_dnf
+from pungi.arch import get_valid_arches, tree_arch_to_yum_arch
+from pungi.phases.gather import _mk_pkg_map
+from pungi.util import (
+ get_arch_variant_data,
+ iter_module_defaults,
+ pkg_is_debug,
+ temp_dir,
+)
+from pungi.wrappers import fus
+from pungi.wrappers.createrepo import CreaterepoWrapper
+
+from .method_nodeps import expand_groups
+
+import createrepo_c as cr
+
+
+class FakePackage(object):
+ """This imitates a DNF package object and can be passed to python-multilib
+ library.
+ """
+
+ def __init__(self, pkg):
+ self.pkg = pkg
+
+ def __getattr__(self, attr):
+ return getattr(self.pkg, attr)
+
+ @property
+ def files(self):
+ return [
+ os.path.join(dirname, basename) for (_, dirname, basename) in self.pkg.files
+ ]
+
+ @property
+ def provides(self):
+ # This is supposed to match what yum package object returns. It's a
+ # nested tuple (name, flag, (epoch, version, release)). This code only
+ # fills in the name, because that's all that python-multilib is using..
+ return [(p[0].split()[0], None, (None, None, None)) for p in self.pkg.provides]
+
+
+class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
+ enabled = True
+
+ def __init__(self, *args, **kwargs):
+ super(GatherMethodHybrid, self).__init__(*args, **kwargs)
+ self.package_maps = {}
+ self.packages = {}
+
+ def _get_pkg_map(self, arch):
+ """Create a mapping from NEVRA to actual package object. This will be
+ done once for each architecture, since the package set is the same for
+ all variants.
+
+ The keys are in NEVRA format and only include the epoch if it's not
+ zero. This makes it easier to query by results for the depsolver.
+ """
+ if arch not in self.package_maps:
+ pkg_map = {}
+ for pkg_arch in self.package_sets[arch].rpms_by_arch:
+ for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]:
+ pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg
+ self.package_maps[arch] = pkg_map
+
+ return self.package_maps[arch]
+
+ def _prepare_packages(self):
+ repo_path = self.compose.paths.work.arch_repo(arch=self.arch)
+ md = cr.Metadata()
+ md.locate_and_load_xml(repo_path)
+ for key in md.keys():
+ pkg = md.get(key)
+ if pkg.arch in self.valid_arches:
+ self.packages[_fmt_nevra(pkg, arch=pkg.arch)] = FakePackage(pkg)
+
+ def _get_package(self, nevra):
+ if not self.packages:
+ self._prepare_packages()
+ return self.packages[nevra]
+
+ def __call__(
+ self,
+ arch,
+ variant,
+ package_sets,
+ packages=[],
+ groups=[],
+ multilib_whitelist=[],
+ multilib_blacklist=[],
+ **kwargs
+ ):
+ self.arch = arch
+ self.valid_arches = get_valid_arches(arch, multilib=True)
+ self.package_sets = package_sets
+
+ self.multilib_methods = get_arch_variant_data(
+ self.compose.conf, "multilib", arch, variant
+ )
+ self.multilib = multilib_dnf.Multilib(
+ self.multilib_methods, multilib_blacklist, multilib_whitelist
+ )
+
+ platform, modular_rpms = create_module_repo(self.compose, variant, arch)
+
+ packages.update(
+ expand_groups(self.compose, arch, variant, groups, set_pkg_arch=False)
+ )
+
+ nvrs, modules = self.run_solver(variant, arch, packages, platform, modular_rpms)
+ return expand_packages(
+ self._get_pkg_map(arch),
+ variant.arch_mmds.get(arch, {}),
+ pungi.phases.gather.get_lookaside_repos(self.compose, arch, variant),
+ nvrs,
+ modules,
+ )
+ # maybe check invalid sigkeys
+
+ def run_solver(self, variant, arch, packages, platform, modular_rpms):
+ repos = [self.compose.paths.work.arch_repo(arch=arch)]
+
+ modules = []
+ if variant.arch_mmds.get(arch):
+ repos.append(self.compose.paths.work.module_repo(arch, variant))
+ for mmd in variant.arch_mmds[arch].values():
+ modules.append("%s:%s" % (mmd.peek_name(), mmd.peek_stream()))
+
+ input_packages = [
+ _fmt_pkg(pkg_name, pkg_arch) for pkg_name, pkg_arch in packages
+ ]
+
+ step = 0
+
+ while True:
+ step += 1
+ cmd = fus.get_cmd(
+ tree_arch_to_yum_arch(arch),
+ repos,
+ pungi.phases.gather.get_lookaside_repos(self.compose, arch, variant),
+ input_packages,
+ modules,
+ platform=platform,
+ )
+ logfile = self.compose.paths.log.log_file(
+ arch, "hybrid-depsolver-%s-iter-%d" % (variant, step)
+ )
+ run(cmd, logfile=logfile, show_cmd=True)
+ output, output_modules = fus.parse_output(logfile)
+ new_multilib = self.add_multilib(variant, arch, output, modular_rpms)
+ if not new_multilib:
+ # No new multilib packages were added, we're done.
+ break
+
+ input_packages.extend(
+ _fmt_pkg(pkg_name, pkg_arch) for pkg_name, pkg_arch in new_multilib
+ )
+
+ return output, output_modules
+
+ def add_multilib(self, variant, arch, nvrs, modular_rpms):
+ added = set()
+ if not self.multilib_methods:
+ return []
+
+ for nvr, pkg_arch in nvrs:
+ if pkg_arch != arch:
+ # Not a native package, not checking to add multilib
+ continue
+
+ nevr = kobo.rpmlib.parse_nvr(nvr)
+ nevr_copy = nevr.copy()
+ nevr_copy["arch"] = pkg_arch
+
+ if kobo.rpmlib.make_nvra(nevr_copy, force_epoch=True) in modular_rpms:
+ # Skip modular package
+ continue
+
+ if self.multilib.is_multilib(self._get_package("%s.%s" % (nvr, pkg_arch))):
+ for add_arch in self.valid_arches:
+ if add_arch == arch:
+ continue
+ if _nevra(arch=add_arch, **nevr) in self._get_pkg_map(arch):
+ added.add((nevr["name"], add_arch))
+
+ # Remove packages that are already present
+ for nvr, pkg_arch in nvrs:
+ existing = (nvr.rsplit("-", 2)[0], pkg_arch)
+ if existing in added:
+ added.remove(existing)
+
+ return sorted(added)
+
+
+def create_module_repo(compose, variant, arch):
+ """Create repository with module metadata. There are no packages otherwise."""
+ createrepo_c = compose.conf["createrepo_c"]
+ createrepo_checksum = compose.conf["createrepo_checksum"]
+ msg = "Creating repo with modular metadata for %s.%s" % (variant, arch)
+
+ if not variant.arch_mmds.get(arch):
+ compose.log_debug("[SKIP ] %s: no modules found" % msg)
+ return None, []
+
+ compose.log_debug("[BEGIN] %s" % msg)
+
+ platforms = set()
+ modular_rpms = set()
+
+ repo_path = compose.paths.work.module_repo(arch, variant)
+
+ # Add modular metadata to it
+ modules = []
+
+ for mmd in variant.arch_mmds[arch].values():
+ # Set the arch field, but no other changes are needed.
+ repo_mmd = mmd.copy()
+ repo_mmd.set_arch(tree_arch_to_yum_arch(arch))
+
+ for dep in repo_mmd.peek_dependencies():
+ streams = dep.peek_requires().get("platform")
+ if streams:
+ platforms.update(streams.dup())
+
+ # Collect all modular NEVRAs
+ artifacts = repo_mmd.get_rpm_artifacts()
+ if artifacts:
+ modular_rpms.update(artifacts.dup())
+
+ modules.append(repo_mmd)
+
+ if len(platforms) > 1:
+ raise RuntimeError("There are conflicting requests for platform.")
+
+ module_names = set([x.get_name() for x in modules])
+ defaults_dir = compose.paths.work.module_defaults_dir()
+ for mmddef in iter_module_defaults(defaults_dir):
+ if mmddef.peek_module_name() in module_names:
+ modules.append(mmddef)
+
+ # Initialize empty repo
+ repo = CreaterepoWrapper(createrepo_c=createrepo_c)
+ cmd = repo.get_createrepo_cmd(
+ repo_path, database=False, outputdir=repo_path, checksum=createrepo_checksum
+ )
+ logfile = "module_repo-%s" % variant
+ run(cmd, logfile=compose.paths.log.log_file(arch, logfile), show_cmd=True)
+
+ with temp_dir() as tmp_dir:
+ modules_path = os.path.join(tmp_dir, "modules.yaml")
+ Modulemd.dump(modules, modules_path)
+
+ cmd = repo.get_modifyrepo_cmd(
+ os.path.join(repo_path, "repodata"),
+ modules_path,
+ mdtype="modules",
+ compress_type="gz",
+ )
+ log_file = compose.paths.log.log_file(
+ arch, "gather-modifyrepo-modules-%s" % variant
+ )
+ run(cmd, logfile=log_file, show_cmd=True)
+
+ compose.log_debug("[DONE ] %s" % msg)
+ return list(platforms)[0] if platforms else None, modular_rpms
+
+
+def _fmt_pkg(pkg_name, arch):
+ if arch:
+ pkg_name += ".%s" % arch
+ return pkg_name
+
+
+def _nevra(**kwargs):
+ if kwargs.get("epoch") not in (None, "", 0, "0"):
+ return "%(name)s-%(epoch)s:%(version)s-%(release)s.%(arch)s" % kwargs
+ return "%(name)s-%(version)s-%(release)s.%(arch)s" % kwargs
+
+
+def _fmt_nevra(pkg, arch):
+ return _nevra(
+ name=pkg.name,
+ epoch=pkg.epoch,
+ version=pkg.version,
+ release=pkg.release,
+ arch=arch,
+ )
+
+
+def _get_srpm_nevra(pkg):
+ nevra = kobo.rpmlib.parse_nvra(pkg.sourcerpm)
+ nevra["epoch"] = nevra["epoch"] or pkg.epoch
+ return _nevra(**nevra)
+
+
+def _make_result(paths):
+ return [{"path": path, "flags": []} for path in sorted(paths)]
+
+
+def expand_packages(nevra_to_pkg, variant_modules, lookasides, nvrs, modules):
+ """For each package add source RPM and possibly also debuginfo."""
+ # This will server as the final result. We collect sets of paths to the
+ # packages.
+ rpms = set()
+ srpms = set()
+ debuginfo = set()
+
+ # Collect list of all packages in lookaside. These will not be added to the
+ # result. Fus handles this in part: if a package is explicitly mentioned as
+ # input (which can happen with comps group expansion), it will be in the
+ # output even if it's in lookaside.
+ lookaside_packages = set()
+ for repo in lookasides:
+ md = cr.Metadata()
+ md.locate_and_load_xml(repo)
+ for key in md.keys():
+ pkg = md.get(key)
+ url = os.path.join(pkg.location_base, pkg.location_href)
+ # Strip file:// prefix
+ lookaside_packages.add(url[7:])
+
+ # Get all packages in modules and include them in rpms or debuginfo.
+ variant_mmd = {}
+ for mmd in variant_modules.values():
+ nsvc = "%s:%s:%s:%s" % (
+ mmd.peek_name(),
+ mmd.peek_stream(),
+ mmd.peek_version(),
+ mmd.peek_context(),
+ )
+ variant_mmd[nsvc] = mmd
+
+ for module in modules:
+ mmd = variant_mmd.get(module)
+ if not mmd:
+ continue
+ artifacts = mmd.get_rpm_artifacts()
+ if not artifacts:
+ continue
+ for rpm in artifacts.dup():
+ pkg = nevra_to_pkg[_nevra(**kobo.rpmlib.parse_nvra(rpm))]
+ if pkg_is_debug(pkg):
+ debuginfo.add(pkg.file_path)
+ else:
+ rpms.add(pkg.file_path)
+ # Add source package. We don't need modular packages, those are
+ # listed in modulemd.
+ try:
+ srpm_nevra = _get_srpm_nevra(pkg)
+ srpm = nevra_to_pkg[srpm_nevra]
+ if srpm.file_path not in lookaside_packages:
+ srpms.add(srpm.file_path)
+ except KeyError:
+ # Didn't find source RPM.. this should be logged
+ pass
+
+ # This is used to figure out which debuginfo packages to include. We keep
+ # track of package architectures from each SRPM.
+ srpm_arches = defaultdict(set)
+
+ for nvr, arch in nvrs:
+ pkg = nevra_to_pkg["%s.%s" % (nvr, arch)]
+ if pkg.file_path in lookaside_packages:
+ # Package is in lookaside, don't add it and ignore sources and
+ # debuginfo too.
+ continue
+ rpms.add(pkg.file_path)
+
+ try:
+ srpm_nevra = _get_srpm_nevra(pkg)
+ srpm = nevra_to_pkg[srpm_nevra]
+ srpm_arches[srpm_nevra].add(arch)
+ if srpm.file_path not in lookaside_packages:
+ srpms.add(srpm.file_path)
+ except KeyError:
+ # Didn't find source RPM.. this should be logged
+ pass
+
+ # Get all debuginfo packages from all included sources. We iterate over all
+ # available packages and if we see a debug package from included SRPM built
+ # for architecture that has at least one binary package, we include it too.
+ for pkg in nevra_to_pkg.values():
+ if pkg_is_debug(pkg) and pkg.arch in srpm_arches[_get_srpm_nevra(pkg)]:
+ if pkg.file_path not in lookaside_packages:
+ debuginfo.add(pkg.file_path)
+
+ return _mk_pkg_map(_make_result(rpms), _make_result(srpms), _make_result(debuginfo))
diff --git a/pungi/phases/gather/methods/method_nodeps.py b/pungi/phases/gather/methods/method_nodeps.py
index d1aa21ea..f3d585cf 100644
--- a/pungi/phases/gather/methods/method_nodeps.py
+++ b/pungi/phases/gather/methods/method_nodeps.py
@@ -117,7 +117,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
return result
-def expand_groups(compose, arch, variant, groups):
+def expand_groups(compose, arch, variant, groups, set_pkg_arch=True):
"""Read comps file filtered for given architecture and variant and return
all packages in given groups.
@@ -131,7 +131,9 @@ def expand_groups(compose, arch, variant, groups):
comps = CompsWrapper(comps_file)
packages = set()
+ pkg_arch = arch if set_pkg_arch else None
+
for group in groups:
- packages.update([(pkg, arch) for pkg in comps.get_packages(group)])
+ packages.update([(pkg, pkg_arch) for pkg in comps.get_packages(group)])
return packages
diff --git a/pungi/wrappers/fus.py b/pungi/wrappers/fus.py
new file mode 100644
index 00000000..8f846cde
--- /dev/null
+++ b/pungi/wrappers/fus.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
+"""
+This is a wrapper for a hybrid depsolver that understands how module
+dependencies work. It's Funny Solver, because it does funny things.
+
+https://github.com/fedora-modularity/fus
+
+The executable basically provides one iteration of the traditional DNF based
+depsolver. It has to be run multiple times to explicitly add multilib packages,
+or source packages to include build dependencies (which is not yet supported in
+Pungi).
+"""
+
+
+def get_cmd(
+ arch,
+ repos,
+ lookasides,
+ packages,
+ modules,
+ platform=None,
+ filter_packages=None, # TODO not supported yet
+):
+ cmd = ["fus", "--verbose", "--arch", arch]
+
+ for idx, repo in enumerate(repos):
+ cmd.append("--repo=repo-%s,repo,%s" % (idx, repo))
+ for idx, repo in enumerate(lookasides):
+ cmd.append("--repo=lookaside-%s,lookaside,%s" % (idx, repo))
+
+ if platform:
+ cmd.append("--platform=%s" % platform)
+
+ for module in modules:
+ cmd.append("module(%s)" % module)
+
+ cmd.extend(packages)
+
+ return cmd
+
+
+def parse_output(output):
+ """Read output of fus from the given filepath, and return a set of tuples
+ (NVR, arch) and a set of module NSVCs.
+ """
+ packages = set()
+ modules = set()
+ with open(output) as f:
+ for line in f:
+ if " " in line or "@" not in line:
+ continue
+ nevra, _ = line.strip().rsplit("@", 1)
+ if nevra.startswith("module:"):
+ modules.add(nevra[7:].rsplit(".", 1)[0])
+ else:
+ packages.add(tuple(nevra.rsplit(".", 1)))
+ return packages, modules
diff --git a/tests/test_fus_wrapper.py b/tests/test_fus_wrapper.py
new file mode 100644
index 00000000..84e9694d
--- /dev/null
+++ b/tests/test_fus_wrapper.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import tempfile
+
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
+
+from pungi.wrappers import fus
+
+from .helpers import touch
+
+
+class TestGetCmd(unittest.TestCase):
+ def test_minimum_command(self):
+ cmd = fus.get_cmd("x86_64", [], [], [], [], [])
+ self.assertEqual(cmd, ["fus", "--verbose", "--arch", "x86_64"])
+
+ def test_full_command(self):
+ cmd = fus.get_cmd(
+ "x86_64",
+ ["/tmp/first", "/tmp/second"],
+ ["/tmp/fst", "/tmp/snd"],
+ ["pkg"],
+ ["mod:1.0"],
+ platform="f29",
+ )
+ self.assertEqual(
+ cmd,
+ [
+ "fus",
+ "--verbose",
+ "--arch",
+ "x86_64",
+ "--repo=repo-0,repo,/tmp/first",
+ "--repo=repo-1,repo,/tmp/second",
+ "--repo=lookaside-0,lookaside,/tmp/fst",
+ "--repo=lookaside-1,lookaside,/tmp/snd",
+ "--platform=f29",
+ "module(mod:1.0)",
+ "pkg",
+ ],
+ )
+
+
+class TestParseOutput(unittest.TestCase):
+ def setUp(self):
+ _, self.file = tempfile.mkstemp(prefix="test-parse-fus-out-")
+
+ def tearDown(self):
+ os.remove(self.file)
+
+ def test_skips_debug_line(self):
+ touch(self.file, "debug line\n")
+ packages, modules = fus.parse_output(self.file)
+ self.assertItemsEqual(packages, [])
+ self.assertItemsEqual(modules, [])
+
+ def test_separates_arch(self):
+ touch(self.file, "pkg-1.0-1.x86_64@repo-0\npkg-1.0-1.i686@repo-0\n")
+ packages, modules = fus.parse_output(self.file)
+ self.assertItemsEqual(
+ packages,
+ [("pkg-1.0-1", "x86_64"), ("pkg-1.0-1", "i686")],
+ )
+ self.assertItemsEqual(modules, [])
+
+ def test_returns_modules(self):
+ touch(self.file, "module:foo:1:201807131350:deadcafe.x86_64@repo-0\n")
+ packages, modules = fus.parse_output(self.file)
+ self.assertItemsEqual(packages, [])
+ self.assertItemsEqual(modules, ["foo:1:201807131350:deadcafe"])
diff --git a/tests/test_gather_method_hybrid.py b/tests/test_gather_method_hybrid.py
new file mode 100644
index 00000000..46f2574a
--- /dev/null
+++ b/tests/test_gather_method_hybrid.py
@@ -0,0 +1,584 @@
+# -*- coding: utf-8 -*-
+
+from collections import namedtuple
+import copy
+import mock
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
+
+from pungi.phases.gather.methods import method_hybrid as hybrid
+from tests import helpers
+
+
+MockPkg = namedtuple(
+ "MockPkg", ["name", "version", "release", "epoch", "sourcerpm", "file_path", "arch"]
+)
+
+
+class NamedMock(mock.Mock):
+ def __init__(self, name=None, **kwargs):
+ super(NamedMock, self).__init__(**kwargs)
+ self.name = name
+
+
+class TestMethodHybrid(helpers.PungiTestCase):
+ @mock.patch("pungi.phases.gather.get_lookaside_repos")
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.expand_groups")
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.expand_packages")
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.create_module_repo")
+ def test_call_method(self, cmr, ep, eg, glr):
+ compose = helpers.DummyCompose(self.topdir, {})
+ cmr.return_value = (mock.Mock(), mock.Mock())
+ m = hybrid.GatherMethodHybrid(compose)
+ m.run_solver = mock.Mock(return_value=(mock.Mock(), mock.Mock()))
+ pkg = MockPkg(
+ name="pkg",
+ version="1",
+ release="2",
+ arch="x86_64",
+ epoch=3,
+ sourcerpm=None,
+ file_path=None,
+ )
+ eg.return_value = ["foo", "bar"]
+ package_sets = {"x86_64": mock.Mock(rpms_by_arch={"x86_64": [pkg]})}
+ arch = "x86_64"
+ variant = compose.variants["Server"]
+
+ res = m(arch, variant, package_sets, set(["pkg"]), ["standard"])
+
+ self.assertEqual(res, ep.return_value)
+ self.assertEqual(cmr.call_args_list, [mock.call(compose, variant, arch)])
+ self.assertEqual(
+ m.run_solver.call_args_list,
+ [mock.call(variant, arch, set(["pkg", "foo", "bar"]), *cmr.return_value)],
+ )
+ self.assertEqual(
+ ep.call_args_list,
+ [
+ mock.call(
+ {"pkg-3:1-2.x86_64": pkg},
+ {},
+ glr.return_value,
+ m.run_solver.return_value[0],
+ m.run_solver.return_value[1],
+ )
+ ],
+ )
+ self.assertEqual(
+ eg.call_args_list,
+ [mock.call(compose, arch, variant, ["standard"], set_pkg_arch=False)],
+ )
+
+
+class MockModule(object):
+ def __init__(
+ self, name, platform=None, stream=None, version=None, context=None, rpms=None
+ ):
+ self.name = name
+ self.platform = platform
+ self.stream = stream
+ self.version = version
+ self.context = context
+ self.rpms = rpms or ["pkg-1.0-1.x86_64"]
+
+ def get_name(self):
+ return self.name
+
+ def peek_name(self):
+ return self.name
+
+ def peek_stream(self):
+ return self.stream
+
+ def peek_version(self):
+ return self.version
+
+ def peek_context(self):
+ return self.context
+
+ def peek_dependencies(self):
+ return [
+ mock.Mock(
+ peek_requires=mock.Mock(
+ return_value={
+ "platform": mock.Mock(
+ dup=mock.Mock(return_value=[self.platform])
+ )
+ }
+ )
+ )
+ ]
+
+ def copy(self):
+ return self
+
+ def set_arch(self, arch):
+ pass
+
+ def get_rpm_artifacts(self):
+ return mock.Mock(dup=mock.Mock(return_value=self.rpms))
+
+
+class HelperMixin(object):
+ def _repo(self, name):
+ return os.path.join(self.compose.topdir, "work/x86_64/%s" % name)
+
+
+@mock.patch("pungi.phases.gather.methods.method_hybrid.Modulemd")
+@mock.patch("pungi.phases.gather.methods.method_hybrid.run")
+class TestCreateModuleRepo(HelperMixin, helpers.PungiTestCase):
+ def setUp(self):
+ super(TestCreateModuleRepo, self).setUp()
+ self.compose = helpers.DummyCompose(self.topdir, {})
+ self.variant = self.compose.variants["Server"]
+
+ def test_no_modules(self, run, Modulemd):
+ plat, pkgs = hybrid.create_module_repo(self.compose, self.variant, "x86_64")
+
+ self.assertIsNone(plat)
+ self.assertItemsEqual(pkgs, [])
+ self.assertEqual(run.call_args_list, [])
+ self.assertEqual(Modulemd.mock_calls, [])
+
+ def test_more_than_one_platform(self, run, Modulemd):
+ self.variant.arch_mmds["x86_64"] = {
+ "mod:1": MockModule("mod", platform="f29"),
+ "mod:2": MockModule("mod", platform="f30"),
+ }
+
+ with self.assertRaises(RuntimeError) as ctx:
+ hybrid.create_module_repo(self.compose, self.variant, "x86_64")
+
+ self.assertIn("conflicting requests for platform", str(ctx.exception))
+ self.assertEqual(run.call_args_list, [])
+ self.assertEqual(Modulemd.mock_calls, [])
+
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.iter_module_defaults")
+ def test_creating_repo_with_module_and_default(self, imd, run, Modulemd):
+ mod = MockModule("mod", platform="f29")
+ self.variant.arch_mmds["x86_64"] = {"mod:1": mod}
+ default = mock.Mock(peek_module_name=mock.Mock(return_value="mod"))
+ imd.return_value = [default]
+
+ plat, pkgs = hybrid.create_module_repo(self.compose, self.variant, "x86_64")
+
+ self.assertEqual(plat, "f29")
+ self.assertItemsEqual(pkgs, ["pkg-1.0-1.x86_64"])
+
+ self.assertEqual(
+ Modulemd.mock_calls, [mock.call.dump([mod, default], mock.ANY)]
+ )
+ create, modify = run.call_args_list
+ self.assertEqual(
+ create[0][0][:2], ["createrepo_c", self._repo("module_repo_Server")]
+ )
+ self.assertEqual(
+ modify[0][0][:4],
+ [
+ "modifyrepo_c",
+ Modulemd.mock_calls[0][1][1],
+ self._repo("module_repo_Server/repodata"),
+ "--mdtype=modules",
+ ],
+ )
+
+
+class ModifiedMagicMock(mock.MagicMock):
+ """Like MagicMock, but remembers original values or mutable arguments."""
+
+ def _mock_call(_mock_self, *args, **kwargs):
+ return super(ModifiedMagicMock, _mock_self)._mock_call(
+ *copy.deepcopy(args), **copy.deepcopy(kwargs)
+ )
+
+
+@mock.patch("pungi.wrappers.fus.parse_output")
+@mock.patch("pungi.wrappers.fus.get_cmd", new_callable=ModifiedMagicMock)
+@mock.patch("pungi.phases.gather.methods.method_hybrid.run")
+class TestRunSolver(HelperMixin, helpers.PungiTestCase):
+ def setUp(self):
+ super(TestRunSolver, self).setUp()
+ self.compose = helpers.DummyCompose(self.topdir, {})
+ self.phase = hybrid.GatherMethodHybrid(self.compose)
+ self.phase.multilib_methods = []
+ self.logfile1 = os.path.join(
+ self.compose.topdir, "logs/x86_64/hybrid-depsolver-Server-iter-1.x86_64.log"
+ )
+ self.logfile2 = os.path.join(
+ self.compose.topdir, "logs/x86_64/hybrid-depsolver-Server-iter-2.x86_64.log"
+ )
+
+ def test_with_modules(self, run, gc, po):
+ self.compose.has_comps = None
+ self.compose.variants["Server"].arch_mmds["x86_64"] = {
+ "mod:master": mock.Mock(
+ peek_name=mock.Mock(return_value="mod"),
+ peek_stream=mock.Mock(return_value="master"),
+ )
+ }
+ po.return_value = (mock.Mock(), mock.Mock())
+
+ res = self.phase.run_solver(
+ self.compose.variants["Server"],
+ "x86_64",
+ [],
+ platform="pl",
+ modular_rpms=[],
+ )
+
+ self.assertEqual(res, po.return_value)
+ self.assertEqual(po.call_args_list, [mock.call(self.logfile1)])
+ self.assertEqual(
+ run.call_args_list,
+ [mock.call(gc.return_value, logfile=self.logfile1, show_cmd=True)],
+ )
+ self.assertEqual(
+ gc.call_args_list,
+ [
+ mock.call(
+ "x86_64",
+ [self._repo("repo"), self._repo("module_repo_Server")],
+ [],
+ [],
+ ["mod:master"],
+ platform="pl",
+ )
+ ],
+ )
+
+ def test_with_comps(self, run, gc, po):
+ po.return_value = (mock.Mock(), mock.Mock())
+ res = self.phase.run_solver(
+ self.compose.variants["Server"],
+ "x86_64",
+ [("pkg", None)],
+ platform=None,
+ modular_rpms=[],
+ )
+
+ self.assertEqual(res, po.return_value)
+ self.assertEqual(po.call_args_list, [mock.call(self.logfile1)])
+ self.assertEqual(
+ run.call_args_list,
+ [mock.call(gc.return_value, logfile=self.logfile1, show_cmd=True)],
+ )
+ self.assertEqual(
+ gc.call_args_list,
+ [mock.call("x86_64", [self._repo("repo")], [], ["pkg"], [], platform=None)],
+ )
+
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
+ def test_multilib_devel(self, cr, run, gc, po):
+ self.phase.arch = "x86_64"
+ self.phase.multilib_methods = ["devel"]
+ self.phase.multilib = mock.Mock()
+ self.phase.multilib.is_multilib.side_effect = (
+ lambda pkg: pkg.name == "pkg-devel"
+ )
+ self.phase.valid_arches = ["x86_64", "i686", "noarch"]
+ cr.Metadata.return_value.keys.return_value = []
+ self.phase.package_maps = {
+ "x86_64": {
+ "pkg-devel-1.0-1.x86_64": NamedMock(name="pkg-devel"),
+ "pkg-devel-1.0-1.i686": NamedMock(name="pkg-devel"),
+ "foo-1.0-1.x86_64": NamedMock(name="foo"),
+ }
+ }
+ self.phase.packages = self.phase.package_maps["x86_64"]
+ final = [
+ ("pkg-devel-1.0-1", "x86_64"),
+ ("foo-1.0-1", "x86_64"),
+ ("pkg-devel-1.0-1", "i686"),
+ ]
+ po.side_effect = [
+ [[("pkg-devel-1.0-1", "x86_64"), ("foo-1.0-1", "x86_64")], set()],
+ [final, set()],
+ ]
+
+ res = self.phase.run_solver(
+ self.compose.variants["Server"],
+ "x86_64",
+ [("pkg-devel", None), ("foo", None)],
+ platform=None,
+ modular_rpms=[],
+ )
+
+ self.assertEqual(res, (final, set()))
+ self.assertEqual(
+ po.call_args_list, [mock.call(self.logfile1), mock.call(self.logfile2)]
+ )
+ self.assertEqual(
+ run.call_args_list,
+ [
+ mock.call(gc.return_value, logfile=self.logfile1, show_cmd=True),
+ mock.call(gc.return_value, logfile=self.logfile2, show_cmd=True),
+ ],
+ )
+ self.assertEqual(
+ gc.call_args_list,
+ [
+ mock.call(
+ "x86_64",
+ [self._repo("repo")],
+ [],
+ ["pkg-devel", "foo"],
+ [],
+ platform=None,
+ ),
+ mock.call(
+ "x86_64",
+ [self._repo("repo")],
+ [],
+ ["pkg-devel", "foo", "pkg-devel.i686"],
+ [],
+ platform=None,
+ ),
+ ],
+ )
+
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
+ def test_multilib_runtime(self, cr, run, gc, po):
+ packages = {
+ "abc": NamedMock(
+ name="foo",
+ epoch=None,
+ version="1.0",
+ release="1",
+ arch="x86_64",
+ provides=[("/usr/lib/libfoo.1.so.1", None, None)],
+ ),
+ "def": NamedMock(
+ name="foo",
+ epoch=None,
+ version="1.0",
+ release="1",
+ arch="i686",
+ provides=[("/usr/lib/libfoo.1.so.1", None, None)],
+ ),
+ "ghi": NamedMock(
+ name="pkg-devel",
+ epoch=None,
+ version="1.0",
+ release="1",
+ arch="x86_64",
+ provides=[],
+ ),
+ }
+ cr.Metadata.return_value.keys.return_value = packages.keys()
+ cr.Metadata.return_value.get.side_effect = lambda key: packages[key]
+
+ self.phase.multilib_methods = ["runtime"]
+ self.phase.multilib = mock.Mock()
+ self.phase.multilib.is_multilib.side_effect = lambda pkg: pkg.name == "foo"
+ self.phase.valid_arches = ["x86_64", "i686", "noarch"]
+ self.phase.arch = "x86_64"
+ self.phase.package_maps = {
+ "x86_64": {
+ "pkg-devel-1.0-1.x86_64": mock.Mock(),
+ "pkg-devel-1.0-1.i686": mock.Mock(),
+ "foo-1.0-1.x86_64": mock.Mock(),
+ "foo-1.0-1.i686": mock.Mock(),
+ }
+ }
+ final = [
+ ("pkg-devel-1.0-1", "x86_64"),
+ ("foo-1.0-1", "x86_64"),
+ ("foo-1.0-1", "i686"),
+ ]
+ po.side_effect = [
+ ([("pkg-devel-1.0-1", "x86_64"), ("foo-1.0-1", "x86_64")], set()),
+ (final, set()),
+ ]
+
+ res = self.phase.run_solver(
+ self.compose.variants["Server"],
+ "x86_64",
+ [("pkg-devel", None), ("foo", None)],
+ platform=None,
+ modular_rpms=[],
+ )
+
+ self.assertEqual(res, (final, set()))
+ self.assertEqual(
+ po.call_args_list, [mock.call(self.logfile1), mock.call(self.logfile2)]
+ )
+ self.assertEqual(
+ run.call_args_list,
+ [
+ mock.call(gc.return_value, logfile=self.logfile1, show_cmd=True),
+ mock.call(gc.return_value, logfile=self.logfile2, show_cmd=True),
+ ],
+ )
+ self.assertEqual(
+ gc.call_args_list,
+ [
+ mock.call(
+ "x86_64",
+ [self._repo("repo")],
+ [],
+ ["pkg-devel", "foo"],
+ [],
+ platform=None,
+ ),
+ mock.call(
+ "x86_64",
+ [self._repo("repo")],
+ [],
+ ["pkg-devel", "foo", "foo.i686"],
+ [],
+ platform=None,
+ ),
+ ],
+ )
+
+
+class TestExpandPackages(helpers.PungiTestCase):
+ def _mk_packages(self, src=None, debug_arch=None):
+ pkg = MockPkg(
+ name="pkg",
+ version="1",
+ release="2",
+ arch="x86_64",
+ epoch=3,
+ sourcerpm="pkg-1-2.src",
+ file_path="/tmp/pkg.rpm",
+ )
+ nevra_to_pkg = {"pkg-3:1-2.x86_64": pkg}
+ if src or debug_arch:
+ nevra_to_pkg["pkg-3:1-2.src"] = pkg._replace(
+ arch="src", file_path="/tmp/spkg.rpm"
+ )
+ if debug_arch:
+ nevra_to_pkg["pkg-debuginfo-3:1-2.%s" % debug_arch] = pkg._replace(
+ name="pkg-debuginfo", arch=debug_arch, file_path="/tmp/d1.rpm"
+ )
+ return nevra_to_pkg
+
+ def test_single_package(self):
+ nevra_to_pkg = self._mk_packages()
+
+ res = hybrid.expand_packages(
+ nevra_to_pkg, {}, [], [("pkg-3:1-2", "x86_64")], []
+ )
+
+ self.assertEqual(
+ res,
+ {
+ "rpm": [{"path": "/tmp/pkg.rpm", "flags": []}],
+ "srpm": [],
+ "debuginfo": [],
+ },
+ )
+
+ def test_include_src_and_debuginfo(self):
+ nevra_to_pkg = self._mk_packages(debug_arch="x86_64")
+
+ res = hybrid.expand_packages(
+ nevra_to_pkg, {}, [], [("pkg-3:1-2", "x86_64")], []
+ )
+
+ self.assertEqual(
+ res,
+ {
+ "rpm": [{"path": "/tmp/pkg.rpm", "flags": []}],
+ "srpm": [{"path": "/tmp/spkg.rpm", "flags": []}],
+ "debuginfo": [{"path": "/tmp/d1.rpm", "flags": []}],
+ },
+ )
+
+ def test_skip_debuginfo_for_different_arch(self):
+ nevra_to_pkg = self._mk_packages(debug_arch="i686")
+
+ res = hybrid.expand_packages(
+ nevra_to_pkg, {}, [], [("pkg-3:1-2", "x86_64")], []
+ )
+
+ self.assertEqual(
+ res,
+ {
+ "rpm": [{"path": "/tmp/pkg.rpm", "flags": []}],
+ "srpm": [{"path": "/tmp/spkg.rpm", "flags": []}],
+ "debuginfo": [],
+ },
+ )
+
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
+ def test_skip_lookaside_source_and_debuginfo(self, cr):
+ nevra_to_pkg = self._mk_packages(debug_arch="x86_64")
+ lookasides = [mock.Mock()]
+ repo = {
+ "abc": NamedMock(
+ name="pkg",
+ arch="src",
+ location_base="file:///tmp/",
+ location_href="spkg.rpm",
+ ),
+ "def": NamedMock(
+ name="pkg-debuginfo",
+ arch="x86_64",
+ location_base="file:///tmp/",
+ location_href="d1.rpm",
+ ),
+ }
+ cr.Metadata.return_value.keys.return_value = repo.keys()
+ cr.Metadata.return_value.get.side_effect = lambda key: repo[key]
+
+ res = hybrid.expand_packages(
+ nevra_to_pkg, {}, lookasides, [("pkg-3:1-2", "x86_64")], []
+ )
+
+ self.assertEqual(
+ res,
+ {
+ "rpm": [{"path": "/tmp/pkg.rpm", "flags": []}],
+ "srpm": [],
+ "debuginfo": [],
+ },
+ )
+
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
+ def test_skip_lookaside_packages(self, cr):
+ nevra_to_pkg = self._mk_packages(debug_arch="x86_64")
+ lookasides = [mock.Mock()]
+ repo = {
+ "abc": NamedMock(
+ name="pkg",
+ arch="x86_64",
+ location_base="file:///tmp/",
+ location_href="pkg.rpm",
+ )
+ }
+ cr.Metadata.return_value.keys.return_value = repo.keys()
+ cr.Metadata.return_value.get.side_effect = lambda key: repo[key]
+
+ res = hybrid.expand_packages(
+ nevra_to_pkg, {}, lookasides, [("pkg-3:1-2", "x86_64")], []
+ )
+
+ self.assertEqual(res, {"rpm": [], "srpm": [], "debuginfo": []})
+
+ def test_expand_module_packages(self):
+ nevra_to_pkg = self._mk_packages(src=True)
+ mod = MockModule(
+ "foo",
+ stream="1.0",
+ version="201807131350",
+ context="deadcafe",
+ rpms=["pkg-3:1-2.x86_64"],
+ )
+
+ res = hybrid.expand_packages(
+ nevra_to_pkg, {"foo-1.0": mod}, [], [], ["foo:1.0:201807131350:deadcafe"]
+ )
+
+ self.assertEqual(
+ res,
+ {
+ "rpm": [{"flags": [], "path": "/tmp/pkg.rpm"}],
+ "srpm": [{"flags": [], "path": "/tmp/spkg.rpm"}],
+ "debuginfo": [],
+ },
+ )
diff --git a/tests/test_gather_phase.py b/tests/test_gather_phase.py
index b85af8c3..e58a2408 100644
--- a/tests/test_gather_phase.py
+++ b/tests/test_gather_phase.py
@@ -711,6 +711,31 @@ class TestGatherPackages(helpers.PungiTestCase):
self.assertEqual(get_gather_method.call_args_list,
[mock.call('nodeps'), mock.call('deps'), mock.call('deps')])
+ @mock.patch("pungi.phases.gather.get_variant_packages")
+ @mock.patch("pungi.phases.gather.get_gather_method")
+ def test_hybrid_method(self, get_gather_method, get_variant_packages):
+ packages, groups, filters = mock.Mock(), mock.Mock(), mock.Mock()
+ get_variant_packages.side_effect = (
+ lambda c, v, a, s, p: (packages, groups, filters)
+ if s == "comps"
+ else (None, None, None)
+ )
+ compose = helpers.DummyCompose(self.topdir, {"gather_method": "hybrid"})
+ variant = compose.variants["Server"]
+ pkg_set = mock.Mock()
+ gather.gather_packages(compose, "x86_64", variant, pkg_set),
+ self.assertItemsEqual(
+ get_variant_packages.call_args_list,
+ [
+ mock.call(compose, "x86_64", variant, "module", pkg_set),
+ mock.call(compose, "x86_64", variant, "comps", pkg_set)
+ ],
+ )
+ self.assertEqual(get_gather_method.call_args_list, [mock.call("hybrid")])
+ method_kwargs = get_gather_method.return_value.return_value.call_args_list[0][1]
+ self.assertEqual(method_kwargs["packages"], packages)
+ self.assertEqual(method_kwargs["groups"], groups)
+
class TestWritePrepopulate(helpers.PungiTestCase):
def test_without_config(self):