New upstream release 4.1.23

This commit is contained in:
Lubomír Sedlář 2018-04-04 09:34:34 +02:00
parent 8fa473b7a1
commit 49e14caa9b
15 changed files with 90 additions and 1953 deletions

View File

@ -1,754 +0,0 @@
From 659eb0215a7a53628533c195cb6dc6e461d8be27 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Tue, 7 Nov 2017 14:16:37 +0100
Subject: [PATCH 01/12] Support multiple sources in one variant
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
With this patch the gather_source option is no longer used. Instead, all
sources are always used. If they return at least some input packages,
then a configured method is used and the returned lists of packages from
all sources are merged.
The method used for gathering can be configured for each variant and
gather source separately.
Additional packages are only added to the comps source.
Each gathering step is logged separately. All the logs are preserved for
later inspection.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
doc/configuration.rst | 36 +++++---
doc/gathering.rst | 6 +-
pungi/checks.py | 39 ++++----
pungi/paths.py | 21 +++--
pungi/phases/gather/__init__.py | 127 ++++++++++++++++++---------
pungi/phases/gather/methods/method_deps.py | 15 ++--
pungi/phases/gather/methods/method_nodeps.py | 5 +-
tests/data/dummy-pungi.conf | 8 +-
tests/helpers.py | 1 -
tests/test_config.py | 25 ------
tests/test_gather_phase.py | 51 +++++++----
tests/test_pkgset_source_koji.py | 1 -
12 files changed, 199 insertions(+), 136 deletions(-)
diff --git a/doc/configuration.rst b/doc/configuration.rst
index a1408317..dbe5ba08 100644
--- a/doc/configuration.rst
+++ b/doc/configuration.rst
@@ -35,7 +35,6 @@ Minimal Config Example
createrepo_checksum = "sha256"
# GATHER
- gather_source = "comps"
gather_method = "deps"
greedy_method = "build"
check_deps = False
@@ -546,16 +545,13 @@ Gather Settings
Options
-------
-**gather_source** [mandatory]
- (*str*) -- from where to read initial package list; expected values:
- ``comps``, ``none``, ``module``
-
- When ``comps`` is selected, you have to specify ``comps_file`` option. When
- ``module`` is selected, you have to :ref:`configure PDC API url <pdc-settings>`.
-
**gather_method** [mandatory]
- (*str*) -- Options are ``deps`` and ``nodeps``. Specifies whether package
- dependencies should be pulled in as well.
+ (*str*|*dict*) -- Options are ``deps`` and ``nodeps``. Specifies whether
+ package dependencies should be pulled in as well. Either a single value or
+ a dictionary mapping variant UID and source type to a value. Make sure only
+ one regex matches each variant, as there is no guarantee which value will
+ be used if there are multiple matching ones. All used sources must have a
+ configured method.
**gather_fulltree** = False
(*bool*) -- When set to ``True`` all RPMs built from an SRPM will always be
@@ -671,9 +667,9 @@ Options
such cases are still reported as warnings in the log.
**gather_source_mapping**
- (*str*) -- Only use when ``gather_source = "json"``. The value should be a
- path to JSON file with following mapping: ``{variant: {arch: {rpm_name:
- [rpm_arch|None]}}}``.
+ (*str*) -- JSON mapping with initial packages for the compose. The value
+ should be a path to JSON file with following mapping: ``{variant: {arch:
+ {rpm_name: [rpm_arch|None]}}}``.
**gather_profiler** = False
(*bool*) -- When set to ``True`` the gather tool will produce additional
@@ -685,12 +681,24 @@ Example
-------
::
- gather_source = "comps"
gather_method = "deps"
greedy_method = "build"
check_deps = False
hashed_directories = True
+ gather_method = {
+ "^Everything$": {
+ "comps": "deps" # traditional content defined by comps groups
+ },
+ "^Modular$": {
+ "module": "nodeps" # Modules do not need dependencies
+ },
+ "^Mixed$": { # Mixed content in one variant
+ "comps": "deps",
+ "module": "nodeps"
+ }
+ }
+
additional_packages = [
# bz#123456
('^(Workstation|Server)$', {
diff --git a/doc/gathering.rst b/doc/gathering.rst
index 5111f617..08b348b7 100644
--- a/doc/gathering.rst
+++ b/doc/gathering.rst
@@ -8,9 +8,9 @@ a subset of the content targeted at a particular use case.
There are different types of variants. The type affects how packages are
gathered into the variant.
-The inputs for gathering are defined by the ``gather_source`` option. It
-provides a list of package names, comps groups names and a list of packages
-that should be filtered out.
+The inputs for gathering are defined by various gather sources. Packages from
+all sources are collected to create a big list of package names, comps groups
+names and a list of packages that should be filtered out.
.. note::
The inputs for both explicit package list and comps file are interpreted as
diff --git a/pungi/checks.py b/pungi/checks.py
index faf18a24..9b317ab6 100644
--- a/pungi/checks.py
+++ b/pungi/checks.py
@@ -554,12 +554,30 @@ def make_schema():
},
"gather_method": {
- "type": "string",
- "enum": ["deps", "nodeps"],
+ "oneOf": [
+ {
+ "type": "object",
+ "patternProperties": {
+ ".+": {
+ "type": "object",
+ "patternProperties": {
+ "^module|comps|json$": {
+ "type": "string",
+ "enum": ["deps", "nodeps"],
+ }
+ }
+ }
+ },
+ "additionalProperties": False,
+ },
+ {
+ "type": "string",
+ "enum": ["deps", "nodeps"],
+ }
+ ],
},
"gather_source": {
- "type": "string",
- "enum": ["module", "json", "comps", "none"],
+ "deprecated": "remove it",
},
"gather_fulltree": {
"type": "boolean",
@@ -706,7 +724,7 @@ def make_schema():
"type": "string",
"enum": ["lorax", "buildinstall"],
},
- "buildinstall_topdir": {"type": "string"},
+ "buildinstall_topdir": {"type": "string"},
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
@@ -1067,7 +1085,7 @@ def make_schema():
"release_is_layered",
"variants_file", "sigkeys",
"runroot", "pkgset_source",
- "gather_source", "gather_method"],
+ "gather_method"],
"additionalProperties": False,
}
@@ -1118,15 +1136,6 @@ def get_num_cpus():
# encountered and its value satisfies the lambda, an error is reported for each
# missing (for requires) option in the list.
CONFIG_DEPS = {
- "gather_source": {
- "conflicts": [
- (lambda val: val != 'json', ['gather_source_mapping']),
- ],
- "requires": [
- (lambda val: val == 'json', ['gather_source_mapping']),
- (lambda val: val == 'comps', ['comps_file']),
- ]
- },
"productimg": {
"requires": (
(lambda x: bool(x), ["productimg_install_class"]),
diff --git a/pungi/paths.py b/pungi/paths.py
index 42d0e3eb..0e506795 100644
--- a/pungi/paths.py
+++ b/pungi/paths.py
@@ -107,32 +107,37 @@ class WorkPaths(object):
path = os.path.join(path, file_name)
return path
- def pungi_conf(self, arch=None, variant=None, create_dir=True):
+ def pungi_conf(self, arch=None, variant=None, create_dir=True, source_name=None):
"""
Examples:
work/x86_64/pungi/x86_64.conf
work/x86_64/pungi/Server.x86_64.conf
"""
arch = arch or "global"
- if variant is None:
- file_name = "%s.conf" % arch
- else:
- file_name = "%s.%s.conf" % (variant.uid, arch)
+ file_name = ''
+ if variant:
+ file_name += variant.uid + '.'
+ file_name += arch + '.'
+ if source_name:
+ file_name += source_name + '.'
+ file_name += 'conf'
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi")
if create_dir:
makedirs(path)
path = os.path.join(path, file_name)
return path
- def pungi_log(self, arch=None, variant=None, create_dir=True):
+ def pungi_log(self, arch=None, variant=None, create_dir=True, source_name=None):
"""
Examples:
work/x86_64/pungi/x86_64.log
work/x86_64/pungi/Server.x86_64.log
"""
path = self.pungi_conf(arch, variant, create_dir=create_dir)
- path = path[:-5] + ".log"
- return path
+ path = path[:-5]
+ if source_name:
+ path += '.' + source_name
+ return path + ".log"
def pungi_cache_dir(self, arch, variant=None, create_dir=True):
"""
diff --git a/pungi/phases/gather/__init__.py b/pungi/phases/gather/__init__.py
index 23b9079b..bc056e25 100644
--- a/pungi/phases/gather/__init__.py
+++ b/pungi/phases/gather/__init__.py
@@ -24,7 +24,7 @@ from productmd.rpms import Rpms
from pungi.wrappers.scm import get_file_from_scm
from .link import link_files
-from pungi.util import get_arch_variant_data, get_arch_data
+from pungi.util import get_arch_variant_data, get_arch_data, get_variant_data
from pungi.phases.base import PhaseBase
from pungi.arch import split_name_arch, get_compatible_arches
@@ -68,10 +68,13 @@ class GatherPhase(PhaseBase):
except ValueError as exc:
errors = exc.message.split('\n')
- if self.compose.conf['gather_source'] == 'module':
- from pungi.phases.pkgset.sources import source_koji
- if not source_koji.WITH_MODULES:
- errors.append('Modular compose requires pdc_client and modulemd packages.')
+ # This must be imported here to avoid circular deps problems.
+ from pungi.phases.pkgset.sources import source_koji
+ if not source_koji.WITH_MODULES:
+ # Modules are not supported, check if we need them
+ for variant in self.compose.variants.values():
+ if variant.modules:
+ errors.append('Modular compose requires pdc_client and modulemd packages.')
if errors:
raise ValueError('\n'.join(errors))
@@ -126,7 +129,14 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
# multilib white/black-list is per-arch, common for all variants
multilib_whitelist = get_multilib_whitelist(compose, arch)
multilib_blacklist = get_multilib_blacklist(compose, arch)
- GatherMethod = get_gather_method(compose.conf["gather_method"])
+ methods = compose.conf["gather_method"]
+ global_method_name = methods
+ if isinstance(methods, dict):
+ try:
+ methods = get_variant_data(compose.conf, 'gather_method', variant)[-1]
+ global_method_name = None
+ except IndexError:
+ raise RuntimeError("Variant %s has no configured gather_method" % variant.uid)
msg = "Gathering packages (arch: %s, variant: %s)" % (arch, variant)
@@ -136,17 +146,43 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
compose.log_info("[BEGIN] %s" % msg)
- packages, groups, filter_packages = get_variant_packages(compose, arch, variant, package_sets)
+ result = {
+ "rpm": [],
+ "srpm": [],
+ "debuginfo": [],
+ }
+
prepopulate = get_prepopulate_packages(compose, arch, variant)
fulltree_excludes = fulltree_excludes or set()
- method = GatherMethod(compose)
- pkg_map = method(arch, variant, packages, groups, filter_packages,
- multilib_whitelist, multilib_blacklist, package_sets,
- fulltree_excludes=fulltree_excludes, prepopulate=prepopulate)
+ for source_name in ('module', 'comps', 'json'):
+
+ packages, groups, filter_packages = get_variant_packages(compose, arch, variant,
+ source_name, package_sets)
+ if not packages and not groups:
+ # No inputs, nothing to do really.
+ continue
+
+ try:
+ method_name = global_method_name or methods[source_name]
+ except KeyError:
+ raise RuntimeError("Variant %s has no configured gather_method for source %s"
+ % (variant.uid, source_name))
+
+ GatherMethod = get_gather_method(method_name)
+ method = GatherMethod(compose)
+ method.source_name = source_name
+ compose.log_debug("Gathering source %s, method %s" % (source_name, method_name))
+ pkg_map = method(arch, variant, packages, groups, filter_packages,
+ multilib_whitelist, multilib_blacklist, package_sets,
+ fulltree_excludes=fulltree_excludes,
+ prepopulate=prepopulate if source_name == 'comps' else set())
+
+ for t in ('rpm', 'srpm', 'debuginfo'):
+ result[t].extend(pkg_map.get(t, []))
compose.log_info("[DONE ] %s" % msg)
- return pkg_map
+ return result
def write_packages(compose, arch, variant, pkg_map, path_prefix):
@@ -415,7 +451,7 @@ def get_lookaside_repos(compose, arch, variant):
return get_arch_variant_data(compose.conf, "gather_lookaside_repos", arch, variant)
-def get_variant_packages(compose, arch, variant, package_sets=None):
+def get_variant_packages(compose, arch, variant, source_name, package_sets=None):
"""Find inputs for depsolving of variant.arch combination.
Returns a triple: a list of input packages, a list of input comps groups
@@ -429,17 +465,27 @@ def get_variant_packages(compose, arch, variant, package_sets=None):
When system-release packages should be filtered, the ``package_sets``
argument is required.
"""
- GatherSource = get_gather_source(compose.conf["gather_source"])
+ packages, groups, filter_packages = set(), set(), set()
+ GatherSource = get_gather_source(source_name)
source = GatherSource(compose)
- packages, groups = source(arch, variant)
- filter_packages = set()
+ p, g = source(arch, variant)
+
+ if source_name != "comps" and not p and not g:
+ # For modules and json source, if the source did not return anything,
+ # we should skip all further work. Additional packages and possibly
+ # system-release will be added to comps source.
+ return packages, groups, filter_packages
+
+ packages |= p
+ groups |= g
if variant is None:
# no variant -> no parent -> we have everything we need
# doesn't make sense to do any package filtering
return packages, groups, filter_packages
- packages |= get_additional_packages(compose, arch, variant)
+ if source_name == 'comps':
+ packages |= get_additional_packages(compose, arch, variant)
filter_packages |= get_filter_packages(compose, arch, variant)
if compose.conf['filter_system_release_packages']:
@@ -452,13 +498,13 @@ def get_variant_packages(compose, arch, variant, package_sets=None):
for var in variant.parent.get_variants(
arch=arch, types=["self", "variant", "addon", "layered-product"]):
var_packages, var_groups, _ = get_variant_packages(
- compose, arch, var, package_sets=package_sets)
+ compose, arch, var, source_name, package_sets=package_sets)
packages |= var_packages
groups |= var_groups
if variant.type in ["addon", "layered-product"]:
var_packages, var_groups, _ = get_variant_packages(
- compose, arch, variant.parent, package_sets=package_sets)
+ compose, arch, variant.parent, source_name, package_sets=package_sets)
packages |= var_packages
groups |= var_groups
@@ -517,9 +563,6 @@ def get_packages_to_gather(compose, arch=None, variant=None, include_arch=True,
"""
Returns the list of names of packages and list of names of groups which
would be included in a compose as GATHER phase result.
- This works only for "comps" or "json" gather_source. For "module"
- gather_source, this always return an empty list, because it is not clear
- what packages will end up in a compose before the gather phase is run.
:param str arch: Arch to return packages for. If not set, returns packages
for all arches.
@@ -531,30 +574,28 @@ def get_packages_to_gather(compose, arch=None, variant=None, include_arch=True,
:param include_prepopulated: When True, the prepopulated packages will
be included in a list of packages.
"""
- if compose.conf["gather_source"] == "module":
- return ([], [])
+ packages = set([])
+ groups = set([])
+ for source_name in ('module', 'comps', 'json'):
+ GatherSource = get_gather_source(source_name)
+ src = GatherSource(compose)
- arches = [arch] if arch else compose.get_arches()
+ arches = [arch] if arch else compose.get_arches()
- GatherSource = get_gather_source(compose.conf["gather_source"])
- src = GatherSource(compose)
+ for arch in arches:
+ pkgs, grps = src(arch, variant)
+ groups = groups.union(set(grps))
- packages = set([])
- groups = set([])
- for arch in arches:
- pkgs, grps = src(arch, variant)
- groups = groups.union(set(grps))
-
- additional_packages = get_additional_packages(compose, arch, None)
- for pkg_name, pkg_arch in pkgs | additional_packages:
- if not include_arch or pkg_arch is None:
- packages.add(pkg_name)
- else:
- packages.add("%s.%s" % (pkg_name, pkg_arch))
+ additional_packages = get_additional_packages(compose, arch, None)
+ for pkg_name, pkg_arch in pkgs | additional_packages:
+ if not include_arch or pkg_arch is None:
+ packages.add(pkg_name)
+ else:
+ packages.add("%s.%s" % (pkg_name, pkg_arch))
- if include_prepopulated:
- prepopulated = get_prepopulate_packages(
- compose, arch, variant, include_arch)
- packages = packages.union(prepopulated)
+ if include_prepopulated:
+ prepopulated = get_prepopulate_packages(
+ compose, arch, variant, include_arch)
+ packages = packages.union(prepopulated)
return list(packages), list(groups)
diff --git a/pungi/phases/gather/methods/method_deps.py b/pungi/phases/gather/methods/method_deps.py
index f4d0d6f8..7c9e8fb6 100644
--- a/pungi/phases/gather/methods/method_deps.py
+++ b/pungi/phases/gather/methods/method_deps.py
@@ -40,8 +40,9 @@ class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
write_pungi_config(self.compose, arch, variant, packages, groups, filter_packages,
multilib_whitelist, multilib_blacklist,
- fulltree_excludes=fulltree_excludes, prepopulate=prepopulate)
- result, missing_deps = resolve_deps(self.compose, arch, variant)
+ fulltree_excludes=fulltree_excludes, prepopulate=prepopulate,
+ source_name=self.source_name)
+ result, missing_deps = resolve_deps(self.compose, arch, variant, source_name=self.source_name)
check_deps(self.compose, arch, variant, missing_deps)
return result
@@ -61,10 +62,10 @@ def _format_packages(pkgs):
def write_pungi_config(compose, arch, variant, packages, groups, filter_packages,
multilib_whitelist, multilib_blacklist, fulltree_excludes=None,
- prepopulate=None):
+ prepopulate=None, source_name=None):
"""write pungi config (kickstart) for arch/variant"""
pungi_wrapper = PungiWrapper()
- pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)
+ pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch, source_name=source_name)
msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)
if compose.DEBUG and os.path.isfile(pungi_cfg):
@@ -95,9 +96,9 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
prepopulate=prepopulate)
-def resolve_deps(compose, arch, variant):
+def resolve_deps(compose, arch, variant, source_name=None):
pungi_wrapper = PungiWrapper()
- pungi_log = compose.paths.work.pungi_log(arch, variant)
+ pungi_log = compose.paths.work.pungi_log(arch, variant, source_name=source_name)
msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant)
if compose.DEBUG and os.path.exists(pungi_log):
@@ -107,7 +108,7 @@ def resolve_deps(compose, arch, variant):
return res, broken_deps
compose.log_info("[BEGIN] %s" % msg)
- pungi_conf = compose.paths.work.pungi_conf(arch, variant)
+ pungi_conf = compose.paths.work.pungi_conf(arch, variant, source_name=source_name)
multilib_methods = get_arch_variant_data(compose.conf, 'multilib', arch, variant)
diff --git a/pungi/phases/gather/methods/method_nodeps.py b/pungi/phases/gather/methods/method_nodeps.py
index 69249da2..ffc8e460 100644
--- a/pungi/phases/gather/methods/method_nodeps.py
+++ b/pungi/phases/gather/methods/method_nodeps.py
@@ -28,7 +28,10 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
enabled = True
def __call__(self, arch, variant, *args, **kwargs):
- log_file = self.compose.paths.log.log_file(arch, 'gather-nodeps-%s' % variant.uid)
+ fname = 'gather-nodeps-%s' % variant.uid
+ if self.source_name:
+ fname += '-' + self.source_name
+ log_file = self.compose.paths.log.log_file(arch, fname)
with open(log_file, 'w') as log:
return self.worker(log, arch, variant, *args, **kwargs)
diff --git a/tests/data/dummy-pungi.conf b/tests/data/dummy-pungi.conf
index faaebb5b..2d1f21f1 100644
--- a/tests/data/dummy-pungi.conf
+++ b/tests/data/dummy-pungi.conf
@@ -36,8 +36,12 @@ createrepo_checksum = "sha256"
# GATHER
-gather_source = "comps"
-gather_method = "deps"
+gather_method = {
+ "^.*$": {
+ "module": "nodeps",
+ "comps": "deps",
+ }
+}
greedy_method = "build"
check_deps = False
hashed_directories = True
diff --git a/tests/helpers.py b/tests/helpers.py
index 3f258ba9..f069635d 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -186,7 +186,6 @@ BASE_CONFIG = dict(
runroot=False,
createrepo_checksum='sha256',
gather_method='deps',
- gather_source='none',
sigkeys=[],
)
diff --git a/tests/test_config.py b/tests/test_config.py
index 5cfdcbb0..b2c1c8fb 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -213,31 +213,6 @@ class CreaterepoConfigTestCase(ConfigTestCase):
class GatherConfigTestCase(ConfigTestCase):
- def test_source_comps_requires_comps(self):
- cfg = load_config(
- pkgset_source='koji',
- pkgset_koji_tag="f25",
- gather_source='comps',
- gather_source_mapping='foo'
- )
-
- self.assertValidation(
- cfg,
- [checks.REQUIRES.format('gather_source', 'comps', 'comps_file'),
- checks.CONFLICTS.format('gather_source', 'comps', 'gather_source_mapping')])
-
- def test_source_json_requires_mapping(self):
- cfg = load_config(
- pkgset_source='koji',
- pkgset_koji_tag="f25",
- gather_source='json',
- comps_file='comps',
- )
-
- self.assertValidation(
- cfg,
- [checks.REQUIRES.format('gather_source', 'json', 'gather_source_mapping')])
-
def test_dnf_backend_is_default_on_py3(self):
cfg = load_config(
pkgset_source='koji',
diff --git a/tests/test_gather_phase.py b/tests/test_gather_phase.py
index 28598fac..1630ef33 100644
--- a/tests/test_gather_phase.py
+++ b/tests/test_gather_phase.py
@@ -460,7 +460,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
def test_no_variant(self):
compose = helpers.DummyCompose(self.topdir, {})
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', None)
+ compose, 'x86_64', None, 'comps')
self.assertItemsEqual(packages, [])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [])
@@ -473,7 +473,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {})
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.variants['Server'])
+ compose, 'x86_64', compose.variants['Server'], 'comps')
self.assertItemsEqual(packages, ['foo'])
self.assertItemsEqual(groups, ['core'])
self.assertItemsEqual(filter_packages, [])
@@ -490,7 +490,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.variants['Server'], package_sets={'x86_64': pkgset})
+ compose, 'x86_64', compose.variants['Server'], 'comps', package_sets={'x86_64': pkgset})
self.assertItemsEqual(packages, [('system-release-server', None)])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [('system-release', None)])
@@ -509,7 +509,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.variants['Server'], package_sets={'x86_64': pkgset})
+ compose, 'x86_64', compose.variants['Server'], 'comps', package_sets={'x86_64': pkgset})
self.assertItemsEqual(packages, [])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [])
@@ -534,7 +534,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.all_variants['Server-optional'])
+ compose, 'x86_64', compose.all_variants['Server-optional'], 'comps')
self.assertItemsEqual(packages, ['server-pkg', 'addon-pkg', 'opt-pkg'])
self.assertItemsEqual(groups, ['server-group', 'addon-group', 'opt-group'])
self.assertItemsEqual(filter_packages, [])
@@ -554,7 +554,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.all_variants['Server-optional'])
+ compose, 'x86_64', compose.all_variants['Server-optional'], 'comps')
self.assertItemsEqual(packages, [])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [])
@@ -572,7 +572,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
)
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.all_variants['Server'])
+ compose, 'x86_64', compose.all_variants['Server'], 'comps')
self.assertItemsEqual(packages, [('pkg', None), ('foo', 'x86_64')])
self.assertItemsEqual(groups, [])
self.assertItemsEqual(filter_packages, [])
@@ -591,7 +591,7 @@ class TestGetVariantPackages(helpers.PungiTestCase):
with self.assertRaises(ValueError) as ctx:
packages, groups, filter_packages = gather.get_variant_packages(
- compose, 'x86_64', compose.all_variants['Server'])
+ compose, 'x86_64', compose.all_variants['Server'], 'comps')
self.assertIn('Incompatible package arch', str(ctx.exception))
@@ -641,17 +641,19 @@ class TestGatherPackages(helpers.PungiTestCase):
pkg_set = mock.Mock()
self.assertEqual(
gather.gather_packages(compose, 'x86_64', compose.variants['Server'], pkg_set),
- get_gather_method.return_value.return_value.return_value
+ {'rpm': [], 'srpm': [], 'debuginfo': []}
)
self.assertEqual(get_gather_method.call_args_list,
- [mock.call(compose.conf['gather_method'])])
+ [mock.call(compose.conf['gather_method'])] * 3)
self.assertEqual(get_variant_packages.call_args_list,
- [mock.call(compose, 'x86_64', compose.variants['Server'], pkg_set)])
+ [mock.call(compose, 'x86_64', compose.variants['Server'], 'module', pkg_set),
+ mock.call(compose, 'x86_64', compose.variants['Server'], 'comps', pkg_set),
+ mock.call(compose, 'x86_64', compose.variants['Server'], 'json', pkg_set)])
self.assertEqual(
get_gather_method.return_value.return_value.call_args_list,
[mock.call('x86_64', compose.variants['Server'], packages, groups,
filters, set(), set(), pkg_set, fulltree_excludes=set(),
- prepopulate=set())]
+ prepopulate=set())] * 3
)
@mock.patch('pungi.phases.gather.get_variant_packages')
@@ -679,19 +681,36 @@ class TestGatherPackages(helpers.PungiTestCase):
pkg_set = mock.Mock()
self.assertEqual(
gather.gather_packages(compose, 'x86_64', compose.variants['Server'], pkg_set),
- get_gather_method.return_value.return_value.return_value
+ {'rpm': [], 'srpm': [], 'debuginfo': []}
)
self.assertEqual(get_gather_method.call_args_list,
- [mock.call(compose.conf['gather_method'])])
+ [mock.call(compose.conf['gather_method'])] * 3)
self.assertEqual(get_variant_packages.call_args_list,
- [mock.call(compose, 'x86_64', compose.variants['Server'], pkg_set)])
+ [mock.call(compose, 'x86_64', compose.variants['Server'], 'module', pkg_set),
+ mock.call(compose, 'x86_64', compose.variants['Server'], 'comps', pkg_set),
+ mock.call(compose, 'x86_64', compose.variants['Server'], 'json', pkg_set)])
self.assertEqual(
get_gather_method.return_value.return_value.call_args_list,
[mock.call('x86_64', compose.variants['Server'], packages, groups,
filters, set(['white']), set(['black']), pkg_set,
- fulltree_excludes=set(), prepopulate=set())]
+ fulltree_excludes=set(), prepopulate=set())] * 3
)
+ @mock.patch('pungi.phases.gather.get_variant_packages')
+ @mock.patch('pungi.phases.gather.get_gather_method')
+ def test_per_source_method(self, get_gather_method, get_variant_packages):
+ packages, groups, filters = mock.Mock(), mock.Mock(), mock.Mock()
+ get_variant_packages.return_value = (packages, groups, filters)
+ compose = helpers.DummyCompose(self.topdir, {
+ 'multilib_whitelist': {'*': ['white']},
+ 'multilib_blacklist': {'*': ['black']},
+ 'gather_method': {'^Server$': {'comps': 'deps', 'module': 'nodeps', 'json': 'deps'}},
+ })
+ pkg_set = mock.Mock()
+ gather.gather_packages(compose, 'x86_64', compose.variants['Server'], pkg_set),
+ self.assertEqual(get_gather_method.call_args_list,
+ [mock.call('nodeps'), mock.call('deps'), mock.call('deps')])
+
class TestWritePrepopulate(helpers.PungiTestCase):
def test_without_config(self):
diff --git a/tests/test_pkgset_source_koji.py b/tests/test_pkgset_source_koji.py
index 6ea17166..b1e1308d 100644
--- a/tests/test_pkgset_source_koji.py
+++ b/tests/test_pkgset_source_koji.py
@@ -204,7 +204,6 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
pickle_dumps):
self.compose = helpers.DummyCompose(self.topdir, {
'gather_method': 'nodeps',
- 'gather_source': 'none',
'pkgset_koji_tag': 'f25',
'sigkeys': mock.Mock(),
'additional_packages': [
--
2.13.6

View File

@ -0,0 +1,32 @@
From 1436ea2b03a9b86a8706bd2e001550221914cb2e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Wed, 4 Apr 2018 12:23:43 +0200
Subject: [PATCH] tests: Use dummy modulesdir for DNF
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Otherwise it tries to ensure it exists, and since the default is
/etc/dnf/modules.d, it's causing problems if the directory does not
exist and user does not have permissions to create it.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
tests/test_gather.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/tests/test_gather.py b/tests/test_gather.py
index 1daf3c60..25dfc399 100644
--- a/tests/test_gather.py
+++ b/tests/test_gather.py
@@ -1810,6 +1810,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
conf = Conf(base_arch)
conf.persistdir = persistdir
conf.cachedir = self.cachedir
+ conf.modulesdir = os.path.join(persistdir, 'modules.d')
if exclude:
conf.exclude = exclude
dnf = DnfWrapper(conf)
--
2.13.6

View File

@ -1,74 +0,0 @@
From de5dcc7e9ebb3c5b7201b404f302b9ac7dcab722 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Fri, 2 Mar 2018 08:33:37 +0100
Subject: [PATCH 02/12] Remove comps groups from purely modular variants
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The comps source should not return all groups when there are only
modules defined. This fixes part of the problem: non-modular packages
will not go in by default.
The second part is the comps file in the created repository. It will be
filtered to not contain any groups (because packages from there will not
be in the repo).
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/phases/gather/sources/source_comps.py | 3 ++-
pungi/phases/init.py | 6 +++++-
tests/test_initphase.py | 1 +
3 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/pungi/phases/gather/sources/source_comps.py b/pungi/phases/gather/sources/source_comps.py
index 655c7673..447e0a47 100644
--- a/pungi/phases/gather/sources/source_comps.py
+++ b/pungi/phases/gather/sources/source_comps.py
@@ -39,7 +39,8 @@ class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
comps = CompsWrapper(self.compose.paths.work.comps(arch=arch))
- if variant is not None and (variant.groups or variant.type != 'variant'):
+ is_modular = variant and not variant.groups and variant.modules
+ if variant is not None and (variant.groups or variant.type != 'variant' or is_modular):
# Get packages for a particular variant. We want to skip the
# filtering if the variant is top-level and has no groups (to use
# all of them).
diff --git a/pungi/phases/init.py b/pungi/phases/init.py
index cbda4949..a01168a9 100644
--- a/pungi/phases/init.py
+++ b/pungi/phases/init.py
@@ -45,12 +45,16 @@ class InitPhase(PhaseBase):
# write variant comps
for variant in self.compose.get_variants():
+ is_modular = not variant.groups and variant.modules
for arch in variant.arches:
- if variant.groups or variant.type == 'optional':
+ if variant.groups or variant.type == 'optional' or is_modular:
# The variant lists only some groups, run filter. Other
# option is that it's optional variant, in which case
# we want to filter everything (unless there was
# explicit list in which case it will be used).
+ # For fully modular variant (one without groups but
+ # with modules) we also want to filter (effectively
+ # producing empty comps).
write_variant_comps(self.compose, arch, variant)
else:
# The variant does not mention any groups, copy
diff --git a/tests/test_initphase.py b/tests/test_initphase.py
index 7d8b639a..ae6c4e52 100644
--- a/tests/test_initphase.py
+++ b/tests/test_initphase.py
@@ -53,6 +53,7 @@ class TestInitPhase(PungiTestCase):
compose = DummyCompose(self.topdir, {})
compose.has_comps = True
compose.variants['Everything'].groups = []
+ compose.variants['Everything'].modules = []
phase = init.InitPhase(compose)
phase.run()
--
2.13.6

View File

@ -1,35 +0,0 @@
From adcb2e23312914535dd71b15d4705c8101055836 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Tue, 6 Mar 2018 08:47:17 +0100
Subject: [PATCH 03/12] pkgset: Correctly detect single tag for variant
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
We need to check tags for the variant, not for the whole compose. This
results in merge always being done even if there is a single tag. For
f29 tag in Fedora this takes about 2 hours for each variant.
Relates: https://pagure.io/pungi/issue/860
Relates: https://bugzilla.redhat.com/show_bug.cgi?id=1551653
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/phases/pkgset/sources/source_koji.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py
index 98eed625..17d66773 100644
--- a/pungi/phases/pkgset/sources/source_koji.py
+++ b/pungi/phases/pkgset/sources/source_koji.py
@@ -305,7 +305,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
if compose_tag in variant_tags[variant]:
# Optimization for case where we have just single compose
# tag - we do not have to merge in this case...
- if len(compose_tags) == 1:
+ if len(variant_tags[variant]) == 1:
variant.pkgset = pkgset
else:
variant.pkgset.merge(pkgset, None, list(all_arches))
--
2.13.6

View File

@ -1,78 +0,0 @@
From fde41452c0bb030eb3467a87eaf25d7f789cba52 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Thu, 8 Mar 2018 09:07:48 +0100
Subject: [PATCH 04/12] image-build: Accept tar.xz extension for docker images
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fixes: https://pagure.io/pungi/issue/863
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/phases/image_build.py | 43 +++++++++++++++++++++++--------------------
1 file changed, 23 insertions(+), 20 deletions(-)
diff --git a/pungi/phases/image_build.py b/pungi/phases/image_build.py
index ef4242c7..ea517b4e 100644
--- a/pungi/phases/image_build.py
+++ b/pungi/phases/image_build.py
@@ -20,22 +20,22 @@ from productmd.images import Image
# name will be ending with. The extensions are used to filter out which task
# results will be pulled into the compose.
EXTENSIONS = {
- 'docker': 'tar.gz',
- 'liveimg-squashfs': 'liveimg.squashfs',
- 'qcow': 'qcow',
- 'qcow2': 'qcow2',
- 'raw': 'raw',
- 'raw-xz': 'raw.xz',
- 'rhevm-ova': 'rhevm.ova',
- 'tar-gz': 'tar.gz',
- 'vagrant-hyperv': 'vagrant-hyperv.box',
- 'vagrant-libvirt': 'vagrant-libvirt.box',
- 'vagrant-virtualbox': 'vagrant-virtualbox.box',
- 'vagrant-vmware-fusion': 'vagrant-vmware-fusion.box',
- 'vdi': 'vdi',
- 'vmdk': 'vdmk',
- 'vpc': 'vhd',
- 'vsphere-ova': 'vsphere.ova',
+ 'docker': ['tar.gz', 'tar.xz'],
+ 'liveimg-squashfs': ['liveimg.squashfs'],
+ 'qcow': ['qcow'],
+ 'qcow2': ['qcow2'],
+ 'raw': ['raw'],
+ 'raw-xz': ['raw.xz'],
+ 'rhevm-ova': ['rhevm.ova'],
+ 'tar-gz': ['tar.gz'],
+ 'vagrant-hyperv': ['vagrant-hyperv.box'],
+ 'vagrant-libvirt': ['vagrant-libvirt.box'],
+ 'vagrant-virtualbox': ['vagrant-virtualbox.box'],
+ 'vagrant-vmware-fusion': ['vagrant-vmware-fusion.box'],
+ 'vdi': ['vdi'],
+ 'vmdk': ['vdmk'],
+ 'vpc': ['vhd'],
+ 'vsphere-ova': ['vsphere.ova'],
}
@@ -216,10 +216,13 @@ class CreateImageBuildThread(WorkerThread):
for arch, paths in paths.items():
for path in paths:
for format in cmd['image_conf']['image-build']['format']:
- suffix = EXTENSIONS[format]
- if path.endswith(suffix):
- image_infos.append({'path': path, 'suffix': suffix, 'type': format, 'arch': arch})
- break
+ for suffix in EXTENSIONS[format]:
+ if path.endswith(suffix):
+ image_infos.append({'path': path,
+ 'suffix': suffix,
+ 'type': format,
+ 'arch': arch})
+ break
# The usecase here is that you can run koji image-build with multiple --format
# It's ok to do it serialized since we're talking about max 2 images per single
--
2.13.6

View File

@ -1,225 +0,0 @@
From 63329d48c3bc1c72a7bacd654a3ce6e93f6041e7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Wed, 7 Mar 2018 12:35:33 +0100
Subject: [PATCH 05/12] Write package whitelist for each variant
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
If we have a package set for the variant (which happens if there are
modules), include a list of all NEVRAs in the pungi kickstart.
This can be used to make sure only packages from correct tag get into
the compose. If two packages with same name but different version get
into the compose, this can help get even older version into a particular
variant.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/ks.py | 15 +++++++++
pungi/phases/gather/methods/method_deps.py | 7 +++-
pungi/wrappers/pungi.py | 51 +++++++++++-------------------
tests/helpers.py | 1 +
tests/test_gather_method_deps.py | 29 +++++++++++++++--
5 files changed, 68 insertions(+), 35 deletions(-)
diff --git a/pungi/ks.py b/pungi/ks.py
index ecb8821f..517242aa 100644
--- a/pungi/ks.py
+++ b/pungi/ks.py
@@ -127,6 +127,19 @@ class PrepopulateSection(pykickstart.sections.Section):
self.handler.prepopulate.add(line)
+class PackageWhitelistSection(pykickstart.sections.Section):
+ sectionOpen = "%package-whitelist"
+
+ def handleLine(self, line):
+ if not self.handler:
+ return
+
+ (h, s, t) = line.partition('#')
+ line = h.rstrip()
+
+ self.handler.package_whitelist.add(line)
+
+
class KickstartParser(pykickstart.parser.KickstartParser):
def setupSections(self):
pykickstart.parser.KickstartParser.setupSections(self)
@@ -134,6 +147,7 @@ class KickstartParser(pykickstart.parser.KickstartParser):
self.registerSection(MultilibBlacklistSection(self.handler))
self.registerSection(MultilibWhitelistSection(self.handler))
self.registerSection(PrepopulateSection(self.handler))
+ self.registerSection(PackageWhitelistSection(self.handler))
def get_packages(self, dnf_obj):
packages = set()
@@ -194,6 +208,7 @@ class PungiHandler(HandlerClass):
self.multilib_blacklist = set()
self.multilib_whitelist = set()
self.prepopulate = set()
+ self.package_whitelist = set()
def get_ksparser(ks_path=None):
diff --git a/pungi/phases/gather/methods/method_deps.py b/pungi/phases/gather/methods/method_deps.py
index 7c9e8fb6..d38343f3 100644
--- a/pungi/phases/gather/methods/method_deps.py
+++ b/pungi/phases/gather/methods/method_deps.py
@@ -88,12 +88,17 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
'No packages included in %s.%s (no comps groups, no input packages, no prepopulate)'
% (variant.uid, arch))
+ package_whitelist = set()
+ if variant.pkgset:
+ for rpm_obj in variant.pkgset.rpms_by_arch.get(arch, []):
+ package_whitelist.add(rpm_obj.nevra)
+
pungi_wrapper.write_kickstart(
ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str,
exclude_packages=filter_packages_str,
lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes,
multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist,
- prepopulate=prepopulate)
+ prepopulate=prepopulate, package_whitelist=package_whitelist)
def resolve_deps(compose, arch, variant, source_name=None):
diff --git a/pungi/wrappers/pungi.py b/pungi/wrappers/pungi.py
index 501d6f57..04110f47 100644
--- a/pungi/wrappers/pungi.py
+++ b/pungi/wrappers/pungi.py
@@ -32,9 +32,22 @@ UNRESOLVED_DEPENDENCY_RE = re.compile(r"^.*Unresolvable dependency (.+) in ([^ ]
MISSING_COMPS_PACKAGE_RE = re.compile(r"^.*Could not find a match for (.+) in any configured repo")
+def _write_ks_section(f, section, lines):
+ if lines:
+ f.write("\n%%%s\n" % section)
+ for i in sorted(lines):
+ f.write("%s\n" % i)
+
+ f.write("%end\n")
+
+
class PungiWrapper(object):
- def write_kickstart(self, ks_path, repos, groups, packages, exclude_packages=None, comps_repo=None, lookaside_repos=None, fulltree_excludes=None, multilib_blacklist=None, multilib_whitelist=None, prepopulate=None):
+ def write_kickstart(self, ks_path, repos, groups, packages,
+ exclude_packages=None, comps_repo=None,
+ lookaside_repos=None, fulltree_excludes=None,
+ multilib_blacklist=None, multilib_whitelist=None,
+ prepopulate=None, package_whitelist=None):
groups = groups or []
exclude_packages = exclude_packages or {}
lookaside_repos = lookaside_repos or {}
@@ -75,37 +88,11 @@ class PungiWrapper(object):
kickstart.write("%end\n")
- # %fulltree-excludes
- if fulltree_excludes:
- kickstart.write("\n")
- kickstart.write("%fulltree-excludes\n")
- for i in sorted(fulltree_excludes):
- kickstart.write("%s\n" % i)
- kickstart.write("%end\n")
-
- # %multilib-blacklist
- if multilib_blacklist:
- kickstart.write("\n")
- kickstart.write("%multilib-blacklist\n")
- for i in sorted(multilib_blacklist):
- kickstart.write("%s\n" % i)
- kickstart.write("%end\n")
-
- # %multilib-whitelist
- if multilib_whitelist:
- kickstart.write("\n")
- kickstart.write("%multilib-whitelist\n")
- for i in sorted(multilib_whitelist):
- kickstart.write("%s\n" % i)
- kickstart.write("%end\n")
-
- # %prepopulate
- if prepopulate:
- kickstart.write("\n")
- kickstart.write("%prepopulate\n")
- for i in sorted(prepopulate):
- kickstart.write("%s\n" % i)
- kickstart.write("%end\n")
+ _write_ks_section(kickstart, "fulltree-excludes", fulltree_excludes)
+ _write_ks_section(kickstart, "multilib-blacklist", multilib_blacklist)
+ _write_ks_section(kickstart, "multilib-whitelist", multilib_whitelist)
+ _write_ks_section(kickstart, "prepopulate", prepopulate)
+ _write_ks_section(kickstart, "package-whitelist", package_whitelist)
kickstart.close()
diff --git a/tests/helpers.py b/tests/helpers.py
index f069635d..b82de42f 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -38,6 +38,7 @@ class MockVariant(mock.Mock):
self.mmds = []
self.arch_mmds = {}
self.variants = {}
+ self.pkgset = mock.Mock(rpms_by_arch={})
def __str__(self):
return self.uid
diff --git a/tests/test_gather_method_deps.py b/tests/test_gather_method_deps.py
index bd93a185..31bf82b7 100644
--- a/tests/test_gather_method_deps.py
+++ b/tests/test_gather_method_deps.py
@@ -39,7 +39,7 @@ class TestWritePungiConfig(helpers.PungiTestCase):
groups=['grp1'], prepopulate=prepopulate,
repos={'pungi-repo': self.topdir + '/work/x86_64/repo'},
exclude_packages=['pkg3', 'pkg4.x86_64'],
- fulltree_excludes=fulltree)
+ fulltree_excludes=fulltree, package_whitelist=set())
@mock.patch('pungi.phases.gather.get_lookaside_repos')
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
@@ -54,11 +54,36 @@ class TestWritePungiConfig(helpers.PungiTestCase):
multilib_whitelist=[], multilib_blacklist=[],
groups=[], prepopulate=None,
repos={'pungi-repo': self.topdir + '/work/x86_64/repo'},
- exclude_packages=[], fulltree_excludes=None)
+ exclude_packages=[], fulltree_excludes=None,
+ package_whitelist=set())
self.assertEqual(glr.call_args_list,
[mock.call(self.compose, 'x86_64', self.compose.variants['Server'])])
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
+ def test_with_whitelist(self, PungiWrapper):
+ pkgs = [('pkg1', None), ('pkg2', 'x86_64')]
+ grps = ['grp1']
+ filter = [('pkg3', None), ('pkg4', 'x86_64')]
+ self.compose.variants['Server'].pkgset.rpms_by_arch['x86_64'] = [
+ mock.Mock(nevra='pkg-1.0.0-1')
+ ]
+ white = mock.Mock()
+ black = mock.Mock()
+ prepopulate = mock.Mock()
+ fulltree = mock.Mock()
+ deps.write_pungi_config(self.compose, 'x86_64', self.compose.variants['Server'],
+ pkgs, grps, filter, white, black,
+ prepopulate=prepopulate, fulltree_excludes=fulltree)
+ self.assertWritten(PungiWrapper, packages=['pkg1', 'pkg2.x86_64'],
+ ks_path=self.topdir + '/work/x86_64/pungi/Server.x86_64.conf',
+ lookaside_repos={}, multilib_whitelist=white, multilib_blacklist=black,
+ groups=['grp1'], prepopulate=prepopulate,
+ repos={'pungi-repo': self.topdir + '/work/x86_64/repo'},
+ exclude_packages=['pkg3', 'pkg4.x86_64'],
+ fulltree_excludes=fulltree,
+ package_whitelist=set(['pkg-1.0.0-1']))
+
+ @mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
def test_without_input(self, PungiWrapper):
with self.assertRaises(RuntimeError) as ctx:
deps.write_pungi_config(self.compose, 'x86_64', self.compose.variants['Server'],
--
2.13.6

View File

@ -1,158 +0,0 @@
From 1bfea4523b803917e37f81f83519721848012674 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Wed, 7 Mar 2018 13:42:09 +0100
Subject: [PATCH 06/12] gather: Honor package whitelist
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Basically everything not on the list is excluded. This has to be applied
before we filter only the latest versions (otherwise we could lose
packages that are on the whitelist).
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
bin/pungi-gather | 1 +
pungi/gather_dnf.py | 29 +++++++++++++++++++-----
tests/test_gather.py | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++++
3 files changed, 88 insertions(+), 5 deletions(-)
diff --git a/bin/pungi-gather b/bin/pungi-gather
index daf80f2c..aa70977b 100755
--- a/bin/pungi-gather
+++ b/bin/pungi-gather
@@ -125,6 +125,7 @@ def main(persistdir, cachedir):
gather_opts.multilib_whitelist = ksparser.handler.multilib_whitelist
gather_opts.prepopulate = ksparser.handler.prepopulate
gather_opts.fulltree_excludes = ksparser.handler.fulltree_excludes
+ gather_opts.package_whitelist = ksparser.handler.package_whitelist
g = Gather(dnf_obj, gather_opts)
diff --git a/pungi/gather_dnf.py b/pungi/gather_dnf.py
index 1a47eea6..1023c57d 100644
--- a/pungi/gather_dnf.py
+++ b/pungi/gather_dnf.py
@@ -69,6 +69,8 @@ class GatherOptions(pungi.common.OptionsBase):
# lookaside repos; packages will be flagged accordingly
self.lookaside_repos = []
+ self.package_whitelist = set()
+
self.merge_options(**kwargs)
@@ -363,12 +365,29 @@ class Gather(GatherBase):
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
self.dnf._sack.add_excludes(pkgs)
+ all_queues = ['q_binary_packages', 'q_native_binary_packages',
+ 'q_multilib_binary_packages', 'q_noarch_binary_packages',
+ 'q_source_packages', 'q_native_debug_packages',
+ 'q_multilib_debug_packages']
+
+ if self.opts.package_whitelist:
+ with Profiler("Gather._apply_excludes():apply-package-whitelist'"):
+ to_keep = []
+ for pattern in self.opts.package_whitelist:
+ nvra = parse_nvra(pattern)
+ nvra.pop('src')
+ try:
+ nvra['epoch'] = int(nvra.pop('epoch'))
+ except ValueError:
+ pass
+ to_keep.extend(self._query.filter(**nvra).run())
+
+ for queue in all_queues:
+ setattr(self, queue, getattr(self, queue).filter(pkg=to_keep).latest().apply())
+
with Profiler("Gather._apply_excludes():exclude-queries"):
- self._filter_queue('q_binary_packages', exclude)
- self._filter_queue('q_native_binary_packages', exclude)
- self._filter_queue('q_multilib_binary_packages', exclude)
- self._filter_queue('q_noarch_binary_packages', exclude)
- self._filter_queue('q_source_packages', exclude)
+ for queue in all_queues:
+ self._filter_queue(queue, exclude)
@Profiler("Gather.add_initial_packages()")
def add_initial_packages(self, pattern_list):
diff --git a/tests/test_gather.py b/tests/test_gather.py
index 0b015abe..e73ae9c3 100644
--- a/tests/test_gather.py
+++ b/tests/test_gather.py
@@ -1791,8 +1791,71 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def test_bash_older(self):
pass
+ def test_whitelist_old_version(self):
+ # There are two version of dummy-bash in the package set; let's
+ # whitelist only the older one and its dependencies.
+ packages = [
+ "dummy-bash",
+ ]
+ package_whitelist = [
+ "dummy-basesystem-10.0-6.noarch",
+ "dummy-basesystem-10.0-6.src",
+ "dummy-bash-debuginfo-4.2.37-5.x86_64",
+ "dummy-bash-4.2.37-5.x86_64",
+ "dummy-bash-4.2.37-5.src",
+ "dummy-filesystem-4.2.37-6.x86_64",
+ "dummy-filesystem-4.2.37-6.src",
+ "dummy-glibc-common-2.14-5.x86_64",
+ "dummy-glibc-debuginfo-common-2.14-5.x86_64",
+ "dummy-glibc-debuginfo-2.14-5.x86_64",
+ "dummy-glibc-2.14-5.x86_64",
+ "dummy-glibc-2.14-5.src",
+ ]
+ pkg_map = self.go(packages, None, greedy="none", package_whitelist=package_whitelist)
+
+ self.assertNotIn("dummy-bash-4.2.37-5.i686.rpm", pkg_map["rpm"])
+ self.assertNotIn("dummy-bash-4.2.37-6.i686.rpm", pkg_map["rpm"])
+ self.assertNotIn("dummy-bash-4.2.37-6.x86_64.rpm", pkg_map["rpm"])
+
+ self.assertItemsEqual(pkg_map["rpm"], [
+ "dummy-basesystem-10.0-6.noarch.rpm",
+ "dummy-bash-4.2.37-5.x86_64.rpm",
+ "dummy-filesystem-4.2.37-6.x86_64.rpm",
+ "dummy-glibc-2.14-5.x86_64.rpm",
+ "dummy-glibc-common-2.14-5.x86_64.rpm",
+ ])
+ self.assertItemsEqual(pkg_map["srpm"], [
+ "dummy-basesystem-10.0-6.src.rpm",
+ "dummy-bash-4.2.37-5.src.rpm",
+ "dummy-filesystem-4.2.37-6.src.rpm",
+ "dummy-glibc-2.14-5.src.rpm",
+ ])
+ self.assertItemsEqual(pkg_map["debuginfo"], [
+ "dummy-bash-debuginfo-4.2.37-5.x86_64.rpm",
+ "dummy-glibc-debuginfo-2.14-5.x86_64.rpm",
+ "dummy-glibc-debuginfo-common-2.14-5.x86_64.rpm",
+ ])
+
def test_firefox_selfhosting_with_krb5_lookaside(self):
super(DNFDepsolvingTestCase, self).test_firefox_selfhosting_with_krb5_lookaside()
self.assertFlags("dummy-krb5-devel-1.10-5.x86_64", [PkgFlag.lookaside])
self.assertFlags("dummy-krb5-1.10-5.src", [PkgFlag.lookaside])
self.assertFlags("dummy-krb5-debuginfo-1.10-5.x86_64", [PkgFlag.lookaside])
+
+ def test_package_whitelist(self):
+ packages = ['*']
+ whitelist = [
+ 'dummy-bash-4.2.37-6.x86_64',
+ 'dummy-bash-4.2.37-6.src',
+ ]
+
+ pkg_map = self.go(packages, None, package_whitelist=whitelist)
+
+ self.assertItemsEqual(pkg_map["rpm"], [
+ 'dummy-bash-4.2.37-6.x86_64.rpm',
+ ])
+ self.assertItemsEqual(pkg_map["srpm"], [
+ 'dummy-bash-4.2.37-6.src.rpm',
+ ])
+ self.assertItemsEqual(pkg_map["debuginfo"], [
+ ])
--
2.13.6

View File

@ -1,78 +0,0 @@
From 74b0d14095733c66c54d47edaac69ef056f55332 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Wed, 7 Mar 2018 13:58:53 +0100
Subject: [PATCH 07/12] pkgset: Remove check for unique name
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
We now have a way to select even older version of package (since the
newer one can be left out of the whitelist), so we can include multiple
versions of the same package into global package set.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/phases/pkgset/pkgsets.py | 9 +--------
pungi/phases/pkgset/sources/source_koji.py | 5 ++---
2 files changed, 3 insertions(+), 11 deletions(-)
diff --git a/pungi/phases/pkgset/pkgsets.py b/pungi/phases/pkgset/pkgsets.py
index 4d902f4f..9038f1b1 100644
--- a/pungi/phases/pkgset/pkgsets.py
+++ b/pungi/phases/pkgset/pkgsets.py
@@ -134,12 +134,9 @@ class PackageSetBase(kobo.log.LoggingBase):
return self.rpms_by_arch
- def merge(self, other, primary_arch, arch_list, unique_name=False):
+ def merge(self, other, primary_arch, arch_list):
"""
Merge ``other`` package set into this instance.
-
- With ``unique_name=True`` a package will be added only if there is not
- a package with the same name already.
"""
msg = "Merging package sets for %s: %s" % (primary_arch, arch_list)
self.log_debug("[BEGIN] %s" % msg)
@@ -163,15 +160,11 @@ class PackageSetBase(kobo.log.LoggingBase):
else:
exclusivearch_list = None
for arch in arch_list:
- known_packages = set(pkg.name for pkg in self.rpms_by_arch.get(arch, []))
self.rpms_by_arch.setdefault(arch, [])
for i in other.rpms_by_arch.get(arch, []):
if i.file_path in self.file_cache:
# TODO: test if it really works
continue
- if unique_name and i.name in known_packages:
- self.log_debug('Not merging in %r' % i)
- continue
if exclusivearch_list and arch == "noarch":
if is_excluded(i, exclusivearch_list, logger=self._logger):
continue
diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py
index 17d66773..94ee79df 100644
--- a/pungi/phases/pkgset/sources/source_koji.py
+++ b/pungi/phases/pkgset/sources/source_koji.py
@@ -263,7 +263,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
if not variant_tags[variant]:
variant_tags[variant].extend(force_list(compose.conf["pkgset_koji_tag"]))
- # Add global tag if supplied.
+ # Add global tag(s) if supplied.
if 'pkgset_koji_tag' in compose.conf:
if compose.conf["pkgset_koji_tag"] == "not-used":
# The magic value is used for modular composes to avoid errors
@@ -314,8 +314,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
if len(compose_tags) == 1:
global_pkgset = pkgset
else:
- global_pkgset.merge(pkgset, None, list(all_arches),
- unique_name=compose_tag in compose.conf['pkgset_koji_tag'])
+ global_pkgset.merge(pkgset, None, list(all_arches))
with open(global_pkgset_path, 'wb') as f:
data = pickle.dumps(global_pkgset)
f.write(data)
--
2.13.6

View File

@ -1,80 +0,0 @@
From 40c8f95b2bba62f454c7f996409e8bf1d775eee4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Wed, 7 Mar 2018 13:59:12 +0100
Subject: [PATCH 08/12] pkgset: Merge initial package set without checks
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
For the first pass we don't need to filter out exclusive architectures,
and we don't need to exclude source packages without any binary
packages. We just want to merge the two package sets as fast as
possible.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/phases/pkgset/pkgsets.py | 11 +++++++++++
pungi/phases/pkgset/sources/source_koji.py | 4 ++--
tests/test_pkgset_source_koji.py | 4 ++--
3 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/pungi/phases/pkgset/pkgsets.py b/pungi/phases/pkgset/pkgsets.py
index 9038f1b1..d53c6af9 100644
--- a/pungi/phases/pkgset/pkgsets.py
+++ b/pungi/phases/pkgset/pkgsets.py
@@ -182,6 +182,17 @@ class PackageSetBase(kobo.log.LoggingBase):
self.log_debug("[DONE ] %s" % msg)
+ def fast_merge(self, other):
+ """
+ Merge two package sets together without any filtering of packages. All
+ packages from `other` package set are taken.
+ """
+ for arch in other.rpms_by_arch.keys():
+ self.rpms_by_arch.setdefault(arch, [])
+ for i in other.rpms_by_arch.get(arch, []):
+ self.file_cache.file_cache[i.file_path] = i
+ self.rpms_by_arch[arch].append(i)
+
def save_file_list(self, file_path, remove_path_prefix=None):
with open(file_path, "w") as f:
for arch in sorted(self.rpms_by_arch):
diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py
index 94ee79df..2ce14be6 100644
--- a/pungi/phases/pkgset/sources/source_koji.py
+++ b/pungi/phases/pkgset/sources/source_koji.py
@@ -308,13 +308,13 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
if len(variant_tags[variant]) == 1:
variant.pkgset = pkgset
else:
- variant.pkgset.merge(pkgset, None, list(all_arches))
+ variant.pkgset.fast_merge(pkgset)
# Optimization for case where we have just single compose
# tag - we do not have to merge in this case...
if len(compose_tags) == 1:
global_pkgset = pkgset
else:
- global_pkgset.merge(pkgset, None, list(all_arches))
+ global_pkgset.fast_merge(pkgset)
with open(global_pkgset_path, 'wb') as f:
data = pickle.dumps(global_pkgset)
f.write(data)
diff --git a/tests/test_pkgset_source_koji.py b/tests/test_pkgset_source_koji.py
index b1e1308d..53670843 100644
--- a/tests/test_pkgset_source_koji.py
+++ b/tests/test_pkgset_source_koji.py
@@ -170,8 +170,8 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
logfile=self.topdir + '/logs/global/packages_from_f25-extra.global.log')])
pkgset.assert_has_calls([mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
remove_path_prefix='/prefix')])
- # for each tag, call pkgset.merge once for each variant and once for global pkgset
- self.assertEqual(pkgset.merge.call_count, 2 * (len(self.compose.all_variants.values()) + 1))
+ # for each tag, call pkgset.fast_merge once for each variant and once for global pkgset
+ self.assertEqual(pkgset.fast_merge.call_count, 2 * (len(self.compose.all_variants.values()) + 1))
self.assertItemsEqual(pickle_dumps.call_args_list,
[mock.call(orig_pkgset)])
with open(self.pkgset_path) as f:
--
2.13.6

View File

@ -1,147 +0,0 @@
From 2dca0b0e3106f3c7252a5d0ad56f3943940e3fd0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Fri, 9 Mar 2018 13:45:03 +0100
Subject: [PATCH 09/12] gather: Fix package set whitelist
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
We need to include all relevant arches, not just the base one (including
noarch and src). However the list can be shortened by only listing
NEVRs, because that should be unique.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/gather_dnf.py | 9 ++++-----
pungi/phases/gather/methods/method_deps.py | 9 ++++++---
tests/test_gather.py | 23 +++++++++--------------
tests/test_gather_method_deps.py | 8 ++++----
4 files changed, 23 insertions(+), 26 deletions(-)
diff --git a/pungi/gather_dnf.py b/pungi/gather_dnf.py
index 1023c57d..a0ea6fd1 100644
--- a/pungi/gather_dnf.py
+++ b/pungi/gather_dnf.py
@@ -19,7 +19,7 @@ from itertools import count
import logging
import os
-from kobo.rpmlib import parse_nvra
+from kobo.rpmlib import parse_nvra, parse_nvr
import pungi.common
import pungi.dnf_wrapper
@@ -374,13 +374,12 @@ class Gather(GatherBase):
with Profiler("Gather._apply_excludes():apply-package-whitelist'"):
to_keep = []
for pattern in self.opts.package_whitelist:
- nvra = parse_nvra(pattern)
- nvra.pop('src')
+ nvr = parse_nvr(pattern)
try:
- nvra['epoch'] = int(nvra.pop('epoch'))
+ nvr['epoch'] = int(nvr.pop('epoch'))
except ValueError:
pass
- to_keep.extend(self._query.filter(**nvra).run())
+ to_keep.extend(self._query.filter(**nvr).run())
for queue in all_queues:
setattr(self, queue, getattr(self, queue).filter(pkg=to_keep).latest().apply())
diff --git a/pungi/phases/gather/methods/method_deps.py b/pungi/phases/gather/methods/method_deps.py
index d38343f3..8c29cfde 100644
--- a/pungi/phases/gather/methods/method_deps.py
+++ b/pungi/phases/gather/methods/method_deps.py
@@ -22,7 +22,7 @@ from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
from pungi.util import rmtree, get_arch_variant_data
from pungi.wrappers.pungi import PungiWrapper
-from pungi.arch import tree_arch_to_yum_arch
+from pungi.arch import tree_arch_to_yum_arch, get_valid_arches
import pungi.phases.gather
import pungi.phases.gather.method
@@ -90,8 +90,11 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
package_whitelist = set()
if variant.pkgset:
- for rpm_obj in variant.pkgset.rpms_by_arch.get(arch, []):
- package_whitelist.add(rpm_obj.nevra)
+ multilib = get_arch_variant_data(compose.conf, 'multilib', arch, variant)
+ for i in get_valid_arches(arch, multilib=multilib, add_noarch=True, add_src=True):
+ for rpm_obj in variant.pkgset.rpms_by_arch.get(i, []):
+ package_whitelist.add(
+ '{0.name}-{1}:{0.version}-{0.release}'.format(rpm_obj, rpm_obj.epoch or 0))
pungi_wrapper.write_kickstart(
ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str,
diff --git a/tests/test_gather.py b/tests/test_gather.py
index e73ae9c3..fb59dc17 100644
--- a/tests/test_gather.py
+++ b/tests/test_gather.py
@@ -1798,18 +1798,14 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
"dummy-bash",
]
package_whitelist = [
- "dummy-basesystem-10.0-6.noarch",
- "dummy-basesystem-10.0-6.src",
- "dummy-bash-debuginfo-4.2.37-5.x86_64",
- "dummy-bash-4.2.37-5.x86_64",
- "dummy-bash-4.2.37-5.src",
- "dummy-filesystem-4.2.37-6.x86_64",
- "dummy-filesystem-4.2.37-6.src",
- "dummy-glibc-common-2.14-5.x86_64",
- "dummy-glibc-debuginfo-common-2.14-5.x86_64",
- "dummy-glibc-debuginfo-2.14-5.x86_64",
- "dummy-glibc-2.14-5.x86_64",
- "dummy-glibc-2.14-5.src",
+ "dummy-basesystem-10.0-6",
+ "dummy-bash-debuginfo-4.2.37-5",
+ "dummy-bash-4.2.37-5",
+ "dummy-filesystem-4.2.37-6",
+ "dummy-glibc-common-2.14-5",
+ "dummy-glibc-debuginfo-common-2.14-5",
+ "dummy-glibc-debuginfo-2.14-5",
+ "dummy-glibc-2.14-5",
]
pkg_map = self.go(packages, None, greedy="none", package_whitelist=package_whitelist)
@@ -1845,8 +1841,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def test_package_whitelist(self):
packages = ['*']
whitelist = [
- 'dummy-bash-4.2.37-6.x86_64',
- 'dummy-bash-4.2.37-6.src',
+ 'dummy-bash-4.2.37-6',
]
pkg_map = self.go(packages, None, package_whitelist=whitelist)
diff --git a/tests/test_gather_method_deps.py b/tests/test_gather_method_deps.py
index 31bf82b7..0bebb74e 100644
--- a/tests/test_gather_method_deps.py
+++ b/tests/test_gather_method_deps.py
@@ -64,9 +64,9 @@ class TestWritePungiConfig(helpers.PungiTestCase):
pkgs = [('pkg1', None), ('pkg2', 'x86_64')]
grps = ['grp1']
filter = [('pkg3', None), ('pkg4', 'x86_64')]
- self.compose.variants['Server'].pkgset.rpms_by_arch['x86_64'] = [
- mock.Mock(nevra='pkg-1.0.0-1')
- ]
+ mock_rpm = mock.Mock(version='1.0.0', release='1', epoch=0)
+ mock_rpm.name = 'pkg'
+ self.compose.variants['Server'].pkgset.rpms_by_arch['x86_64'] = [mock_rpm]
white = mock.Mock()
black = mock.Mock()
prepopulate = mock.Mock()
@@ -81,7 +81,7 @@ class TestWritePungiConfig(helpers.PungiTestCase):
repos={'pungi-repo': self.topdir + '/work/x86_64/repo'},
exclude_packages=['pkg3', 'pkg4.x86_64'],
fulltree_excludes=fulltree,
- package_whitelist=set(['pkg-1.0.0-1']))
+ package_whitelist=set(['pkg-0:1.0.0-1']))
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
def test_without_input(self, PungiWrapper):
--
2.13.6

View File

@ -1,123 +0,0 @@
From 1e6a0c19e78be15e2398bd1a462b5c8b41168155 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Tue, 13 Mar 2018 15:54:52 +0100
Subject: [PATCH 10/12] buildinstall: Add option to disable it
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fixes: https://pagure.io/pungi/issue/854
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
doc/configuration.rst | 12 ++++++++++++
pungi/checks.py | 1 +
pungi/phases/buildinstall.py | 8 ++++++++
tests/test_buildinstall.py | 27 +++++++++++++++++++++++++++
4 files changed, 48 insertions(+)
diff --git a/doc/configuration.rst b/doc/configuration.rst
index dbe5ba08..e5f1e9e0 100644
--- a/doc/configuration.rst
+++ b/doc/configuration.rst
@@ -504,6 +504,11 @@ Options
task using HTTP and set the output directory for this task to
``buildinstall_topdir``. Once the runroot task finishes, Pungi will copy
the results of runroot tasks to the compose working directory.
+**buildinstall_skip**
+ (*list*) -- mapping that defines which variants and arches to skip during
+ buildinstall; format: ``[(variant_uid_regex, {arch|*: True})]``. This is
+ only supported for lorax.
+
Example
-------
@@ -525,6 +530,13 @@ Example
})
]
+ # Don't run buildinstall phase for Modular variant
+ buildinstall_skip = [
+ ('^Modular', {
+ '*': True
+ })
+ ]
+
.. note::
diff --git a/pungi/checks.py b/pungi/checks.py
index 9b317ab6..2ff4f697 100644
--- a/pungi/checks.py
+++ b/pungi/checks.py
@@ -727,6 +727,7 @@ def make_schema():
"buildinstall_topdir": {"type": "string"},
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
+ "buildinstall_skip": _variant_arch_mapping({"type": "boolean"}),
"global_ksurl": {"type": "string"},
"global_version": {"type": "string"},
diff --git a/pungi/phases/buildinstall.py b/pungi/phases/buildinstall.py
index 69813c2a..3ec523bf 100644
--- a/pungi/phases/buildinstall.py
+++ b/pungi/phases/buildinstall.py
@@ -134,10 +134,18 @@ class BuildinstallPhase(PhaseBase):
repo_baseurl = translate_path(self.compose, repo_baseurl)
if self.buildinstall_method == "lorax":
+
buildarch = get_valid_arches(arch)[0]
for variant in self.compose.get_variants(arch=arch, types=['variant']):
if variant.is_empty:
continue
+
+ skip = get_arch_variant_data(self.compose.conf, "buildinstall_skip", arch, variant)
+ if skip == [True]:
+ self.compose.log_info(
+ 'Skipping buildinstall for %s.%s due to config option' % (variant, arch))
+ continue
+
volid = get_volid(self.compose, arch, variant=variant, disc_type=disc_type)
commands.append(
(variant,
diff --git a/tests/test_buildinstall.py b/tests/test_buildinstall.py
index 335b0d2d..aa8f527b 100644
--- a/tests/test_buildinstall.py
+++ b/tests/test_buildinstall.py
@@ -43,6 +43,33 @@ class TestBuildinstallPhase(PungiTestCase):
self.assertTrue(phase.skip())
+ @mock.patch('pungi.phases.buildinstall.ThreadPool')
+ @mock.patch('pungi.phases.buildinstall.LoraxWrapper')
+ @mock.patch('pungi.phases.buildinstall.get_volid')
+ def test_skip_option(self, get_volid, loraxCls, poolCls):
+ compose = BuildInstallCompose(self.topdir, {
+ 'bootable': True,
+ 'buildinstall_method': 'lorax',
+ 'buildinstall_skip': [
+ ('^Server$', {
+ 'amd64': True
+ }),
+ ('^Client$', {
+ '*': True,
+ }),
+ ]
+ })
+
+ get_volid.return_value = 'vol_id'
+ loraxCls.return_value.get_lorax_cmd.return_value = ['lorax', '...']
+
+ phase = BuildinstallPhase(compose)
+
+ phase.run()
+
+ pool = poolCls.return_value
+ self.assertEqual(1, len(pool.queue_put.mock_calls))
+
def test_does_not_skip_on_bootable(self):
compose = BuildInstallCompose(self.topdir, {'bootable': True})
compose.just_phases = None
--
2.13.6

View File

@ -1,124 +0,0 @@
From 4481d1145e1ea4b0bc3ac3696491a01e633f6397 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Wed, 14 Mar 2018 08:17:50 +0100
Subject: [PATCH 11/12] pkgset: Allow empty list of modules
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This should indicate that it's a modular variant, but there is no
modular content yet. We don't want to treat that as Everything.
The end result will be an empty repository.
Fixes: https://pagure.io/pungi/issue/871
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
---
pungi/phases/gather/sources/source_comps.py | 2 +-
pungi/phases/gather/sources/source_module.py | 2 +-
pungi/phases/pkgset/sources/source_koji.py | 2 +-
pungi/wrappers/variants.py | 8 ++++++--
share/variants.dtd | 2 +-
tests/helpers.py | 1 +
6 files changed, 11 insertions(+), 6 deletions(-)
diff --git a/pungi/phases/gather/sources/source_comps.py b/pungi/phases/gather/sources/source_comps.py
index 447e0a47..0b4a87d5 100644
--- a/pungi/phases/gather/sources/source_comps.py
+++ b/pungi/phases/gather/sources/source_comps.py
@@ -39,7 +39,7 @@ class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
comps = CompsWrapper(self.compose.paths.work.comps(arch=arch))
- is_modular = variant and not variant.groups and variant.modules
+ is_modular = variant and not variant.groups and variant.modules is not None
if variant is not None and (variant.groups or variant.type != 'variant' or is_modular):
# Get packages for a particular variant. We want to skip the
# filtering if the variant is top-level and has no groups (to use
diff --git a/pungi/phases/gather/sources/source_module.py b/pungi/phases/gather/sources/source_module.py
index 57740fca..ba6db442 100644
--- a/pungi/phases/gather/sources/source_module.py
+++ b/pungi/phases/gather/sources/source_module.py
@@ -41,7 +41,7 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
compatible_arches = pungi.arch.get_compatible_arches(arch, multilib=True)
- if variant is not None and variant.modules:
+ if variant is not None and variant.modules is not None:
variant.arch_mmds.setdefault(arch, {})
# Contains per-module RPMs added to variant.
diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py
index 2ce14be6..4d97b020 100644
--- a/pungi/phases/pkgset/sources/source_koji.py
+++ b/pungi/phases/pkgset/sources/source_koji.py
@@ -260,7 +260,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
if pdc_modules:
with open(pdc_module_file, 'w') as f:
json.dump(pdc_modules, f)
- if not variant_tags[variant]:
+ if not variant_tags[variant] and variant.modules is None:
variant_tags[variant].extend(force_list(compose.conf["pkgset_koji_tag"]))
# Add global tag(s) if supplied.
diff --git a/pungi/wrappers/variants.py b/pungi/wrappers/variants.py
index b32b1f59..6bd00800 100755
--- a/pungi/wrappers/variants.py
+++ b/pungi/wrappers/variants.py
@@ -71,7 +71,7 @@ class VariantsXmlParser(object):
"type": str(variant_node.attrib["type"]),
"arches": [str(i) for i in variant_node.xpath("arches/arch/text()")],
"groups": [],
- "modules": [],
+ "modules": None,
"environments": [],
"buildinstallpackages": [],
"is_empty": bool(variant_node.attrib.get("is_empty", False)),
@@ -110,6 +110,7 @@ class VariantsXmlParser(object):
"glob": self._is_true(module_node.attrib.get("glob", "false"))
}
+ variant_dict["modules"] = variant_dict["modules"] or []
variant_dict["modules"].append(module)
for environments_node in variant_node.xpath("environments"):
@@ -283,7 +284,10 @@ class Variant(object):
return result
def get_modules(self, arch=None, types=None, recursive=False):
- """Return list of groups, default types is ["self"]"""
+ """Return list of modules, default types is ["self"]"""
+
+ if self.modules is None:
+ return []
types = types or ["self"]
result = copy.deepcopy(self.modules)
diff --git a/share/variants.dtd b/share/variants.dtd
index 197e4c4b..f8e4a5f3 100644
--- a/share/variants.dtd
+++ b/share/variants.dtd
@@ -27,7 +27,7 @@
uservisible (true|false) #IMPLIED
>
-<!ELEMENT modules (module)+>
+<!ELEMENT modules (module)*>
<!ELEMENT module (#PCDATA)>
<!ATTLIST module
diff --git a/tests/helpers.py b/tests/helpers.py
index b82de42f..4ed0ae3f 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -39,6 +39,7 @@ class MockVariant(mock.Mock):
self.arch_mmds = {}
self.variants = {}
self.pkgset = mock.Mock(rpms_by_arch={})
+ self.modules = None
def __str__(self):
return self.uid
--
2.13.6

View File

@ -1,59 +0,0 @@
From f3d33e74b30d5475de1f4a605858ba677596c408 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= <lsedlar@redhat.com>
Date: Fri, 16 Mar 2018 13:57:03 +0100
Subject: [PATCH 12/12] Fix modular content in non-modular variant
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
When allowing empty list of modules, the check for variant tags got
broken, causing Everything to no longer have an associated list of
allowed packages.
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
Relates: https://pagure.io/pungi/issue/862
---
pungi/wrappers/variants.py | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/pungi/wrappers/variants.py b/pungi/wrappers/variants.py
index 6bd00800..34ff215a 100755
--- a/pungi/wrappers/variants.py
+++ b/pungi/wrappers/variants.py
@@ -104,13 +104,13 @@ class VariantsXmlParser(object):
variant_dict["groups"].append(group)
for modulelist_node in variant_node.xpath("modules"):
+ variant_dict["modules"] = variant_dict["modules"] or []
for module_node in modulelist_node.xpath("module"):
module = {
"name": str(module_node.text),
"glob": self._is_true(module_node.attrib.get("glob", "false"))
}
- variant_dict["modules"] = variant_dict["modules"] or []
variant_dict["modules"].append(module)
for environments_node in variant_node.xpath("environments"):
@@ -205,7 +205,6 @@ class Variant(object):
modules=None):
environments = environments or []
- modules = modules or []
buildinstallpackages = buildinstallpackages or []
self.id = id
@@ -214,7 +213,9 @@ class Variant(object):
self.arches = sorted(copy.deepcopy(arches))
self.groups = sorted(copy.deepcopy(groups), key=lambda x: x["name"])
self.environments = sorted(copy.deepcopy(environments), key=lambda x: x["name"])
- self.modules = sorted(copy.deepcopy(modules), key=lambda x: x["name"])
+ self.modules = copy.deepcopy(modules)
+ if self.modules:
+ self.modules = sorted(self.modules, key=lambda x: x["name"])
self.buildinstallpackages = sorted(buildinstallpackages)
self.variants = {}
self.parent = parent
--
2.13.6

View File

@ -1,30 +1,19 @@
Name: pungi
Version: 4.1.22
Release: 10%{?dist}
Version: 4.1.23
Release: 1%{?dist}
Summary: Distribution compose tool
Group: Development/Tools
License: GPLv2
URL: https://pagure.io/pungi
Source0: https://pagure.io/releases/%{name}/%{name}-%{version}.tar.bz2
Patch0: 0001-Support-multiple-sources-in-one-variant.patch
Patch1: 0002-Remove-comps-groups-from-purely-modular-variants.patch
Patch2: 0003-pkgset-Correctly-detect-single-tag-for-variant.patch
Patch3: 0004-image-build-Accept-tar.xz-extension-for-docker-image.patch
Patch4: 0005-Write-package-whitelist-for-each-variant.patch
Patch5: 0006-gather-Honor-package-whitelist.patch
Patch6: 0007-pkgset-Remove-check-for-unique-name.patch
Patch7: 0008-pkgset-Merge-initial-package-set-without-checks.patch
Patch8: 0009-gather-Fix-package-set-whitelist.patch
Patch9: 0010-buildinstall-Add-option-to-disable-it.patch
Patch10: 0011-pkgset-Allow-empty-list-of-modules.patch
Patch11: 0012-Fix-modular-content-in-non-modular-variant.patch
Patch0: 0001-tests-Use-dummy-modulesdir-for-DNF.patch
BuildRequires: python3-nose
BuildRequires: python3-mock
BuildRequires: python2-devel
BuildRequires: python3-devel
BuildRequires: python3-setuptools
BuildRequires: python3-productmd >= 1.3
BuildRequires: python3-productmd >= 1.11
BuildRequires: python3-kobo-rpmlib
BuildRequires: createrepo_c
BuildRequires: python3-lxml
@ -42,6 +31,8 @@ BuildRequires: python3-koji
BuildRequires: python3-unittest2
BuildRequires: lorax
BuildRequires: python3-PyYAML
BuildRequires: libmodulemd
BuildRequires: gobject-introspection
#deps for doc building
BuildRequires: python3-sphinx, texlive-collection-fontsrecommended
@ -56,7 +47,7 @@ BuildRequires: latexmk
Requires: python3-kobo >= 0.6
Requires: python3-kobo-rpmlib
Requires: python3-productmd >= 1.3
Requires: python3-productmd >= 1.11
Requires: python3-kickstart
Requires: createrepo_c
Requires: python3-lxml
@ -73,6 +64,8 @@ Requires: python3-multilib
Requires: python3-libcomps
Requires: python3-six
Requires: python3-koji
Requires: libmodulemd
Requires: gobject-introspection
Requires: python3-%{name} = %{version}-%{release}
@ -144,7 +137,7 @@ rm -rf %{buildroot}%{python2_sitelib}/%{name}_utils
%check
# Temporarily disabled to avoid problems with DNF trying to write to /etc...
# nosetests-3 --exe
nosetests-3 --exe
%files
%license COPYING GPL
@ -179,6 +172,53 @@ rm -rf %{buildroot}%{python2_sitelib}/%{name}_utils
%{_bindir}/%{name}-wait-for-signed-ostree-handler
%changelog
* Wed Apr 4 2018 Lubomír Sedlář <lsedlar@redhat.com> - 4.1.23-1
- Update documentation section 'contributing' (onosek)
- Write module metadata (onosek)
- Support multilib in GatherSourceModule (jkaluza)
- ostree: Always substitute basearch (lsedlar)
- If sigkeys is specified, require at least one (puiterwijk)
- Allow setting <kojitag/> in <modules/> in variants.xml to get the modules
from this Koji tag. (jkaluza)
- Move Modulemd import to pungi/__init__.py to remove duplicated code.
(jkaluza)
- Use Modulemd.Module for 'variant.arch_mmds' instead of yaml dump (jkaluza)
- Fix modular content in non-modular variant (lsedlar)
- Remove the filtered RPMs from module metadata even in case all RPMs are
filtered out. (jkaluza)
- pkgset: Allow empty list of modules (lsedlar)
- buildinstall: Add option to disable it (lsedlar)
- Use libmodulemd instead of modulemd Python module (jkaluza)
- gather: Fix package set whitelist (lsedlar)
- pkgset: Merge initial package set without checks (lsedlar)
- pkgset: Remove check for unique name (lsedlar)
- gather: Honor package whitelist (lsedlar)
- Write package whitelist for each variant (lsedlar)
- image-build: Accept tar.xz extension for docker images (lsedlar)
- pkgset: Correctly detect single tag for variant (lsedlar)
- Remove comps groups from purely modular variants (lsedlar)
- gather: Allow filtering debuginfo packages (lsedlar)
- Move ostree phase and pipelines for running phases (onosek)
- Other repo for OstreeInstaller (onosek)
- Add modulemd metadata to repo even without components (jkaluza)
- Correct fix for volume ID substition sorting by length (awilliam)
- Ordering processing for volume ID substitutions (onosek)
- Disable multilib for modules (jkaluza)
- scm: Stop decoding output of post-clone command (lsedlar)
- Remove useless shebang (lsedlar)
- source_koji.py: Properly handle unset pkgset_koji_tag (otaylor)
- pkgset: Only use package whitelist if enabled (lsedlar)
- Fail early if input packages are unsigned (jkaluza)
- Allow composing from tag with unsigned packages (jkaluza)
- Ostree can use pkgset repos (onosek)
- Support multiple sources in one variant (lsedlar)
- gather: Set lookaside flag focorrectly (lsedlar)
- gather: Try getting srpm from the same repo as rpm (lsedlar)
- Minor correction for python backward compatibility (onosek)
* Fri Mar 23 2018 Lubomír Sedlář <lsedlar@redhat.com> - 4.1.22-10.1
- Always substitute basearch in ostree
* Fri Mar 16 2018 Lubomír Sedlář <lsedlar@redhat.com> - 4.1.22-10
- Fix package whitelist for non-modular variants

View File

@ -1 +1 @@
SHA512 (pungi-4.1.22.tar.bz2) = ab0d54823e2a2ef89f39b53cf61998bbf6e4fb148de743ff61eb732524c37c0af05b97e41498a8677d8e8dbe63b6f10d0cbe20394cf5a00c33ff0e2b72149e64
SHA512 (pungi-4.1.23.tar.bz2) = 0521e002f36d8effbdf4e412bb4161830b4b05f50ef3ec88af9bedc046246f7aa120f97274a4588f4469576ffd4f8c2195c4b3186e97738b6497e844a7f66aef