gather: Remove module source
This source does not really return anything useful. It was necessary to process the source modulemd to fill in list of RPMs. Since we now get the final files from Koji, this is not needed anymore and the source can be dropped. This change requires a lot of tweaks for test. Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
parent
9939d09643
commit
ac15f21135
@ -123,20 +123,6 @@ def _mk_pkg_map(rpm=None, srpm=None, debuginfo=None, iterable_class=list):
|
||||
}
|
||||
|
||||
|
||||
def prepare_module_metadata(compose, package_sets):
|
||||
"""Populate metadata about modules in each variant. This has to be done
|
||||
before we try running the hybrid solver for any variant. Otherwise there
|
||||
will be modular packages without metadata and they will be incorrectly
|
||||
considered as bare RPMs.
|
||||
"""
|
||||
for arch in compose.get_arches():
|
||||
for variant in compose.get_variants(arch=arch):
|
||||
# Run the module source. This is needed to set up module metadata
|
||||
# for the variant, but we don't really care about the returned
|
||||
# packages. They will be pulled in based on the actual module.
|
||||
get_variant_packages(compose, arch, variant, "module", package_sets)
|
||||
|
||||
|
||||
def get_parent_pkgs(arch, variant, result_dict):
|
||||
"""Find packages for parent variant (if any).
|
||||
|
||||
@ -216,7 +202,7 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
||||
|
||||
else:
|
||||
|
||||
for source_name in ('module', 'comps', 'json'):
|
||||
for source_name in ('comps', 'json'):
|
||||
|
||||
packages, groups, filter_packages = get_variant_packages(compose, arch, variant,
|
||||
source_name, package_sets)
|
||||
@ -497,8 +483,6 @@ def _trim_variants(result, compose, variant_type, remove_pkgs=None, move_to_pare
|
||||
def gather_wrapper(compose, package_sets, path_prefix):
|
||||
result = {}
|
||||
|
||||
prepare_module_metadata(compose, package_sets)
|
||||
|
||||
_gather_variants(result, compose, 'variant', package_sets)
|
||||
_gather_variants(result, compose, 'addon', package_sets, exclude_fulltree=True)
|
||||
_gather_variants(result, compose, 'layered-product', package_sets, exclude_fulltree=True)
|
||||
@ -740,7 +724,7 @@ def get_packages_to_gather(compose, arch=None, variant=None, include_arch=True,
|
||||
"""
|
||||
packages = set([])
|
||||
groups = set([])
|
||||
for source_name in ('module', 'comps', 'json'):
|
||||
for source_name in ('comps', 'json'):
|
||||
GatherSource = get_gather_source(source_name)
|
||||
src = GatherSource(compose)
|
||||
|
||||
|
@ -1,201 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
Get a package list based on modulemd metadata loaded in pkgset phase.
|
||||
"""
|
||||
|
||||
|
||||
import pungi.arch
|
||||
import pungi.phases.gather.source
|
||||
import kobo.rpmlib
|
||||
from pungi import Modulemd
|
||||
|
||||
|
||||
class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
enabled = True
|
||||
|
||||
def __call__(self, arch, variant):
|
||||
uid = variant.uid if variant else 'no-variant'
|
||||
logfile = self.compose.paths.log.log_file(arch, 'source-module-%s' % uid)
|
||||
with open(logfile, 'w') as log:
|
||||
return self.worker(log, arch, variant)
|
||||
|
||||
def worker(self, log, arch, variant):
|
||||
groups = set()
|
||||
packages = set()
|
||||
|
||||
# Check if this variant contains some modules
|
||||
if variant is None or variant.modules is None:
|
||||
return packages, groups
|
||||
|
||||
# Check if we even support modules in Pungi.
|
||||
if not Modulemd:
|
||||
log.write(
|
||||
"pygobject module or libmodulemd library is not installed, "
|
||||
"support for modules is disabled\n")
|
||||
return packages, groups
|
||||
|
||||
compatible_arches = pungi.arch.get_compatible_arches(arch, multilib=True)
|
||||
multilib_arches = set(compatible_arches) - set(
|
||||
pungi.arch.get_compatible_arches(arch))
|
||||
exclusivearchlist = pungi.arch.get_valid_arches(
|
||||
arch, multilib=False, add_noarch=False
|
||||
)
|
||||
|
||||
# Generate architecture specific modulemd metadata, so we can
|
||||
# store per-architecture artifacts there later.
|
||||
variant.arch_mmds.setdefault(arch, {})
|
||||
variant.dev_mmds.setdefault(arch, {})
|
||||
include_devel = self.compose.conf.get("include_devel_modules", {}).get(variant.uid, [])
|
||||
for mmd in variant.mmds:
|
||||
nsvc = "%s:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
mmd.peek_stream(),
|
||||
mmd.peek_version(),
|
||||
mmd.peek_context(),
|
||||
)
|
||||
if nsvc not in variant.arch_mmds[arch]:
|
||||
arch_mmd = mmd.copy()
|
||||
variant.arch_mmds[arch][nsvc] = arch_mmd
|
||||
|
||||
if self.compose.conf.get("include_devel_modules"):
|
||||
# Devel modules are enabled, we need to create it.
|
||||
devel_nsvc = "%s-devel:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
mmd.peek_stream(),
|
||||
mmd.peek_version(),
|
||||
mmd.peek_context(),
|
||||
)
|
||||
if (
|
||||
devel_nsvc not in variant.arch_mmds[arch]
|
||||
and devel_nsvc not in variant.dev_mmds[arch]
|
||||
):
|
||||
arch_mmd = mmd.copy()
|
||||
arch_mmd.set_name(arch_mmd.peek_name() + "-devel")
|
||||
# Depend on the actual module
|
||||
for dep in arch_mmd.get_dependencies():
|
||||
dep.add_requires_single(mmd.peek_name(), mmd.peek_stream())
|
||||
# Delete API and profiles
|
||||
arch_mmd.set_rpm_api(Modulemd.SimpleSet())
|
||||
arch_mmd.clear_profiles()
|
||||
|
||||
ns = "%s:%s" % (arch_mmd.peek_name(), arch_mmd.peek_stream())
|
||||
|
||||
# Store the new modulemd
|
||||
variant.module_uid_to_koji_tag[devel_nsvc] = variant.module_uid_to_koji_tag.get(nsvc)
|
||||
if ns in include_devel:
|
||||
variant.arch_mmds[arch][devel_nsvc] = arch_mmd
|
||||
else:
|
||||
variant.dev_mmds[arch][devel_nsvc] = arch_mmd
|
||||
|
||||
# Contains per-module RPMs added to variant.
|
||||
added_rpms = {}
|
||||
|
||||
for mmd in variant.mmds:
|
||||
nsvc = "%s:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
mmd.peek_stream(),
|
||||
mmd.peek_version(),
|
||||
mmd.peek_context(),
|
||||
)
|
||||
arch_mmd = variant.arch_mmds[arch][nsvc]
|
||||
|
||||
rpms = sum([
|
||||
variant.nsvc_to_pkgset[nsvc].rpms_by_arch.get(a, [])
|
||||
for a in compatible_arches
|
||||
], [])
|
||||
for rpm_obj in rpms:
|
||||
log.write('Examining %s for inclusion\n' % rpm_obj)
|
||||
# Skip the RPM if it is excluded on this arch or exclusive
|
||||
# for different arch.
|
||||
if pungi.arch.is_excluded(rpm_obj, exclusivearchlist):
|
||||
log.write('Skipping %s due to incompatible arch\n' % rpm_obj)
|
||||
continue
|
||||
|
||||
if should_include(rpm_obj, arch, arch_mmd, multilib_arches):
|
||||
# Add RPM to packages.
|
||||
packages.add((rpm_obj, None))
|
||||
added_rpms.setdefault(nsvc, [])
|
||||
added_rpms[nsvc].append(str(rpm_obj.nevra))
|
||||
log.write('Adding %s because it is in %s\n'
|
||||
% (rpm_obj, nsvc))
|
||||
elif self.compose.conf.get("include_devel_modules"):
|
||||
nsvc_devel = "%s-devel:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
mmd.peek_stream(),
|
||||
mmd.peek_version(),
|
||||
mmd.peek_context(),
|
||||
)
|
||||
added_rpms.setdefault(nsvc_devel, [])
|
||||
added_rpms[nsvc_devel].append(str(rpm_obj.nevra))
|
||||
packages.add((rpm_obj, None))
|
||||
log.write("Adding %s to %s module\n" % (rpm_obj, nsvc_devel))
|
||||
|
||||
# GatherSource returns all the packages in variant and does not
|
||||
# care which package is in which module, but for modular metadata
|
||||
# in the resulting compose repository, we have to know which RPM
|
||||
# is part of which module.
|
||||
# We therefore iterate over all the added packages grouped by
|
||||
# particular module and use them to filter out the packages which
|
||||
# have not been added to variant from the `arch_mmd`. This package
|
||||
# list is later used in createrepo phase to generated modules.yaml.
|
||||
for nsvc, rpm_nevras in added_rpms.items():
|
||||
# If we added some RPMs from a module, that module must exist in
|
||||
# exactly one of the dicts. We need to find the metadata object for
|
||||
# it.
|
||||
arch_mmd = variant.arch_mmds[arch].get(nsvc) or variant.dev_mmds[arch].get(
|
||||
nsvc
|
||||
)
|
||||
|
||||
added_artifacts = Modulemd.SimpleSet()
|
||||
for rpm_nevra in rpm_nevras:
|
||||
added_artifacts.add(rpm_nevra)
|
||||
arch_mmd.set_rpm_artifacts(added_artifacts)
|
||||
|
||||
return packages, groups
|
||||
|
||||
|
||||
def should_include(rpm_obj, arch, arch_mmd, multilib_arches):
|
||||
srpm = kobo.rpmlib.parse_nvr(rpm_obj.sourcerpm)["name"]
|
||||
|
||||
buildopts = arch_mmd.get_buildopts()
|
||||
if buildopts:
|
||||
whitelist = buildopts.get_rpm_whitelist()
|
||||
if whitelist:
|
||||
# We have whitelist, no filtering against components.
|
||||
if srpm not in whitelist:
|
||||
# Package is not on the list, skip it.
|
||||
return False
|
||||
|
||||
# Filter out the RPM from artifacts if its filtered in MMD.
|
||||
if rpm_obj.name in arch_mmd.get_rpm_filter().get():
|
||||
return False
|
||||
|
||||
# Skip the rpm_obj if it's built for multilib arch, but multilib is not
|
||||
# enabled for this srpm in MMD.
|
||||
try:
|
||||
mmd_component = arch_mmd.get_rpm_components()[srpm]
|
||||
multilib = mmd_component.get_multilib()
|
||||
multilib = multilib.get() if multilib else set()
|
||||
if arch not in multilib and rpm_obj.arch in multilib_arches:
|
||||
return False
|
||||
except KeyError:
|
||||
# No such component, disable any multilib
|
||||
if rpm_obj.arch not in ("noarch", arch):
|
||||
return False
|
||||
|
||||
return True
|
@ -644,16 +644,19 @@ class TestGatherPackages(helpers.PungiTestCase):
|
||||
{'rpm': [], 'srpm': [], 'debuginfo': []}
|
||||
)
|
||||
self.assertEqual(get_gather_method.call_args_list,
|
||||
[mock.call(compose.conf['gather_method'])] * 3)
|
||||
self.assertEqual(get_variant_packages.call_args_list,
|
||||
[mock.call(compose, 'x86_64', compose.variants['Server'], 'module', pkg_set),
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'comps', pkg_set),
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'json', pkg_set)])
|
||||
[mock.call(compose.conf['gather_method'])] * 2)
|
||||
self.assertEqual(
|
||||
get_variant_packages.call_args_list,
|
||||
[
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'comps', pkg_set),
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'json', pkg_set),
|
||||
],
|
||||
)
|
||||
self.assertEqual(
|
||||
get_gather_method.return_value.return_value.call_args_list,
|
||||
[mock.call('x86_64', compose.variants['Server'], packages, groups,
|
||||
filters, set(), set(), pkg_set, fulltree_excludes=set(),
|
||||
prepopulate=set())] * 3
|
||||
prepopulate=set())] * 2
|
||||
)
|
||||
|
||||
@mock.patch('pungi.phases.gather.get_variant_packages')
|
||||
@ -684,16 +687,19 @@ class TestGatherPackages(helpers.PungiTestCase):
|
||||
{'rpm': [], 'srpm': [], 'debuginfo': []}
|
||||
)
|
||||
self.assertEqual(get_gather_method.call_args_list,
|
||||
[mock.call(compose.conf['gather_method'])] * 3)
|
||||
self.assertEqual(get_variant_packages.call_args_list,
|
||||
[mock.call(compose, 'x86_64', compose.variants['Server'], 'module', pkg_set),
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'comps', pkg_set),
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'json', pkg_set)])
|
||||
[mock.call(compose.conf['gather_method'])] * 2)
|
||||
self.assertEqual(
|
||||
get_variant_packages.call_args_list,
|
||||
[
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'comps', pkg_set),
|
||||
mock.call(compose, 'x86_64', compose.variants['Server'], 'json', pkg_set),
|
||||
],
|
||||
)
|
||||
self.assertEqual(
|
||||
get_gather_method.return_value.return_value.call_args_list,
|
||||
[mock.call('x86_64', compose.variants['Server'], packages, groups,
|
||||
filters, set(['white']), set(['black']), pkg_set,
|
||||
fulltree_excludes=set(), prepopulate=set())] * 3
|
||||
fulltree_excludes=set(), prepopulate=set())] * 2
|
||||
)
|
||||
|
||||
@mock.patch('pungi.phases.gather.get_variant_packages')
|
||||
@ -704,12 +710,12 @@ class TestGatherPackages(helpers.PungiTestCase):
|
||||
compose = helpers.DummyCompose(self.topdir, {
|
||||
'multilib_whitelist': {'*': ['white']},
|
||||
'multilib_blacklist': {'*': ['black']},
|
||||
'gather_method': {'^Server$': {'comps': 'deps', 'module': 'nodeps', 'json': 'deps'}},
|
||||
'gather_method': {'^Server$': {'comps': 'deps', 'json': 'deps'}},
|
||||
})
|
||||
pkg_set = mock.Mock()
|
||||
gather.gather_packages(compose, 'x86_64', compose.variants['Server'], pkg_set),
|
||||
self.assertEqual(get_gather_method.call_args_list,
|
||||
[mock.call('nodeps'), mock.call('deps'), mock.call('deps')])
|
||||
[mock.call('deps'), mock.call('deps')])
|
||||
|
||||
@mock.patch("pungi.phases.gather.get_variant_packages")
|
||||
@mock.patch("pungi.phases.gather.get_gather_method")
|
||||
|
@ -1,104 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
from pungi.phases.gather.sources.source_module import GatherSourceModule
|
||||
from tests import helpers
|
||||
from pungi import Modulemd
|
||||
|
||||
|
||||
@unittest.skipUnless(Modulemd is not None, 'Skipped test, no module support.')
|
||||
class TestGatherSourceModule(helpers.PungiTestCase):
|
||||
def setUp(self):
|
||||
super(TestGatherSourceModule, self).setUp()
|
||||
|
||||
self.compose = helpers.DummyCompose(self.topdir, {})
|
||||
self.mmd = self.compose.variants["Server"].add_fake_module(
|
||||
"testmodule:master:1:2017", rpm_nvrs=["pkg-0:1.0.0-1.x86_64", "pkg-0:1.0.0-1.i686"])
|
||||
|
||||
mock_rpm = mock.Mock(version='1.0.0', release='1',
|
||||
epoch=0, excludearch=None, exclusivearch=None,
|
||||
sourcerpm='pkg-1.0.0-1', nevra='pkg-0:1.0.0-1.x86_64',
|
||||
arch="x86_64")
|
||||
mock_rpm.name = 'pkg'
|
||||
self.compose.variants['Server'].nsvc_to_pkgset["testmodule:master:1:2017"].rpms_by_arch['x86_64'] = [mock_rpm]
|
||||
mock_rpm = mock.Mock(version='1.0.0', release='1',
|
||||
epoch=0, excludearch=None, exclusivearch=None,
|
||||
sourcerpm='pkg-1.0.0-1', nevra='pkg-0:1.0.0-1.i686',
|
||||
arch="i686")
|
||||
mock_rpm.name = 'pkg'
|
||||
self.compose.variants['Server'].nsvc_to_pkgset["testmodule:master:1:2017"].rpms_by_arch['i686'] = [mock_rpm]
|
||||
|
||||
def test_gather_module(self):
|
||||
source = GatherSourceModule(self.compose)
|
||||
packages, groups = source("x86_64", self.compose.variants["Server"])
|
||||
self.assertEqual(len(packages), 1)
|
||||
self.assertEqual(list(packages)[0][0].nevra, "pkg-0:1.0.0-1.x86_64")
|
||||
self.assertEqual(len(groups), 0)
|
||||
|
||||
variant = self.compose.variants["Server"]
|
||||
arch_mmd = variant.arch_mmds["x86_64"]["testmodule:master:1:2017"]
|
||||
self.assertEqual(set(arch_mmd.get_rpm_artifacts().get()),
|
||||
set(["pkg-0:1.0.0-1.x86_64"]))
|
||||
|
||||
def test_gather_multilib(self):
|
||||
multilib = Modulemd.SimpleSet()
|
||||
multilib.add("x86_64")
|
||||
self.mmd.get_rpm_components()["pkg"].set_multilib(multilib)
|
||||
|
||||
source = GatherSourceModule(self.compose)
|
||||
packages, groups = source("x86_64", self.compose.variants["Server"])
|
||||
self.assertEqual(len(packages), 2)
|
||||
self.assertEqual(set(package[0].nevra for package in packages),
|
||||
set(["pkg-0:1.0.0-1.x86_64", "pkg-0:1.0.0-1.i686"]))
|
||||
self.assertEqual(len(groups), 0)
|
||||
|
||||
variant = self.compose.variants["Server"]
|
||||
arch_mmd = variant.arch_mmds["x86_64"]["testmodule:master:1:2017"]
|
||||
self.assertEqual(set(arch_mmd.get_rpm_artifacts().get()),
|
||||
set(["pkg-0:1.0.0-1.x86_64", "pkg-0:1.0.0-1.i686"]))
|
||||
|
||||
def test_gather_filtered_module(self):
|
||||
filter_set = Modulemd.SimpleSet()
|
||||
filter_set.add("pkg")
|
||||
self.mmd.set_rpm_filter(filter_set)
|
||||
|
||||
source = GatherSourceModule(self.compose)
|
||||
packages, groups = source("x86_64", self.compose.variants["Server"])
|
||||
self.assertEqual(len(packages), 0)
|
||||
self.assertEqual(len(groups), 0)
|
||||
|
||||
variant = self.compose.variants["Server"]
|
||||
arch_mmd = variant.arch_mmds["x86_64"]["testmodule:master:1:2017"]
|
||||
self.assertEqual(len(arch_mmd.get_rpm_artifacts().get()), 0)
|
||||
|
||||
def test_gather_filtered_module_include_devel_modules(self):
|
||||
self.compose.conf['include_devel_modules'] = {
|
||||
"Server": ["testmodule-devel:master"]}
|
||||
|
||||
filter_set = Modulemd.SimpleSet()
|
||||
filter_set.add("pkg")
|
||||
self.mmd.set_rpm_filter(filter_set)
|
||||
|
||||
source = GatherSourceModule(self.compose)
|
||||
packages, groups = source("x86_64", self.compose.variants["Server"])
|
||||
|
||||
# Two packages, because -devel for x86_64 includes both x86_64
|
||||
# and i686.
|
||||
self.assertEqual(len(packages), 2)
|
||||
self.assertEqual(len(groups), 0)
|
||||
|
||||
variant = self.compose.variants["Server"]
|
||||
arch_mmd = variant.arch_mmds["x86_64"]["testmodule:master:1:2017"]
|
||||
self.assertEqual(len(arch_mmd.get_rpm_artifacts().get()), 0)
|
||||
arch_mmd = variant.arch_mmds["x86_64"]["testmodule-devel:master:1:2017"]
|
||||
self.assertEqual(len(arch_mmd.get_rpm_artifacts().get()), 2)
|
Loading…
Reference in New Issue
Block a user