diff --git a/pungi/phases/createrepo.py b/pungi/phases/createrepo.py index 9b8984b3..22ee0df2 100644 --- a/pungi/phases/createrepo.py +++ b/pungi/phases/createrepo.py @@ -37,6 +37,16 @@ from ..util import find_old_compose, temp_dir, get_arch_variant_data import productmd.rpms +try: + from pdc_client import PDCClient + import gi + gi.require_version('Modulemd', '1.0') # noqa + from gi.repository import Modulemd + WITH_MODULES = True +except: + WITH_MODULES = False + + createrepo_lock = threading.Lock() createrepo_dirs = set() @@ -183,25 +193,29 @@ def create_variant_repo(compose, arch, variant, pkg_type): shutil.copy2(product_id_path, os.path.join(repo_dir, "repodata", "productid")) # call modifyrepo to inject modulemd if needed - if arch in variant.arch_mmds: - import yaml + if arch in variant.arch_mmds and WITH_MODULES: modules = [] for mmd in variant.arch_mmds[arch].values(): # Create copy of architecture specific mmd to filter out packages # which are not part of this particular repo. - repo_mmd = copy.deepcopy(mmd) + repo_mmd = Modulemd.Module.new_from_string(mmd.dumps()) + artifacts = repo_mmd.get_rpm_artifacts() + # Modules without RPMs are also valid. - if ("artifacts" in repo_mmd["data"] and - "rpms" in repo_mmd["data"]["artifacts"]): - repo_mmd["data"]["artifacts"]["rpms"] = [ - rpm_nevra for rpm_nevra in repo_mmd["data"]["artifacts"]["rpms"] - if rpm_nevra in rpm_nevras] + if not artifacts or artifacts.size() == 0: + continue + + repo_artifacts = Modulemd.SimpleSet() + for rpm_nevra in rpm_nevras: + if artifacts.contains(rpm_nevra): + repo_artifacts.add(rpm_nevra) + repo_mmd.set_rpm_artifacts(repo_artifacts) modules.append(repo_mmd) with temp_dir() as tmp_dir: modules_path = os.path.join(tmp_dir, "modules.yaml") - with open(modules_path, "w") as outfile: - outfile.write(yaml.dump_all(modules, explicit_start=True)) + Modulemd.Module.dump_all(modules, modules_path) + cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"), modules_path, mdtype="modules", compress_type="gz") diff --git a/pungi/phases/gather/sources/source_module.py b/pungi/phases/gather/sources/source_module.py index 4488f80c..4f663088 100644 --- a/pungi/phases/gather/sources/source_module.py +++ b/pungi/phases/gather/sources/source_module.py @@ -24,6 +24,16 @@ import pungi.phases.gather.source import kobo.rpmlib +try: + from pdc_client import PDCClient + import gi + gi.require_version('Modulemd', '1.0') # noqa + from gi.repository import Modulemd + WITH_MODULES = True +except: + WITH_MODULES = False + + class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase): enabled = True @@ -34,80 +44,93 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase): return self.worker(log, arch, variant) def worker(self, log, arch, variant): - import yaml - groups = set() packages = set() + # Check if this variant contains some modules + if variant is None or variant.modules is None: + return packages, groups + + # Check if we even support modules in Pungi. + if not WITH_MODULES: + log.write( + "pdc_client module, pygobject module or libmodulemd " + "library is not installed, support for modules is " + "disabled\n") + return packages, groups + # TODO: Enable multilib here and handle "multilib" field in the # components part of modulemd. We currently cannot do it, because # it is not clear what is semantic of that modulemd section. compatible_arches = pungi.arch.get_compatible_arches(arch, multilib=False) - if variant is not None and variant.modules is not None: - variant.arch_mmds.setdefault(arch, {}) + # Generate architecture specific modulemd metadata, so we can + # store per-architecture artifacts there later. + variant.arch_mmds.setdefault(arch, {}) + for mmd in variant.mmds: + mmd_id = "%s-%s" % (mmd.get_name(), mmd.get_stream()) + if mmd_id not in variant.arch_mmds[arch]: + arch_mmd = Modulemd.Module.new_from_string(mmd.dumps()) + variant.arch_mmds[arch][mmd_id] = arch_mmd + + # Contains per-module RPMs added to variant. + added_rpms = {} + + rpms = sum([ + variant.pkgset.rpms_by_arch.get(a, []) + for a in compatible_arches + ], []) + for rpm_obj in rpms: + log.write('Examining %s for inclusion\n' % rpm_obj) + # Skip the RPM if it is excluded on this arch or exclusive + # for different arch. + if pungi.arch.is_excluded(rpm_obj, compatible_arches): + log.write('Skipping %s due to incompatible arch\n' % rpm_obj) + continue - # Generate architecture specific modulemd metadata, so we can - # store per-architecture artifacts there later. for mmd in variant.mmds: mmd_id = "%s-%s" % (mmd.get_name(), mmd.get_stream()) - if mmd_id not in variant.arch_mmds[arch]: - arch_mmd = yaml.safe_load(mmd.dumps()) - variant.arch_mmds[arch][mmd_id] = arch_mmd - - # Contains per-module RPMs added to variant. - added_rpms = {} - - rpms = sum([ - variant.pkgset.rpms_by_arch.get(a, []) - for a in compatible_arches - ], []) - for rpm_obj in rpms: - log.write('Examining %s for inclusion\n' % rpm_obj) - # Skip the RPM if it is excluded on this arch or exclusive - # for different arch. - if pungi.arch.is_excluded(rpm_obj, compatible_arches): - log.write('Skipping %s due to incompatible arch\n' % rpm_obj) - continue - - for mmd in variant.mmds: - mmd_id = "%s-%s" % (mmd.get_name(), mmd.get_stream()) - arch_mmd = variant.arch_mmds[arch][mmd_id] - - # Skip this mmd if this RPM does not belong to it. - srpm = kobo.rpmlib.parse_nvr(rpm_obj.sourcerpm)["name"] - if (srpm not in mmd.get_rpm_components().keys() or - rpm_obj.nevra not in mmd.get_rpm_artifacts().get()): - continue - - # If the RPM is not filtered out, add it to compose, - # otherwise remove it from arch_mmd artifacts section. - if rpm_obj.name not in mmd.get_rpm_filter().get(): - packages.add((rpm_obj, None)) - added_rpms.setdefault(mmd_id, []) - added_rpms[mmd_id].append(str(rpm_obj.nevra)) - log.write('Adding %s because it is in %s\n' - % (rpm_obj, mmd_id)) - elif rpm_obj.nevra in arch_mmd["data"]["artifacts"]["rpms"]: - arch_mmd["data"]["artifacts"]["rpms"].remove( - rpm_obj.nevra) - - # GatherSource returns all the packages in variant and does not - # care which package is in which module, but for modular metadata - # in the resulting compose repository, we have to know which RPM - # is part of which module. - # We therefore iterate over all the added packages grouped by - # particular module and use them to filter out the packages which - # have not been added to variant from the `arch_mmd`. This package - # list is later used in createrepo phase to generated modules.yaml. - for mmd_id, rpm_nevras in added_rpms.items(): arch_mmd = variant.arch_mmds[arch][mmd_id] - # Modules without artifacts are also valid. - if ("artifacts" not in arch_mmd["data"] or - "rpms" not in arch_mmd["data"]["artifacts"]): + + # Skip this mmd if this RPM does not belong to it. + srpm = kobo.rpmlib.parse_nvr(rpm_obj.sourcerpm)["name"] + if (srpm not in mmd.get_rpm_components().keys() or + rpm_obj.nevra not in mmd.get_rpm_artifacts().get()): continue - arch_mmd["data"]["artifacts"]["rpms"] = [ - rpm_nevra for rpm_nevra in rpm_nevras - if rpm_nevra in arch_mmd["data"]["artifacts"]["rpms"]] + + # If the RPM is not filtered out, add it to compose, + # otherwise remove it from arch_mmd artifacts section. + if rpm_obj.name not in mmd.get_rpm_filter().get(): + packages.add((rpm_obj, None)) + added_rpms.setdefault(mmd_id, []) + added_rpms[mmd_id].append(str(rpm_obj.nevra)) + log.write('Adding %s because it is in %s\n' + % (rpm_obj, mmd_id)) + else: + # No need to check if the rpm_obj is in rpm artifacts, + # the .remove() method does that anyway. + arch_mmd.get_rpm_artifacts().remove(str(rpm_obj.nevra)) + + # GatherSource returns all the packages in variant and does not + # care which package is in which module, but for modular metadata + # in the resulting compose repository, we have to know which RPM + # is part of which module. + # We therefore iterate over all the added packages grouped by + # particular module and use them to filter out the packages which + # have not been added to variant from the `arch_mmd`. This package + # list is later used in createrepo phase to generated modules.yaml. + for mmd_id, rpm_nevras in added_rpms.items(): + arch_mmd = variant.arch_mmds[arch][mmd_id] + artifacts = arch_mmd.get_rpm_artifacts() + + # Modules without artifacts are also valid. + if not artifacts or artifacts.size() == 0: + continue + + added_artifacts = Modulemd.SimpleSet() + for rpm_nevra in rpm_nevras: + if artifacts.contains(rpm_nevra): + added_artifacts.add(rpm_nevra) + arch_mmd.set_rpm_artifacts(added_artifacts) return packages, groups diff --git a/tests/test_createrepophase.py b/tests/test_createrepophase.py index 2accb4cc..1bce0aa0 100644 --- a/tests/test_createrepophase.py +++ b/tests/test_createrepophase.py @@ -19,6 +19,15 @@ from pungi.phases.createrepo import (CreaterepoPhase, get_productids_from_scm) from tests.helpers import DummyCompose, PungiTestCase, copy_fixture, touch +try: + import gi # noqa + gi.require_version('Modulemd', '1.0') # noqa + from gi.repository import Modulemd # noqa + import pdc_client # noqa + HAS_MODULE_SUPPORT = True +except ImportError: + HAS_MODULE_SUPPORT = False + class TestCreaterepoPhase(PungiTestCase): @mock.patch('pungi.phases.createrepo.ThreadPool') @@ -707,6 +716,92 @@ class TestCreateVariantRepo(PungiTestCase): with open(list_file) as f: self.assertEqual(f.read(), 'Packages/b/bash-4.3.30-2.fc21.src.rpm\n') + @mock.patch('pungi.phases.createrepo.run') + @mock.patch('pungi.phases.createrepo.CreaterepoWrapper') + def test_variant_repo_modules_artifacts_not_in_compose( + self, CreaterepoWrapperCls, run): + if not HAS_MODULE_SUPPORT: + self.skipTest("Skipped test, no module support.") + + compose = DummyCompose(self.topdir, { + 'createrepo_checksum': 'sha256', + }) + compose.DEBUG = False + compose.has_comps = False + + variant = compose.variants['Server'] + variant.arch_mmds["x86_64"] = {} + variant.arch_mmds["x86_64"]["test-f27"] = variant.add_fake_module( + "test:f27:1:2017", rpm_nvrs=["pkg-1.0.0-1"]) + variant.arch_mmds["x86_64"]["test-f28"] = variant.add_fake_module( + "test:f28:1:2017", rpm_nvrs=["pkg-2.0.0-1"]) + + def mocked_modifyrepo_cmd(repodir, mmd_path, **kwargs): + modules = Modulemd.Module.new_all_from_file(mmd_path) + self.assertEqual(len(modules), 2) + self.assertItemsEqual([m.get_stream() for m in modules], + ["f27", "f28"]) + self.assertItemsEqual( + [m.get_rpm_artifacts().get() for m in modules], + [[], []]) + + repo = CreaterepoWrapperCls.return_value + repo.get_modifyrepo_cmd.side_effect = mocked_modifyrepo_cmd + copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json')) + + repodata_dir = os.path.join( + compose.paths.compose.os_tree('x86_64', compose.variants['Server']), + 'repodata') + + create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm') + + self.assertItemsEqual( + repo.get_modifyrepo_cmd.mock_calls, + [mock.call(repodata_dir, ANY, compress_type='gz', mdtype='modules')]) + + @mock.patch('pungi.phases.createrepo.run') + @mock.patch('pungi.phases.createrepo.CreaterepoWrapper') + def test_variant_repo_modules_artifacts( + self, CreaterepoWrapperCls, run): + if not HAS_MODULE_SUPPORT: + self.skipTest("Skipped test, no module support.") + + compose = DummyCompose(self.topdir, { + 'createrepo_checksum': 'sha256', + }) + compose.DEBUG = False + compose.has_comps = False + + variant = compose.variants['Server'] + variant.arch_mmds["x86_64"] = {} + variant.arch_mmds["x86_64"]["test-f27"] = variant.add_fake_module( + "test:f27:1:2017", rpm_nvrs=["bash-0:4.3.30-2.fc21.x86_64"]) + variant.arch_mmds["x86_64"]["test-f28"] = variant.add_fake_module( + "test:f28:1:2017", rpm_nvrs=["pkg-2.0.0-1"]) + + def mocked_modifyrepo_cmd(repodir, mmd_path, **kwargs): + modules = Modulemd.Module.new_all_from_file(mmd_path) + self.assertEqual(len(modules), 2) + self.assertItemsEqual([m.get_stream() for m in modules], + ["f27", "f28"]) + self.assertItemsEqual( + [m.get_rpm_artifacts().get() for m in modules], + [["bash-0:4.3.30-2.fc21.x86_64"], []]) + + repo = CreaterepoWrapperCls.return_value + repo.get_modifyrepo_cmd.side_effect = mocked_modifyrepo_cmd + copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json')) + + repodata_dir = os.path.join( + compose.paths.compose.os_tree('x86_64', compose.variants['Server']), + 'repodata') + + create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm') + + self.assertItemsEqual( + repo.get_modifyrepo_cmd.mock_calls, + [mock.call(repodata_dir, ANY, compress_type='gz', mdtype='modules')]) + class ANYSingleton(object): """An object that is equal to anything.""" diff --git a/tests/test_gather_source_module.py b/tests/test_gather_source_module.py index 0e578494..b1cb746f 100644 --- a/tests/test_gather_source_module.py +++ b/tests/test_gather_source_module.py @@ -45,7 +45,7 @@ class TestGatherSourceModule(helpers.PungiTestCase): variant = self.compose.variants["Server"] arch_mmd = variant.arch_mmds["x86_64"]["testmodule-master"] - self.assertEqual(set(arch_mmd["data"]["artifacts"]["rpms"]), + self.assertEqual(set(arch_mmd.get_rpm_artifacts().get()), set(["pkg-1.0.0-1"])) def test_gather_filtered_module(self): @@ -60,4 +60,4 @@ class TestGatherSourceModule(helpers.PungiTestCase): variant = self.compose.variants["Server"] arch_mmd = variant.arch_mmds["x86_64"]["testmodule-master"] - self.assertEqual(len(arch_mmd["data"]["artifacts"]["rpms"]), 0) + self.assertEqual(len(arch_mmd.get_rpm_artifacts().get()), 0)