Port to libmodulemd v2

Fixes: https://pagure.io/pungi/issue/1225
JIRA: COMPOSE-3662
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
Lubomír Sedlář 2019-07-01 12:11:39 +02:00
parent 62c6d4ddcf
commit 61e3cb0ef1
14 changed files with 143 additions and 132 deletions

View File

@ -6,9 +6,9 @@ import re
try:
import gi
gi.require_version('Modulemd', '1.0') # noqa
gi.require_version('Modulemd', '2.0') # noqa
from gi.repository import Modulemd
except:
except (ImportError, ValueError):
Modulemd = None

View File

@ -36,7 +36,7 @@ from .base import PhaseBase
from ..util import (
find_old_compose,
get_arch_variant_data,
iter_module_defaults,
collect_module_defaults,
temp_dir,
)
from pungi import Modulemd
@ -196,22 +196,21 @@ def create_variant_repo(compose, arch, variant, pkg_type, modules_metadata=None)
# call modifyrepo to inject modulemd if needed
if pkg_type == "rpm" and arch in variant.arch_mmds and Modulemd is not None:
modules = []
mod_index = Modulemd.ModuleIndex()
metadata = []
for module_id, mmd in variant.arch_mmds[arch].items():
if modules_metadata:
module_rpms = mmd.peek_rpm_artifacts().dup()
module_rpms = mmd.get_rpm_artifacts()
metadata.append((module_id, module_rpms))
modules.append(mmd)
mod_index.add_module_stream(mmd)
module_names = set([x.get_name() for x in modules])
for mmddef in iter_module_defaults(compose.paths.work.module_defaults_dir()):
if mmddef.peek_module_name() in module_names:
modules.append(mmddef)
module_names = set(mod_index.get_module_names())
defaults_dir = compose.paths.work.module_defaults_dir()
collect_module_defaults(defaults_dir, module_names, mod_index)
log_file = compose.paths.log.log_file(arch, "modifyrepo-modules-%s" % variant)
add_modular_metadata(repo, repo_dir, modules, log_file)
add_modular_metadata(repo, repo_dir, mod_index, log_file)
for module_id, module_rpms in metadata:
modulemd_path = os.path.join(
@ -230,11 +229,12 @@ def create_variant_repo(compose, arch, variant, pkg_type, modules_metadata=None)
compose.log_info("[DONE ] %s" % msg)
def add_modular_metadata(repo, repo_path, mmd, log_file):
def add_modular_metadata(repo, repo_path, mod_index, log_file):
"""Add modular metadata into a repository."""
with temp_dir() as tmp_dir:
modules_path = os.path.join(tmp_dir, "modules.yaml")
Modulemd.dump(mmd, modules_path)
with open(modules_path, "w") as f:
f.write(mod_index.dump_to_string())
cmd = repo.get_modifyrepo_cmd(
os.path.join(repo_path, "repodata"),

View File

@ -29,10 +29,10 @@ import pungi.wrappers.kojiwrapper
from pungi import Modulemd
from pungi.compose import get_ordered_variant_uids
from pungi.arch import get_compatible_arches, split_name_arch, tree_arch_to_yum_arch
from pungi.arch import get_compatible_arches, split_name_arch
from pungi.phases.base import PhaseBase
from pungi.util import (get_arch_data, get_arch_variant_data, get_variant_data,
makedirs, iter_module_defaults)
makedirs, collect_module_defaults)
from pungi.phases.createrepo import add_modular_metadata
@ -377,23 +377,18 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map):
# Add modular metadata into the repo
if variant.arch_mmds:
mmds = []
mod_index = Modulemd.ModuleIndex()
for mmd in variant.arch_mmds[arch].values():
# Set the arch field, but no other changes are needed.
repo_mmd = mmd.copy()
repo_mmd.set_arch(tree_arch_to_yum_arch(arch))
mmds.append(repo_mmd)
mod_index.add_module_stream(mmd)
module_names = set([x.get_name() for x in mmds])
module_names = set(mod_index.get_module_names())
defaults_dir = compose.paths.work.module_defaults_dir()
for mmddef in iter_module_defaults(defaults_dir):
if mmddef.peek_module_name() in module_names:
mmds.append(mmddef)
collect_module_defaults(defaults_dir, module_names, mod_index)
log_file = compose.paths.log.log_file(
arch, "lookaside_repo_modules_%s" % (variant.uid)
)
add_modular_metadata(cr, repo, mmds, log_file)
add_modular_metadata(cr, repo, mod_index, log_file)
compose.log_info('[DONE ] %s', msg)

View File

@ -142,7 +142,7 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
def prepare_modular_packages(self):
for var in self.compose.all_variants.values():
for mmd in var.arch_mmds.get(self.arch, {}).values():
self.modular_packages.update(mmd.get_rpm_artifacts().dup())
self.modular_packages.update(mmd.get_rpm_artifacts())
def prepare_langpacks(self, arch, variant):
if not self.compose.has_comps:
@ -223,7 +223,7 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
modules = []
for mmd in variant.arch_mmds.get(arch, {}).values():
modules.append("%s:%s" % (mmd.peek_name(), mmd.peek_stream()))
modules.append("%s:%s" % (mmd.get_module_name(), mmd.get_stream_name()))
input_packages = []
for pkg_name, pkg_arch in packages:
@ -399,10 +399,10 @@ def get_platform(compose, variant, arch):
for var in compose.all_variants.values():
for mmd in var.arch_mmds.get(arch, {}).values():
for dep in mmd.peek_dependencies():
streams = dep.peek_requires().get("platform")
for dep in mmd.get_dependencies():
streams = dep.get_runtime_streams("platform")
if streams:
platforms.update(streams.dup())
platforms.update(streams)
if len(platforms) > 1:
raise RuntimeError("There are conflicting requests for platform.")

View File

@ -38,7 +38,7 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
compatible_arches = pungi.arch.get_compatible_arches(arch, multilib=True)
for nsvc, mmd in variant.arch_mmds[arch].items():
for nsvc, module_stream in variant.arch_mmds[arch].items():
available_rpms = sum(
(
variant.nsvc_to_pkgset[nsvc].rpms_by_arch.get(a, [])
@ -46,7 +46,7 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
),
[],
)
to_include = set(mmd.get_rpm_artifacts().get())
to_include = set(module_stream.get_rpm_artifacts())
for rpm_obj in available_rpms:
if rpm_obj.nevra in to_include:
packages.add((rpm_obj, None))

View File

@ -229,8 +229,8 @@ def validate_module_defaults(path):
"""
seen_defaults = collections.defaultdict(set)
for mmddef in iter_module_defaults(path):
seen_defaults[mmddef.peek_module_name()].add(mmddef.peek_default_stream())
for module_name, defaults in iter_module_defaults(path):
seen_defaults[module_name].add(defaults.get_default_stream())
errors = []
for module_name, defaults in seen_defaults.items():

View File

@ -20,9 +20,10 @@ from kobo.shortcuts import run, relative_path
from kobo.threads import run_in_threads
import pungi.phases.pkgset.pkgsets
from pungi import Modulemd
from pungi.arch import get_valid_arches
from pungi.wrappers.createrepo import CreaterepoWrapper
from pungi.util import is_arch_multilib, find_old_compose, iter_module_defaults
from pungi.util import is_arch_multilib, find_old_compose, collect_module_defaults
from pungi.phases.createrepo import add_modular_metadata
@ -118,12 +119,14 @@ def _create_arch_repo(worker_thread, args, task_num):
update_md_path=repo_dir_global, checksum=createrepo_checksum)
run(cmd, logfile=compose.paths.log.log_file(arch, "arch_repo"), show_cmd=True)
# Add modulemd to the repo for all modules in all variants on this architecture.
mmds = list(iter_module_defaults(compose.paths.work.module_defaults_dir()))
for variant in compose.get_variants(arch=arch):
mmds.extend(variant.arch_mmds.get(arch, {}).values())
if mmds:
if Modulemd:
mod_index = collect_module_defaults(compose.paths.work.module_defaults_dir())
for variant in compose.get_variants(arch=arch):
for module_stream in variant.arch_mmds.get(arch, {}).values():
mod_index.add_module_stream(module_stream)
add_modular_metadata(
repo, repo_dir, mmds, compose.paths.log.log_file(arch, "arch_repo_modulemd")
repo, repo_dir, mod_index, compose.paths.log.log_file(arch, "arch_repo_modulemd")
)
compose.log_info("[DONE ] %s" % msg)

View File

@ -247,7 +247,9 @@ def _add_module_to_variant(koji_wrapper, variant, build, add_to_variant_modules=
for arch in variant.arches:
try:
mmd = Modulemd.Module.new_from_file(mmds["modulemd.%s.txt" % arch])
mmd = Modulemd.ModuleStream.read_file(
mmds["modulemd.%s.txt" % arch], strict=True
)
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
except KeyError:
# There is no modulemd for this arch. This could mean an arch was
@ -662,11 +664,9 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
# Not current tag, skip it
continue
for arch_modules in variant.arch_mmds.values():
arch_mmd = arch_modules[nsvc]
if arch_mmd:
for rpm_nevra in arch_mmd.get_rpm_artifacts().get():
nevra = parse_nvra(rpm_nevra)
modular_packages.add((nevra["name"], nevra["arch"]))
for rpm_nevra in arch_modules[nsvc].get_rpm_artifacts():
nevra = parse_nvra(rpm_nevra)
modular_packages.add((nevra["name"], nevra["arch"]))
pkgset.populate(
compose_tag,

View File

@ -927,12 +927,38 @@ def parse_koji_event(event):
def iter_module_defaults(path):
"""Given a path to a directory with yaml files, yield each module default in there.
"""Given a path to a directory with yaml files, yield each module default
in there as a pair (module_name, ModuleDefaults instance).
"""
# It is really tempting to merge all the module indexes into a single one
# and work with it. However that does not allow for detecting conflicting
# defaults. That should not happen in practice, but better safe than sorry.
# Once libmodulemd can report the error, this code can be simplifed by a
# lot. It's implemented in
# https://github.com/fedora-modularity/libmodulemd/commit/3087e4a5c38a331041fec9b6b8f1a372f9ffe64d
# and released in 2.6.0
for file in glob.glob(os.path.join(path, "*.yaml")):
for mmddef in Modulemd.objects_from_file(file):
if isinstance(mmddef, Modulemd.Defaults):
yield mmddef
index = Modulemd.ModuleIndex()
index.update_from_file(file, strict=False)
for module_name in index.get_module_names():
yield module_name, index.get_module(module_name).get_defaults()
def collect_module_defaults(defaults_dir, modules_to_load=None, mod_index=None):
"""Load module defaults into index.
If `modules_to_load` is passed in, it should be a set of module names. Only
defaults for these modules will be loaded.
If `mod_index` is passed in, it will be updated and returned. If it was
not, a new ModuleIndex will be created and returned
"""
mod_index = mod_index or Modulemd.ModuleIndex()
for module_name, defaults in iter_module_defaults(defaults_dir):
if not modules_to_load or module_name in modules_to_load:
mod_index.add_defaults(defaults)
return mod_index
def load_config(file_path, defaults={}):

View File

@ -86,36 +86,30 @@ class MockVariant(mock.Mock):
# No support for modules
return
name, stream, version, context = nsvc.split(":")
mmd = Modulemd.Module()
mmd.set_mdversion(2)
mmd.set_name(name)
mmd.set_stream(stream)
mmd.set_version(int(version))
mmd.set_context(context)
mmd.set_summary("foo")
mmd.set_description("foo")
licenses = Modulemd.SimpleSet()
licenses.add("GPL")
mmd.set_module_licenses(licenses)
module_stream = Modulemd.ModuleStreamV2.new(name, stream)
module_stream.set_version(int(version))
module_stream.set_context(context)
module_stream.set_summary("foo")
module_stream.set_description("foo")
module_stream.add_module_license("GPL")
if rpm_nvrs:
artifacts = Modulemd.SimpleSet()
for rpm_nvr in rpm_nvrs:
artifacts.add(rpm_nvr)
rpm_name = parse_nvr(rpm_nvr)["name"]
component = Modulemd.ComponentRpm()
component = Modulemd.ComponentRpm.new(rpm_nvr)
component.set_name(rpm_name)
component.set_rationale("Needed for test")
mmd.add_rpm_component(component)
if with_artifacts:
mmd.set_rpm_artifacts(artifacts)
module_stream.add_component(component)
if with_artifacts:
module_stream.add_rpm_artifact(rpm_nvr)
if self.modules is None:
self.modules = []
self.modules.append(":".join([name, stream, version]))
if mmd_arch:
self.arch_mmds.setdefault(mmd_arch, {})[mmd.dup_nsvc()] = mmd
return mmd
nsvc = module_stream.get_nsvc()
self.arch_mmds.setdefault(mmd_arch, {})[nsvc] = module_stream
return module_stream
class IterableMock(mock.Mock):

View File

@ -118,6 +118,24 @@ class TestCreaterepoPhase(PungiTestCase):
mock.call((compose, None, compose.variants['Everything'], 'srpm', phase.modules_metadata))])
def make_mocked_modifyrepo_cmd(tc, module_artifacts):
def mocked_modifyrepo_cmd(repodir, mmd_path, **kwargs):
mod_index = Modulemd.ModuleIndex.new()
mod_index.update_from_file(mmd_path, strict=True)
tc.assertEqual(len(mod_index.get_module_names()), 1)
module = mod_index.get_module("test")
module_streams = module.get_all_streams()
tc.assertEqual(len(module_streams), len(module_artifacts))
for ms in module_streams:
tc.assertIn(ms.get_stream_name(), module_artifacts)
tc.assertItemsEqual(
ms.get_rpm_artifacts(), module_artifacts[ms.get_stream_name()],
)
return mocked_modifyrepo_cmd
class TestCreateVariantRepo(PungiTestCase):
@mock.patch('pungi.phases.createrepo.run')
@ -742,17 +760,10 @@ class TestCreateVariantRepo(PungiTestCase):
variant.arch_mmds["x86_64"]["test:f28:1:2017"] = variant.add_fake_module(
"test:f28:1:2017", rpm_nvrs=["pkg-0:2.0.0-1.x86_64"])
def mocked_modifyrepo_cmd(repodir, mmd_path, **kwargs):
modules = Modulemd.Module.new_all_from_file(mmd_path)
self.assertEqual(len(modules), 2)
self.assertItemsEqual([m.get_stream() for m in modules],
["f27", "f28"])
self.assertItemsEqual(
[m.get_rpm_artifacts().get() for m in modules],
[[], []])
repo = CreaterepoWrapperCls.return_value
repo.get_modifyrepo_cmd.side_effect = mocked_modifyrepo_cmd
repo.get_modifyrepo_cmd.side_effect = make_mocked_modifyrepo_cmd(
self, {"f27": [], "f28": []}
)
copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))
repodata_dir = os.path.join(
@ -796,17 +807,12 @@ class TestCreateVariantRepo(PungiTestCase):
"test:f27:2018:cafe": "tag-2",
}
def mocked_modifyrepo_cmd(repodir, mmd_path, **kwargs):
modules = Modulemd.Module.new_all_from_file(mmd_path)
self.assertEqual(len(modules), 2)
self.assertItemsEqual([m.get_stream() for m in modules],
["f27", "f28"])
self.assertItemsEqual(
[m.get_rpm_artifacts().get() for m in modules],
[["bash-0:4.3.30-2.fc21.x86_64"], ["pkg-0:2.0.0-1.x86_64"]])
repo = CreaterepoWrapperCls.return_value
repo.get_modifyrepo_cmd.side_effect = mocked_modifyrepo_cmd
repo.get_modifyrepo_cmd.side_effect = make_mocked_modifyrepo_cmd(
self,
{"f27": ["bash-0:4.3.30-2.fc21.x86_64"], "f28": ["pkg-0:2.0.0-1.x86_64"]},
)
copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))
repodata_dir = os.path.join(

View File

@ -209,39 +209,26 @@ class MockModule(object):
def get_name(self):
return self.name
def peek_name(self):
def get_module_name(self):
return self.name
def peek_stream(self):
def get_stream_name(self):
return self.stream
def peek_version(self):
def get_version(self):
return self.version
def peek_context(self):
def get_context(self):
return self.context
def peek_dependencies(self):
return [
mock.Mock(
peek_requires=mock.Mock(
return_value={
"platform": mock.Mock(
dup=mock.Mock(return_value=[self.platform])
)
}
)
)
]
def copy(self):
return self
def set_arch(self, arch):
pass
def get_dependencies(self):
def get_runtime_streams(platform):
assert platform == "platform"
return [self.platform]
return [mock.Mock(get_runtime_streams=get_runtime_streams)]
def get_rpm_artifacts(self):
return mock.Mock(dup=mock.Mock(return_value=self.rpms))
return self.rpms
class HelperMixin(object):
@ -308,10 +295,10 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
self.compose.has_comps = False
self.compose.variants["Server"].arch_mmds["x86_64"] = {
"mod:master": mock.Mock(
peek_name=mock.Mock(return_value="mod"),
peek_stream=mock.Mock(return_value="master"),
peek_version=mock.Mock(return_value="ver"),
peek_context=mock.Mock(return_value="ctx"),
get_module_name=mock.Mock(return_value="mod"),
get_stream_name=mock.Mock(return_value="master"),
get_version=mock.Mock(return_value="ver"),
get_context=mock.Mock(return_value="ctx"),
)
}
po.return_value = ([], ["m1"])
@ -356,16 +343,16 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
self.compose.has_comps = False
self.compose.variants["Server"].arch_mmds["x86_64"] = {
"mod:master": mock.Mock(
peek_name=mock.Mock(return_value="mod"),
peek_stream=mock.Mock(return_value="master"),
peek_version=mock.Mock(return_value="ver"),
peek_context=mock.Mock(return_value="ctx"),
get_module_name=mock.Mock(return_value="mod"),
get_stream_name=mock.Mock(return_value="master"),
get_version=mock.Mock(return_value="ver"),
get_context=mock.Mock(return_value="ctx"),
),
"mod-devel:master": mock.Mock(
peek_name=mock.Mock(return_value="mod-devel"),
peek_stream=mock.Mock(return_value="master"),
peek_version=mock.Mock(return_value="ver"),
peek_context=mock.Mock(return_value="ctx"),
get_module_name=mock.Mock(return_value="mod-devel"),
get_stream_name=mock.Mock(return_value="master"),
get_version=mock.Mock(return_value="ver"),
get_context=mock.Mock(return_value="ctx"),
),
}
po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"])

View File

@ -542,13 +542,13 @@ class TestValidateModuleDefaults(PungiTestCase):
def _write_defaults(self, defs):
for mod_name, streams in defs.items():
for stream in streams:
mmddef = Modulemd.Defaults.new()
mmddef.set_version(1)
mmddef.set_module_name(mod_name)
mod_index = Modulemd.ModuleIndex.new()
mmddef = Modulemd.DefaultsV1.new(mod_name)
mmddef.set_default_stream(stream)
mmddef.dump(
os.path.join(self.topdir, "%s-%s.yaml" % (mod_name, stream))
)
mod_index.add_defaults(mmddef)
filename = "%s-%s.yaml" % (mod_name, stream)
with open(os.path.join(self.topdir, filename), "w") as f:
f.write(mod_index.dump_to_string())
def test_valid_files(self):
self._write_defaults({"httpd": ["1"], "python": ["3.6"]})

View File

@ -675,7 +675,7 @@ class TestFilterByWhitelist(unittest.TestCase):
class MockModule(object):
def __init__(self, path):
def __init__(self, path, strict=True):
self.path = path
def __repr__(self):
@ -685,7 +685,7 @@ class MockModule(object):
return self.path == other.path
@mock.patch("pungi.Modulemd.Module.new_from_file", new=MockModule)
@mock.patch("pungi.Modulemd.ModuleStream.read_file", new=MockModule)
@unittest.skipIf(Modulemd is None, "Skipping tests, no module support")
class TestAddModuleToVariant(unittest.TestCase):