Fix module defaults and obsoletes validation

- Remove validation for modules obsoletes
  We can have multiple obsoletes for one module
- Add unit tests to cover basic scenarios for
  modules defaults && obsoletes
- Add additional check for invalid yaml file
  in Defaults. Previously, empty list of default would
  be returned when invalid yaml is present in Defaults
  directory.
- Using MergeIndex for Obsoletes only (for now).

https://pagure.io/pungi/issue/1592

Signed-off-by: Marek Kulik <mkulik@redhat.com>
This commit is contained in:
Marek Kulik 2022-04-27 15:31:14 +02:00 committed by lsedlar
parent 895b3982d7
commit ca185aaea8
7 changed files with 339 additions and 56 deletions

View File

@ -25,10 +25,9 @@ except (ImportError, ValueError):
Modulemd = None
def iter_module_defaults_or_obsoletes(path, obsoletes=False):
def iter_module_defaults(path):
"""Given a path to a directory with yaml files, yield each module default
in there as a pair (module_name, ModuleDefaults instance).
The same happens for module obsoletes if the obsoletes switch is True.
"""
# It is really tempting to merge all the module indexes into a single one
# and work with it. However that does not allow for detecting conflicting
@ -42,10 +41,31 @@ def iter_module_defaults_or_obsoletes(path, obsoletes=False):
index = Modulemd.ModuleIndex()
index.update_from_file(file, strict=False)
for module_name in index.get_module_names():
if obsoletes:
yield module_name, index.get_module(module_name).get_obsoletes()
else:
yield module_name, index.get_module(module_name).get_defaults()
yield module_name, index.get_module(module_name).get_defaults()
def get_module_obsoletes_idx(path, mod_list):
"""Given a path to a directory with yaml files, return Index with
merged all obsoletes.
"""
merger = Modulemd.ModuleIndexMerger.new()
md_idxs = []
# associate_index does NOT copy it's argument (nor increases a
# reference counter on the object). It only stores a pointer.
for file in glob.glob(os.path.join(path, "*.yaml")):
index = Modulemd.ModuleIndex()
index.update_from_file(file, strict=False)
mod_name = index.get_module_names()[0]
if mod_name and (mod_name in mod_list or not mod_list):
md_idxs.append(index)
merger.associate_index(md_idxs[-1], 0)
merged_idx = merger.resolve()
return merged_idx
def collect_module_defaults(
@ -78,16 +98,21 @@ def collect_module_defaults(
def collect_module_obsoletes(obsoletes_dir, modules_to_load, mod_index=None):
"""Load module obsoletes into index.
This works in a similar fashion as collect_module_defaults except the overrides_dir
feature.
This works in a similar fashion as collect_module_defaults except it
merges indexes together instead of adding them during iteration.
Additionally if modules_to_load is not empty returned Index will include
only obsoletes for those modules.
"""
mod_index = mod_index or Modulemd.ModuleIndex()
for module_name, obsoletes in iter_module_defaults_or_obsoletes(
obsoletes_dir, obsoletes=True
):
for obsolete in obsoletes:
if not modules_to_load or module_name in modules_to_load:
mod_index.add_obsoletes(obsolete)
obsoletes_index = get_module_obsoletes_idx(obsoletes_dir, modules_to_load)
return mod_index
# Merge Obsoletes with Modules Index.
if mod_index:
merger = Modulemd.ModuleIndexMerger.new()
merger.associate_index(mod_index, 0)
merger.associate_index(obsoletes_index, 0)
merged_idx = merger.resolve()
obsoletes_index = merged_idx
return obsoletes_index

View File

@ -267,7 +267,7 @@ def create_variant_repo(
)
obsoletes_dir = compose.paths.work.module_obsoletes_dir()
collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
# Add extra modulemd files
if variant.uid in compose.conf.get("createrepo_extra_modulemd", {}):

View File

@ -703,7 +703,7 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
)
obsoletes_dir = compose.paths.work.module_obsoletes_dir()
collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
log_file = compose.paths.log.log_file(
arch, "lookaside_repo_modules_%s" % (variant.uid)

View File

@ -16,6 +16,7 @@
import collections
import os
import glob
import shutil
from kobo.shortcuts import run
@ -24,7 +25,7 @@ from kobo.threads import run_in_threads
from pungi.phases.base import PhaseBase
from pungi.phases.gather import write_prepopulate_file
from pungi.util import temp_dir
from pungi.module_util import iter_module_defaults_or_obsoletes
from pungi.module_util import iter_module_defaults
from pungi.wrappers.comps import CompsWrapper
from pungi.wrappers.createrepo import CreaterepoWrapper
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
@ -68,17 +69,13 @@ class InitPhase(PhaseBase):
# download module defaults
if self.compose.has_module_defaults:
write_module_defaults(self.compose)
validate_module_defaults_or_obsoletes(
validate_module_defaults(
self.compose.paths.work.module_defaults_dir(create_dir=False)
)
# download module obsoletes
if self.compose.has_module_obsoletes:
write_module_obsoletes(self.compose)
validate_module_defaults_or_obsoletes(
self.compose.paths.work.module_obsoletes_dir(create_dir=False),
obsoletes=True,
)
# write prepopulate file
write_prepopulate_file(self.compose)
@ -244,37 +241,38 @@ def write_module_obsoletes(compose):
)
def validate_module_defaults_or_obsoletes(path, obsoletes=False):
"""Make sure there are no conflicting defaults. Each module name can only
have one default stream or module obsolete.
def validate_module_defaults(path):
"""Make sure there are no conflicting defaults and every default can be loaded.
Each module name can onlyhave one default stream.
:param str path: directory with cloned module defaults/obsoletes
:param str path: directory with cloned module defaults
"""
seen = collections.defaultdict(set)
mmd_type = "obsoletes" if obsoletes else "defaults"
for module_name, defaults_or_obsoletes in iter_module_defaults_or_obsoletes(
path, obsoletes
):
if obsoletes:
for obsolete in defaults_or_obsoletes:
seen[obsolete.props.module_name].add(obsolete)
else:
seen[module_name].add(defaults_or_obsoletes.get_default_stream())
defaults_num = len(glob.glob(os.path.join(path, "*.yaml")))
seen_defaults = collections.defaultdict(set)
for module_name, defaults in iter_module_defaults(path):
seen_defaults[module_name].add(defaults.get_default_stream())
errors = []
for module_name, defaults_or_obsoletes in seen.items():
if len(defaults_or_obsoletes) > 1:
for module_name, defaults in seen_defaults.items():
if len(defaults) > 1:
errors.append(
"Module %s has multiple %s: %s"
% (module_name, mmd_type, ", ".join(sorted(defaults_or_obsoletes)))
"Module %s has multiple defaults: %s"
% (module_name, ", ".join(sorted(defaults)))
)
if errors:
raise RuntimeError(
"There are duplicated module %s:\n%s" % (mmd_type, "\n".join(errors))
"There are duplicated module defaults:\n%s" % "\n".join(errors)
)
# Make sure all defaults are valid otherwise update_from_defaults_directory
# will return empty object
if defaults_num != len(seen_defaults):
raise RuntimeError("Defaults contains not valid default file")
def validate_comps(path):
"""Check that there are whitespace issues in comps."""

View File

@ -24,7 +24,8 @@ from tests.helpers import (
@mock.patch("pungi.phases.init.run_in_threads", new=fake_run_in_threads)
@mock.patch("pungi.phases.init.validate_comps")
@mock.patch("pungi.phases.init.validate_module_defaults_or_obsoletes")
@mock.patch("pungi.phases.init.validate_module_defaults")
@mock.patch("pungi.phases.init.write_module_obsoletes")
@mock.patch("pungi.phases.init.write_module_defaults")
@mock.patch("pungi.phases.init.write_global_comps")
@mock.patch("pungi.phases.init.write_arch_comps")
@ -40,6 +41,7 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
write_obsoletes,
validate_defaults,
validate_comps,
):
@ -85,6 +87,7 @@ class TestInitPhase(PungiTestCase):
],
)
self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
def test_run_with_preserve(
@ -95,6 +98,7 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
write_obsoletes,
validate_defaults,
validate_comps,
):
@ -142,6 +146,7 @@ class TestInitPhase(PungiTestCase):
],
)
self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
def test_run_without_comps(
@ -152,6 +157,7 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
write_obsoletes,
validate_defaults,
validate_comps,
):
@ -169,6 +175,7 @@ class TestInitPhase(PungiTestCase):
self.assertEqual(create_comps.mock_calls, [])
self.assertEqual(write_variant.mock_calls, [])
self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
def test_with_module_defaults(
@ -179,6 +186,7 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
write_obsoletes,
validate_defaults,
validate_comps,
):
@ -196,11 +204,41 @@ class TestInitPhase(PungiTestCase):
self.assertEqual(create_comps.mock_calls, [])
self.assertEqual(write_variant.mock_calls, [])
self.assertEqual(write_defaults.call_args_list, [mock.call(compose)])
self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(
validate_defaults.call_args_list,
[mock.call(compose.paths.work.module_defaults_dir())],
)
def test_with_module_obsoletes(
self,
write_prepopulate,
write_variant,
create_comps,
write_arch,
write_global,
write_defaults,
write_obsoletes,
validate_defaults,
validate_comps,
):
compose = DummyCompose(self.topdir, {})
compose.has_comps = False
compose.has_module_defaults = False
compose.has_module_obsoletes = True
phase = init.InitPhase(compose)
phase.run()
self.assertEqual(write_global.mock_calls, [])
self.assertEqual(validate_comps.call_args_list, [])
self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])
self.assertEqual(write_arch.mock_calls, [])
self.assertEqual(create_comps.mock_calls, [])
self.assertEqual(write_variant.mock_calls, [])
self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(write_obsoletes.call_args_list, [mock.call(compose)])
self.assertEqual(validate_defaults.call_args_list, [])
class TestWriteArchComps(PungiTestCase):
@mock.patch("pungi.phases.init.run")
@ -624,13 +662,13 @@ class TestValidateModuleDefaults(PungiTestCase):
def test_valid_files(self):
self._write_defaults({"httpd": ["1"], "python": ["3.6"]})
init.validate_module_defaults_or_obsoletes(self.topdir)
init.validate_module_defaults(self.topdir)
def test_duplicated_stream(self):
self._write_defaults({"httpd": ["1"], "python": ["3.6", "3.5"]})
with self.assertRaises(RuntimeError) as ctx:
init.validate_module_defaults_or_obsoletes(self.topdir)
init.validate_module_defaults(self.topdir)
self.assertIn(
"Module python has multiple defaults: 3.5, 3.6", str(ctx.exception)
@ -640,7 +678,7 @@ class TestValidateModuleDefaults(PungiTestCase):
self._write_defaults({"httpd": ["1", "2"], "python": ["3.6", "3.5"]})
with self.assertRaises(RuntimeError) as ctx:
init.validate_module_defaults_or_obsoletes(self.topdir)
init.validate_module_defaults(self.topdir)
self.assertIn("Module httpd has multiple defaults: 1, 2", str(ctx.exception))
self.assertIn(
@ -665,7 +703,10 @@ class TestValidateModuleDefaults(PungiTestCase):
),
)
init.validate_module_defaults_or_obsoletes(self.topdir)
with self.assertRaises(RuntimeError) as ctx:
init.validate_module_defaults(self.topdir)
self.assertIn("Defaults contains not valid default file", str(ctx.exception))
@mock.patch("pungi.phases.init.CompsWrapper")

192
tests/test_module_util.py Normal file
View File

@ -0,0 +1,192 @@
import os
try:
import unittest2 as unittest
except ImportError:
import unittest
from parameterized import parameterized
from pungi import module_util
from pungi.module_util import Modulemd
from tests import helpers
@unittest.skipUnless(Modulemd, "Skipped test, no module support.")
class TestModuleUtil(helpers.PungiTestCase):
def _get_stream(self, mod_name, stream_name):
stream = Modulemd.ModuleStream.new(
Modulemd.ModuleStreamVersionEnum.TWO, mod_name, stream_name
)
stream.props.version = 42
stream.props.context = "deadbeef"
stream.props.arch = "x86_64"
return stream
def _write_obsoletes(self, defs):
for mod_name, stream, obsoleted_by in defs:
mod_index = Modulemd.ModuleIndex.new()
mmdobs = Modulemd.Obsoletes.new(1, 10993435, mod_name, stream, "testmsg")
mmdobs.set_obsoleted_by(obsoleted_by[0], obsoleted_by[1])
mod_index.add_obsoletes(mmdobs)
filename = "%s:%s.yaml" % (mod_name, stream)
with open(os.path.join(self.topdir, filename), "w") as f:
f.write(mod_index.dump_to_string())
def _write_defaults(self, defs):
for mod_name, streams in defs.items():
for stream in streams:
mod_index = Modulemd.ModuleIndex.new()
mmddef = Modulemd.DefaultsV1.new(mod_name)
mmddef.set_default_stream(stream)
mod_index.add_defaults(mmddef)
filename = "%s-%s.yaml" % (mod_name, stream)
with open(os.path.join(self.topdir, filename), "w") as f:
f.write(mod_index.dump_to_string())
@parameterized.expand(
[
(
"MULTIPLE",
[
("httpd", "1.22.1", ("httpd-new", "3.0")),
("httpd", "10.4", ("httpd", "11.1.22")),
],
),
(
"NORMAL",
[
("gdb", "2.8", ("gdb", "3.0")),
("nginx", "12.7", ("nginx-nightly", "13.3")),
],
),
]
)
def test_merged_module_obsoletes_idx(self, test_name, data):
self._write_obsoletes(data)
mod_index = module_util.get_module_obsoletes_idx(self.topdir, [])
if test_name == "MULTIPLE":
# Multiple obsoletes are allowed
mod = mod_index.get_module("httpd")
self.assertEqual(len(mod.get_obsoletes()), 2)
else:
mod = mod_index.get_module("gdb")
self.assertEqual(len(mod.get_obsoletes()), 1)
mod_obsolete = mod.get_obsoletes()
self.assertIsNotNone(mod_obsolete)
self.assertEqual(mod_obsolete[0].get_obsoleted_by_module_stream(), "3.0")
def test_collect_module_defaults_with_index(self):
stream = self._get_stream("httpd", "1")
mod_index = Modulemd.ModuleIndex()
mod_index.add_module_stream(stream)
defaults_data = {"httpd": ["1.44.2"], "python": ["3.6", "3.5"]}
self._write_defaults(defaults_data)
mod_index = module_util.collect_module_defaults(
self.topdir, defaults_data.keys(), mod_index
)
for module_name in defaults_data.keys():
mod = mod_index.get_module(module_name)
self.assertIsNotNone(mod)
mod_defaults = mod.get_defaults()
self.assertIsNotNone(mod_defaults)
if module_name == "httpd":
self.assertEqual(mod_defaults.get_default_stream(), "1.44.2")
else:
# Can't have multiple defaults for one stream
self.assertEqual(mod_defaults.get_default_stream(), None)
def test_handles_non_defaults_file_without_validation(self):
self._write_defaults({"httpd": ["1"], "python": ["3.6"]})
helpers.touch(
os.path.join(self.topdir, "boom.yaml"),
"\n".join(
[
"document: modulemd",
"version: 2",
"data:",
" summary: dummy module",
" description: dummy module",
" license:",
" module: [GPL]",
" content: [GPL]",
]
),
)
idx = module_util.collect_module_defaults(self.topdir)
self.assertEqual(len(idx.get_module_names()), 0)
@parameterized.expand([(False, ["httpd"]), (False, ["python"])])
def test_collect_module_obsoletes(self, no_index, mod_list):
if not no_index:
stream = self._get_stream(mod_list[0], "1.22.1")
mod_index = Modulemd.ModuleIndex()
mod_index.add_module_stream(stream)
else:
mod_index = None
data = [
("httpd", "1.22.1", ("httpd-new", "3.0")),
("httpd", "10.4", ("httpd", "11.1.22")),
]
self._write_obsoletes(data)
mod_index = module_util.collect_module_obsoletes(
self.topdir, mod_list, mod_index
)
# Obsoletes should not me merged without corresponding module
# if module list is present
if "python" in mod_list:
mod = mod_index.get_module("httpd")
self.assertIsNone(mod)
else:
mod = mod_index.get_module("httpd")
# No modules
if "httpd" not in mod_list:
self.assertIsNone(mod.get_obsoletes())
else:
self.assertIsNotNone(mod)
obsoletes_from_orig = mod.get_newest_active_obsoletes("1.22.1", None)
self.assertEqual(
obsoletes_from_orig.get_obsoleted_by_module_name(), "httpd-new"
)
def test_collect_module_obsoletes_without_modlist(self):
stream = self._get_stream("nginx", "1.22.1")
mod_index = Modulemd.ModuleIndex()
mod_index.add_module_stream(stream)
data = [
("httpd", "1.22.1", ("httpd-new", "3.0")),
("nginx", "10.4", ("nginx", "11.1.22")),
("nginx", "11.1.22", ("nginx", "66")),
]
self._write_obsoletes(data)
mod_index = module_util.collect_module_obsoletes(self.topdir, [], mod_index)
# All obsoletes are merged into main Index when filter is empty
self.assertEqual(len(mod_index.get_module_names()), 2)
mod = mod_index.get_module("httpd")
self.assertIsNotNone(mod)
self.assertEqual(len(mod.get_obsoletes()), 1)
mod = mod_index.get_module("nginx")
self.assertIsNotNone(mod)
self.assertEqual(len(mod.get_obsoletes()), 2)

View File

@ -96,24 +96,51 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
@helpers.unittest.skipUnless(Modulemd, "Skipping tests, no module support")
@mock.patch("pungi.phases.pkgset.common.collect_module_defaults")
@mock.patch("pungi.phases.pkgset.common.collect_module_obsoletes")
@mock.patch("pungi.phases.pkgset.common.add_modular_metadata")
def test_run_with_modulemd(self, amm, cmd, mock_run):
mmd = {"x86_64": [mock.Mock()]}
def test_run_with_modulemd(self, amm, cmo, cmd, mock_run):
# Test Index for cmo
mod_index = Modulemd.ModuleIndex.new()
mmdobs = Modulemd.Obsoletes.new(
1, 10993435, "mod_name", "mod_stream", "testmsg"
)
mmdobs.set_obsoleted_by("mod_name", "mod_name_2")
mod_index.add_obsoletes(mmdobs)
cmo.return_value = mod_index
mmd = {
"x86_64": [
Modulemd.ModuleStream.new(
Modulemd.ModuleStreamVersionEnum.TWO, "mod_name", "stream_name"
)
]
}
common.MaterializedPackageSet.create(
self.compose, self.pkgset, self.prefix, mmd=mmd
)
cmd.assert_called_once_with(
os.path.join(self.topdir, "work/global/module_defaults"),
set(x.get_module_name.return_value for x in mmd["x86_64"]),
{"mod_name"},
overrides_dir=None,
)
amm.assert_called_once_with(
mock.ANY,
os.path.join(self.topdir, "work/x86_64/repo/foo"),
cmd.return_value,
cmo.assert_called_once()
cmd.assert_called_once()
amm.assert_called_once()
self.assertEqual(
amm.mock_calls[0].args[1], os.path.join(self.topdir, "work/x86_64/repo/foo")
)
self.assertIsInstance(amm.mock_calls[0].args[2], Modulemd.ModuleIndex)
self.assertIsNotNone(amm.mock_calls[0].args[2].get_module("mod_name"))
# Check if proper Index is used by add_modular_metadata
self.assertIsNotNone(
amm.mock_calls[0].args[2].get_module("mod_name").get_obsoletes()
)
self.assertEqual(
amm.mock_calls[0].args[3],
os.path.join(self.topdir, "logs/x86_64/arch_repo_modulemd.foo.x86_64.log"),
)
cmd.return_value.add_module_stream.assert_called_once_with(mmd["x86_64"][0])
class TestCreateArchRepos(helpers.PungiTestCase):