Support more specific config for devel modules
The initial implementation is very coarse. It enables it for all variants and all modules. That is not always wanted. With this patch, the config file has to explicitly list the devel modules for each variant that should have it. The variant must be configured also to include the non-devel module (but the module may be in lookaside so it won't be included). We now include module metadata in the internal lookaside repo, so that this whole thing works if one variant is built on top of another. JIRA: COMPOSE-3034 Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
parent
32bb9aeabe
commit
a73099d446
@ -609,8 +609,8 @@ def make_schema():
|
||||
"default": False,
|
||||
},
|
||||
"include_devel_modules": {
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
"type": "object",
|
||||
"default": {},
|
||||
},
|
||||
|
||||
"pkgset_source": {
|
||||
|
@ -254,24 +254,41 @@ def create_variant_repo(compose, arch, variant, pkg_type, modules_metadata=None)
|
||||
if mmddef.peek_module_name() in module_names:
|
||||
modules.append(mmddef)
|
||||
|
||||
with temp_dir() as tmp_dir:
|
||||
modules_path = os.path.join(tmp_dir, "modules.yaml")
|
||||
Modulemd.dump(modules, modules_path)
|
||||
|
||||
cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"),
|
||||
modules_path, mdtype="modules",
|
||||
compress_type="gz")
|
||||
log_file = compose.paths.log.log_file(
|
||||
arch, "modifyrepo-modules-%s" % variant)
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
log_file = compose.paths.log.log_file(arch, "modifyrepo-modules-%s" % variant)
|
||||
add_modular_metadata(repo, repo_dir, modules, log_file)
|
||||
|
||||
for module_id, module_rpms in metadata:
|
||||
modulemd_path = os.path.join(types[pkg_type][1](relative=True), find_file_in_repodata(repo_dir, 'modules'))
|
||||
modules_metadata.prepare_module_metadata(variant, arch, module_id, modulemd_path, types[pkg_type][0], list(module_rpms))
|
||||
modulemd_path = os.path.join(
|
||||
types[pkg_type][1](relative=True),
|
||||
find_file_in_repodata(repo_dir, 'modules'),
|
||||
)
|
||||
modules_metadata.prepare_module_metadata(
|
||||
variant,
|
||||
arch,
|
||||
module_id,
|
||||
modulemd_path,
|
||||
types[pkg_type][0],
|
||||
list(module_rpms),
|
||||
)
|
||||
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
def add_modular_metadata(repo, repo_path, mmd, log_file):
|
||||
"""Add modular metadata into a repository."""
|
||||
with temp_dir() as tmp_dir:
|
||||
modules_path = os.path.join(tmp_dir, "modules.yaml")
|
||||
Modulemd.dump(mmd, modules_path)
|
||||
|
||||
cmd = repo.get_modifyrepo_cmd(
|
||||
os.path.join(repo_path, "repodata"),
|
||||
modules_path,
|
||||
mdtype="modules",
|
||||
compress_type="gz"
|
||||
)
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
|
||||
|
||||
def find_file_in_repodata(repo_path, type_):
|
||||
dom = xml.dom.minidom.parse(os.path.join(repo_path, 'repodata', 'repomd.xml'))
|
||||
for entry in dom.getElementsByTagName('data'):
|
||||
|
@ -28,11 +28,12 @@ from ...wrappers.createrepo import CreaterepoWrapper
|
||||
import pungi.wrappers.kojiwrapper
|
||||
|
||||
from pungi import Modulemd
|
||||
from pungi.arch import get_compatible_arches, split_name_arch
|
||||
from pungi.arch import get_compatible_arches, split_name_arch, tree_arch_to_yum_arch
|
||||
from pungi.graph import SimpleAcyclicOrientedGraph
|
||||
from pungi.phases.base import PhaseBase
|
||||
from pungi.util import (get_arch_data, get_arch_variant_data, get_variant_data,
|
||||
makedirs)
|
||||
makedirs, iter_module_defaults)
|
||||
from pungi.phases.createrepo import add_modular_metadata
|
||||
|
||||
|
||||
def get_gather_source(name):
|
||||
@ -401,6 +402,27 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map):
|
||||
run(cmd,
|
||||
logfile=compose.paths.log.log_file(arch, "lookaside_repo_%s" % (variant.uid)),
|
||||
show_cmd=True)
|
||||
|
||||
# Add modular metadata into the repo
|
||||
if variant.arch_mmds:
|
||||
mmds = []
|
||||
for mmd in variant.arch_mmds[arch].values():
|
||||
# Set the arch field, but no other changes are needed.
|
||||
repo_mmd = mmd.copy()
|
||||
repo_mmd.set_arch(tree_arch_to_yum_arch(arch))
|
||||
mmds.append(repo_mmd)
|
||||
|
||||
module_names = set([x.get_name() for x in mmds])
|
||||
defaults_dir = compose.paths.work.module_defaults_dir()
|
||||
for mmddef in iter_module_defaults(defaults_dir):
|
||||
if mmddef.peek_module_name() in module_names:
|
||||
mmds.append(mmddef)
|
||||
|
||||
log_file = compose.paths.log.log_file(
|
||||
arch, "lookaside_repo_modules_%s" % (variant.uid)
|
||||
)
|
||||
add_modular_metadata(cr, repo, mmds, log_file)
|
||||
|
||||
compose.log_info('[DONE ] %s', msg)
|
||||
|
||||
return repo
|
||||
|
@ -13,22 +13,25 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from collections import defaultdict
|
||||
import os
|
||||
from kobo.shortcuts import run
|
||||
import kobo.rpmlib
|
||||
from fnmatch import fnmatch
|
||||
import gzip
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from fnmatch import fnmatch
|
||||
from itertools import chain
|
||||
|
||||
import createrepo_c as cr
|
||||
import kobo.rpmlib
|
||||
from kobo.shortcuts import run
|
||||
|
||||
import pungi.phases.gather.method
|
||||
from pungi import Modulemd, multilib_dnf
|
||||
from pungi.arch import get_valid_arches, tree_arch_to_yum_arch
|
||||
from pungi.phases.gather import _mk_pkg_map
|
||||
from pungi.phases.createrepo import add_modular_metadata
|
||||
from pungi.util import (
|
||||
get_arch_variant_data,
|
||||
iter_module_defaults,
|
||||
pkg_is_debug,
|
||||
temp_dir,
|
||||
)
|
||||
from pungi.wrappers import fus
|
||||
from pungi.wrappers.comps import CompsWrapper
|
||||
@ -36,8 +39,6 @@ from pungi.wrappers.createrepo import CreaterepoWrapper
|
||||
|
||||
from .method_nodeps import expand_groups
|
||||
|
||||
import createrepo_c as cr
|
||||
|
||||
|
||||
class FakePackage(object):
|
||||
"""This imitates a DNF package object and can be passed to python-multilib
|
||||
@ -127,6 +128,8 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
||||
for var in self.compose.all_variants.values():
|
||||
for mmd in var.arch_mmds.get(self.arch, {}).values():
|
||||
self.modular_packages.update(mmd.get_rpm_artifacts().dup())
|
||||
for mmd in var.dev_mmds.get(self.arch, {}).values():
|
||||
self.modular_packages.update(mmd.get_rpm_artifacts().dup())
|
||||
|
||||
def prepare_langpacks(self, arch, variant):
|
||||
if not self.compose.has_comps:
|
||||
@ -236,8 +239,10 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
||||
# useless for us anyway).
|
||||
env = os.environ.copy()
|
||||
env["G_MESSAGES_PREFIXED"] = ""
|
||||
self.compose.log_debug("[BEGIN] Running fus")
|
||||
run(cmd, logfile=logfile, show_cmd=True, env=env)
|
||||
output, out_modules = fus.parse_output(logfile)
|
||||
self.compose.log_debug("[DONE ] Running fus")
|
||||
new_multilib = self.add_multilib(variant, arch, output, old_multilib)
|
||||
old_multilib = new_multilib
|
||||
if new_multilib:
|
||||
@ -287,6 +292,7 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
||||
def add_langpacks(self, nvrs):
|
||||
if not self.langpacks:
|
||||
return set()
|
||||
|
||||
added = set()
|
||||
for nvr, pkg_arch, flags in nvrs:
|
||||
if "modular" in flags:
|
||||
@ -376,12 +382,15 @@ def create_module_repo(compose, variant, arch):
|
||||
)
|
||||
|
||||
# Add modular metadata to it
|
||||
included = set()
|
||||
modules = []
|
||||
|
||||
# We need to include metadata for all variants. The packages are in the
|
||||
# set, so we need their metadata.
|
||||
for var in compose.all_variants.values():
|
||||
for mmd in var.arch_mmds.get(arch, {}).values():
|
||||
for mmd in chain(
|
||||
var.arch_mmds.get(arch, {}).values(), var.dev_mmds.get(arch, {}).values()
|
||||
):
|
||||
# Set the arch field, but no other changes are needed.
|
||||
repo_mmd = mmd.copy()
|
||||
repo_mmd.set_arch(tree_arch_to_yum_arch(arch))
|
||||
@ -397,8 +406,9 @@ def create_module_repo(compose, variant, arch):
|
||||
repo_mmd.peek_version(),
|
||||
repo_mmd.peek_context(),
|
||||
)
|
||||
if nsvc not in lookaside_modules:
|
||||
if nsvc not in lookaside_modules and nsvc not in included:
|
||||
modules.append(repo_mmd)
|
||||
included.add(nsvc)
|
||||
|
||||
if len(platforms) > 1:
|
||||
raise RuntimeError("There are conflicting requests for platform.")
|
||||
@ -418,20 +428,10 @@ def create_module_repo(compose, variant, arch):
|
||||
logfile = "module_repo-%s" % variant
|
||||
run(cmd, logfile=compose.paths.log.log_file(arch, logfile), show_cmd=True)
|
||||
|
||||
with temp_dir() as tmp_dir:
|
||||
modules_path = os.path.join(tmp_dir, "modules.yaml")
|
||||
Modulemd.dump(modules, modules_path)
|
||||
|
||||
cmd = repo.get_modifyrepo_cmd(
|
||||
os.path.join(repo_path, "repodata"),
|
||||
modules_path,
|
||||
mdtype="modules",
|
||||
compress_type="gz",
|
||||
)
|
||||
log_file = compose.paths.log.log_file(
|
||||
arch, "gather-modifyrepo-modules-%s" % variant
|
||||
)
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
add_modular_metadata(repo, repo_path, modules, log_file)
|
||||
|
||||
compose.log_debug("[DONE ] %s" % msg)
|
||||
return list(platforms)[0] if platforms else None
|
||||
|
@ -59,6 +59,8 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
# Generate architecture specific modulemd metadata, so we can
|
||||
# store per-architecture artifacts there later.
|
||||
variant.arch_mmds.setdefault(arch, {})
|
||||
variant.dev_mmds.setdefault(arch, {})
|
||||
include_devel = self.compose.conf.get("include_devel_modules", {}).get(variant.uid, [])
|
||||
for mmd in variant.mmds:
|
||||
nsvc = "%s:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
@ -70,14 +72,18 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
arch_mmd = mmd.copy()
|
||||
variant.arch_mmds[arch][nsvc] = arch_mmd
|
||||
|
||||
if self.compose.conf["include_devel_modules"]:
|
||||
if self.compose.conf.get("include_devel_modules"):
|
||||
# Devel modules are enabled, we need to create it.
|
||||
devel_nsvc = "%s-devel:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
mmd.peek_stream(),
|
||||
mmd.peek_version(),
|
||||
mmd.peek_context(),
|
||||
)
|
||||
if devel_nsvc not in variant.arch_mmds[arch]:
|
||||
if (
|
||||
devel_nsvc not in variant.arch_mmds[arch]
|
||||
and devel_nsvc not in variant.dev_mmds[arch]
|
||||
):
|
||||
arch_mmd = mmd.copy()
|
||||
arch_mmd.set_name(arch_mmd.peek_name() + "-devel")
|
||||
# Depend on the actual module
|
||||
@ -86,9 +92,15 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
# Delete API and profiles
|
||||
arch_mmd.set_rpm_api(Modulemd.SimpleSet())
|
||||
arch_mmd.clear_profiles()
|
||||
|
||||
ns = "%s:%s" % (arch_mmd.peek_name(), arch_mmd.peek_stream())
|
||||
|
||||
# Store the new modulemd
|
||||
variant.arch_mmds[arch][devel_nsvc] = arch_mmd
|
||||
variant.module_uid_to_koji_tag[devel_nsvc] = variant.module_uid_to_koji_tag.get(nsvc)
|
||||
if ns in include_devel:
|
||||
variant.arch_mmds[arch][devel_nsvc] = arch_mmd
|
||||
else:
|
||||
variant.dev_mmds[arch][devel_nsvc] = arch_mmd
|
||||
|
||||
# Contains per-module RPMs added to variant.
|
||||
added_rpms = {}
|
||||
@ -121,7 +133,7 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
added_rpms[nsvc].append(str(rpm_obj.nevra))
|
||||
log.write('Adding %s because it is in %s\n'
|
||||
% (rpm_obj, nsvc))
|
||||
elif self.compose.conf["include_devel_modules"]:
|
||||
elif self.compose.conf.get("include_devel_modules"):
|
||||
nsvc_devel = "%s-devel:%s:%s:%s" % (
|
||||
mmd.peek_name(),
|
||||
mmd.peek_stream(),
|
||||
@ -141,7 +153,12 @@ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
# have not been added to variant from the `arch_mmd`. This package
|
||||
# list is later used in createrepo phase to generated modules.yaml.
|
||||
for nsvc, rpm_nevras in added_rpms.items():
|
||||
arch_mmd = variant.arch_mmds[arch][nsvc]
|
||||
# If we added some RPMs from a module, that module must exist in
|
||||
# exactly one of the dicts. We need to find the metadata object for
|
||||
# it.
|
||||
arch_mmd = variant.arch_mmds[arch].get(nsvc) or variant.dev_mmds[arch].get(
|
||||
nsvc
|
||||
)
|
||||
|
||||
added_artifacts = Modulemd.SimpleSet()
|
||||
for rpm_nevra in rpm_nevras:
|
||||
|
@ -266,6 +266,7 @@ class Variant(object):
|
||||
self.pkgset = None
|
||||
self.mmds = []
|
||||
self.arch_mmds = {}
|
||||
self.dev_mmds = {}
|
||||
self.module_uid_to_koji_tag = {}
|
||||
self.nsvc_to_pkgset = {}
|
||||
|
||||
|
@ -62,6 +62,7 @@ class MockVariant(mock.Mock):
|
||||
self.parent = kwargs.get('parent', None)
|
||||
self.mmds = []
|
||||
self.arch_mmds = {}
|
||||
self.dev_mmds = {}
|
||||
self.module_uid_to_koji_tag = {}
|
||||
self.variants = {}
|
||||
self.pkgset = mock.Mock(rpms_by_arch={})
|
||||
|
@ -235,7 +235,7 @@ class HelperMixin(object):
|
||||
return os.path.join(self.compose.topdir, "work/x86_64/%s" % name)
|
||||
|
||||
|
||||
@mock.patch("pungi.phases.gather.methods.method_hybrid.Modulemd")
|
||||
@mock.patch("pungi.phases.gather.methods.method_hybrid.add_modular_metadata")
|
||||
@mock.patch("pungi.phases.gather.methods.method_hybrid.run")
|
||||
class TestCreateModuleRepo(HelperMixin, helpers.PungiTestCase):
|
||||
def setUp(self):
|
||||
@ -250,7 +250,7 @@ class TestCreateModuleRepo(HelperMixin, helpers.PungiTestCase):
|
||||
self.assertEqual(run.call_args_list, [])
|
||||
self.assertEqual(Modulemd.mock_calls, [])
|
||||
|
||||
def test_more_than_one_platform(self, run, Modulemd):
|
||||
def test_more_than_one_platform(self, run, add_modular_metadata):
|
||||
self.variant.arch_mmds["x86_64"] = {
|
||||
"mod:1": MockModule("mod", platform="f29"),
|
||||
"mod:2": MockModule("mod", platform="f30"),
|
||||
@ -261,10 +261,10 @@ class TestCreateModuleRepo(HelperMixin, helpers.PungiTestCase):
|
||||
|
||||
self.assertIn("conflicting requests for platform", str(ctx.exception))
|
||||
self.assertEqual(run.call_args_list, [])
|
||||
self.assertEqual(Modulemd.mock_calls, [])
|
||||
self.assertEqual(add_modular_metadata.mock_calls, [])
|
||||
|
||||
@mock.patch("pungi.phases.gather.methods.method_hybrid.iter_module_defaults")
|
||||
def test_creating_repo_with_module_and_default(self, imd, run, Modulemd):
|
||||
def test_creating_repo_with_module_and_default(self, imd, run, add_modular_metadata):
|
||||
mod = MockModule("mod", platform="f29")
|
||||
self.variant.arch_mmds["x86_64"] = {"mod:1": mod}
|
||||
default = mock.Mock(peek_module_name=mock.Mock(return_value="mod"))
|
||||
@ -275,21 +275,22 @@ class TestCreateModuleRepo(HelperMixin, helpers.PungiTestCase):
|
||||
self.assertEqual(plat, "f29")
|
||||
|
||||
self.assertEqual(
|
||||
Modulemd.mock_calls, [mock.call.dump([mod, default], mock.ANY)]
|
||||
)
|
||||
create, modify = run.call_args_list
|
||||
self.assertEqual(
|
||||
create[0][0][:2], ["createrepo_c", self._repo("module_repo_Server")]
|
||||
)
|
||||
self.assertEqual(
|
||||
modify[0][0][:4],
|
||||
add_modular_metadata.call_args_list,
|
||||
[
|
||||
"modifyrepo_c",
|
||||
Modulemd.mock_calls[0][1][1],
|
||||
self._repo("module_repo_Server/repodata"),
|
||||
"--mdtype=modules",
|
||||
mock.call(
|
||||
mock.ANY,
|
||||
self._repo("module_repo_Server"),
|
||||
[mod, default],
|
||||
mock.ANY,
|
||||
),
|
||||
],
|
||||
)
|
||||
self.assertEqual(
|
||||
# Get first positional argument of the first call, and since it's
|
||||
# an array, take first two elements.
|
||||
run.call_args_list[0][0][0][:2],
|
||||
["createrepo_c", self._repo("module_repo_Server")]
|
||||
)
|
||||
|
||||
|
||||
class ModifiedMagicMock(mock.MagicMock):
|
||||
|
Loading…
Reference in New Issue
Block a user