2018-06-22 06:58:54 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
|
|
|
|
2018-10-19 08:27:04 +00:00
|
|
|
import gzip
|
2018-06-22 06:58:54 +00:00
|
|
|
import os
|
2018-10-19 08:27:04 +00:00
|
|
|
from collections import defaultdict
|
2018-07-17 12:29:52 +00:00
|
|
|
from fnmatch import fnmatch
|
2018-10-19 08:27:04 +00:00
|
|
|
from itertools import chain
|
|
|
|
|
|
|
|
import createrepo_c as cr
|
|
|
|
import kobo.rpmlib
|
|
|
|
from kobo.shortcuts import run
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
import pungi.phases.gather.method
|
|
|
|
from pungi import Modulemd, multilib_dnf
|
|
|
|
from pungi.arch import get_valid_arches, tree_arch_to_yum_arch
|
|
|
|
from pungi.phases.gather import _mk_pkg_map
|
2018-10-19 08:27:04 +00:00
|
|
|
from pungi.phases.createrepo import add_modular_metadata
|
2018-06-22 06:58:54 +00:00
|
|
|
from pungi.util import (
|
|
|
|
get_arch_variant_data,
|
|
|
|
iter_module_defaults,
|
|
|
|
pkg_is_debug,
|
|
|
|
)
|
|
|
|
from pungi.wrappers import fus
|
2018-07-17 12:29:52 +00:00
|
|
|
from pungi.wrappers.comps import CompsWrapper
|
2018-06-22 06:58:54 +00:00
|
|
|
from pungi.wrappers.createrepo import CreaterepoWrapper
|
|
|
|
|
|
|
|
from .method_nodeps import expand_groups
|
|
|
|
|
|
|
|
|
|
|
|
class FakePackage(object):
|
|
|
|
"""This imitates a DNF package object and can be passed to python-multilib
|
|
|
|
library.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, pkg):
|
|
|
|
self.pkg = pkg
|
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
|
|
|
return getattr(self.pkg, attr)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def files(self):
|
|
|
|
return [
|
|
|
|
os.path.join(dirname, basename) for (_, dirname, basename) in self.pkg.files
|
|
|
|
]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def provides(self):
|
|
|
|
# This is supposed to match what yum package object returns. It's a
|
|
|
|
# nested tuple (name, flag, (epoch, version, release)). This code only
|
|
|
|
# fills in the name, because that's all that python-multilib is using..
|
|
|
|
return [(p[0].split()[0], None, (None, None, None)) for p in self.pkg.provides]
|
|
|
|
|
|
|
|
|
|
|
|
class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
|
|
|
enabled = True
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(GatherMethodHybrid, self).__init__(*args, **kwargs)
|
|
|
|
self.package_maps = {}
|
|
|
|
self.packages = {}
|
2018-07-17 12:29:52 +00:00
|
|
|
# Mapping from package name to set of langpack packages (stored as
|
|
|
|
# names).
|
|
|
|
self.langpacks = {}
|
|
|
|
# Set of packages for which we already added langpacks.
|
|
|
|
self.added_langpacks = set()
|
2018-10-23 06:29:45 +00:00
|
|
|
# Set of NEVRAs of modular packages
|
|
|
|
self.modular_packages = set()
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
def _get_pkg_map(self, arch):
|
|
|
|
"""Create a mapping from NEVRA to actual package object. This will be
|
|
|
|
done once for each architecture, since the package set is the same for
|
|
|
|
all variants.
|
|
|
|
|
|
|
|
The keys are in NEVRA format and only include the epoch if it's not
|
|
|
|
zero. This makes it easier to query by results for the depsolver.
|
|
|
|
"""
|
|
|
|
if arch not in self.package_maps:
|
|
|
|
pkg_map = {}
|
|
|
|
for pkg_arch in self.package_sets[arch].rpms_by_arch:
|
|
|
|
for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]:
|
|
|
|
pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg
|
|
|
|
self.package_maps[arch] = pkg_map
|
|
|
|
|
|
|
|
return self.package_maps[arch]
|
|
|
|
|
|
|
|
def _prepare_packages(self):
|
|
|
|
repo_path = self.compose.paths.work.arch_repo(arch=self.arch)
|
|
|
|
md = cr.Metadata()
|
|
|
|
md.locate_and_load_xml(repo_path)
|
|
|
|
for key in md.keys():
|
|
|
|
pkg = md.get(key)
|
|
|
|
if pkg.arch in self.valid_arches:
|
|
|
|
self.packages[_fmt_nevra(pkg, arch=pkg.arch)] = FakePackage(pkg)
|
|
|
|
|
|
|
|
def _get_package(self, nevra):
|
|
|
|
if not self.packages:
|
|
|
|
self._prepare_packages()
|
|
|
|
return self.packages[nevra]
|
|
|
|
|
2018-10-23 06:29:45 +00:00
|
|
|
def expand_list(self, patterns):
|
2018-09-05 13:00:01 +00:00
|
|
|
"""Given a list of globs, create a list of package names matching any
|
|
|
|
of the pattern.
|
|
|
|
"""
|
|
|
|
expanded = set()
|
2018-10-23 06:29:45 +00:00
|
|
|
for pkg_arch in self.package_sets[self.arch].rpms_by_arch:
|
|
|
|
for pkg in self.package_sets[self.arch].rpms_by_arch[pkg_arch]:
|
2018-09-05 13:00:01 +00:00
|
|
|
for pattern in patterns:
|
|
|
|
if fnmatch(pkg.name, pattern):
|
2018-10-23 06:29:45 +00:00
|
|
|
expanded.add(pkg)
|
2018-09-05 13:00:01 +00:00
|
|
|
break
|
|
|
|
return expanded
|
|
|
|
|
2018-10-23 06:29:45 +00:00
|
|
|
def prepare_modular_packages(self):
|
|
|
|
for var in self.compose.all_variants.values():
|
|
|
|
for mmd in var.arch_mmds.get(self.arch, {}).values():
|
|
|
|
self.modular_packages.update(mmd.get_rpm_artifacts().dup())
|
2018-10-19 08:27:04 +00:00
|
|
|
for mmd in var.dev_mmds.get(self.arch, {}).values():
|
|
|
|
self.modular_packages.update(mmd.get_rpm_artifacts().dup())
|
2018-10-23 06:29:45 +00:00
|
|
|
|
2018-07-17 12:29:52 +00:00
|
|
|
def prepare_langpacks(self, arch, variant):
|
|
|
|
if not self.compose.has_comps:
|
|
|
|
return
|
|
|
|
comps_file = self.compose.paths.work.comps(arch, variant, create_dir=False)
|
|
|
|
comps = CompsWrapper(comps_file)
|
|
|
|
|
|
|
|
for name, install in comps.get_langpacks().items():
|
|
|
|
# Replace %s with * for fnmatch.
|
|
|
|
install_match = install % "*"
|
|
|
|
self.langpacks[name] = set()
|
|
|
|
for pkg_arch in self.package_sets[arch].rpms_by_arch:
|
|
|
|
for pkg in self.package_sets[arch].rpms_by_arch[pkg_arch]:
|
|
|
|
if not fnmatch(pkg.name, install_match):
|
|
|
|
# Does not match the pattern, ignore...
|
|
|
|
continue
|
|
|
|
if pkg.name.endswith("-devel") or pkg.name.endswith("-static"):
|
|
|
|
continue
|
2018-10-17 07:41:21 +00:00
|
|
|
if pkg_is_debug(pkg):
|
|
|
|
continue
|
2018-07-17 12:29:52 +00:00
|
|
|
self.langpacks[name].add(pkg.name)
|
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
def __call__(
|
|
|
|
self,
|
|
|
|
arch,
|
|
|
|
variant,
|
|
|
|
package_sets,
|
|
|
|
packages=[],
|
|
|
|
groups=[],
|
|
|
|
multilib_whitelist=[],
|
|
|
|
multilib_blacklist=[],
|
2018-10-16 10:29:00 +00:00
|
|
|
filter_packages=[],
|
2018-06-22 06:58:54 +00:00
|
|
|
**kwargs
|
|
|
|
):
|
|
|
|
self.arch = arch
|
|
|
|
self.valid_arches = get_valid_arches(arch, multilib=True)
|
|
|
|
self.package_sets = package_sets
|
|
|
|
|
2018-07-17 12:29:52 +00:00
|
|
|
self.prepare_langpacks(arch, variant)
|
2018-10-23 06:29:45 +00:00
|
|
|
self.prepare_modular_packages()
|
2018-07-17 12:29:52 +00:00
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
self.multilib_methods = get_arch_variant_data(
|
|
|
|
self.compose.conf, "multilib", arch, variant
|
|
|
|
)
|
|
|
|
self.multilib = multilib_dnf.Multilib(
|
2018-09-05 13:00:01 +00:00
|
|
|
self.multilib_methods,
|
2018-10-23 06:29:45 +00:00
|
|
|
set(p.name for p in self.expand_list(multilib_blacklist)),
|
|
|
|
set(p.name for p in self.expand_list(multilib_whitelist)),
|
2018-06-22 06:58:54 +00:00
|
|
|
)
|
|
|
|
|
2018-07-31 12:01:19 +00:00
|
|
|
platform = create_module_repo(self.compose, variant, arch)
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
packages.update(
|
|
|
|
expand_groups(self.compose, arch, variant, groups, set_pkg_arch=False)
|
|
|
|
)
|
|
|
|
|
2018-10-16 10:29:00 +00:00
|
|
|
# Filters are received as tuples (name, arch), we should convert it to
|
|
|
|
# strings.
|
|
|
|
filter_packages = [_fmt_pkg(*p) for p in filter_packages]
|
|
|
|
|
|
|
|
nvrs, out_modules = self.run_solver(
|
|
|
|
variant, arch, packages, platform, filter_packages
|
|
|
|
)
|
2018-10-03 10:19:33 +00:00
|
|
|
filter_modules(variant, arch, out_modules)
|
2018-06-22 06:58:54 +00:00
|
|
|
return expand_packages(
|
|
|
|
self._get_pkg_map(arch),
|
|
|
|
variant.arch_mmds.get(arch, {}),
|
|
|
|
pungi.phases.gather.get_lookaside_repos(self.compose, arch, variant),
|
|
|
|
nvrs,
|
|
|
|
)
|
|
|
|
# maybe check invalid sigkeys
|
|
|
|
|
2018-10-16 10:29:00 +00:00
|
|
|
def run_solver(self, variant, arch, packages, platform, filter_packages):
|
2018-06-22 06:58:54 +00:00
|
|
|
repos = [self.compose.paths.work.arch_repo(arch=arch)]
|
|
|
|
|
|
|
|
modules = []
|
|
|
|
if variant.arch_mmds.get(arch):
|
|
|
|
repos.append(self.compose.paths.work.module_repo(arch, variant))
|
|
|
|
for mmd in variant.arch_mmds[arch].values():
|
|
|
|
modules.append("%s:%s" % (mmd.peek_name(), mmd.peek_stream()))
|
|
|
|
|
2018-10-23 06:29:45 +00:00
|
|
|
input_packages = []
|
|
|
|
for pkg_name, pkg_arch in packages:
|
|
|
|
input_packages.extend(self._expand_wildcard(pkg_name, pkg_arch))
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
step = 0
|
|
|
|
|
2018-10-22 13:20:13 +00:00
|
|
|
old_multilib = set()
|
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
while True:
|
|
|
|
step += 1
|
2018-10-12 12:50:46 +00:00
|
|
|
conf_file = self.compose.paths.work.fus_conf(arch, variant, step)
|
|
|
|
fus.write_config(conf_file, sorted(modules), sorted(input_packages))
|
2018-06-22 06:58:54 +00:00
|
|
|
cmd = fus.get_cmd(
|
2018-10-12 12:50:46 +00:00
|
|
|
conf_file,
|
2018-06-22 06:58:54 +00:00
|
|
|
tree_arch_to_yum_arch(arch),
|
|
|
|
repos,
|
|
|
|
pungi.phases.gather.get_lookaside_repos(self.compose, arch, variant),
|
|
|
|
platform=platform,
|
2018-10-16 10:29:00 +00:00
|
|
|
filter_packages=filter_packages,
|
2018-06-22 06:58:54 +00:00
|
|
|
)
|
|
|
|
logfile = self.compose.paths.log.log_file(
|
|
|
|
arch, "hybrid-depsolver-%s-iter-%d" % (variant, step)
|
|
|
|
)
|
2018-09-26 12:13:35 +00:00
|
|
|
# Adding this environement variable will tell GLib not to prefix
|
|
|
|
# any log messages with the PID of the fus process (which is quite
|
|
|
|
# useless for us anyway).
|
|
|
|
env = os.environ.copy()
|
|
|
|
env["G_MESSAGES_PREFIXED"] = ""
|
2018-10-19 08:27:04 +00:00
|
|
|
self.compose.log_debug("[BEGIN] Running fus")
|
2018-09-26 12:13:35 +00:00
|
|
|
run(cmd, logfile=logfile, show_cmd=True, env=env)
|
2018-10-03 10:19:33 +00:00
|
|
|
output, out_modules = fus.parse_output(logfile)
|
2018-10-19 08:27:04 +00:00
|
|
|
self.compose.log_debug("[DONE ] Running fus")
|
2018-10-22 13:20:13 +00:00
|
|
|
new_multilib = self.add_multilib(variant, arch, output, old_multilib)
|
|
|
|
old_multilib = new_multilib
|
2018-07-17 12:29:52 +00:00
|
|
|
if new_multilib:
|
|
|
|
input_packages.extend(
|
2018-10-22 13:20:13 +00:00
|
|
|
_fmt_pkg(pkg_name, pkg_arch)
|
|
|
|
for pkg_name, pkg_arch in sorted(new_multilib)
|
2018-07-17 12:29:52 +00:00
|
|
|
)
|
|
|
|
continue
|
2018-06-22 06:58:54 +00:00
|
|
|
|
2018-07-17 12:29:52 +00:00
|
|
|
new_langpacks = self.add_langpacks(output)
|
|
|
|
if new_langpacks:
|
|
|
|
input_packages.extend(new_langpacks)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Nothing new was added, we can stop now.
|
|
|
|
break
|
2018-06-22 06:58:54 +00:00
|
|
|
|
2018-10-03 10:19:33 +00:00
|
|
|
return output, out_modules
|
2018-06-22 06:58:54 +00:00
|
|
|
|
2018-10-22 13:20:13 +00:00
|
|
|
def add_multilib(self, variant, arch, nvrs, old_multilib):
|
2018-06-22 06:58:54 +00:00
|
|
|
added = set()
|
|
|
|
if not self.multilib_methods:
|
|
|
|
return []
|
|
|
|
|
2018-07-31 12:01:19 +00:00
|
|
|
for nvr, pkg_arch, flags in nvrs:
|
|
|
|
if "modular" in flags:
|
|
|
|
continue
|
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
if pkg_arch != arch:
|
|
|
|
# Not a native package, not checking to add multilib
|
|
|
|
continue
|
|
|
|
|
|
|
|
nevr = kobo.rpmlib.parse_nvr(nvr)
|
|
|
|
|
2018-09-05 14:04:40 +00:00
|
|
|
for add_arch in self.valid_arches:
|
|
|
|
if add_arch == arch:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
multilib_candidate = self._get_package("%s.%s" % (nvr, add_arch))
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
if self.multilib.is_multilib(multilib_candidate):
|
|
|
|
added.add((nevr["name"], add_arch))
|
2018-06-22 06:58:54 +00:00
|
|
|
|
2018-10-22 13:20:13 +00:00
|
|
|
return added - old_multilib
|
2018-06-22 06:58:54 +00:00
|
|
|
|
2018-07-17 12:29:52 +00:00
|
|
|
def add_langpacks(self, nvrs):
|
|
|
|
if not self.langpacks:
|
|
|
|
return set()
|
2018-10-19 08:27:04 +00:00
|
|
|
|
2018-07-17 12:29:52 +00:00
|
|
|
added = set()
|
2018-07-31 12:01:19 +00:00
|
|
|
for nvr, pkg_arch, flags in nvrs:
|
|
|
|
if "modular" in flags:
|
|
|
|
continue
|
2018-07-17 12:29:52 +00:00
|
|
|
name = nvr.rsplit("-", 2)[0]
|
|
|
|
if name in self.added_langpacks:
|
|
|
|
# This package is already processed.
|
|
|
|
continue
|
|
|
|
added.update(self.langpacks.get(name, []))
|
|
|
|
self.added_langpacks.add(name)
|
|
|
|
|
|
|
|
return sorted(added)
|
|
|
|
|
2018-10-23 06:29:45 +00:00
|
|
|
def _expand_wildcard(self, pkg_name, pkg_arch):
|
|
|
|
if "*" not in pkg_name:
|
|
|
|
return [_fmt_pkg(pkg_name, pkg_arch)]
|
|
|
|
|
|
|
|
packages = []
|
|
|
|
|
|
|
|
for pkg in self.expand_list([pkg_name]):
|
|
|
|
if pkg_is_debug(pkg):
|
|
|
|
# No debuginfo
|
|
|
|
continue
|
|
|
|
|
|
|
|
if pkg_arch:
|
|
|
|
if pkg_arch != pkg.arch:
|
|
|
|
# Arch is specified and does not match, skip the package.
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
if pkg.arch not in ("noarch", self.arch):
|
|
|
|
# No arch specified and package does not match
|
|
|
|
continue
|
|
|
|
|
|
|
|
strict_nevra = "%s-%s:%s-%s.%s" % (
|
|
|
|
pkg.name, pkg.epoch or "0", pkg.version, pkg.release, pkg.arch
|
|
|
|
)
|
|
|
|
if strict_nevra in self.modular_packages:
|
|
|
|
# Wildcards should not match modular packages.
|
|
|
|
continue
|
|
|
|
|
|
|
|
packages.append(_fmt_nevra(pkg, pkg.arch))
|
|
|
|
|
|
|
|
return packages
|
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
|
2018-10-05 09:02:04 +00:00
|
|
|
def get_lookaside_modules(lookasides):
|
|
|
|
"""Get list of NSVC of all modules in all lookaside repos."""
|
|
|
|
modules = set()
|
|
|
|
for repo in lookasides:
|
|
|
|
repo = fus._prep_path(repo)
|
|
|
|
repomd = cr.Repomd(os.path.join(repo, "repodata/repomd.xml"))
|
|
|
|
for rec in repomd.records:
|
|
|
|
if rec.type != "modules":
|
|
|
|
continue
|
|
|
|
with gzip.GzipFile(os.path.join(repo, rec.location_href), "r") as f:
|
|
|
|
# This can't use _from_stream, since gobject-introspection
|
|
|
|
# refuses to pass a file object.
|
|
|
|
mmds = Modulemd.objects_from_string(f.read())
|
|
|
|
for mmd in mmds:
|
|
|
|
if isinstance(mmd, Modulemd.Module):
|
|
|
|
modules.add(
|
|
|
|
"%s:%s:%s:%s"
|
|
|
|
% (
|
|
|
|
mmd.peek_name(),
|
|
|
|
mmd.peek_stream(),
|
|
|
|
mmd.peek_version(),
|
|
|
|
mmd.peek_context(),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return modules
|
|
|
|
|
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
def create_module_repo(compose, variant, arch):
|
|
|
|
"""Create repository with module metadata. There are no packages otherwise."""
|
|
|
|
createrepo_c = compose.conf["createrepo_c"]
|
|
|
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
|
|
|
msg = "Creating repo with modular metadata for %s.%s" % (variant, arch)
|
|
|
|
|
2018-10-18 10:48:13 +00:00
|
|
|
repo_path = compose.paths.work.module_repo(arch, variant)
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
compose.log_debug("[BEGIN] %s" % msg)
|
|
|
|
|
|
|
|
platforms = set()
|
|
|
|
|
2018-10-05 09:02:04 +00:00
|
|
|
lookaside_modules = get_lookaside_modules(
|
|
|
|
pungi.phases.gather.get_lookaside_repos(compose, arch, variant)
|
|
|
|
)
|
|
|
|
|
2018-06-22 06:58:54 +00:00
|
|
|
# Add modular metadata to it
|
2018-10-19 08:27:04 +00:00
|
|
|
included = set()
|
2018-06-22 06:58:54 +00:00
|
|
|
modules = []
|
|
|
|
|
2018-10-18 10:48:13 +00:00
|
|
|
# We need to include metadata for all variants. The packages are in the
|
|
|
|
# set, so we need their metadata.
|
|
|
|
for var in compose.all_variants.values():
|
2018-10-19 08:27:04 +00:00
|
|
|
for mmd in chain(
|
|
|
|
var.arch_mmds.get(arch, {}).values(), var.dev_mmds.get(arch, {}).values()
|
|
|
|
):
|
2018-10-18 10:48:13 +00:00
|
|
|
# Set the arch field, but no other changes are needed.
|
|
|
|
repo_mmd = mmd.copy()
|
|
|
|
repo_mmd.set_arch(tree_arch_to_yum_arch(arch))
|
|
|
|
|
|
|
|
for dep in repo_mmd.peek_dependencies():
|
|
|
|
streams = dep.peek_requires().get("platform")
|
|
|
|
if streams:
|
|
|
|
platforms.update(streams.dup())
|
|
|
|
|
|
|
|
nsvc = "%s:%s:%s:%s" % (
|
|
|
|
repo_mmd.peek_name(),
|
|
|
|
repo_mmd.peek_stream(),
|
|
|
|
repo_mmd.peek_version(),
|
|
|
|
repo_mmd.peek_context(),
|
|
|
|
)
|
2018-10-19 08:27:04 +00:00
|
|
|
if nsvc not in lookaside_modules and nsvc not in included:
|
2018-10-18 10:48:13 +00:00
|
|
|
modules.append(repo_mmd)
|
2018-10-19 08:27:04 +00:00
|
|
|
included.add(nsvc)
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
if len(platforms) > 1:
|
|
|
|
raise RuntimeError("There are conflicting requests for platform.")
|
|
|
|
|
|
|
|
module_names = set([x.get_name() for x in modules])
|
|
|
|
defaults_dir = compose.paths.work.module_defaults_dir()
|
|
|
|
for mmddef in iter_module_defaults(defaults_dir):
|
|
|
|
if mmddef.peek_module_name() in module_names:
|
|
|
|
modules.append(mmddef)
|
|
|
|
|
2018-10-18 10:48:13 +00:00
|
|
|
if modules:
|
|
|
|
# Initialize empty repo
|
|
|
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
|
|
|
cmd = repo.get_createrepo_cmd(
|
|
|
|
repo_path, database=False, outputdir=repo_path, checksum=createrepo_checksum
|
2018-06-22 06:58:54 +00:00
|
|
|
)
|
2018-10-18 10:48:13 +00:00
|
|
|
logfile = "module_repo-%s" % variant
|
|
|
|
run(cmd, logfile=compose.paths.log.log_file(arch, logfile), show_cmd=True)
|
|
|
|
|
2018-10-19 08:27:04 +00:00
|
|
|
log_file = compose.paths.log.log_file(
|
|
|
|
arch, "gather-modifyrepo-modules-%s" % variant
|
|
|
|
)
|
|
|
|
add_modular_metadata(repo, repo_path, modules, log_file)
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
compose.log_debug("[DONE ] %s" % msg)
|
2018-07-31 12:01:19 +00:00
|
|
|
return list(platforms)[0] if platforms else None
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _fmt_pkg(pkg_name, arch):
|
|
|
|
if arch:
|
|
|
|
pkg_name += ".%s" % arch
|
|
|
|
return pkg_name
|
|
|
|
|
|
|
|
|
|
|
|
def _nevra(**kwargs):
|
|
|
|
if kwargs.get("epoch") not in (None, "", 0, "0"):
|
|
|
|
return "%(name)s-%(epoch)s:%(version)s-%(release)s.%(arch)s" % kwargs
|
|
|
|
return "%(name)s-%(version)s-%(release)s.%(arch)s" % kwargs
|
|
|
|
|
|
|
|
|
|
|
|
def _fmt_nevra(pkg, arch):
|
|
|
|
return _nevra(
|
|
|
|
name=pkg.name,
|
|
|
|
epoch=pkg.epoch,
|
|
|
|
version=pkg.version,
|
|
|
|
release=pkg.release,
|
|
|
|
arch=arch,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_srpm_nevra(pkg):
|
|
|
|
nevra = kobo.rpmlib.parse_nvra(pkg.sourcerpm)
|
|
|
|
nevra["epoch"] = nevra["epoch"] or pkg.epoch
|
|
|
|
return _nevra(**nevra)
|
|
|
|
|
|
|
|
|
|
|
|
def _make_result(paths):
|
|
|
|
return [{"path": path, "flags": []} for path in sorted(paths)]
|
|
|
|
|
|
|
|
|
2018-07-31 12:01:19 +00:00
|
|
|
def expand_packages(nevra_to_pkg, variant_modules, lookasides, nvrs):
|
2018-06-22 06:58:54 +00:00
|
|
|
"""For each package add source RPM and possibly also debuginfo."""
|
|
|
|
# This will server as the final result. We collect sets of paths to the
|
|
|
|
# packages.
|
|
|
|
rpms = set()
|
|
|
|
srpms = set()
|
|
|
|
debuginfo = set()
|
|
|
|
|
|
|
|
# Collect list of all packages in lookaside. These will not be added to the
|
|
|
|
# result. Fus handles this in part: if a package is explicitly mentioned as
|
|
|
|
# input (which can happen with comps group expansion), it will be in the
|
|
|
|
# output even if it's in lookaside.
|
|
|
|
lookaside_packages = set()
|
|
|
|
for repo in lookasides:
|
|
|
|
md = cr.Metadata()
|
|
|
|
md.locate_and_load_xml(repo)
|
|
|
|
for key in md.keys():
|
|
|
|
pkg = md.get(key)
|
2018-10-03 10:20:55 +00:00
|
|
|
url = os.path.join(pkg.location_base or repo, pkg.location_href)
|
2018-06-22 06:58:54 +00:00
|
|
|
# Strip file:// prefix
|
|
|
|
lookaside_packages.add(url[7:])
|
|
|
|
|
|
|
|
# This is used to figure out which debuginfo packages to include. We keep
|
|
|
|
# track of package architectures from each SRPM.
|
|
|
|
srpm_arches = defaultdict(set)
|
|
|
|
|
2018-07-31 12:01:19 +00:00
|
|
|
for nvr, arch, flags in nvrs:
|
2018-06-22 06:58:54 +00:00
|
|
|
pkg = nevra_to_pkg["%s.%s" % (nvr, arch)]
|
|
|
|
if pkg.file_path in lookaside_packages:
|
|
|
|
# Package is in lookaside, don't add it and ignore sources and
|
|
|
|
# debuginfo too.
|
|
|
|
continue
|
2018-07-31 12:01:19 +00:00
|
|
|
if pkg_is_debug(pkg):
|
|
|
|
debuginfo.add(pkg.file_path)
|
|
|
|
else:
|
|
|
|
rpms.add(pkg.file_path)
|
2018-06-22 06:58:54 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
srpm_nevra = _get_srpm_nevra(pkg)
|
|
|
|
srpm = nevra_to_pkg[srpm_nevra]
|
2018-07-31 12:01:19 +00:00
|
|
|
if "modular" not in flags:
|
|
|
|
# Only mark the arch for sources of non-modular packages. The
|
|
|
|
# debuginfo is explicitly listed in the output, and we don't
|
|
|
|
# want anything more.
|
|
|
|
srpm_arches[srpm_nevra].add(arch)
|
2018-06-22 06:58:54 +00:00
|
|
|
if srpm.file_path not in lookaside_packages:
|
|
|
|
srpms.add(srpm.file_path)
|
|
|
|
except KeyError:
|
|
|
|
# Didn't find source RPM.. this should be logged
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Get all debuginfo packages from all included sources. We iterate over all
|
|
|
|
# available packages and if we see a debug package from included SRPM built
|
|
|
|
# for architecture that has at least one binary package, we include it too.
|
|
|
|
for pkg in nevra_to_pkg.values():
|
|
|
|
if pkg_is_debug(pkg) and pkg.arch in srpm_arches[_get_srpm_nevra(pkg)]:
|
|
|
|
if pkg.file_path not in lookaside_packages:
|
|
|
|
debuginfo.add(pkg.file_path)
|
|
|
|
|
|
|
|
return _mk_pkg_map(_make_result(rpms), _make_result(srpms), _make_result(debuginfo))
|
2018-10-03 10:19:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
def filter_modules(variant, arch, nsvcs_to_keep):
|
|
|
|
"""Remove any arch-specific module metadata from the module if it's not
|
|
|
|
listed in the list to keep. This will ultimately cause the module to not be
|
|
|
|
included in the final repodata and module metadata.
|
|
|
|
"""
|
|
|
|
for nsvc in list(variant.arch_mmds.get(arch, {}).keys()):
|
|
|
|
if nsvc not in nsvcs_to_keep:
|
|
|
|
del variant.arch_mmds[arch][nsvc]
|