2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2018-06-06 11:21:51 +00:00
|
|
|
import collections
|
2015-02-10 13:19:34 +00:00
|
|
|
import os
|
2022-04-27 13:31:14 +00:00
|
|
|
import glob
|
2015-02-10 13:19:34 +00:00
|
|
|
import shutil
|
|
|
|
|
|
|
|
from kobo.shortcuts import run
|
2018-08-23 13:22:40 +00:00
|
|
|
from kobo.threads import run_in_threads
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.phases.base import PhaseBase
|
|
|
|
from pungi.phases.gather import write_prepopulate_file
|
2019-10-02 07:40:18 +00:00
|
|
|
from pungi.util import temp_dir
|
2022-04-27 13:31:14 +00:00
|
|
|
from pungi.module_util import iter_module_defaults
|
2018-06-06 11:21:51 +00:00
|
|
|
from pungi.wrappers.comps import CompsWrapper
|
|
|
|
from pungi.wrappers.createrepo import CreaterepoWrapper
|
|
|
|
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
class InitPhase(PhaseBase):
|
|
|
|
"""INIT is a mandatory phase"""
|
2020-02-03 03:50:06 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
name = "init"
|
|
|
|
|
|
|
|
def skip(self):
|
|
|
|
# INIT must never be skipped,
|
|
|
|
# because it generates data for LIVEIMAGES
|
|
|
|
return False
|
|
|
|
|
|
|
|
def run(self):
|
2016-03-10 14:04:52 +00:00
|
|
|
if self.compose.has_comps:
|
2016-03-10 14:13:04 +00:00
|
|
|
# write global comps and arch comps, create comps repos
|
2018-06-14 13:37:23 +00:00
|
|
|
global_comps = write_global_comps(self.compose)
|
|
|
|
validate_comps(global_comps)
|
2020-02-03 03:50:06 +00:00
|
|
|
num_workers = self.compose.conf["createrepo_num_threads"]
|
2018-08-23 13:22:40 +00:00
|
|
|
run_in_threads(
|
|
|
|
_arch_worker,
|
|
|
|
[(self.compose, arch) for arch in self.compose.get_arches()],
|
|
|
|
threads=num_workers,
|
|
|
|
)
|
2016-03-10 14:04:52 +00:00
|
|
|
|
|
|
|
# write variant comps
|
2018-08-23 13:22:40 +00:00
|
|
|
run_in_threads(
|
|
|
|
_variant_worker,
|
|
|
|
[
|
|
|
|
(self.compose, arch, variant)
|
|
|
|
for variant in self.compose.get_variants()
|
|
|
|
for arch in variant.arches
|
|
|
|
],
|
|
|
|
threads=num_workers,
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# download variants.xml / product.xml?
|
|
|
|
|
2018-04-04 10:59:52 +00:00
|
|
|
# download module defaults
|
|
|
|
if self.compose.has_module_defaults:
|
|
|
|
write_module_defaults(self.compose)
|
2022-04-27 13:31:14 +00:00
|
|
|
validate_module_defaults(
|
2018-06-06 11:21:51 +00:00
|
|
|
self.compose.paths.work.module_defaults_dir(create_dir=False)
|
|
|
|
)
|
2018-04-04 10:59:52 +00:00
|
|
|
|
2021-12-15 09:13:23 +00:00
|
|
|
# download module obsoletes
|
|
|
|
if self.compose.has_module_obsoletes:
|
|
|
|
write_module_obsoletes(self.compose)
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
# write prepopulate file
|
|
|
|
write_prepopulate_file(self.compose)
|
|
|
|
|
|
|
|
|
2018-08-23 13:22:40 +00:00
|
|
|
def _arch_worker(_, args, num):
|
|
|
|
compose, arch = args
|
|
|
|
write_arch_comps(compose, arch)
|
|
|
|
create_comps_repo(compose, arch, None)
|
|
|
|
|
|
|
|
|
|
|
|
def _variant_worker(_, args, num):
|
|
|
|
compose, arch, variant = args
|
|
|
|
write_variant_comps(compose, arch, variant)
|
|
|
|
create_comps_repo(compose, arch, variant)
|
|
|
|
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def write_global_comps(compose):
|
|
|
|
comps_file_global = compose.paths.work.comps(arch="global")
|
|
|
|
|
2019-07-24 11:36:23 +00:00
|
|
|
scm_dict = compose.conf["comps_file"]
|
|
|
|
if isinstance(scm_dict, dict):
|
|
|
|
comps_name = os.path.basename(scm_dict["file"])
|
|
|
|
if scm_dict["scm"] == "file":
|
|
|
|
scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"])
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
2019-07-24 11:36:23 +00:00
|
|
|
comps_name = os.path.basename(scm_dict)
|
|
|
|
scm_dict = os.path.join(compose.config_dir, scm_dict)
|
|
|
|
|
2019-07-24 11:45:00 +00:00
|
|
|
compose.log_debug("Writing global comps file: %s", comps_file_global)
|
2019-07-24 11:36:23 +00:00
|
|
|
tmp_dir = compose.mkdtemp(prefix="comps_")
|
2019-10-07 10:26:22 +00:00
|
|
|
get_file_from_scm(scm_dict, tmp_dir, compose=compose)
|
2019-07-24 11:36:23 +00:00
|
|
|
shutil.copy2(os.path.join(tmp_dir, comps_name), comps_file_global)
|
|
|
|
shutil.rmtree(tmp_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2018-06-14 13:37:23 +00:00
|
|
|
return comps_file_global
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
def write_arch_comps(compose, arch):
|
|
|
|
comps_file_arch = compose.paths.work.comps(arch=arch)
|
|
|
|
|
2019-07-24 11:45:00 +00:00
|
|
|
compose.log_debug("Writing comps file for arch '%s': %s", arch, comps_file_arch)
|
2020-02-03 03:50:06 +00:00
|
|
|
run(
|
|
|
|
[
|
|
|
|
"comps_filter",
|
|
|
|
"--arch=%s" % arch,
|
|
|
|
"--no-cleanup",
|
|
|
|
"--output=%s" % comps_file_arch,
|
|
|
|
compose.paths.work.comps(arch="global"),
|
|
|
|
]
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2020-02-06 07:09:32 +00:00
|
|
|
UNMATCHED_GROUP_MSG = "Variant %s.%s requires comps group %s which does not match anything in input comps file" # noqa: E501
|
2017-03-31 06:44:18 +00:00
|
|
|
|
|
|
|
|
2018-11-02 11:43:56 +00:00
|
|
|
def get_lookaside_groups(compose, variant):
|
|
|
|
"""Find all groups listed in parent variant."""
|
|
|
|
groups = set()
|
|
|
|
if variant.parent:
|
|
|
|
groups.update(g["name"] for g in variant.parent.groups)
|
|
|
|
|
|
|
|
for var, lookaside in compose.conf.get("variant_as_lookaside", []):
|
|
|
|
if var == variant.uid:
|
|
|
|
lookaside_variant = compose.all_variants[lookaside]
|
|
|
|
groups.update(g["name"] for g in lookaside_variant.groups)
|
|
|
|
return groups
|
|
|
|
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def write_variant_comps(compose, arch, variant):
|
|
|
|
comps_file = compose.paths.work.comps(arch=arch, variant=variant)
|
|
|
|
|
2019-07-24 11:45:00 +00:00
|
|
|
compose.log_debug(
|
|
|
|
"Writing comps file (arch: %s, variant: %s): %s", arch, variant, comps_file
|
|
|
|
)
|
2018-11-02 11:43:56 +00:00
|
|
|
cmd = [
|
|
|
|
"comps_filter",
|
|
|
|
"--arch=%s" % arch,
|
|
|
|
"--keep-empty-group=conflicts",
|
|
|
|
"--keep-empty-group=conflicts-%s" % variant.uid.lower(),
|
|
|
|
"--variant=%s" % variant.uid,
|
|
|
|
"--output=%s" % comps_file,
|
2020-02-03 03:50:06 +00:00
|
|
|
compose.paths.work.comps(arch="global"),
|
2018-11-02 11:43:56 +00:00
|
|
|
]
|
|
|
|
for group in get_lookaside_groups(compose, variant):
|
|
|
|
cmd.append("--lookaside-group=%s" % group)
|
|
|
|
run(cmd)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
comps = CompsWrapper(comps_file)
|
2022-11-03 10:05:53 +00:00
|
|
|
# Filter groups if the variant has some, or it's a modular variant, or
|
|
|
|
# is not a base variant.
|
|
|
|
if (
|
|
|
|
variant.groups
|
|
|
|
or variant.modules is not None
|
|
|
|
or variant.modular_koji_tags is not None
|
|
|
|
or variant.type != "variant"
|
|
|
|
):
|
2018-04-05 13:16:06 +00:00
|
|
|
unmatched = comps.filter_groups(variant.groups)
|
|
|
|
for grp in unmatched:
|
|
|
|
compose.log_warning(UNMATCHED_GROUP_MSG % (variant.uid, arch, grp))
|
2022-11-03 10:05:53 +00:00
|
|
|
|
2018-05-10 06:58:33 +00:00
|
|
|
contains_all = not variant.groups and not variant.environments
|
|
|
|
if compose.conf["comps_filter_environments"] and not contains_all:
|
|
|
|
# We only want to filter environments if it's enabled by configuration
|
|
|
|
# and it's a variant with some groups and environements defined. If
|
|
|
|
# there are none, all packages should go in there and also all
|
|
|
|
# environments should be preserved.
|
2015-02-10 13:19:34 +00:00
|
|
|
comps.filter_environments(variant.environments)
|
|
|
|
comps.write_comps()
|
|
|
|
|
|
|
|
|
2018-04-09 13:32:33 +00:00
|
|
|
def create_comps_repo(compose, arch, variant):
|
2016-08-22 14:08:25 +00:00
|
|
|
createrepo_c = compose.conf["createrepo_c"]
|
2015-07-08 12:27:00 +00:00
|
|
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
2015-02-10 13:19:34 +00:00
|
|
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
2018-04-09 13:32:33 +00:00
|
|
|
comps_repo = compose.paths.work.comps_repo(arch=arch, variant=variant)
|
|
|
|
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
2020-02-03 03:50:06 +00:00
|
|
|
msg = "Creating comps repo for arch '%s' variant '%s'" % (
|
|
|
|
arch,
|
|
|
|
variant.uid if variant else None,
|
|
|
|
)
|
2019-07-24 11:36:23 +00:00
|
|
|
|
|
|
|
compose.log_info("[BEGIN] %s" % msg)
|
|
|
|
cmd = repo.get_createrepo_cmd(
|
2020-02-03 03:50:06 +00:00
|
|
|
comps_repo,
|
|
|
|
database=False,
|
2019-07-24 11:36:23 +00:00
|
|
|
outputdir=comps_repo,
|
|
|
|
groupfile=comps_path,
|
|
|
|
checksum=createrepo_checksum,
|
|
|
|
)
|
|
|
|
logfile = "comps_repo-%s" % variant if variant else "comps_repo"
|
|
|
|
run(cmd, logfile=compose.paths.log.log_file(arch, logfile), show_cmd=True)
|
|
|
|
compose.log_info("[DONE ] %s" % msg)
|
2018-04-04 10:59:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
def write_module_defaults(compose):
|
|
|
|
scm_dict = compose.conf["module_defaults_dir"]
|
2018-04-04 12:01:10 +00:00
|
|
|
if isinstance(scm_dict, dict):
|
|
|
|
if scm_dict["scm"] == "file":
|
|
|
|
scm_dict["dir"] = os.path.join(compose.config_dir, scm_dict["dir"])
|
|
|
|
else:
|
|
|
|
scm_dict = os.path.join(compose.config_dir, scm_dict)
|
2018-04-04 10:59:52 +00:00
|
|
|
|
|
|
|
with temp_dir(prefix="moduledefaults_") as tmp_dir:
|
2019-10-07 10:26:22 +00:00
|
|
|
get_dir_from_scm(scm_dict, tmp_dir, compose=compose)
|
2018-04-04 10:59:52 +00:00
|
|
|
compose.log_debug("Writing module defaults")
|
2020-02-03 03:50:06 +00:00
|
|
|
shutil.copytree(
|
2021-01-13 07:34:09 +00:00
|
|
|
tmp_dir,
|
|
|
|
compose.paths.work.module_defaults_dir(create_dir=False),
|
|
|
|
ignore=shutil.ignore_patterns(".git"),
|
2020-02-03 03:50:06 +00:00
|
|
|
)
|
2018-06-06 11:21:51 +00:00
|
|
|
|
|
|
|
|
2021-12-15 09:13:23 +00:00
|
|
|
def write_module_obsoletes(compose):
|
|
|
|
scm_dict = compose.conf["module_obsoletes_dir"]
|
|
|
|
if isinstance(scm_dict, dict):
|
|
|
|
if scm_dict["scm"] == "file":
|
|
|
|
scm_dict["dir"] = os.path.join(compose.config_dir, scm_dict["dir"])
|
|
|
|
else:
|
|
|
|
scm_dict = os.path.join(compose.config_dir, scm_dict)
|
|
|
|
|
|
|
|
with temp_dir(prefix="moduleobsoletes_") as tmp_dir:
|
|
|
|
get_dir_from_scm(scm_dict, tmp_dir, compose=compose)
|
|
|
|
compose.log_debug("Writing module obsoletes")
|
|
|
|
shutil.copytree(
|
|
|
|
tmp_dir,
|
|
|
|
compose.paths.work.module_obsoletes_dir(create_dir=False),
|
|
|
|
ignore=shutil.ignore_patterns(".git"),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-27 13:31:14 +00:00
|
|
|
def validate_module_defaults(path):
|
|
|
|
"""Make sure there are no conflicting defaults and every default can be loaded.
|
|
|
|
Each module name can onlyhave one default stream.
|
2018-06-06 11:21:51 +00:00
|
|
|
|
2022-04-27 13:31:14 +00:00
|
|
|
:param str path: directory with cloned module defaults
|
2018-06-06 11:21:51 +00:00
|
|
|
"""
|
2022-04-27 13:31:14 +00:00
|
|
|
|
|
|
|
defaults_num = len(glob.glob(os.path.join(path, "*.yaml")))
|
|
|
|
|
|
|
|
seen_defaults = collections.defaultdict(set)
|
|
|
|
|
|
|
|
for module_name, defaults in iter_module_defaults(path):
|
|
|
|
seen_defaults[module_name].add(defaults.get_default_stream())
|
2018-06-06 11:21:51 +00:00
|
|
|
|
|
|
|
errors = []
|
2022-04-27 13:31:14 +00:00
|
|
|
for module_name, defaults in seen_defaults.items():
|
|
|
|
if len(defaults) > 1:
|
2018-06-06 11:21:51 +00:00
|
|
|
errors.append(
|
2022-04-27 13:31:14 +00:00
|
|
|
"Module %s has multiple defaults: %s"
|
|
|
|
% (module_name, ", ".join(sorted(defaults)))
|
2018-06-06 11:21:51 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if errors:
|
|
|
|
raise RuntimeError(
|
2022-04-27 13:31:14 +00:00
|
|
|
"There are duplicated module defaults:\n%s" % "\n".join(errors)
|
2018-06-06 11:21:51 +00:00
|
|
|
)
|
2018-06-14 13:37:23 +00:00
|
|
|
|
2022-04-27 13:31:14 +00:00
|
|
|
# Make sure all defaults are valid otherwise update_from_defaults_directory
|
|
|
|
# will return empty object
|
|
|
|
if defaults_num != len(seen_defaults):
|
|
|
|
raise RuntimeError("Defaults contains not valid default file")
|
|
|
|
|
2018-06-14 13:37:23 +00:00
|
|
|
|
|
|
|
def validate_comps(path):
|
|
|
|
"""Check that there are whitespace issues in comps."""
|
|
|
|
wrapper = CompsWrapper(path)
|
|
|
|
wrapper.validate()
|