2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
__all__ = ("Compose",)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
import errno
|
2019-09-18 09:48:37 +00:00
|
|
|
import logging
|
2015-02-10 13:19:34 +00:00
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import tempfile
|
|
|
|
import shutil
|
2016-04-13 11:44:17 +00:00
|
|
|
import json
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
import kobo.log
|
2015-06-02 22:32:21 +00:00
|
|
|
from productmd.composeinfo import ComposeInfo
|
2015-04-24 22:55:56 +00:00
|
|
|
from productmd.images import Images
|
2018-08-15 07:20:40 +00:00
|
|
|
from dogpile.cache import make_region
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2019-01-31 14:52:34 +00:00
|
|
|
from pungi.graph import SimpleAcyclicOrientedGraph
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.wrappers.variants import VariantsXmlParser
|
|
|
|
from pungi.paths import Paths
|
|
|
|
from pungi.wrappers.scm import get_file_from_scm
|
2018-11-01 09:07:38 +00:00
|
|
|
from pungi.util import (
|
2020-02-03 03:50:06 +00:00
|
|
|
makedirs,
|
|
|
|
get_arch_variant_data,
|
|
|
|
get_format_substs,
|
|
|
|
get_variant_data,
|
2018-11-01 09:07:38 +00:00
|
|
|
)
|
2015-06-06 15:52:08 +00:00
|
|
|
from pungi.metadata import compose_to_composeinfo
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2018-10-30 08:34:55 +00:00
|
|
|
try:
|
|
|
|
# This is available since productmd >= 1.18
|
|
|
|
# TODO: remove this once the version is distributed widely enough
|
|
|
|
from productmd.composeinfo import SUPPORTED_MILESTONES
|
|
|
|
except ImportError:
|
|
|
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
2018-01-19 12:06:30 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2020-05-22 05:40:02 +00:00
|
|
|
def get_compose_info(
|
2020-02-03 03:50:06 +00:00
|
|
|
conf,
|
|
|
|
compose_type="production",
|
|
|
|
compose_date=None,
|
|
|
|
compose_respin=None,
|
|
|
|
compose_label=None,
|
2020-08-13 11:22:43 +00:00
|
|
|
parent_compose_ids=None,
|
2020-11-19 09:04:51 +00:00
|
|
|
respin_of=None,
|
2020-02-03 03:50:06 +00:00
|
|
|
):
|
2020-05-22 05:40:02 +00:00
|
|
|
"""
|
|
|
|
Creates inncomplete ComposeInfo to generate Compose ID
|
|
|
|
"""
|
2015-06-02 22:32:21 +00:00
|
|
|
ci = ComposeInfo()
|
2015-07-09 10:58:30 +00:00
|
|
|
ci.release.name = conf["release_name"]
|
|
|
|
ci.release.short = conf["release_short"]
|
|
|
|
ci.release.version = conf["release_version"]
|
2019-06-03 06:55:40 +00:00
|
|
|
ci.release.is_layered = True if conf.get("base_product_name", "") else False
|
2015-08-25 12:04:06 +00:00
|
|
|
ci.release.type = conf.get("release_type", "ga").lower()
|
2016-12-06 11:35:57 +00:00
|
|
|
ci.release.internal = bool(conf.get("release_internal", False))
|
2015-07-09 10:58:30 +00:00
|
|
|
if ci.release.is_layered:
|
2015-02-10 13:19:34 +00:00
|
|
|
ci.base_product.name = conf["base_product_name"]
|
|
|
|
ci.base_product.short = conf["base_product_short"]
|
|
|
|
ci.base_product.version = conf["base_product_version"]
|
2016-05-23 12:12:23 +00:00
|
|
|
ci.base_product.type = conf.get("base_product_type", "ga").lower()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
ci.compose.label = compose_label
|
|
|
|
ci.compose.type = compose_type
|
|
|
|
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
|
|
|
ci.compose.respin = compose_respin or 0
|
|
|
|
|
2020-05-22 05:40:02 +00:00
|
|
|
cts_url = conf.get("cts_url", None)
|
|
|
|
if cts_url:
|
|
|
|
# Import requests and requests-kerberos here so it is not needed
|
|
|
|
# if running without Compose Tracking Service.
|
|
|
|
import requests
|
|
|
|
from requests_kerberos import HTTPKerberosAuth
|
|
|
|
|
|
|
|
# Requests-kerberos cannot accept custom keytab, we need to use
|
|
|
|
# environment variable for this. But we need to change environment
|
|
|
|
# only temporarily just for this single requests.post.
|
|
|
|
# So at first backup the current environment and revert to it
|
|
|
|
# after the requests.post call.
|
|
|
|
cts_keytab = conf.get("cts_keytab", None)
|
|
|
|
if cts_keytab:
|
|
|
|
environ_copy = dict(os.environ)
|
|
|
|
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2020-05-22 05:40:02 +00:00
|
|
|
try:
|
|
|
|
# Create compose in CTS and get the reserved compose ID.
|
|
|
|
ci.compose.id = ci.create_compose_id()
|
|
|
|
url = os.path.join(cts_url, "api/1/composes/")
|
2020-08-13 11:22:43 +00:00
|
|
|
data = {
|
|
|
|
"compose_info": json.loads(ci.dumps()),
|
|
|
|
"parent_compose_ids": parent_compose_ids,
|
2020-11-19 09:04:51 +00:00
|
|
|
"respin_of": respin_of,
|
2020-08-13 11:22:43 +00:00
|
|
|
}
|
2020-05-22 05:40:02 +00:00
|
|
|
rv = requests.post(url, json=data, auth=HTTPKerberosAuth())
|
|
|
|
rv.raise_for_status()
|
|
|
|
finally:
|
|
|
|
if cts_keytab:
|
|
|
|
os.environ.clear()
|
|
|
|
os.environ.update(environ_copy)
|
|
|
|
|
|
|
|
# Update local ComposeInfo with received ComposeInfo.
|
|
|
|
cts_ci = ComposeInfo()
|
|
|
|
cts_ci.loads(rv.text)
|
|
|
|
ci.compose.respin = cts_ci.compose.respin
|
|
|
|
ci.compose.id = cts_ci.compose.id
|
|
|
|
else:
|
|
|
|
ci.compose.id = ci.create_compose_id()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2020-05-22 05:40:02 +00:00
|
|
|
return ci
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2020-05-22 05:40:02 +00:00
|
|
|
def write_compose_info(compose_dir, ci):
|
|
|
|
"""
|
|
|
|
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
|
|
|
"""
|
|
|
|
makedirs(compose_dir)
|
2017-10-10 16:47:21 +00:00
|
|
|
with open(os.path.join(compose_dir, "COMPOSE_ID"), "w") as f:
|
|
|
|
f.write(ci.compose.id)
|
2015-02-10 13:19:34 +00:00
|
|
|
work_dir = os.path.join(compose_dir, "work", "global")
|
|
|
|
makedirs(work_dir)
|
|
|
|
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
|
2020-05-22 05:40:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_compose_dir(
|
|
|
|
topdir,
|
|
|
|
conf,
|
|
|
|
compose_type="production",
|
|
|
|
compose_date=None,
|
|
|
|
compose_respin=None,
|
|
|
|
compose_label=None,
|
|
|
|
already_exists_callbacks=None,
|
|
|
|
):
|
|
|
|
already_exists_callbacks = already_exists_callbacks or []
|
|
|
|
|
|
|
|
ci = get_compose_info(
|
|
|
|
conf, compose_type, compose_date, compose_respin, compose_label
|
|
|
|
)
|
|
|
|
|
|
|
|
cts_url = conf.get("cts_url", None)
|
|
|
|
if cts_url:
|
|
|
|
# Create compose directory.
|
|
|
|
compose_dir = os.path.join(topdir, ci.compose.id)
|
|
|
|
os.makedirs(compose_dir)
|
|
|
|
else:
|
|
|
|
while 1:
|
|
|
|
ci.compose.id = ci.create_compose_id()
|
|
|
|
|
|
|
|
compose_dir = os.path.join(topdir, ci.compose.id)
|
|
|
|
|
|
|
|
exists = False
|
|
|
|
# TODO: callbacks to determine if a composeid was already used
|
|
|
|
# for callback in already_exists_callbacks:
|
|
|
|
# if callback(data):
|
|
|
|
# exists = True
|
|
|
|
# break
|
|
|
|
|
|
|
|
# already_exists_callbacks fallback: does target compose_dir exist?
|
|
|
|
try:
|
|
|
|
os.makedirs(compose_dir)
|
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno == errno.EEXIST:
|
|
|
|
exists = True
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
|
|
|
if exists:
|
|
|
|
ci = get_compose_info(
|
|
|
|
conf,
|
|
|
|
compose_type,
|
|
|
|
compose_date,
|
|
|
|
ci.compose.respin + 1,
|
|
|
|
compose_label,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
|
|
|
|
write_compose_info(compose_dir, ci)
|
2015-02-10 13:19:34 +00:00
|
|
|
return compose_dir
|
|
|
|
|
|
|
|
|
|
|
|
class Compose(kobo.log.LoggingBase):
|
2020-02-03 03:50:06 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
conf,
|
|
|
|
topdir,
|
|
|
|
skip_phases=None,
|
|
|
|
just_phases=None,
|
|
|
|
old_composes=None,
|
|
|
|
koji_event=None,
|
|
|
|
supported=False,
|
|
|
|
logger=None,
|
|
|
|
notifier=None,
|
|
|
|
):
|
2015-02-10 13:19:34 +00:00
|
|
|
kobo.log.LoggingBase.__init__(self, logger)
|
|
|
|
# TODO: check if minimal conf values are set
|
|
|
|
self.conf = conf
|
2016-11-09 09:20:55 +00:00
|
|
|
# This is a dict mapping UID to Variant objects. It only contains top
|
|
|
|
# level variants.
|
2015-02-10 13:19:34 +00:00
|
|
|
self.variants = {}
|
2016-11-09 09:20:55 +00:00
|
|
|
# This is a similar mapping, but contains even nested variants.
|
|
|
|
self.all_variants = {}
|
2015-02-10 13:19:34 +00:00
|
|
|
self.topdir = os.path.abspath(topdir)
|
|
|
|
self.skip_phases = skip_phases or []
|
|
|
|
self.just_phases = just_phases or []
|
|
|
|
self.old_composes = old_composes or []
|
2019-02-26 09:39:59 +00:00
|
|
|
self.koji_event = koji_event or conf.get("koji_event")
|
2015-11-26 07:45:33 +00:00
|
|
|
self.notifier = notifier
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# path definitions
|
|
|
|
self.paths = Paths(self)
|
|
|
|
|
2016-06-14 06:47:36 +00:00
|
|
|
# Set up logging to file
|
|
|
|
if logger:
|
2020-02-03 03:50:06 +00:00
|
|
|
kobo.log.add_file_logger(
|
|
|
|
logger, self.paths.log.log_file("global", "pungi.log")
|
|
|
|
)
|
|
|
|
kobo.log.add_file_logger(
|
|
|
|
logger, self.paths.log.log_file("global", "excluding-arch.log")
|
|
|
|
)
|
2019-09-18 09:48:37 +00:00
|
|
|
|
|
|
|
class PungiLogFilter(logging.Filter):
|
|
|
|
def filter(self, record):
|
2020-02-03 03:50:06 +00:00
|
|
|
return (
|
|
|
|
False
|
|
|
|
if record.funcName and record.funcName == "is_excluded"
|
|
|
|
else True
|
|
|
|
)
|
2019-09-18 09:48:37 +00:00
|
|
|
|
|
|
|
class ExcludingArchLogFilter(logging.Filter):
|
|
|
|
def filter(self, record):
|
2019-11-08 14:24:15 +00:00
|
|
|
message = record.getMessage()
|
2020-02-03 03:50:06 +00:00
|
|
|
if "Populating package set for arch:" in message or (
|
|
|
|
record.funcName and record.funcName == "is_excluded"
|
|
|
|
):
|
2019-09-18 09:48:37 +00:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
for handler in logger.handlers:
|
|
|
|
if isinstance(handler, logging.FileHandler):
|
|
|
|
log_file_name = os.path.basename(handler.stream.name)
|
2020-02-03 03:50:06 +00:00
|
|
|
if log_file_name == "pungi.global.log":
|
2019-09-18 09:48:37 +00:00
|
|
|
handler.addFilter(PungiLogFilter())
|
2020-02-03 03:50:06 +00:00
|
|
|
elif log_file_name == "excluding-arch.global.log":
|
2019-09-18 09:48:37 +00:00
|
|
|
handler.addFilter(ExcludingArchLogFilter())
|
2016-06-14 06:47:36 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
# to provide compose_id, compose_date and compose_respin
|
2015-06-02 22:32:21 +00:00
|
|
|
self.ci_base = ComposeInfo()
|
2020-02-03 03:50:06 +00:00
|
|
|
self.ci_base.load(
|
|
|
|
os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json")
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
self.supported = supported
|
2020-02-03 03:50:06 +00:00
|
|
|
if (
|
|
|
|
self.compose_label
|
|
|
|
and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES
|
|
|
|
):
|
|
|
|
self.log_info(
|
|
|
|
"Automatically setting 'supported' flag due to label: %s."
|
|
|
|
% self.compose_label
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
self.supported = True
|
|
|
|
|
2015-06-02 22:32:21 +00:00
|
|
|
self.im = Images()
|
2015-02-10 13:19:34 +00:00
|
|
|
self.im.compose.id = self.compose_id
|
|
|
|
self.im.compose.type = self.compose_type
|
|
|
|
self.im.compose.date = self.compose_date
|
|
|
|
self.im.compose.respin = self.compose_respin
|
|
|
|
self.im.metadata_path = self.paths.compose.metadata()
|
|
|
|
|
2016-01-12 12:29:14 +00:00
|
|
|
# Stores list of deliverables that failed, but did not abort the
|
|
|
|
# compose.
|
2016-04-13 11:44:17 +00:00
|
|
|
# {deliverable: [(Variant.uid, arch, subvariant)]}
|
2016-01-12 12:29:14 +00:00
|
|
|
self.failed_deliverables = {}
|
2016-04-13 11:44:17 +00:00
|
|
|
self.attempted_deliverables = {}
|
|
|
|
self.required_deliverables = {}
|
2016-01-12 12:29:14 +00:00
|
|
|
|
2018-08-15 07:20:40 +00:00
|
|
|
if self.conf.get("dogpile_cache_backend", None):
|
|
|
|
self.cache_region = make_region().configure(
|
|
|
|
self.conf.get("dogpile_cache_backend"),
|
|
|
|
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
|
2020-02-03 03:50:06 +00:00
|
|
|
arguments=self.conf.get("dogpile_cache_arguments", {}),
|
2018-08-15 07:20:40 +00:00
|
|
|
)
|
|
|
|
else:
|
2020-02-03 03:50:06 +00:00
|
|
|
self.cache_region = make_region().configure("dogpile.cache.null")
|
2018-08-15 07:20:40 +00:00
|
|
|
|
2020-05-22 05:40:02 +00:00
|
|
|
get_compose_info = staticmethod(get_compose_info)
|
|
|
|
write_compose_info = staticmethod(write_compose_info)
|
2015-02-10 13:19:34 +00:00
|
|
|
get_compose_dir = staticmethod(get_compose_dir)
|
|
|
|
|
|
|
|
def __getitem__(self, name):
|
|
|
|
return self.variants[name]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_id(self):
|
|
|
|
return self.ci_base.compose.id
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_date(self):
|
|
|
|
return self.ci_base.compose.date
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_respin(self):
|
|
|
|
return self.ci_base.compose.respin
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_type(self):
|
|
|
|
return self.ci_base.compose.type
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_type_suffix(self):
|
|
|
|
return self.ci_base.compose.type_suffix
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_label(self):
|
|
|
|
return self.ci_base.compose.label
|
|
|
|
|
2015-08-25 12:20:52 +00:00
|
|
|
@property
|
|
|
|
def compose_label_major_version(self):
|
|
|
|
return self.ci_base.compose.label_major_version
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
@property
|
|
|
|
def has_comps(self):
|
|
|
|
return bool(self.conf.get("comps_file", False))
|
|
|
|
|
2018-04-04 10:59:52 +00:00
|
|
|
@property
|
|
|
|
def has_module_defaults(self):
|
|
|
|
return bool(self.conf.get("module_defaults_dir", False))
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
@property
|
|
|
|
def config_dir(self):
|
|
|
|
return os.path.dirname(self.conf._open_file or "")
|
|
|
|
|
2018-05-18 08:17:41 +00:00
|
|
|
@property
|
|
|
|
def should_create_yum_database(self):
|
|
|
|
"""Explicit configuration trumps all. Otherwise check gather backend
|
|
|
|
and only create it for Yum.
|
|
|
|
"""
|
2020-02-03 03:50:06 +00:00
|
|
|
config = self.conf.get("createrepo_database")
|
2018-05-18 08:17:41 +00:00
|
|
|
if config is not None:
|
|
|
|
return config
|
2020-02-03 03:50:06 +00:00
|
|
|
return self.conf["gather_backend"] == "yum"
|
2018-05-18 08:17:41 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def read_variants(self):
|
|
|
|
# TODO: move to phases/init ?
|
|
|
|
variants_file = self.paths.work.variants_file(arch="global")
|
|
|
|
|
2019-07-24 11:36:23 +00:00
|
|
|
scm_dict = self.conf["variants_file"]
|
|
|
|
if isinstance(scm_dict, dict):
|
|
|
|
file_name = os.path.basename(scm_dict["file"])
|
|
|
|
if scm_dict["scm"] == "file":
|
|
|
|
scm_dict["file"] = os.path.join(
|
|
|
|
self.config_dir, os.path.basename(scm_dict["file"])
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
2019-07-24 11:36:23 +00:00
|
|
|
file_name = os.path.basename(scm_dict)
|
|
|
|
scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
|
|
|
|
|
2019-07-24 11:45:00 +00:00
|
|
|
self.log_debug("Writing variants file: %s", variants_file)
|
2019-07-24 11:36:23 +00:00
|
|
|
tmp_dir = self.mkdtemp(prefix="variants_file_")
|
2019-10-07 10:26:22 +00:00
|
|
|
get_file_from_scm(scm_dict, tmp_dir, compose=self)
|
2019-07-24 11:36:23 +00:00
|
|
|
shutil.copy2(os.path.join(tmp_dir, file_name), variants_file)
|
|
|
|
shutil.rmtree(tmp_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
tree_arches = self.conf.get("tree_arches", None)
|
2016-03-29 14:07:24 +00:00
|
|
|
tree_variants = self.conf.get("tree_variants", None)
|
|
|
|
with open(variants_file, "r") as file_obj:
|
2020-02-03 03:50:06 +00:00
|
|
|
parser = VariantsXmlParser(
|
|
|
|
file_obj, tree_arches, tree_variants, logger=self._logger
|
|
|
|
)
|
2016-03-29 14:07:24 +00:00
|
|
|
self.variants = parser.parse()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-11-09 09:20:55 +00:00
|
|
|
self.all_variants = {}
|
|
|
|
for variant in self.get_variants():
|
|
|
|
self.all_variants[variant.uid] = variant
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
# populate ci_base with variants - needed for layered-products (compose_id)
|
2020-02-07 02:50:25 +00:00
|
|
|
# FIXME - compose_to_composeinfo is no longer needed and has been
|
|
|
|
# removed, but I'm not entirely sure what this is needed for
|
|
|
|
# or if it is at all
|
2015-06-06 15:52:08 +00:00
|
|
|
self.ci_base = compose_to_composeinfo(self)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-11-09 12:39:01 +00:00
|
|
|
def get_variants(self, types=None, arch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
result = []
|
2017-09-05 08:01:21 +00:00
|
|
|
for i in self.variants.values():
|
2017-05-03 12:40:32 +00:00
|
|
|
if (not types or i.type in types) and (not arch or arch in i.arches):
|
2015-02-10 13:19:34 +00:00
|
|
|
result.append(i)
|
2016-11-09 12:39:01 +00:00
|
|
|
result.extend(i.get_variants(types=types, arch=arch))
|
2015-02-10 13:19:34 +00:00
|
|
|
return sorted(set(result))
|
|
|
|
|
|
|
|
def get_arches(self):
|
|
|
|
result = set()
|
|
|
|
for variant in self.get_variants():
|
|
|
|
for arch in variant.arches:
|
2016-03-29 14:07:24 +00:00
|
|
|
result.add(arch)
|
2015-02-10 13:19:34 +00:00
|
|
|
return sorted(result)
|
|
|
|
|
2016-01-12 12:29:14 +00:00
|
|
|
@property
|
|
|
|
def status_file(self):
|
|
|
|
"""Path to file where the compose status will be stored."""
|
2020-02-03 03:50:06 +00:00
|
|
|
if not hasattr(self, "_status_file"):
|
|
|
|
self._status_file = os.path.join(self.topdir, "STATUS")
|
2016-01-12 12:29:14 +00:00
|
|
|
return self._status_file
|
|
|
|
|
|
|
|
def _log_failed_deliverables(self):
|
2017-09-05 08:01:21 +00:00
|
|
|
for kind, data in self.failed_deliverables.items():
|
2016-04-13 11:44:17 +00:00
|
|
|
for variant, arch, subvariant in data:
|
2020-02-03 03:50:06 +00:00
|
|
|
self.log_info(
|
|
|
|
"Failed %s on variant <%s>, arch <%s>, subvariant <%s>."
|
|
|
|
% (kind, variant, arch, subvariant)
|
|
|
|
)
|
|
|
|
log = os.path.join(self.paths.log.topdir("global"), "deliverables.json")
|
|
|
|
with open(log, "w") as f:
|
|
|
|
json.dump(
|
|
|
|
{
|
|
|
|
"required": self.required_deliverables,
|
|
|
|
"failed": self.failed_deliverables,
|
|
|
|
"attempted": self.attempted_deliverables,
|
|
|
|
},
|
|
|
|
f,
|
|
|
|
indent=4,
|
|
|
|
)
|
2016-01-12 12:29:14 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def write_status(self, stat_msg):
|
2018-05-11 14:49:01 +00:00
|
|
|
if stat_msg not in ("STARTED", "FINISHED", "DOOMED", "TERMINATED"):
|
2015-02-10 13:19:34 +00:00
|
|
|
self.log_warning("Writing nonstandard compose status: %s" % stat_msg)
|
|
|
|
old_status = self.get_status()
|
|
|
|
if stat_msg == old_status:
|
|
|
|
return
|
|
|
|
if old_status == "FINISHED":
|
|
|
|
msg = "Could not modify a FINISHED compose: %s" % self.topdir
|
|
|
|
self.log_error(msg)
|
|
|
|
raise RuntimeError(msg)
|
2016-01-12 12:29:14 +00:00
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
if stat_msg == "FINISHED" and self.failed_deliverables:
|
|
|
|
stat_msg = "FINISHED_INCOMPLETE"
|
2016-04-13 11:44:17 +00:00
|
|
|
|
|
|
|
self._log_failed_deliverables()
|
2016-01-12 12:29:14 +00:00
|
|
|
|
|
|
|
with open(self.status_file, "w") as f:
|
|
|
|
f.write(stat_msg + "\n")
|
|
|
|
|
|
|
|
if self.notifier:
|
2020-02-03 03:50:06 +00:00
|
|
|
self.notifier.send("status-change", status=stat_msg)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
def get_status(self):
|
2016-01-12 12:29:14 +00:00
|
|
|
if not os.path.isfile(self.status_file):
|
2015-02-10 13:19:34 +00:00
|
|
|
return
|
2016-01-12 12:29:14 +00:00
|
|
|
return open(self.status_file, "r").read().strip()
|
2015-12-10 11:51:18 +00:00
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
def get_image_name(
|
|
|
|
self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", format=None
|
|
|
|
):
|
2015-12-10 11:51:18 +00:00
|
|
|
"""Create a filename for image with given parameters.
|
|
|
|
|
|
|
|
:raises RuntimeError: when unknown ``disc_type`` is given
|
|
|
|
"""
|
2016-06-27 13:15:01 +00:00
|
|
|
default_format = "{compose_id}-{variant}-{arch}-{disc_type}{disc_num}{suffix}"
|
2020-02-03 03:50:06 +00:00
|
|
|
format = format or self.conf.get("image_name_format", default_format)
|
2015-12-10 11:51:18 +00:00
|
|
|
|
2018-11-01 09:07:38 +00:00
|
|
|
if isinstance(format, dict):
|
|
|
|
conf = get_variant_data(self.conf, "image_name_format", variant)
|
|
|
|
format = conf[0] if conf else default_format
|
|
|
|
|
2015-12-10 11:51:18 +00:00
|
|
|
if arch == "src":
|
|
|
|
arch = "source"
|
|
|
|
|
|
|
|
if disc_num:
|
|
|
|
disc_num = int(disc_num)
|
|
|
|
else:
|
|
|
|
disc_num = ""
|
|
|
|
|
2016-10-17 13:15:01 +00:00
|
|
|
kwargs = {
|
2020-02-03 03:50:06 +00:00
|
|
|
"arch": arch,
|
|
|
|
"disc_type": disc_type,
|
|
|
|
"disc_num": disc_num,
|
|
|
|
"suffix": suffix,
|
2016-10-17 13:15:01 +00:00
|
|
|
}
|
2015-12-10 11:51:18 +00:00
|
|
|
if variant.type == "layered-product":
|
|
|
|
variant_uid = variant.parent.uid
|
2020-02-03 03:50:06 +00:00
|
|
|
kwargs["compose_id"] = self.ci_base[variant.uid].compose_id
|
2015-12-10 11:51:18 +00:00
|
|
|
else:
|
|
|
|
variant_uid = variant.uid
|
2016-10-17 13:15:01 +00:00
|
|
|
args = get_format_substs(self, variant=variant_uid, **kwargs)
|
2016-01-06 11:24:01 +00:00
|
|
|
try:
|
2016-06-27 13:15:01 +00:00
|
|
|
return (format % args).format(**args)
|
2016-01-06 11:24:01 +00:00
|
|
|
except KeyError as err:
|
2020-02-03 03:50:06 +00:00
|
|
|
raise RuntimeError(
|
|
|
|
"Failed to create image name: unknown format element: %s" % err
|
|
|
|
)
|
2015-12-10 14:49:00 +00:00
|
|
|
|
|
|
|
def can_fail(self, variant, arch, deliverable):
|
|
|
|
"""Figure out if deliverable can fail on variant.arch.
|
|
|
|
|
|
|
|
Variant can be None.
|
|
|
|
"""
|
2020-02-03 03:50:06 +00:00
|
|
|
failable = get_arch_variant_data(
|
|
|
|
self.conf, "failable_deliverables", arch, variant
|
|
|
|
)
|
2016-04-13 11:44:17 +00:00
|
|
|
return deliverable in failable
|
|
|
|
|
|
|
|
def attempt_deliverable(self, variant, arch, kind, subvariant=None):
|
|
|
|
"""Log information about attempted deliverable."""
|
2020-02-03 03:50:06 +00:00
|
|
|
variant_uid = variant.uid if variant else ""
|
2016-04-13 11:44:17 +00:00
|
|
|
self.attempted_deliverables.setdefault(kind, []).append(
|
2020-02-03 03:50:06 +00:00
|
|
|
(variant_uid, arch, subvariant)
|
|
|
|
)
|
2016-04-13 11:44:17 +00:00
|
|
|
|
|
|
|
def require_deliverable(self, variant, arch, kind, subvariant=None):
|
|
|
|
"""Log information about attempted deliverable."""
|
2020-02-03 03:50:06 +00:00
|
|
|
variant_uid = variant.uid if variant else ""
|
2016-04-13 11:44:17 +00:00
|
|
|
self.required_deliverables.setdefault(kind, []).append(
|
2020-02-03 03:50:06 +00:00
|
|
|
(variant_uid, arch, subvariant)
|
|
|
|
)
|
2016-04-13 11:44:17 +00:00
|
|
|
|
|
|
|
def fail_deliverable(self, variant, arch, kind, subvariant=None):
|
|
|
|
"""Log information about failed deliverable."""
|
2020-02-03 03:50:06 +00:00
|
|
|
variant_uid = variant.uid if variant else ""
|
2016-04-13 11:44:17 +00:00
|
|
|
self.failed_deliverables.setdefault(kind, []).append(
|
2020-02-03 03:50:06 +00:00
|
|
|
(variant_uid, arch, subvariant)
|
|
|
|
)
|
2016-03-22 07:56:51 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def image_release(self):
|
2016-08-25 13:22:55 +00:00
|
|
|
"""Generate a value to pass to Koji as image release.
|
|
|
|
|
|
|
|
If this compose has a label, the version from it will be used,
|
|
|
|
otherwise we will create a string with date, compose type and respin.
|
|
|
|
"""
|
|
|
|
if self.compose_label:
|
2020-02-03 03:50:06 +00:00
|
|
|
milestone, release = self.compose_label.split("-")
|
2016-08-25 13:22:55 +00:00
|
|
|
return release
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
return "%s%s.%s" % (
|
|
|
|
self.compose_date,
|
|
|
|
self.ci_base.compose.type_suffix,
|
|
|
|
self.compose_respin,
|
|
|
|
)
|
2016-08-30 07:51:36 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def image_version(self):
|
|
|
|
"""Generate a value to pass to Koji as image version.
|
|
|
|
|
|
|
|
The value is based on release version. If compose has a label, the
|
|
|
|
milestone from it is appended to the version (unless it is RC).
|
|
|
|
"""
|
|
|
|
version = self.ci_base.release.version
|
2020-02-03 03:50:06 +00:00
|
|
|
if self.compose_label and not self.compose_label.startswith("RC-"):
|
|
|
|
milestone, release = self.compose_label.split("-")
|
|
|
|
return "%s_%s" % (version, milestone)
|
2016-08-30 07:51:36 +00:00
|
|
|
|
|
|
|
return version
|
2017-01-09 07:40:24 +00:00
|
|
|
|
|
|
|
def mkdtemp(self, arch=None, variant=None, suffix="", prefix="tmp"):
|
|
|
|
"""
|
|
|
|
Create and return a unique temporary directory under dir of
|
|
|
|
<compose_topdir>/work/{global,<arch>}/tmp[-<variant>]/
|
|
|
|
"""
|
|
|
|
path = os.path.join(self.paths.work.tmp_dir(arch=arch, variant=variant))
|
|
|
|
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
|
2019-01-31 14:52:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_ordered_variant_uids(compose):
|
|
|
|
if not hasattr(compose, "_ordered_variant_uids"):
|
|
|
|
ordered_variant_uids = _prepare_variant_as_lookaside(compose)
|
|
|
|
# Some variants were not mentioned in configuration value
|
|
|
|
# 'variant_as_lookaside' and its run order is not crucial (that
|
|
|
|
# means there are no dependencies inside this group). They will be
|
|
|
|
# processed first. A-Z sorting is for reproducibility.
|
|
|
|
unordered_variant_uids = sorted(
|
|
|
|
set(compose.all_variants.keys()) - set(ordered_variant_uids)
|
|
|
|
)
|
|
|
|
setattr(
|
|
|
|
compose,
|
|
|
|
"_ordered_variant_uids",
|
2020-02-03 03:50:06 +00:00
|
|
|
unordered_variant_uids + ordered_variant_uids,
|
2019-01-31 14:52:34 +00:00
|
|
|
)
|
|
|
|
return getattr(compose, "_ordered_variant_uids")
|
|
|
|
|
|
|
|
|
|
|
|
def _prepare_variant_as_lookaside(compose):
|
|
|
|
"""
|
|
|
|
Configuration value 'variant_as_lookaside' contains variant pairs <variant,
|
|
|
|
its lookaside>. In that pair lookaside variant have to be processed first.
|
|
|
|
Structure can be represented as a oriented graph. Its spanning line shows
|
|
|
|
order how to process this set of variants.
|
|
|
|
"""
|
|
|
|
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
|
|
|
graph = SimpleAcyclicOrientedGraph()
|
|
|
|
for variant, lookaside_variant in variant_as_lookaside:
|
|
|
|
try:
|
|
|
|
graph.add_edge(variant, lookaside_variant)
|
|
|
|
except ValueError as e:
|
2020-02-03 03:50:06 +00:00
|
|
|
raise ValueError(
|
|
|
|
"There is a bad configuration in 'variant_as_lookaside': %s" % e
|
|
|
|
)
|
2019-01-31 14:52:34 +00:00
|
|
|
|
|
|
|
variant_processing_order = reversed(graph.prune_graph())
|
|
|
|
return list(variant_processing_order)
|