2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
__all__ = (
|
|
|
|
"Compose",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
import errno
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import tempfile
|
|
|
|
import shutil
|
2016-04-13 11:44:17 +00:00
|
|
|
import json
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
import kobo.log
|
2015-06-02 22:32:21 +00:00
|
|
|
from productmd.composeinfo import ComposeInfo
|
2015-04-24 22:55:56 +00:00
|
|
|
from productmd.images import Images
|
2018-08-15 07:20:40 +00:00
|
|
|
from dogpile.cache import make_region
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2019-01-31 14:52:34 +00:00
|
|
|
from pungi.graph import SimpleAcyclicOrientedGraph
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.wrappers.variants import VariantsXmlParser
|
|
|
|
from pungi.paths import Paths
|
|
|
|
from pungi.wrappers.scm import get_file_from_scm
|
2018-11-01 09:07:38 +00:00
|
|
|
from pungi.util import (
|
|
|
|
makedirs, get_arch_variant_data, get_format_substs, get_variant_data
|
|
|
|
)
|
2015-06-06 15:52:08 +00:00
|
|
|
from pungi.metadata import compose_to_composeinfo
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2018-10-30 08:34:55 +00:00
|
|
|
try:
|
|
|
|
# This is available since productmd >= 1.18
|
|
|
|
# TODO: remove this once the version is distributed widely enough
|
|
|
|
from productmd.composeinfo import SUPPORTED_MILESTONES
|
|
|
|
except ImportError:
|
|
|
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
2018-01-19 12:06:30 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):
|
|
|
|
already_exists_callbacks = already_exists_callbacks or []
|
|
|
|
|
2015-04-24 22:55:56 +00:00
|
|
|
# create an incomplete composeinfo to generate compose ID
|
2015-06-02 22:32:21 +00:00
|
|
|
ci = ComposeInfo()
|
2015-07-09 10:58:30 +00:00
|
|
|
ci.release.name = conf["release_name"]
|
|
|
|
ci.release.short = conf["release_short"]
|
|
|
|
ci.release.version = conf["release_version"]
|
2019-06-03 06:55:40 +00:00
|
|
|
ci.release.is_layered = True if conf.get("base_product_name", "") else False
|
2015-08-25 12:04:06 +00:00
|
|
|
ci.release.type = conf.get("release_type", "ga").lower()
|
2016-12-06 11:35:57 +00:00
|
|
|
ci.release.internal = bool(conf.get("release_internal", False))
|
2015-07-09 10:58:30 +00:00
|
|
|
if ci.release.is_layered:
|
2015-02-10 13:19:34 +00:00
|
|
|
ci.base_product.name = conf["base_product_name"]
|
|
|
|
ci.base_product.short = conf["base_product_short"]
|
|
|
|
ci.base_product.version = conf["base_product_version"]
|
2016-05-23 12:12:23 +00:00
|
|
|
ci.base_product.type = conf.get("base_product_type", "ga").lower()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
ci.compose.label = compose_label
|
|
|
|
ci.compose.type = compose_type
|
|
|
|
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
|
|
|
ci.compose.respin = compose_respin or 0
|
|
|
|
|
|
|
|
while 1:
|
|
|
|
ci.compose.id = ci.create_compose_id()
|
|
|
|
|
|
|
|
compose_dir = os.path.join(topdir, ci.compose.id)
|
|
|
|
|
|
|
|
exists = False
|
|
|
|
# TODO: callbacks to determine if a composeid was already used
|
|
|
|
# for callback in already_exists_callbacks:
|
|
|
|
# if callback(data):
|
|
|
|
# exists = True
|
|
|
|
# break
|
|
|
|
|
|
|
|
# already_exists_callbacks fallback: does target compose_dir exist?
|
2017-02-14 08:36:32 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(compose_dir)
|
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno == errno.EEXIST:
|
|
|
|
exists = True
|
|
|
|
else:
|
|
|
|
raise
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
if exists:
|
|
|
|
ci.compose.respin += 1
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
|
2017-10-10 16:47:21 +00:00
|
|
|
with open(os.path.join(compose_dir, "COMPOSE_ID"), "w") as f:
|
|
|
|
f.write(ci.compose.id)
|
2015-02-10 13:19:34 +00:00
|
|
|
work_dir = os.path.join(compose_dir, "work", "global")
|
|
|
|
makedirs(work_dir)
|
|
|
|
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
|
|
|
|
return compose_dir
|
|
|
|
|
|
|
|
|
|
|
|
class Compose(kobo.log.LoggingBase):
|
2015-11-26 07:45:33 +00:00
|
|
|
def __init__(self, conf, topdir, debug=False, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
kobo.log.LoggingBase.__init__(self, logger)
|
|
|
|
# TODO: check if minimal conf values are set
|
|
|
|
self.conf = conf
|
2016-11-09 09:20:55 +00:00
|
|
|
# This is a dict mapping UID to Variant objects. It only contains top
|
|
|
|
# level variants.
|
2015-02-10 13:19:34 +00:00
|
|
|
self.variants = {}
|
2016-11-09 09:20:55 +00:00
|
|
|
# This is a similar mapping, but contains even nested variants.
|
|
|
|
self.all_variants = {}
|
2015-02-10 13:19:34 +00:00
|
|
|
self.topdir = os.path.abspath(topdir)
|
|
|
|
self.skip_phases = skip_phases or []
|
|
|
|
self.just_phases = just_phases or []
|
|
|
|
self.old_composes = old_composes or []
|
2019-02-26 09:39:59 +00:00
|
|
|
self.koji_event = koji_event or conf.get("koji_event")
|
2015-11-26 07:45:33 +00:00
|
|
|
self.notifier = notifier
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# intentionally upper-case (visible in the code)
|
|
|
|
self.DEBUG = debug
|
|
|
|
|
|
|
|
# path definitions
|
|
|
|
self.paths = Paths(self)
|
|
|
|
|
2016-06-14 06:47:36 +00:00
|
|
|
# Set up logging to file
|
|
|
|
if logger:
|
|
|
|
kobo.log.add_file_logger(logger, self.paths.log.log_file("global", "pungi.log"))
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
# to provide compose_id, compose_date and compose_respin
|
2015-06-02 22:32:21 +00:00
|
|
|
self.ci_base = ComposeInfo()
|
2015-02-10 13:19:34 +00:00
|
|
|
self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))
|
|
|
|
|
|
|
|
self.supported = supported
|
2018-01-19 12:06:30 +00:00
|
|
|
if self.compose_label and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES:
|
|
|
|
self.log_info("Automatically setting 'supported' flag due to label: %s." % self.compose_label)
|
2015-02-10 13:19:34 +00:00
|
|
|
self.supported = True
|
|
|
|
|
2015-06-02 22:32:21 +00:00
|
|
|
self.im = Images()
|
2015-02-10 13:19:34 +00:00
|
|
|
if self.DEBUG:
|
|
|
|
try:
|
|
|
|
self.im.load(self.paths.compose.metadata("images.json"))
|
|
|
|
except RuntimeError:
|
|
|
|
pass
|
2017-02-16 08:51:10 +00:00
|
|
|
# images.json doesn't exists
|
2015-08-27 08:29:48 +00:00
|
|
|
except IOError:
|
|
|
|
pass
|
2017-02-16 08:51:10 +00:00
|
|
|
# images.json is not a valid json file, for example, it's an empty file
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2015-02-10 13:19:34 +00:00
|
|
|
self.im.compose.id = self.compose_id
|
|
|
|
self.im.compose.type = self.compose_type
|
|
|
|
self.im.compose.date = self.compose_date
|
|
|
|
self.im.compose.respin = self.compose_respin
|
|
|
|
self.im.metadata_path = self.paths.compose.metadata()
|
|
|
|
|
2016-01-12 12:29:14 +00:00
|
|
|
# Stores list of deliverables that failed, but did not abort the
|
|
|
|
# compose.
|
2016-04-13 11:44:17 +00:00
|
|
|
# {deliverable: [(Variant.uid, arch, subvariant)]}
|
2016-01-12 12:29:14 +00:00
|
|
|
self.failed_deliverables = {}
|
2016-04-13 11:44:17 +00:00
|
|
|
self.attempted_deliverables = {}
|
|
|
|
self.required_deliverables = {}
|
2016-01-12 12:29:14 +00:00
|
|
|
|
2018-08-15 07:20:40 +00:00
|
|
|
if self.conf.get("dogpile_cache_backend", None):
|
|
|
|
self.cache_region = make_region().configure(
|
|
|
|
self.conf.get("dogpile_cache_backend"),
|
|
|
|
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
|
|
|
|
arguments=self.conf.get("dogpile_cache_arguments", {})
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.cache_region = make_region().configure('dogpile.cache.null')
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
get_compose_dir = staticmethod(get_compose_dir)
|
|
|
|
|
|
|
|
def __getitem__(self, name):
|
|
|
|
return self.variants[name]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_id(self):
|
|
|
|
return self.ci_base.compose.id
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_date(self):
|
|
|
|
return self.ci_base.compose.date
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_respin(self):
|
|
|
|
return self.ci_base.compose.respin
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_type(self):
|
|
|
|
return self.ci_base.compose.type
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_type_suffix(self):
|
|
|
|
return self.ci_base.compose.type_suffix
|
|
|
|
|
|
|
|
@property
|
|
|
|
def compose_label(self):
|
|
|
|
return self.ci_base.compose.label
|
|
|
|
|
2015-08-25 12:20:52 +00:00
|
|
|
@property
|
|
|
|
def compose_label_major_version(self):
|
|
|
|
return self.ci_base.compose.label_major_version
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
@property
|
|
|
|
def has_comps(self):
|
|
|
|
return bool(self.conf.get("comps_file", False))
|
|
|
|
|
2018-04-04 10:59:52 +00:00
|
|
|
@property
|
|
|
|
def has_module_defaults(self):
|
|
|
|
return bool(self.conf.get("module_defaults_dir", False))
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
@property
|
|
|
|
def config_dir(self):
|
|
|
|
return os.path.dirname(self.conf._open_file or "")
|
|
|
|
|
2018-05-18 08:17:41 +00:00
|
|
|
@property
|
|
|
|
def should_create_yum_database(self):
|
|
|
|
"""Explicit configuration trumps all. Otherwise check gather backend
|
|
|
|
and only create it for Yum.
|
|
|
|
"""
|
|
|
|
config = self.conf.get('createrepo_database')
|
|
|
|
if config is not None:
|
|
|
|
return config
|
|
|
|
return self.conf['gather_backend'] == 'yum'
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def read_variants(self):
|
|
|
|
# TODO: move to phases/init ?
|
|
|
|
variants_file = self.paths.work.variants_file(arch="global")
|
|
|
|
msg = "Writing variants file: %s" % variants_file
|
|
|
|
|
|
|
|
if self.DEBUG and os.path.isfile(variants_file):
|
|
|
|
self.log_warning("[SKIP ] %s" % msg)
|
|
|
|
else:
|
|
|
|
scm_dict = self.conf["variants_file"]
|
|
|
|
if isinstance(scm_dict, dict):
|
|
|
|
file_name = os.path.basename(scm_dict["file"])
|
|
|
|
if scm_dict["scm"] == "file":
|
|
|
|
scm_dict["file"] = os.path.join(self.config_dir, os.path.basename(scm_dict["file"]))
|
|
|
|
else:
|
|
|
|
file_name = os.path.basename(scm_dict)
|
|
|
|
scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
|
|
|
|
|
|
|
|
self.log_debug(msg)
|
2017-01-09 07:40:24 +00:00
|
|
|
tmp_dir = self.mkdtemp(prefix="variants_file_")
|
2015-02-10 13:19:34 +00:00
|
|
|
get_file_from_scm(scm_dict, tmp_dir, logger=self._logger)
|
|
|
|
shutil.copy2(os.path.join(tmp_dir, file_name), variants_file)
|
|
|
|
shutil.rmtree(tmp_dir)
|
|
|
|
|
|
|
|
tree_arches = self.conf.get("tree_arches", None)
|
2016-03-29 14:07:24 +00:00
|
|
|
tree_variants = self.conf.get("tree_variants", None)
|
|
|
|
with open(variants_file, "r") as file_obj:
|
|
|
|
parser = VariantsXmlParser(file_obj, tree_arches, tree_variants, logger=self._logger)
|
|
|
|
self.variants = parser.parse()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-11-09 09:20:55 +00:00
|
|
|
self.all_variants = {}
|
|
|
|
for variant in self.get_variants():
|
|
|
|
self.all_variants[variant.uid] = variant
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
# populate ci_base with variants - needed for layered-products (compose_id)
|
2015-06-02 22:32:21 +00:00
|
|
|
####FIXME - compose_to_composeinfo is no longer needed and has been
|
|
|
|
#### removed, but I'm not entirely sure what this is needed for
|
|
|
|
#### or if it is at all
|
2015-06-06 15:52:08 +00:00
|
|
|
self.ci_base = compose_to_composeinfo(self)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-11-09 12:39:01 +00:00
|
|
|
def get_variants(self, types=None, arch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
result = []
|
2017-09-05 08:01:21 +00:00
|
|
|
for i in self.variants.values():
|
2017-05-03 12:40:32 +00:00
|
|
|
if (not types or i.type in types) and (not arch or arch in i.arches):
|
2015-02-10 13:19:34 +00:00
|
|
|
result.append(i)
|
2016-11-09 12:39:01 +00:00
|
|
|
result.extend(i.get_variants(types=types, arch=arch))
|
2015-02-10 13:19:34 +00:00
|
|
|
return sorted(set(result))
|
|
|
|
|
|
|
|
def get_arches(self):
|
|
|
|
result = set()
|
|
|
|
for variant in self.get_variants():
|
|
|
|
for arch in variant.arches:
|
2016-03-29 14:07:24 +00:00
|
|
|
result.add(arch)
|
2015-02-10 13:19:34 +00:00
|
|
|
return sorted(result)
|
|
|
|
|
2016-01-12 12:29:14 +00:00
|
|
|
@property
|
|
|
|
def status_file(self):
|
|
|
|
"""Path to file where the compose status will be stored."""
|
|
|
|
if not hasattr(self, '_status_file'):
|
|
|
|
self._status_file = os.path.join(self.topdir, 'STATUS')
|
|
|
|
return self._status_file
|
|
|
|
|
|
|
|
def _log_failed_deliverables(self):
|
2017-09-05 08:01:21 +00:00
|
|
|
for kind, data in self.failed_deliverables.items():
|
2016-04-13 11:44:17 +00:00
|
|
|
for variant, arch, subvariant in data:
|
|
|
|
self.log_info('Failed %s on variant <%s>, arch <%s>, subvariant <%s>.'
|
|
|
|
% (kind, variant, arch, subvariant))
|
|
|
|
log = os.path.join(self.paths.log.topdir('global'), 'deliverables.json')
|
|
|
|
with open(log, 'w') as f:
|
|
|
|
json.dump({'required': self.required_deliverables,
|
|
|
|
'failed': self.failed_deliverables,
|
|
|
|
'attempted': self.attempted_deliverables},
|
|
|
|
f, indent=4)
|
2016-01-12 12:29:14 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def write_status(self, stat_msg):
|
2018-05-11 14:49:01 +00:00
|
|
|
if stat_msg not in ("STARTED", "FINISHED", "DOOMED", "TERMINATED"):
|
2015-02-10 13:19:34 +00:00
|
|
|
self.log_warning("Writing nonstandard compose status: %s" % stat_msg)
|
|
|
|
old_status = self.get_status()
|
|
|
|
if stat_msg == old_status:
|
|
|
|
return
|
|
|
|
if old_status == "FINISHED":
|
|
|
|
msg = "Could not modify a FINISHED compose: %s" % self.topdir
|
|
|
|
self.log_error(msg)
|
|
|
|
raise RuntimeError(msg)
|
2016-01-12 12:29:14 +00:00
|
|
|
|
|
|
|
if stat_msg == 'FINISHED' and self.failed_deliverables:
|
|
|
|
stat_msg = 'FINISHED_INCOMPLETE'
|
2016-04-13 11:44:17 +00:00
|
|
|
|
|
|
|
self._log_failed_deliverables()
|
2016-01-12 12:29:14 +00:00
|
|
|
|
|
|
|
with open(self.status_file, "w") as f:
|
|
|
|
f.write(stat_msg + "\n")
|
|
|
|
|
|
|
|
if self.notifier:
|
|
|
|
self.notifier.send('status-change', status=stat_msg)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
def get_status(self):
|
2016-01-12 12:29:14 +00:00
|
|
|
if not os.path.isfile(self.status_file):
|
2015-02-10 13:19:34 +00:00
|
|
|
return
|
2016-01-12 12:29:14 +00:00
|
|
|
return open(self.status_file, "r").read().strip()
|
2015-12-10 11:51:18 +00:00
|
|
|
|
|
|
|
def get_image_name(self, arch, variant, disc_type='dvd',
|
|
|
|
disc_num=1, suffix='.iso', format=None):
|
|
|
|
"""Create a filename for image with given parameters.
|
|
|
|
|
|
|
|
:raises RuntimeError: when unknown ``disc_type`` is given
|
|
|
|
"""
|
2016-06-27 13:15:01 +00:00
|
|
|
default_format = "{compose_id}-{variant}-{arch}-{disc_type}{disc_num}{suffix}"
|
2015-12-10 11:51:18 +00:00
|
|
|
format = format or self.conf.get('image_name_format', default_format)
|
|
|
|
|
2018-11-01 09:07:38 +00:00
|
|
|
if isinstance(format, dict):
|
|
|
|
conf = get_variant_data(self.conf, "image_name_format", variant)
|
|
|
|
format = conf[0] if conf else default_format
|
|
|
|
|
2015-12-10 11:51:18 +00:00
|
|
|
if arch == "src":
|
|
|
|
arch = "source"
|
|
|
|
|
|
|
|
if disc_num:
|
|
|
|
disc_num = int(disc_num)
|
|
|
|
else:
|
|
|
|
disc_num = ""
|
|
|
|
|
2016-10-17 13:15:01 +00:00
|
|
|
kwargs = {
|
|
|
|
'arch': arch,
|
|
|
|
'disc_type': disc_type,
|
|
|
|
'disc_num': disc_num,
|
|
|
|
'suffix': suffix
|
|
|
|
}
|
2015-12-10 11:51:18 +00:00
|
|
|
if variant.type == "layered-product":
|
|
|
|
variant_uid = variant.parent.uid
|
2016-10-17 13:15:01 +00:00
|
|
|
kwargs['compose_id'] = self.ci_base[variant.uid].compose_id
|
2015-12-10 11:51:18 +00:00
|
|
|
else:
|
|
|
|
variant_uid = variant.uid
|
2016-10-17 13:15:01 +00:00
|
|
|
args = get_format_substs(self, variant=variant_uid, **kwargs)
|
2016-01-06 11:24:01 +00:00
|
|
|
try:
|
2016-06-27 13:15:01 +00:00
|
|
|
return (format % args).format(**args)
|
2016-01-06 11:24:01 +00:00
|
|
|
except KeyError as err:
|
2018-05-14 07:28:58 +00:00
|
|
|
raise RuntimeError('Failed to create image name: unknown format element: %s' % err)
|
2015-12-10 14:49:00 +00:00
|
|
|
|
|
|
|
def can_fail(self, variant, arch, deliverable):
|
|
|
|
"""Figure out if deliverable can fail on variant.arch.
|
|
|
|
|
|
|
|
Variant can be None.
|
|
|
|
"""
|
|
|
|
failable = get_arch_variant_data(self.conf, 'failable_deliverables', arch, variant)
|
2016-04-13 11:44:17 +00:00
|
|
|
return deliverable in failable
|
|
|
|
|
|
|
|
def attempt_deliverable(self, variant, arch, kind, subvariant=None):
|
|
|
|
"""Log information about attempted deliverable."""
|
|
|
|
variant_uid = variant.uid if variant else ''
|
|
|
|
self.attempted_deliverables.setdefault(kind, []).append(
|
|
|
|
(variant_uid, arch, subvariant))
|
|
|
|
|
|
|
|
def require_deliverable(self, variant, arch, kind, subvariant=None):
|
|
|
|
"""Log information about attempted deliverable."""
|
|
|
|
variant_uid = variant.uid if variant else ''
|
|
|
|
self.required_deliverables.setdefault(kind, []).append(
|
|
|
|
(variant_uid, arch, subvariant))
|
|
|
|
|
|
|
|
def fail_deliverable(self, variant, arch, kind, subvariant=None):
|
|
|
|
"""Log information about failed deliverable."""
|
|
|
|
variant_uid = variant.uid if variant else ''
|
|
|
|
self.failed_deliverables.setdefault(kind, []).append(
|
|
|
|
(variant_uid, arch, subvariant))
|
2016-03-22 07:56:51 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def image_release(self):
|
2016-08-25 13:22:55 +00:00
|
|
|
"""Generate a value to pass to Koji as image release.
|
|
|
|
|
|
|
|
If this compose has a label, the version from it will be used,
|
|
|
|
otherwise we will create a string with date, compose type and respin.
|
|
|
|
"""
|
|
|
|
if self.compose_label:
|
|
|
|
milestone, release = self.compose_label.split('-')
|
|
|
|
return release
|
|
|
|
|
2016-03-22 07:56:51 +00:00
|
|
|
return '%s%s.%s' % (self.compose_date, self.ci_base.compose.type_suffix,
|
|
|
|
self.compose_respin)
|
2016-08-30 07:51:36 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def image_version(self):
|
|
|
|
"""Generate a value to pass to Koji as image version.
|
|
|
|
|
|
|
|
The value is based on release version. If compose has a label, the
|
|
|
|
milestone from it is appended to the version (unless it is RC).
|
|
|
|
"""
|
|
|
|
version = self.ci_base.release.version
|
|
|
|
if self.compose_label and not self.compose_label.startswith('RC-'):
|
|
|
|
milestone, release = self.compose_label.split('-')
|
|
|
|
return '%s_%s' % (version, milestone)
|
|
|
|
|
|
|
|
return version
|
2017-01-09 07:40:24 +00:00
|
|
|
|
|
|
|
def mkdtemp(self, arch=None, variant=None, suffix="", prefix="tmp"):
|
|
|
|
"""
|
|
|
|
Create and return a unique temporary directory under dir of
|
|
|
|
<compose_topdir>/work/{global,<arch>}/tmp[-<variant>]/
|
|
|
|
"""
|
|
|
|
path = os.path.join(self.paths.work.tmp_dir(arch=arch, variant=variant))
|
|
|
|
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
|
2019-01-31 14:52:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_ordered_variant_uids(compose):
|
|
|
|
if not hasattr(compose, "_ordered_variant_uids"):
|
|
|
|
ordered_variant_uids = _prepare_variant_as_lookaside(compose)
|
|
|
|
# Some variants were not mentioned in configuration value
|
|
|
|
# 'variant_as_lookaside' and its run order is not crucial (that
|
|
|
|
# means there are no dependencies inside this group). They will be
|
|
|
|
# processed first. A-Z sorting is for reproducibility.
|
|
|
|
unordered_variant_uids = sorted(
|
|
|
|
set(compose.all_variants.keys()) - set(ordered_variant_uids)
|
|
|
|
)
|
|
|
|
setattr(
|
|
|
|
compose,
|
|
|
|
"_ordered_variant_uids",
|
|
|
|
unordered_variant_uids + ordered_variant_uids
|
|
|
|
)
|
|
|
|
return getattr(compose, "_ordered_variant_uids")
|
|
|
|
|
|
|
|
|
|
|
|
def _prepare_variant_as_lookaside(compose):
|
|
|
|
"""
|
|
|
|
Configuration value 'variant_as_lookaside' contains variant pairs <variant,
|
|
|
|
its lookaside>. In that pair lookaside variant have to be processed first.
|
|
|
|
Structure can be represented as a oriented graph. Its spanning line shows
|
|
|
|
order how to process this set of variants.
|
|
|
|
"""
|
|
|
|
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
|
|
|
graph = SimpleAcyclicOrientedGraph()
|
|
|
|
for variant, lookaside_variant in variant_as_lookaside:
|
|
|
|
try:
|
|
|
|
graph.add_edge(variant, lookaside_variant)
|
|
|
|
except ValueError as e:
|
|
|
|
raise ValueError("There is a bad configuration in 'variant_as_lookaside': %s" % e)
|
|
|
|
|
|
|
|
variant_processing_order = reversed(graph.prune_graph())
|
|
|
|
return list(variant_processing_order)
|