2016-10-12 13:42:22 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
This script creates unified ISOs for a specified compose.
|
|
|
|
Unified ISOs are created per architecture and
|
|
|
|
contain all variant packages and repos.
|
|
|
|
|
|
|
|
|
|
|
|
TODO:
|
|
|
|
* jigdo
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import copy
|
|
|
|
import errno
|
|
|
|
import glob
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import tempfile
|
|
|
|
|
|
|
|
import productmd
|
|
|
|
import productmd.compose
|
|
|
|
import productmd.images
|
|
|
|
import productmd.treeinfo
|
2017-07-12 12:52:40 +00:00
|
|
|
from kobo.shortcuts import run
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
import pungi.linker
|
|
|
|
import pungi.wrappers.createrepo
|
|
|
|
from pungi.util import makedirs
|
|
|
|
from pungi.compose_metadata.discinfo import write_discinfo as create_discinfo
|
|
|
|
from pungi.wrappers import iso
|
2017-07-12 12:52:40 +00:00
|
|
|
from pungi.phases.image_checksum import make_checksums
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def ti_merge(one, two):
|
|
|
|
assert one.tree.arch == two.tree.arch
|
|
|
|
for variant in two.variants.get_variants(recursive=False):
|
|
|
|
if variant.uid in one.variants:
|
|
|
|
continue
|
|
|
|
var = productmd.treeinfo.Variant(one)
|
|
|
|
var.id = variant.id
|
|
|
|
var.uid = variant.uid
|
|
|
|
var.name = variant.name
|
|
|
|
var.type = variant.type
|
2020-02-03 03:50:06 +00:00
|
|
|
for i in (
|
|
|
|
"debug_packages",
|
|
|
|
"debug_repository",
|
|
|
|
"packages",
|
|
|
|
"repository",
|
|
|
|
"source_packages",
|
|
|
|
"source_repository",
|
|
|
|
):
|
2016-10-12 13:42:22 +00:00
|
|
|
setattr(var, i, getattr(variant, i, None))
|
|
|
|
one.variants.add(var)
|
|
|
|
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
DEFAULT_CHECKSUMS = ["md5", "sha1", "sha256"]
|
2017-01-12 09:42:35 +00:00
|
|
|
|
|
|
|
|
2016-10-12 13:42:22 +00:00
|
|
|
class UnifiedISO(object):
|
2020-06-08 11:46:03 +00:00
|
|
|
def __init__(self, compose_path, output_path=None, arches=None):
|
2016-10-12 13:42:22 +00:00
|
|
|
self.compose_path = os.path.abspath(compose_path)
|
|
|
|
compose_subdir = os.path.join(self.compose_path, "compose")
|
|
|
|
if os.path.exists(compose_subdir):
|
|
|
|
self.compose_path = compose_subdir
|
|
|
|
|
|
|
|
self.compose = productmd.compose.Compose(compose_path)
|
|
|
|
self.ci = self.compose.info
|
|
|
|
|
|
|
|
self.linker = pungi.linker.Linker()
|
|
|
|
|
|
|
|
temp_topdir = os.path.abspath(os.path.join(self.compose_path, "..", "work"))
|
2017-01-05 10:37:07 +00:00
|
|
|
makedirs(temp_topdir)
|
2016-10-12 13:42:22 +00:00
|
|
|
self.temp_dir = tempfile.mkdtemp(prefix="unified_isos_", dir=temp_topdir)
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
self.treeinfo = {} # {arch/src: TreeInfo}
|
|
|
|
self.repos = {} # {arch/src: {variant: new_path}
|
|
|
|
self.comps = {} # {arch/src: {variant: old_path}
|
|
|
|
self.productid = {} # {arch/stc: {variant: old_path}
|
2016-10-12 13:42:22 +00:00
|
|
|
self.conf = self.read_config()
|
2020-02-03 03:50:06 +00:00
|
|
|
self.images = None # productmd.images.Images instance
|
2020-06-08 11:46:03 +00:00
|
|
|
self.arches = arches
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
def create(self, delete_temp=True):
|
|
|
|
print("Creating unified ISOs for: {0}".format(self.compose_path))
|
|
|
|
try:
|
|
|
|
self.link_to_temp()
|
|
|
|
self.createrepo()
|
|
|
|
self.discinfo()
|
|
|
|
self.createiso()
|
|
|
|
self.update_checksums()
|
2017-07-12 12:52:40 +00:00
|
|
|
self.dump_manifest()
|
2020-06-01 04:42:53 +00:00
|
|
|
except RuntimeError as exc:
|
|
|
|
if hasattr(exc, "output"):
|
|
|
|
print(exc.output)
|
|
|
|
raise
|
2016-10-12 13:42:22 +00:00
|
|
|
finally:
|
|
|
|
if delete_temp:
|
|
|
|
shutil.rmtree(self.temp_dir)
|
|
|
|
|
2017-07-12 12:52:40 +00:00
|
|
|
def dump_manifest(self):
|
2020-02-03 03:50:06 +00:00
|
|
|
dest = os.path.join(self.compose_path, "metadata", "images.json")
|
|
|
|
tmp_file = dest + ".tmp"
|
2017-10-03 09:06:03 +00:00
|
|
|
try:
|
2020-01-03 08:20:16 +00:00
|
|
|
self.get_image_manifest().dump(tmp_file)
|
|
|
|
except Exception:
|
2017-10-03 09:06:03 +00:00
|
|
|
# We failed, clean up the temporary file.
|
|
|
|
if os.path.exists(tmp_file):
|
|
|
|
os.remove(tmp_file)
|
|
|
|
raise
|
|
|
|
# Success, move the temp file to proper location.
|
|
|
|
os.rename(tmp_file, dest)
|
2017-07-12 12:52:40 +00:00
|
|
|
|
2016-10-12 13:42:22 +00:00
|
|
|
def _link_tree(self, dir, variant, arch):
|
2020-02-03 03:50:06 +00:00
|
|
|
blacklist_files = [
|
|
|
|
".treeinfo",
|
|
|
|
".discinfo",
|
|
|
|
"boot.iso",
|
|
|
|
"media.repo",
|
|
|
|
"extra_files.json",
|
|
|
|
]
|
2016-10-12 13:42:22 +00:00
|
|
|
blacklist_dirs = ["repodata"]
|
|
|
|
|
|
|
|
for root, dirs, files in os.walk(dir):
|
|
|
|
for i in blacklist_dirs:
|
|
|
|
if i in dirs:
|
|
|
|
dirs.remove(i)
|
|
|
|
|
|
|
|
for fn in files:
|
|
|
|
if fn in blacklist_files:
|
|
|
|
continue
|
|
|
|
|
|
|
|
old_path = os.path.join(root, fn)
|
|
|
|
if fn.endswith(".rpm"):
|
2020-02-03 03:50:06 +00:00
|
|
|
new_path = os.path.join(
|
|
|
|
self.temp_dir, "trees", arch, variant.uid, fn
|
|
|
|
)
|
|
|
|
self.repos.setdefault(arch, {})[variant.uid] = os.path.dirname(
|
|
|
|
new_path
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
else:
|
|
|
|
old_relpath = os.path.relpath(old_path, dir)
|
|
|
|
new_path = os.path.join(self.temp_dir, "trees", arch, old_relpath)
|
|
|
|
|
|
|
|
makedirs(os.path.dirname(new_path))
|
2017-03-13 11:27:43 +00:00
|
|
|
try:
|
|
|
|
self.linker.link(old_path, new_path)
|
|
|
|
except OSError as exc:
|
2020-02-03 03:50:06 +00:00
|
|
|
print(
|
|
|
|
"Failed to link %s to %s: %s"
|
|
|
|
% (old_path, new_path, exc.strerror),
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2017-03-13 11:27:43 +00:00
|
|
|
raise
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
def link_to_temp(self):
|
|
|
|
# copy files to new location; change RPM location to $variant_uid
|
|
|
|
for variant in self.ci.get_variants(recursive=False):
|
|
|
|
for arch in variant.arches:
|
2020-06-08 11:46:03 +00:00
|
|
|
if self.arches and arch not in self.arches:
|
|
|
|
continue
|
2016-10-12 13:42:22 +00:00
|
|
|
print("Processing: {0}.{1}".format(variant.uid, arch))
|
2017-04-06 06:24:03 +00:00
|
|
|
try:
|
2020-02-03 03:50:06 +00:00
|
|
|
tree_dir = os.path.join(
|
|
|
|
self.compose_path, variant.paths.os_tree[arch]
|
|
|
|
)
|
2017-04-06 06:24:03 +00:00
|
|
|
except KeyError:
|
|
|
|
# The path in metadata is missing: no content there
|
|
|
|
continue
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
ti = productmd.treeinfo.TreeInfo()
|
|
|
|
try:
|
|
|
|
ti.load(os.path.join(tree_dir, ".treeinfo"))
|
|
|
|
except IOError as exc:
|
|
|
|
if exc.errno != errno.ENOENT:
|
|
|
|
raise
|
2020-02-03 03:50:06 +00:00
|
|
|
print(
|
|
|
|
"Tree %s.%s has no .treeinfo, skipping..."
|
|
|
|
% (variant.uid, arch),
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
arch_ti = self.treeinfo.get(arch)
|
|
|
|
if arch_ti is None:
|
|
|
|
arch_ti = ti
|
|
|
|
self.treeinfo[arch] = arch_ti
|
|
|
|
else:
|
|
|
|
ti_merge(arch_ti, ti)
|
|
|
|
|
|
|
|
if arch_ti.tree.arch != arch:
|
2020-02-03 03:50:06 +00:00
|
|
|
raise RuntimeError("Treeinfo arch mismatch")
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
# override paths
|
|
|
|
arch_ti[variant.uid].repository = variant.uid
|
|
|
|
arch_ti[variant.uid].packages = variant.uid
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
comps_path = glob.glob(
|
|
|
|
os.path.join(
|
|
|
|
self.compose_path,
|
|
|
|
variant.paths.repository[arch],
|
|
|
|
"repodata",
|
|
|
|
"*comps*.xml",
|
|
|
|
)
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
if comps_path:
|
|
|
|
self.comps.setdefault(arch, {})[variant.uid] = comps_path[0]
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
productid_path = os.path.join(
|
|
|
|
self.compose_path,
|
|
|
|
variant.paths.repository[arch],
|
|
|
|
"repodata",
|
|
|
|
"productid",
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
self.productid.setdefault(arch, {})[variant.uid] = productid_path
|
|
|
|
|
|
|
|
self._link_tree(tree_dir, variant, arch)
|
|
|
|
|
|
|
|
# sources
|
|
|
|
print("Processing: {0}.{1}".format(variant.uid, "src"))
|
2020-02-03 03:50:06 +00:00
|
|
|
tree_dir = os.path.join(
|
|
|
|
self.compose_path, variant.paths.source_tree[arch]
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
ti = productmd.treeinfo.TreeInfo()
|
|
|
|
ti.load(os.path.join(tree_dir, ".treeinfo"))
|
|
|
|
|
|
|
|
arch_ti = self.treeinfo.get("src")
|
|
|
|
if arch_ti is None:
|
|
|
|
arch_ti = ti
|
|
|
|
self.treeinfo["src"] = arch_ti
|
|
|
|
else:
|
|
|
|
ti_merge(arch_ti, ti)
|
|
|
|
|
|
|
|
if arch_ti.tree.arch != "src":
|
2020-02-03 03:50:06 +00:00
|
|
|
raise RuntimeError("Treeinfo arch mismatch")
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
# override paths
|
|
|
|
arch_ti[variant.uid].repository = variant.uid
|
|
|
|
arch_ti[variant.uid].packages = variant.uid
|
2020-02-06 07:09:32 +00:00
|
|
|
# set to None, replace with source_*; requires productmd
|
|
|
|
# changes or upstream version
|
2016-10-12 13:42:22 +00:00
|
|
|
# arch_ti[variant.uid].source_repository = variant.uid
|
|
|
|
# arch_ti[variant.uid].source_packages = variant.uid
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
self._link_tree(tree_dir, variant, "src")
|
2016-10-12 13:42:22 +00:00
|
|
|
|
2017-01-11 13:27:37 +00:00
|
|
|
# Debuginfo
|
|
|
|
print("Processing: {0}.{1} debuginfo".format(variant.uid, arch))
|
2020-02-03 03:50:06 +00:00
|
|
|
tree_dir = os.path.join(
|
|
|
|
self.compose_path, variant.paths.debug_tree[arch]
|
|
|
|
)
|
2017-01-11 13:27:37 +00:00
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
debug_arch = "debug-%s" % arch
|
2017-01-11 13:27:37 +00:00
|
|
|
|
|
|
|
# We don't have a .treeinfo for debuginfo trees. Let's just
|
|
|
|
# copy the one from binary tree.
|
|
|
|
self.treeinfo.setdefault(debug_arch, copy.deepcopy(self.treeinfo[arch]))
|
|
|
|
|
|
|
|
self._link_tree(tree_dir, variant, debug_arch)
|
|
|
|
|
2016-10-12 13:42:22 +00:00
|
|
|
def createrepo(self):
|
|
|
|
# remove old repomd.xml checksums from treeinfo
|
2017-09-05 08:01:21 +00:00
|
|
|
for arch, ti in self.treeinfo.items():
|
2016-10-12 13:42:22 +00:00
|
|
|
print("Removing old repomd.xml checksums from treeinfo: {0}".format(arch))
|
|
|
|
for i in ti.checksums.checksums.keys():
|
|
|
|
if "repomd.xml" in i:
|
|
|
|
del ti.checksums.checksums[i]
|
|
|
|
|
|
|
|
# write new per-variant repodata
|
|
|
|
cr = pungi.wrappers.createrepo.CreaterepoWrapper(createrepo_c=True)
|
|
|
|
for arch in self.repos:
|
|
|
|
ti = self.treeinfo[arch]
|
|
|
|
for variant in self.repos[arch]:
|
|
|
|
print("Creating repodata: {0}.{1}".format(variant, arch))
|
|
|
|
tree_dir = os.path.join(self.temp_dir, "trees", arch)
|
|
|
|
repo_path = self.repos[arch][variant]
|
|
|
|
comps_path = self.comps.get(arch, {}).get(variant, None)
|
2020-02-03 03:50:06 +00:00
|
|
|
cmd = cr.get_createrepo_cmd(
|
|
|
|
repo_path, groupfile=comps_path, update=True
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
run(cmd, show_cmd=True)
|
|
|
|
|
|
|
|
productid_path = self.productid.get(arch, {}).get(variant, None)
|
|
|
|
if productid_path:
|
|
|
|
print("Adding productid to repodata: {0}.{1}".format(variant, arch))
|
|
|
|
repo_dir = os.path.join(self.repos[arch][variant], "repodata")
|
|
|
|
new_path = os.path.join(repo_dir, os.path.basename(productid_path))
|
|
|
|
|
|
|
|
if os.path.exists(productid_path):
|
|
|
|
shutil.copy2(productid_path, new_path)
|
2020-02-03 03:50:06 +00:00
|
|
|
cmd = cr.get_modifyrepo_cmd(
|
|
|
|
repo_dir, new_path, compress_type="gz"
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
run(cmd)
|
|
|
|
else:
|
2020-02-03 03:50:06 +00:00
|
|
|
print(
|
|
|
|
"WARNING: productid not found in {0}.{1}".format(
|
|
|
|
variant, arch
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
print(
|
|
|
|
"Inserting new repomd.xml checksum to treeinfo: {0}.{1}".format(
|
|
|
|
variant, arch
|
|
|
|
)
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
# insert new repomd.xml checksum to treeinfo
|
|
|
|
repomd_path = os.path.join(repo_path, "repodata", "repomd.xml")
|
2020-02-03 03:50:06 +00:00
|
|
|
ti.checksums.add(
|
|
|
|
os.path.relpath(repomd_path, tree_dir), "sha256", root_dir=tree_dir
|
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
# write treeinfo
|
2017-09-05 08:01:21 +00:00
|
|
|
for arch, ti in self.treeinfo.items():
|
2016-10-12 13:42:22 +00:00
|
|
|
print("Writing treeinfo: {0}".format(arch))
|
|
|
|
ti_path = os.path.join(self.temp_dir, "trees", arch, ".treeinfo")
|
2017-07-10 18:06:24 +00:00
|
|
|
makedirs(os.path.dirname(ti_path))
|
2016-10-12 13:42:22 +00:00
|
|
|
ti.dump(ti_path)
|
|
|
|
|
|
|
|
def discinfo(self):
|
|
|
|
# write discinfo and media repo
|
2017-09-05 08:01:21 +00:00
|
|
|
for arch, ti in self.treeinfo.items():
|
2016-10-12 13:42:22 +00:00
|
|
|
di_path = os.path.join(self.temp_dir, "trees", arch, ".discinfo")
|
|
|
|
description = "%s %s" % (ti.release.name, ti.release.version)
|
|
|
|
if ti.release.is_layered:
|
2020-02-03 03:50:06 +00:00
|
|
|
description += " for %s %s" % (
|
|
|
|
ti.base_product.name,
|
|
|
|
ti.base_product.version,
|
|
|
|
)
|
|
|
|
create_discinfo(di_path, description, arch.split("-", 1)[-1])
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
def read_config(self):
|
|
|
|
try:
|
2020-02-03 03:50:06 +00:00
|
|
|
conf_dump = glob.glob(
|
|
|
|
os.path.join(
|
|
|
|
self.compose_path, "../logs/global/config-dump*.global.log"
|
|
|
|
)
|
|
|
|
)[0]
|
2016-10-12 13:42:22 +00:00
|
|
|
except IndexError:
|
2020-02-03 03:50:06 +00:00
|
|
|
print(
|
|
|
|
"Config dump not found, can not adhere to previous settings. "
|
|
|
|
"Expect weird naming and checksums.",
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2017-01-12 09:42:35 +00:00
|
|
|
return {}
|
2016-10-12 13:42:22 +00:00
|
|
|
with open(conf_dump) as f:
|
|
|
|
return json.load(f)
|
|
|
|
|
|
|
|
def createiso(self):
|
|
|
|
# create ISOs
|
2020-01-03 08:20:16 +00:00
|
|
|
im = self.get_image_manifest()
|
2016-10-12 13:42:22 +00:00
|
|
|
|
2017-01-11 13:27:37 +00:00
|
|
|
for typed_arch, ti in self.treeinfo.items():
|
|
|
|
source_dir = os.path.join(self.temp_dir, "trees", typed_arch)
|
2020-02-03 03:50:06 +00:00
|
|
|
arch = typed_arch.split("-", 1)[-1]
|
|
|
|
debuginfo = typed_arch.startswith("debug-")
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
# XXX: HARDCODED
|
|
|
|
disc_type = "dvd"
|
|
|
|
|
|
|
|
iso_arch = arch
|
|
|
|
if arch == "src":
|
|
|
|
iso_arch = "source"
|
2017-01-11 13:27:37 +00:00
|
|
|
elif debuginfo:
|
2020-02-03 03:50:06 +00:00
|
|
|
iso_arch = arch + "-debuginfo"
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
iso_name = "%s-%s-%s.iso" % (self.ci.compose.id, iso_arch, disc_type)
|
|
|
|
iso_dir = os.path.join(self.temp_dir, "iso", iso_arch)
|
|
|
|
iso_path = os.path.join(iso_dir, iso_name)
|
|
|
|
|
|
|
|
print("Creating ISO for {0}: {1}".format(arch, iso_name))
|
|
|
|
|
|
|
|
makedirs(iso_dir)
|
|
|
|
volid = "%s %s %s" % (ti.release.short, ti.release.version, arch)
|
2017-01-11 13:27:37 +00:00
|
|
|
if debuginfo:
|
|
|
|
volid += " debuginfo"
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
# create ISO
|
2020-02-03 03:50:06 +00:00
|
|
|
run(
|
|
|
|
iso.get_mkisofs_cmd(
|
|
|
|
iso_path, [source_dir], volid=volid, exclude=["./lost+found"]
|
2020-06-01 04:42:53 +00:00
|
|
|
),
|
|
|
|
universal_newlines=True,
|
2020-02-03 03:50:06 +00:00
|
|
|
)
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
# implant MD5
|
|
|
|
supported = True
|
|
|
|
run(iso.get_implantisomd5_cmd(iso_path, supported))
|
|
|
|
|
|
|
|
# write manifest file
|
|
|
|
run(iso.get_manifest_cmd(iso_path))
|
|
|
|
|
|
|
|
img = productmd.images.Image(im)
|
2020-02-06 07:09:32 +00:00
|
|
|
# temporary path, just a file name; to be replaced with
|
|
|
|
# variant specific path
|
2016-10-12 13:42:22 +00:00
|
|
|
img.path = os.path.basename(iso_path)
|
|
|
|
img.mtime = int(os.stat(iso_path).st_mtime)
|
|
|
|
img.size = os.path.getsize(iso_path)
|
|
|
|
img.arch = arch
|
|
|
|
|
|
|
|
# XXX: HARDCODED
|
2020-02-03 03:50:06 +00:00
|
|
|
img.type = "dvd" if not debuginfo else "dvd-debuginfo"
|
2016-10-12 13:42:22 +00:00
|
|
|
img.format = "iso"
|
|
|
|
img.disc_number = 1
|
|
|
|
img.disc_count = 1
|
|
|
|
img.bootable = False
|
|
|
|
img.unified = True
|
|
|
|
|
|
|
|
img.implant_md5 = iso.get_implanted_md5(iso_path)
|
|
|
|
try:
|
|
|
|
img.volume_id = iso.get_volume_id(iso_path)
|
|
|
|
except RuntimeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if arch == "src":
|
|
|
|
all_arches = [i for i in self.treeinfo if i != "src"]
|
|
|
|
else:
|
|
|
|
all_arches = [arch]
|
|
|
|
|
|
|
|
for tree_arch in all_arches:
|
2020-02-03 03:50:06 +00:00
|
|
|
if tree_arch.startswith("debug-"):
|
2017-01-11 13:27:37 +00:00
|
|
|
continue
|
2016-10-12 13:42:22 +00:00
|
|
|
ti = self.treeinfo[tree_arch]
|
|
|
|
for variant_uid in ti.variants:
|
|
|
|
variant = ti.variants[variant_uid]
|
|
|
|
# We don't want to copy the manifest.
|
|
|
|
img.parent = None
|
|
|
|
variant_img = copy.deepcopy(img)
|
|
|
|
variant_img.parent = im
|
|
|
|
variant_img.subvariant = variant.id
|
2018-09-27 13:19:32 +00:00
|
|
|
variant_img.additional_variants = [
|
|
|
|
var.uid
|
|
|
|
for var in self.ci.get_variants(recursive=False)
|
|
|
|
if var.uid != variant_uid
|
|
|
|
]
|
2020-02-03 03:50:06 +00:00
|
|
|
paths_attr = "isos" if arch != "src" else "source_isos"
|
2016-10-12 13:42:22 +00:00
|
|
|
paths = getattr(self.ci.variants[variant.uid].paths, paths_attr)
|
2020-02-03 03:50:06 +00:00
|
|
|
path = paths.get(
|
|
|
|
tree_arch, os.path.join(variant.uid, tree_arch, "iso")
|
2016-10-12 13:42:22 +00:00
|
|
|
)
|
2020-02-03 03:50:06 +00:00
|
|
|
if variant_img.type == "dvd-debuginfo":
|
|
|
|
prefix, isodir = path.rsplit("/", 1)
|
|
|
|
path = os.path.join(prefix, "debug", isodir)
|
|
|
|
variant_img.path = os.path.join(path, os.path.basename(img.path))
|
2016-10-12 13:42:22 +00:00
|
|
|
im.add(variant.uid, tree_arch, variant_img)
|
|
|
|
|
2017-07-31 12:25:08 +00:00
|
|
|
dst = os.path.join(self.compose_path, variant_img.path)
|
|
|
|
print("Linking {0} -> {1}".format(iso_path, dst))
|
|
|
|
makedirs(os.path.dirname(dst))
|
|
|
|
self.linker.link(iso_path, dst)
|
2020-02-03 03:50:06 +00:00
|
|
|
self.linker.link(iso_path + ".manifest", dst + ".manifest")
|
2016-10-12 13:42:22 +00:00
|
|
|
|
|
|
|
def _get_base_filename(self, variant, arch):
|
|
|
|
substs = {
|
2020-02-03 03:50:06 +00:00
|
|
|
"compose_id": self.compose.info.compose.id,
|
|
|
|
"release_short": self.compose.info.release.short,
|
|
|
|
"version": self.compose.info.release.version,
|
|
|
|
"date": self.compose.info.compose.date,
|
|
|
|
"respin": self.compose.info.compose.respin,
|
|
|
|
"type": self.compose.info.compose.type,
|
|
|
|
"type_suffix": self.compose.info.compose.type_suffix,
|
|
|
|
"label": self.compose.info.compose.label,
|
|
|
|
"label_major_version": self.compose.info.compose.label_major_version,
|
|
|
|
"variant": variant,
|
|
|
|
"arch": arch,
|
2016-10-12 13:42:22 +00:00
|
|
|
}
|
2020-02-03 03:50:06 +00:00
|
|
|
base_name = self.conf.get("media_checksum_base_filename", "")
|
2016-10-12 13:42:22 +00:00
|
|
|
if base_name:
|
|
|
|
base_name = (base_name % substs).format(**substs)
|
2020-02-03 03:50:06 +00:00
|
|
|
base_name += "-"
|
2016-10-12 13:42:22 +00:00
|
|
|
return base_name
|
|
|
|
|
|
|
|
def update_checksums(self):
|
2020-02-03 03:50:06 +00:00
|
|
|
make_checksums(
|
|
|
|
self.compose_path,
|
|
|
|
self.get_image_manifest(),
|
|
|
|
self.conf.get("media_checksums", DEFAULT_CHECKSUMS),
|
|
|
|
self.conf.get("media_checksum_one_file", False),
|
|
|
|
self._get_base_filename,
|
|
|
|
)
|
2020-01-03 08:20:16 +00:00
|
|
|
|
|
|
|
def get_image_manifest(self):
|
|
|
|
if not self.images:
|
|
|
|
try:
|
|
|
|
self.images = self.compose.images
|
|
|
|
except RuntimeError:
|
|
|
|
self.images = productmd.images.Images()
|
|
|
|
self.images.compose.id = self.compose.info.compose.id
|
|
|
|
self.images.compose.type = self.compose.info.compose.type
|
|
|
|
self.images.compose.date = self.compose.info.compose.date
|
|
|
|
self.images.compose.respin = self.compose.info.compose.respin
|
|
|
|
return self.images
|