2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-06-06 14:32:17 +00:00
|
|
|
|
2017-03-16 03:16:47 +00:00
|
|
|
import copy
|
2015-02-10 13:19:34 +00:00
|
|
|
import os
|
|
|
|
import time
|
|
|
|
|
|
|
|
import productmd.composeinfo
|
|
|
|
import productmd.treeinfo
|
2015-03-14 17:21:27 +00:00
|
|
|
from productmd.common import get_major_version
|
2016-09-07 11:05:11 +00:00
|
|
|
from kobo.shortcuts import relative_path, compute_file_checksums
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.compose_metadata.discinfo import write_discinfo as create_discinfo
|
|
|
|
from pungi.compose_metadata.discinfo import write_media_repo as create_media_repo
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_description(compose, variant, arch):
|
2015-07-09 10:58:30 +00:00
|
|
|
if "release_discinfo_description" in compose.conf:
|
|
|
|
result = compose.conf["release_discinfo_description"]
|
2015-02-10 13:19:34 +00:00
|
|
|
elif variant.type == "layered-product":
|
|
|
|
# we need to make sure the layered product behaves as it was composed separately
|
2020-02-03 03:50:06 +00:00
|
|
|
result = "%s %s for %s %s" % (
|
|
|
|
variant.release_name,
|
|
|
|
variant.release_version,
|
|
|
|
compose.conf["release_name"],
|
|
|
|
get_major_version(compose.conf["release_version"]),
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
2020-02-03 03:50:06 +00:00
|
|
|
result = "%s %s" % (
|
|
|
|
compose.conf["release_name"],
|
|
|
|
compose.conf["release_version"],
|
|
|
|
)
|
2019-06-03 06:55:40 +00:00
|
|
|
if compose.conf.get("base_product_name", ""):
|
2020-02-03 03:50:06 +00:00
|
|
|
result += " for %s %s" % (
|
|
|
|
compose.conf["base_product_name"],
|
|
|
|
compose.conf["base_product_version"],
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
result = result % {"variant_name": variant.name, "arch": arch}
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def write_discinfo(compose, arch, variant):
|
|
|
|
if variant.type == "addon":
|
|
|
|
return
|
|
|
|
os_tree = compose.paths.compose.os_tree(arch, variant)
|
|
|
|
path = os.path.join(os_tree, ".discinfo")
|
|
|
|
# description = get_volid(compose, arch, variant)
|
|
|
|
description = get_description(compose, variant, arch)
|
|
|
|
return create_discinfo(path, description, arch)
|
|
|
|
|
|
|
|
|
|
|
|
def write_media_repo(compose, arch, variant, timestamp=None):
|
|
|
|
if variant.type == "addon":
|
|
|
|
return
|
|
|
|
os_tree = compose.paths.compose.os_tree(arch, variant)
|
|
|
|
path = os.path.join(os_tree, "media.repo")
|
|
|
|
# description = get_volid(compose, arch, variant)
|
|
|
|
description = get_description(compose, variant, arch)
|
|
|
|
return create_media_repo(path, description, timestamp)
|
|
|
|
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
def compose_to_composeinfo(compose):
|
|
|
|
ci = productmd.composeinfo.ComposeInfo()
|
|
|
|
|
|
|
|
# compose
|
|
|
|
ci.compose.id = compose.compose_id
|
|
|
|
ci.compose.type = compose.compose_type
|
|
|
|
ci.compose.date = compose.compose_date
|
|
|
|
ci.compose.respin = compose.compose_respin
|
|
|
|
ci.compose.label = compose.compose_label
|
2016-06-13 17:01:38 +00:00
|
|
|
ci.compose.final = compose.supported
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
# product
|
2015-07-09 10:58:30 +00:00
|
|
|
ci.release.name = compose.conf["release_name"]
|
|
|
|
ci.release.version = compose.conf["release_version"]
|
|
|
|
ci.release.short = compose.conf["release_short"]
|
2019-06-03 06:55:40 +00:00
|
|
|
ci.release.is_layered = True if compose.conf.get("base_product_name", "") else False
|
2016-08-22 14:08:25 +00:00
|
|
|
ci.release.type = compose.conf["release_type"].lower()
|
2016-12-06 11:35:57 +00:00
|
|
|
ci.release.internal = bool(compose.conf["release_internal"])
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
# base product
|
2015-06-06 15:52:08 +00:00
|
|
|
if ci.release.is_layered:
|
2015-06-06 14:32:17 +00:00
|
|
|
ci.base_product.name = compose.conf["base_product_name"]
|
|
|
|
ci.base_product.version = compose.conf["base_product_version"]
|
|
|
|
ci.base_product.short = compose.conf["base_product_short"]
|
2016-08-22 14:08:25 +00:00
|
|
|
ci.base_product.type = compose.conf["base_product_type"].lower()
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
def dump_variant(variant, parent=None):
|
|
|
|
var = productmd.composeinfo.Variant(ci)
|
|
|
|
|
2016-08-22 14:08:25 +00:00
|
|
|
tree_arches = compose.conf.get("tree_arches")
|
2015-06-06 14:32:17 +00:00
|
|
|
if tree_arches and not (set(variant.arches) & set(tree_arches)):
|
|
|
|
return None
|
|
|
|
|
|
|
|
# variant details
|
2016-06-02 10:43:27 +00:00
|
|
|
# remove dashes from variant ID, rely on productmd verification
|
|
|
|
var.id = variant.id.replace("-", "")
|
2015-06-06 14:32:17 +00:00
|
|
|
var.uid = variant.uid
|
|
|
|
var.name = variant.name
|
|
|
|
var.type = variant.type
|
|
|
|
var.arches = set(variant.arches)
|
|
|
|
|
|
|
|
if var.type == "layered-product":
|
2015-07-09 10:58:30 +00:00
|
|
|
var.release.name = variant.release_name
|
|
|
|
var.release.short = variant.release_short
|
|
|
|
var.release.version = variant.release_version
|
2015-06-06 15:52:08 +00:00
|
|
|
var.release.is_layered = True
|
2015-08-25 12:04:06 +00:00
|
|
|
var.release.type = ci.release.type
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
for arch in variant.arches:
|
|
|
|
# paths: binaries
|
2020-02-03 03:50:06 +00:00
|
|
|
var.paths.os_tree[arch] = relative_path(
|
|
|
|
compose.paths.compose.os_tree(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
var.paths.repository[arch] = relative_path(
|
|
|
|
compose.paths.compose.repository(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
var.paths.packages[arch] = relative_path(
|
|
|
|
compose.paths.compose.packages(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
iso_dir = (
|
|
|
|
compose.paths.compose.iso_dir(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
)
|
|
|
|
or ""
|
|
|
|
)
|
|
|
|
if iso_dir and os.path.isdir(
|
|
|
|
os.path.join(compose.paths.compose.topdir(), iso_dir)
|
|
|
|
):
|
|
|
|
var.paths.isos[arch] = relative_path(
|
|
|
|
iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
2020-10-14 02:19:34 +00:00
|
|
|
image_dir = compose.paths.compose.image_dir(variant=variant) or ""
|
|
|
|
if image_dir:
|
|
|
|
image_dir = image_dir % {"arch": arch}
|
|
|
|
if os.path.isdir(image_dir):
|
|
|
|
var.paths.images[arch] = relative_path(
|
|
|
|
image_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
2020-02-03 03:50:06 +00:00
|
|
|
jigdo_dir = (
|
|
|
|
compose.paths.compose.jigdo_dir(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
)
|
|
|
|
or ""
|
|
|
|
)
|
|
|
|
if jigdo_dir and os.path.isdir(
|
|
|
|
os.path.join(compose.paths.compose.topdir(), jigdo_dir)
|
|
|
|
):
|
|
|
|
var.paths.jigdos[arch] = relative_path(
|
|
|
|
jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
# paths: sources
|
2020-02-03 03:50:06 +00:00
|
|
|
var.paths.source_tree[arch] = relative_path(
|
|
|
|
compose.paths.compose.os_tree(
|
|
|
|
arch="source", variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
var.paths.source_repository[arch] = relative_path(
|
|
|
|
compose.paths.compose.repository(
|
|
|
|
arch="source", variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
var.paths.source_packages[arch] = relative_path(
|
|
|
|
compose.paths.compose.packages(
|
|
|
|
arch="source", variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
source_iso_dir = (
|
|
|
|
compose.paths.compose.iso_dir(
|
|
|
|
arch="source", variant=variant, create_dir=False
|
|
|
|
)
|
|
|
|
or ""
|
|
|
|
)
|
|
|
|
if source_iso_dir and os.path.isdir(
|
|
|
|
os.path.join(compose.paths.compose.topdir(), source_iso_dir)
|
|
|
|
):
|
|
|
|
var.paths.source_isos[arch] = relative_path(
|
|
|
|
source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
|
|
|
source_jigdo_dir = (
|
|
|
|
compose.paths.compose.jigdo_dir(
|
|
|
|
arch="source", variant=variant, create_dir=False
|
|
|
|
)
|
|
|
|
or ""
|
|
|
|
)
|
|
|
|
if source_jigdo_dir and os.path.isdir(
|
|
|
|
os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)
|
|
|
|
):
|
|
|
|
var.paths.source_jigdos[arch] = relative_path(
|
|
|
|
source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
# paths: debug
|
2020-02-03 03:50:06 +00:00
|
|
|
var.paths.debug_tree[arch] = relative_path(
|
|
|
|
compose.paths.compose.debug_tree(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
var.paths.debug_repository[arch] = relative_path(
|
|
|
|
compose.paths.compose.debug_repository(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
var.paths.debug_packages[arch] = relative_path(
|
|
|
|
compose.paths.compose.debug_packages(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
|
|
|
).rstrip("/")
|
|
|
|
"""
|
2020-06-22 09:30:56 +00:00
|
|
|
# XXX: not supported (yet?)
|
2020-02-06 07:09:32 +00:00
|
|
|
debug_iso_dir = (
|
|
|
|
compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
|
|
|
|
)
|
2015-06-06 14:32:17 +00:00
|
|
|
if debug_iso_dir:
|
2020-02-06 07:09:32 +00:00
|
|
|
var.debug_iso_dir[arch] = relative_path(
|
|
|
|
debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
|
|
|
debug_jigdo_dir = (
|
|
|
|
compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
|
|
|
|
)
|
2015-06-06 14:32:17 +00:00
|
|
|
if debug_jigdo_dir:
|
2020-02-06 07:09:32 +00:00
|
|
|
var.debug_jigdo_dir[arch] = relative_path(
|
|
|
|
debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
|
|
|
).rstrip("/")
|
2020-02-03 03:50:06 +00:00
|
|
|
"""
|
2015-06-06 14:32:17 +00:00
|
|
|
|
|
|
|
for v in variant.get_variants(recursive=False):
|
|
|
|
x = dump_variant(v, parent=variant)
|
|
|
|
if x is not None:
|
|
|
|
var.add(x)
|
|
|
|
return var
|
|
|
|
|
|
|
|
for variant_id in sorted(compose.variants):
|
|
|
|
variant = compose.variants[variant_id]
|
|
|
|
v = dump_variant(variant)
|
|
|
|
if v is not None:
|
|
|
|
ci.variants.add(v)
|
|
|
|
return ci
|
|
|
|
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def write_compose_info(compose):
|
2015-06-06 14:32:17 +00:00
|
|
|
ci = compose_to_composeinfo(compose)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
msg = "Writing composeinfo"
|
|
|
|
compose.log_info("[BEGIN] %s" % msg)
|
|
|
|
|
|
|
|
path = compose.paths.compose.metadata("composeinfo.json")
|
2017-03-16 03:16:47 +00:00
|
|
|
# make a copy of composeinfo and modify the copy
|
|
|
|
# if any path in variant paths doesn't exist or just an empty
|
|
|
|
# dir, set it to None, then it won't be dumped.
|
|
|
|
ci_copy = copy.deepcopy(ci)
|
|
|
|
for variant in ci_copy.variants.variants.values():
|
|
|
|
for field in variant.paths._fields:
|
|
|
|
field_paths = getattr(variant.paths, field)
|
2017-09-05 08:01:21 +00:00
|
|
|
for arch, dirpath in field_paths.items():
|
2017-03-16 03:16:47 +00:00
|
|
|
dirpath = os.path.join(compose.paths.compose.topdir(), dirpath)
|
2019-05-20 11:26:27 +00:00
|
|
|
if not os.path.isdir(dirpath):
|
|
|
|
# If the directory does not exist, do not include the path
|
|
|
|
# in metadata.
|
2017-03-16 03:16:47 +00:00
|
|
|
field_paths[arch] = None
|
|
|
|
ci_copy.dump(path)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
compose.log_info("[DONE ] %s" % msg)
|
|
|
|
|
|
|
|
|
2017-11-08 13:24:01 +00:00
|
|
|
def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
2020-02-03 03:50:06 +00:00
|
|
|
if variant.type in ("addon",) or variant.is_empty:
|
2015-02-10 13:19:34 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if not timestamp:
|
|
|
|
timestamp = int(time.time())
|
|
|
|
else:
|
|
|
|
timestamp = int(timestamp)
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
os_tree = (
|
|
|
|
compose.paths.compose.os_tree(arch=arch, variant=variant).rstrip("/") + "/"
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
ti = productmd.treeinfo.TreeInfo()
|
|
|
|
# load from buildinstall .treeinfo
|
|
|
|
|
|
|
|
if variant.type == "layered-product":
|
|
|
|
# we need to make sure the layered product behaves as it was composed separately
|
|
|
|
|
2015-07-09 10:58:30 +00:00
|
|
|
# release
|
2015-02-10 13:19:34 +00:00
|
|
|
# TODO: read from variants.xml
|
2015-07-09 10:58:30 +00:00
|
|
|
ti.release.name = variant.release_name
|
|
|
|
ti.release.version = variant.release_version
|
|
|
|
ti.release.short = variant.release_short
|
2015-06-05 22:04:24 +00:00
|
|
|
ti.release.is_layered = True
|
2016-08-22 14:08:25 +00:00
|
|
|
ti.release.type = compose.conf["release_type"].lower()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# base product
|
2015-07-09 10:58:30 +00:00
|
|
|
ti.base_product.name = compose.conf["release_name"]
|
|
|
|
if "." in compose.conf["release_version"]:
|
2015-02-10 13:19:34 +00:00
|
|
|
# remove minor version if present
|
2015-07-09 10:58:30 +00:00
|
|
|
ti.base_product.version = get_major_version(compose.conf["release_version"])
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
2015-07-09 10:58:30 +00:00
|
|
|
ti.base_product.version = compose.conf["release_version"]
|
|
|
|
ti.base_product.short = compose.conf["release_short"]
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
2015-07-09 10:58:30 +00:00
|
|
|
# release
|
|
|
|
ti.release.name = compose.conf["release_name"]
|
2020-02-03 03:50:06 +00:00
|
|
|
ti.release.version = compose.conf.get(
|
|
|
|
"treeinfo_version", compose.conf["release_version"]
|
|
|
|
)
|
2015-07-09 10:58:30 +00:00
|
|
|
ti.release.short = compose.conf["release_short"]
|
2020-02-03 03:50:06 +00:00
|
|
|
ti.release.is_layered = (
|
|
|
|
True if compose.conf.get("base_product_name", "") else False
|
|
|
|
)
|
2016-08-22 14:08:25 +00:00
|
|
|
ti.release.type = compose.conf["release_type"].lower()
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# base product
|
2015-06-05 22:04:24 +00:00
|
|
|
if ti.release.is_layered:
|
2015-02-10 13:19:34 +00:00
|
|
|
ti.base_product.name = compose.conf["base_product_name"]
|
|
|
|
ti.base_product.version = compose.conf["base_product_version"]
|
|
|
|
ti.base_product.short = compose.conf["base_product_short"]
|
|
|
|
|
|
|
|
# tree
|
|
|
|
ti.tree.arch = arch
|
|
|
|
ti.tree.build_timestamp = timestamp
|
|
|
|
# ti.platforms
|
|
|
|
|
|
|
|
# main variant
|
|
|
|
var = productmd.treeinfo.Variant(ti)
|
|
|
|
if variant.type == "layered-product":
|
|
|
|
var.id = variant.parent.id
|
|
|
|
var.uid = variant.parent.uid
|
|
|
|
var.name = variant.parent.name
|
|
|
|
var.type = "variant"
|
|
|
|
else:
|
2016-06-02 10:43:27 +00:00
|
|
|
# remove dashes from variant ID, rely on productmd verification
|
|
|
|
var.id = variant.id.replace("-", "")
|
2015-02-10 13:19:34 +00:00
|
|
|
var.uid = variant.uid
|
|
|
|
var.name = variant.name
|
|
|
|
var.type = variant.type
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
var.paths.packages = (
|
|
|
|
relative_path(
|
|
|
|
compose.paths.compose.packages(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
os_tree,
|
|
|
|
).rstrip("/")
|
|
|
|
or "."
|
|
|
|
)
|
|
|
|
var.paths.repository = (
|
|
|
|
relative_path(
|
|
|
|
compose.paths.compose.repository(
|
|
|
|
arch=arch, variant=variant, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
os_tree,
|
|
|
|
).rstrip("/")
|
|
|
|
or "."
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
ti.variants.add(var)
|
|
|
|
|
2016-10-20 10:05:48 +00:00
|
|
|
repomd_path = os.path.join(var.paths.repository, "repodata", "repomd.xml")
|
2019-07-19 09:23:20 +00:00
|
|
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
2016-06-27 06:47:26 +00:00
|
|
|
if os.path.isfile(repomd_path):
|
2019-07-19 09:23:20 +00:00
|
|
|
ti.checksums.add(repomd_path, createrepo_checksum, root_dir=os_tree)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
for i in variant.get_variants(types=["addon"], arch=arch):
|
|
|
|
addon = productmd.treeinfo.Variant(ti)
|
|
|
|
addon.id = i.id
|
|
|
|
addon.uid = i.uid
|
|
|
|
addon.name = i.name
|
|
|
|
addon.type = i.type
|
2020-02-03 03:50:06 +00:00
|
|
|
compose.log_debug(
|
|
|
|
"variant '%s' inserting addon uid '%s' type '%s'"
|
|
|
|
% (variant, addon.uid, addon.type)
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
os_tree = compose.paths.compose.os_tree(arch=arch, variant=i).rstrip("/") + "/"
|
2020-02-03 03:50:06 +00:00
|
|
|
addon.paths.packages = (
|
|
|
|
relative_path(
|
|
|
|
compose.paths.compose.packages(
|
|
|
|
arch=arch, variant=i, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
os_tree,
|
|
|
|
).rstrip("/")
|
|
|
|
or "."
|
|
|
|
)
|
|
|
|
addon.paths.repository = (
|
|
|
|
relative_path(
|
|
|
|
compose.paths.compose.repository(
|
|
|
|
arch=arch, variant=i, create_dir=False
|
|
|
|
).rstrip("/")
|
|
|
|
+ "/",
|
|
|
|
os_tree,
|
|
|
|
).rstrip("/")
|
|
|
|
or "."
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
var.add(addon)
|
|
|
|
|
2017-02-07 14:50:33 +00:00
|
|
|
repomd_path = os.path.join(addon.paths.repository, "repodata", "repomd.xml")
|
2016-06-27 06:47:26 +00:00
|
|
|
if os.path.isfile(repomd_path):
|
2019-07-19 09:23:20 +00:00
|
|
|
ti.checksums.add(repomd_path, createrepo_checksum, root_dir=os_tree)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-06-05 22:04:24 +00:00
|
|
|
class LoraxProduct(productmd.treeinfo.Release):
|
2017-01-04 14:18:56 +00:00
|
|
|
def _validate_short(self):
|
2015-02-10 13:19:34 +00:00
|
|
|
# HACK: set self.short so .treeinfo produced by lorax can be read
|
|
|
|
if not self.short:
|
2015-07-09 10:58:30 +00:00
|
|
|
self.short = compose.conf["release_short"]
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-06-06 14:27:02 +00:00
|
|
|
class LoraxTreeInfo(productmd.treeinfo.TreeInfo):
|
2017-01-04 14:18:56 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(LoraxTreeInfo, self).__init__(*args, **kwargs)
|
|
|
|
self.release = LoraxProduct(self)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# images
|
2017-11-08 13:24:01 +00:00
|
|
|
if variant.type == "variant" and bi.succeeded(variant, arch):
|
2015-02-10 13:19:34 +00:00
|
|
|
os_tree = compose.paths.compose.os_tree(arch, variant)
|
|
|
|
|
|
|
|
# clone all but 'general' sections from buildinstall .treeinfo
|
2016-10-18 07:00:10 +00:00
|
|
|
|
|
|
|
bi_dir = compose.paths.work.buildinstall_dir(arch)
|
2020-02-03 03:50:06 +00:00
|
|
|
if compose.conf.get("buildinstall_method") == "lorax":
|
2016-10-18 07:00:10 +00:00
|
|
|
# The .treeinfo file produced by lorax is nested in variant
|
|
|
|
# subdirectory. Legacy buildinstall runs once per arch, so there is
|
|
|
|
# only one file.
|
|
|
|
bi_dir = os.path.join(bi_dir, variant.uid)
|
|
|
|
bi_treeinfo = os.path.join(bi_dir, ".treeinfo")
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
if os.path.exists(bi_treeinfo):
|
|
|
|
bi_ti = LoraxTreeInfo()
|
|
|
|
bi_ti.load(bi_treeinfo)
|
|
|
|
|
|
|
|
# stage2 - mainimage
|
|
|
|
if bi_ti.stage2.mainimage:
|
|
|
|
ti.stage2.mainimage = bi_ti.stage2.mainimage
|
2020-02-03 03:50:06 +00:00
|
|
|
ti.checksums.add(
|
|
|
|
ti.stage2.mainimage, createrepo_checksum, root_dir=os_tree
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# stage2 - instimage
|
|
|
|
if bi_ti.stage2.instimage:
|
|
|
|
ti.stage2.instimage = bi_ti.stage2.instimage
|
2020-02-03 03:50:06 +00:00
|
|
|
ti.checksums.add(
|
|
|
|
ti.stage2.instimage, createrepo_checksum, root_dir=os_tree
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
# images
|
|
|
|
for platform in bi_ti.images.images:
|
|
|
|
ti.images.images[platform] = {}
|
|
|
|
ti.tree.platforms.add(platform)
|
|
|
|
for image, path in bi_ti.images.images[platform].items():
|
2016-03-16 14:24:37 +00:00
|
|
|
if not path:
|
|
|
|
# The .treeinfo file contains an image without a path.
|
|
|
|
# We can't add that.
|
|
|
|
continue
|
2015-02-10 13:19:34 +00:00
|
|
|
ti.images.images[platform][image] = path
|
2019-07-19 09:23:20 +00:00
|
|
|
ti.checksums.add(path, createrepo_checksum, root_dir=os_tree)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
path = os.path.join(
|
|
|
|
compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo"
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
compose.log_info("Writing treeinfo: %s" % path)
|
|
|
|
ti.dump(path)
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
|
|
|
|
|
2019-11-01 08:10:34 +00:00
|
|
|
def populate_extra_files_metadata(
|
|
|
|
metadata, variant, arch, topdir, files, checksum_types, relative_root=None
|
|
|
|
):
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
"""
|
2019-11-01 08:10:34 +00:00
|
|
|
:param metadata: an instance of productmd.extra_files.ExtraFiles to
|
|
|
|
populate with the current files
|
|
|
|
:param Variant variant: under which variant should the files be listed
|
|
|
|
:param str arch: under which arch should the files be listed
|
|
|
|
:param topdir: directory where files are located
|
|
|
|
:param files: list of file paths relative to topdir
|
|
|
|
:param checksum_types: list of checksums to compute
|
|
|
|
:param relative_root: ancestor directory of topdir, this will be removed
|
|
|
|
from paths written to local metadata file
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
"""
|
2019-11-01 08:10:34 +00:00
|
|
|
for copied_file in files:
|
|
|
|
full_path = os.path.join(topdir, copied_file)
|
|
|
|
size = os.path.getsize(full_path)
|
2016-09-07 11:05:11 +00:00
|
|
|
try:
|
2019-11-01 08:10:34 +00:00
|
|
|
checksums = compute_file_checksums(full_path, checksum_types)
|
2016-09-07 11:05:11 +00:00
|
|
|
except IOError as exc:
|
2019-11-01 08:10:34 +00:00
|
|
|
raise RuntimeError(
|
|
|
|
"Failed to calculate checksum for %s: %s" % (full_path, exc)
|
|
|
|
)
|
|
|
|
|
|
|
|
if relative_root:
|
|
|
|
copied_file = os.path.relpath(full_path, relative_root)
|
|
|
|
metadata.add(variant.uid, arch, copied_file, size, checksums)
|
|
|
|
|
2020-02-03 03:50:06 +00:00
|
|
|
strip_prefix = (
|
|
|
|
(os.path.relpath(topdir, relative_root) + "/") if relative_root else ""
|
|
|
|
)
|
2019-11-01 08:10:34 +00:00
|
|
|
with open(os.path.join(topdir, "extra_files.json"), "w") as f:
|
|
|
|
metadata.dump_for_tree(f, variant.uid, arch, strip_prefix)
|