2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
__all__ = (
|
|
|
|
"create_variant_repo",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import glob
|
|
|
|
import shutil
|
|
|
|
import threading
|
2017-05-04 18:46:06 +00:00
|
|
|
import copy
|
2017-07-07 15:31:59 +00:00
|
|
|
import errno
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
from kobo.threads import ThreadPool, WorkerThread
|
|
|
|
from kobo.shortcuts import run, relative_path
|
|
|
|
|
2016-02-29 11:58:21 +00:00
|
|
|
from ..wrappers.scm import get_dir_from_scm
|
|
|
|
from ..wrappers.createrepo import CreaterepoWrapper
|
|
|
|
from .base import PhaseBase
|
2017-08-29 12:26:57 +00:00
|
|
|
from ..util import find_old_compose, temp_dir, get_arch_variant_data
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-07-24 22:18:03 +00:00
|
|
|
import productmd.rpms
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
createrepo_lock = threading.Lock()
|
|
|
|
createrepo_dirs = set()
|
|
|
|
|
|
|
|
|
|
|
|
class CreaterepoPhase(PhaseBase):
|
|
|
|
name = "createrepo"
|
|
|
|
|
|
|
|
def __init__(self, compose):
|
|
|
|
PhaseBase.__init__(self, compose)
|
|
|
|
self.pool = ThreadPool(logger=self.compose._logger)
|
|
|
|
|
2016-02-29 11:58:21 +00:00
|
|
|
def validate(self):
|
|
|
|
errors = []
|
|
|
|
try:
|
|
|
|
super(CreaterepoPhase, self).validate()
|
|
|
|
except ValueError as exc:
|
|
|
|
errors = exc.message.split('\n')
|
|
|
|
|
2017-08-29 12:26:57 +00:00
|
|
|
if not self.compose.old_composes and self.compose.conf.get('createrepo_deltas'):
|
2016-02-29 11:58:21 +00:00
|
|
|
errors.append('Can not generate deltas without old compose')
|
|
|
|
|
|
|
|
if errors:
|
|
|
|
raise ValueError('\n'.join(errors))
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
def run(self):
|
|
|
|
get_productids_from_scm(self.compose)
|
2017-10-03 13:09:53 +00:00
|
|
|
for i in range(self.compose.conf['createrepo_num_threads']):
|
2015-02-10 13:19:34 +00:00
|
|
|
self.pool.add(CreaterepoThread(self.pool))
|
|
|
|
|
|
|
|
for variant in self.compose.get_variants():
|
2016-02-16 09:23:08 +00:00
|
|
|
if variant.is_empty:
|
|
|
|
continue
|
2015-02-10 13:19:34 +00:00
|
|
|
self.pool.queue_put((self.compose, None, variant, "srpm"))
|
2016-02-29 08:22:09 +00:00
|
|
|
for arch in variant.arches:
|
|
|
|
self.pool.queue_put((self.compose, arch, variant, "rpm"))
|
|
|
|
self.pool.queue_put((self.compose, arch, variant, "debuginfo"))
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
self.pool.start()
|
|
|
|
|
|
|
|
|
|
|
|
def create_variant_repo(compose, arch, variant, pkg_type):
|
2016-02-29 08:22:09 +00:00
|
|
|
types = {
|
|
|
|
'rpm': ('binary',
|
|
|
|
lambda: compose.paths.compose.repository(arch=arch, variant=variant)),
|
|
|
|
'srpm': ('source',
|
|
|
|
lambda: compose.paths.compose.repository(arch='src', variant=variant)),
|
|
|
|
'debuginfo': ('debug',
|
|
|
|
lambda: compose.paths.compose.debug_repository(arch=arch, variant=variant)),
|
|
|
|
}
|
|
|
|
|
|
|
|
if variant.is_empty or (arch is None and pkg_type != 'srpm'):
|
2016-02-16 09:23:08 +00:00
|
|
|
compose.log_info("[SKIP ] Creating repo (arch: %s, variant: %s): %s" % (arch, variant))
|
|
|
|
return
|
|
|
|
|
2016-08-22 14:08:25 +00:00
|
|
|
createrepo_c = compose.conf["createrepo_c"]
|
2015-07-08 12:27:00 +00:00
|
|
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
2015-02-10 13:19:34 +00:00
|
|
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
2016-02-29 08:22:09 +00:00
|
|
|
repo_dir_arch = compose.paths.work.arch_repo(arch='global' if pkg_type == 'srpm' else arch)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-02-29 08:22:09 +00:00
|
|
|
try:
|
|
|
|
repo_dir = types[pkg_type][1]()
|
|
|
|
except KeyError:
|
|
|
|
raise ValueError("Unknown package type: %s" % pkg_type)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
msg = "Creating repo (arch: %s, variant: %s): %s" % (arch, variant, repo_dir)
|
|
|
|
|
|
|
|
# HACK: using global lock
|
2016-02-29 08:22:09 +00:00
|
|
|
# This is important when addons put packages into parent variant directory.
|
|
|
|
# There can't be multiple createrepo processes operating on the same
|
|
|
|
# directory.
|
|
|
|
with createrepo_lock:
|
|
|
|
if repo_dir in createrepo_dirs:
|
|
|
|
compose.log_warning("[SKIP ] Already in progress: %s" % msg)
|
|
|
|
return
|
|
|
|
createrepo_dirs.add(repo_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")):
|
|
|
|
compose.log_warning("[SKIP ] %s" % msg)
|
|
|
|
return
|
|
|
|
|
|
|
|
compose.log_info("[BEGIN] %s" % msg)
|
|
|
|
|
2017-08-29 12:02:55 +00:00
|
|
|
# We only want delta RPMs for binary repos.
|
2017-08-29 12:26:57 +00:00
|
|
|
with_deltas = pkg_type == 'rpm' and _has_deltas(compose, variant, arch)
|
2017-08-29 12:02:55 +00:00
|
|
|
|
2015-07-24 22:18:03 +00:00
|
|
|
rpms = set()
|
2017-05-04 18:46:06 +00:00
|
|
|
rpm_nevras = set()
|
2015-07-24 22:18:03 +00:00
|
|
|
|
|
|
|
# read rpms from metadata rather than guessing it by scanning filesystem
|
|
|
|
manifest_file = compose.paths.compose.metadata("rpms.json")
|
|
|
|
manifest = productmd.rpms.Rpms()
|
|
|
|
manifest.load(manifest_file)
|
|
|
|
|
2017-09-05 08:01:21 +00:00
|
|
|
for rpms_arch, data in manifest.rpms.get(variant.uid, {}).items():
|
2015-07-24 22:18:03 +00:00
|
|
|
if arch is not None and arch != rpms_arch:
|
|
|
|
continue
|
2017-09-05 08:01:21 +00:00
|
|
|
for srpm_data in data.values():
|
|
|
|
for rpm_nevra, rpm_data in srpm_data.items():
|
2016-02-29 08:22:09 +00:00
|
|
|
if types[pkg_type][0] != rpm_data['category']:
|
2015-07-24 22:18:03 +00:00
|
|
|
continue
|
|
|
|
path = os.path.join(compose.topdir, "compose", rpm_data["path"])
|
|
|
|
rel_path = relative_path(path, repo_dir.rstrip("/") + "/")
|
|
|
|
rpms.add(rel_path)
|
2017-05-04 18:46:06 +00:00
|
|
|
rpm_nevras.add(str(rpm_nevra))
|
2015-07-24 22:18:03 +00:00
|
|
|
|
|
|
|
file_list = compose.paths.work.repo_package_list(arch, variant, pkg_type)
|
2016-02-29 08:22:09 +00:00
|
|
|
with open(file_list, 'w') as f:
|
|
|
|
for rel_path in sorted(rpms):
|
|
|
|
f.write("%s\n" % rel_path)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-08-29 12:02:55 +00:00
|
|
|
# Only find last compose when we actually want delta RPMs.
|
|
|
|
old_package_dirs = _get_old_package_dirs(compose, repo_dir) if with_deltas else None
|
2017-07-14 12:25:47 +00:00
|
|
|
if old_package_dirs:
|
|
|
|
# If we are creating deltas, we can not reuse existing metadata, as
|
|
|
|
# that would stop deltas from being created.
|
|
|
|
# This seems to only affect createrepo_c though.
|
|
|
|
repo_dir_arch = None
|
2016-02-29 11:58:21 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
comps_path = None
|
|
|
|
if compose.has_comps and pkg_type == "rpm":
|
|
|
|
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
2016-02-29 08:22:09 +00:00
|
|
|
cmd = repo.get_createrepo_cmd(repo_dir, update=True, database=True, skip_stat=True,
|
2017-10-30 09:07:49 +00:00
|
|
|
pkglist=file_list, outputdir=repo_dir,
|
|
|
|
workers=compose.conf["createrepo_num_workers"],
|
2016-02-29 08:22:09 +00:00
|
|
|
groupfile=comps_path, update_md_path=repo_dir_arch,
|
2017-07-14 08:39:47 +00:00
|
|
|
checksum=createrepo_checksum,
|
2017-08-29 12:02:55 +00:00
|
|
|
deltas=with_deltas,
|
2017-07-14 08:39:47 +00:00
|
|
|
oldpackagedirs=old_package_dirs,
|
2016-09-12 09:01:45 +00:00
|
|
|
use_xz=compose.conf['createrepo_use_xz'])
|
2016-06-29 05:46:33 +00:00
|
|
|
log_file = compose.paths.log.log_file(arch, "createrepo-%s.%s" % (variant, pkg_type))
|
2015-02-10 13:19:34 +00:00
|
|
|
run(cmd, logfile=log_file, show_cmd=True)
|
|
|
|
|
|
|
|
# call modifyrepo to inject productid
|
|
|
|
product_id = compose.conf.get("product_id")
|
|
|
|
if product_id and pkg_type == "rpm":
|
|
|
|
# add product certificate to base (rpm) repo; skip source and debug
|
|
|
|
product_id_path = compose.paths.work.product_id(arch, variant)
|
|
|
|
if os.path.isfile(product_id_path):
|
|
|
|
cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"), product_id_path, compress_type="gz")
|
|
|
|
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
|
|
|
|
run(cmd, logfile=log_file, show_cmd=True)
|
|
|
|
# productinfo is not supported by modifyrepo in any way
|
|
|
|
# this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)
|
|
|
|
shutil.copy2(product_id_path, os.path.join(repo_dir, "repodata", "productid"))
|
|
|
|
|
2017-02-28 13:03:36 +00:00
|
|
|
# call modifyrepo to inject modulemd if needed
|
2017-05-04 18:46:06 +00:00
|
|
|
if arch in variant.arch_mmds:
|
2017-02-28 13:03:36 +00:00
|
|
|
import yaml
|
2017-05-04 18:46:06 +00:00
|
|
|
modules = []
|
2017-09-05 08:01:21 +00:00
|
|
|
for mmd in variant.arch_mmds[arch].values():
|
2017-05-04 18:46:06 +00:00
|
|
|
# Create copy of architecture specific mmd to filter out packages
|
|
|
|
# which are not part of this particular repo.
|
|
|
|
repo_mmd = copy.deepcopy(mmd)
|
2017-10-19 11:28:13 +00:00
|
|
|
# Modules without RPMs are also valid.
|
|
|
|
if ("artifacts" in repo_mmd["data"] and
|
|
|
|
"rpms" in repo_mmd["data"]["artifacts"]):
|
|
|
|
repo_mmd["data"]["artifacts"]["rpms"] = [
|
|
|
|
rpm_nevra for rpm_nevra in repo_mmd["data"]["artifacts"]["rpms"]
|
|
|
|
if rpm_nevra in rpm_nevras]
|
2017-05-04 18:46:06 +00:00
|
|
|
modules.append(repo_mmd)
|
|
|
|
|
2017-04-11 12:46:38 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
|
|
|
modules_path = os.path.join(tmp_dir, "modules.yaml")
|
|
|
|
with open(modules_path, "w") as outfile:
|
2017-05-04 18:46:06 +00:00
|
|
|
outfile.write(yaml.dump_all(modules, explicit_start=True))
|
2017-04-11 12:46:38 +00:00
|
|
|
cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"),
|
2017-10-30 09:07:49 +00:00
|
|
|
modules_path, mdtype="modules",
|
|
|
|
compress_type="gz")
|
2017-04-11 12:46:38 +00:00
|
|
|
log_file = compose.paths.log.log_file(
|
|
|
|
arch, "modifyrepo-modules-%s" % variant)
|
|
|
|
run(cmd, logfile=log_file, show_cmd=True)
|
2017-02-28 13:03:36 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
compose.log_info("[DONE ] %s" % msg)
|
|
|
|
|
|
|
|
|
|
|
|
class CreaterepoThread(WorkerThread):
|
|
|
|
def process(self, item, num):
|
|
|
|
compose, arch, variant, pkg_type = item
|
|
|
|
create_variant_repo(compose, arch, variant, pkg_type=pkg_type)
|
|
|
|
|
|
|
|
|
|
|
|
def get_productids_from_scm(compose):
|
|
|
|
# product_id is a scm_dict: {scm, repo, branch, dir}
|
|
|
|
# expected file name format: $variant_uid-$arch-*.pem
|
|
|
|
product_id = compose.conf.get("product_id")
|
|
|
|
if not product_id:
|
|
|
|
compose.log_info("No product certificates specified")
|
|
|
|
return
|
|
|
|
|
2016-08-22 14:08:25 +00:00
|
|
|
product_id_allow_missing = compose.conf["product_id_allow_missing"]
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
msg = "Getting product certificates from SCM..."
|
|
|
|
compose.log_info("[BEGIN] %s" % msg)
|
|
|
|
|
2017-01-09 07:40:24 +00:00
|
|
|
tmp_dir = compose.mkdtemp(prefix="pungi_")
|
2017-07-07 15:31:59 +00:00
|
|
|
try:
|
|
|
|
get_dir_from_scm(product_id, tmp_dir)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.ENOENT and product_id_allow_missing:
|
|
|
|
compose.log_warning("No product IDs in %s" % product_id)
|
|
|
|
return
|
|
|
|
raise
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
for arch in compose.get_arches():
|
|
|
|
for variant in compose.get_variants(arch=arch):
|
|
|
|
# some layered products may use base product name before variant
|
|
|
|
pem_files = glob.glob("%s/*%s-%s-*.pem" % (tmp_dir, variant.uid, arch))
|
|
|
|
# use for development:
|
|
|
|
# pem_files = glob.glob("%s/*.pem" % tmp_dir)[-1:]
|
|
|
|
if not pem_files:
|
|
|
|
msg = "No product certificate found (arch: %s, variant: %s)" % (arch, variant.uid)
|
|
|
|
if product_id_allow_missing:
|
|
|
|
compose.log_warning(msg)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
shutil.rmtree(tmp_dir)
|
|
|
|
raise RuntimeError(msg)
|
|
|
|
if len(pem_files) > 1:
|
|
|
|
shutil.rmtree(tmp_dir)
|
|
|
|
raise RuntimeError("Multiple product certificates found (arch: %s, variant: %s): %s" % (arch, variant.uid, ", ".join(sorted([os.path.basename(i) for i in pem_files]))))
|
|
|
|
product_id_path = compose.paths.work.product_id(arch, variant)
|
|
|
|
shutil.copy2(pem_files[0], product_id_path)
|
|
|
|
|
|
|
|
shutil.rmtree(tmp_dir)
|
|
|
|
compose.log_info("[DONE ] %s" % msg)
|
2017-07-14 08:39:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _get_old_package_dirs(compose, repo_dir):
|
|
|
|
"""Given a compose and a path to a repo in it, try to find corresponging
|
|
|
|
repo in an older compose and return a list of paths to directories with
|
|
|
|
packages in it.
|
|
|
|
"""
|
|
|
|
if not compose.conf['createrepo_deltas']:
|
|
|
|
return None
|
|
|
|
old_compose_path = find_old_compose(
|
|
|
|
compose.old_composes,
|
|
|
|
compose.ci_base.release.short,
|
|
|
|
compose.ci_base.release.version,
|
2017-11-06 13:56:08 +00:00
|
|
|
compose.ci_base.release.type_suffix if compose.conf['old_composes_per_release_type'] else None,
|
2017-07-14 08:39:47 +00:00
|
|
|
compose.ci_base.base_product.short if compose.ci_base.release.is_layered else None,
|
2017-08-29 12:42:39 +00:00
|
|
|
compose.ci_base.base_product.version if compose.ci_base.release.is_layered else None,
|
|
|
|
allowed_statuses=['FINISHED', 'FINISHED_INCOMPLETE'],
|
2017-07-14 08:39:47 +00:00
|
|
|
)
|
|
|
|
if not old_compose_path:
|
|
|
|
compose.log_info("No suitable old compose found in: %s" % compose.old_composes)
|
|
|
|
return None
|
|
|
|
rel_dir = relative_path(repo_dir, compose.topdir.rstrip('/') + '/')
|
|
|
|
old_package_dirs = os.path.join(old_compose_path, rel_dir, 'Packages')
|
|
|
|
if compose.conf['hashed_directories']:
|
|
|
|
old_package_dirs = _find_package_dirs(old_package_dirs)
|
|
|
|
return old_package_dirs
|
|
|
|
|
|
|
|
|
|
|
|
def _find_package_dirs(base):
|
|
|
|
"""Assuming the packages are in directories hashed by first letter, find
|
|
|
|
all the buckets in given base.
|
|
|
|
"""
|
|
|
|
buckets = set()
|
|
|
|
try:
|
|
|
|
for subdir in os.listdir(base):
|
|
|
|
bucket = os.path.join(base, subdir)
|
|
|
|
if os.path.isdir(bucket):
|
|
|
|
buckets.add(bucket)
|
|
|
|
except OSError:
|
|
|
|
# The directory does not exist, so no drpms for you!
|
|
|
|
pass
|
|
|
|
return sorted(buckets)
|
2017-08-29 12:26:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _has_deltas(compose, variant, arch):
|
|
|
|
"""Check if delta RPMs are enabled for given variant and architecture."""
|
|
|
|
key = 'createrepo_deltas'
|
|
|
|
if isinstance(compose.conf.get(key), bool):
|
|
|
|
return compose.conf[key]
|
|
|
|
return any(get_arch_variant_data(compose.conf, key, arch, variant))
|