2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
import os
|
2019-07-24 12:08:34 +00:00
|
|
|
import threading
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2020-03-06 13:53:52 +00:00
|
|
|
from kobo.shortcuts import run
|
2019-06-07 11:20:19 +00:00
|
|
|
from kobo.threads import run_in_threads
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.arch import get_valid_arches
|
|
|
|
from pungi.wrappers.createrepo import CreaterepoWrapper
|
2020-03-25 09:02:25 +00:00
|
|
|
from pungi.util import (
|
|
|
|
copy_all,
|
|
|
|
is_arch_multilib,
|
|
|
|
PartialFuncWorkerThread,
|
|
|
|
PartialFuncThreadPool,
|
|
|
|
)
|
2019-10-02 07:40:18 +00:00
|
|
|
from pungi.module_util import Modulemd, collect_module_defaults
|
2019-06-26 13:09:25 +00:00
|
|
|
from pungi.phases.createrepo import add_modular_metadata
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
|
|
|
result = {}
|
2019-07-26 11:19:39 +00:00
|
|
|
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
2015-02-10 13:19:34 +00:00
|
|
|
for arch in compose.get_arches():
|
2019-07-26 11:19:39 +00:00
|
|
|
compose.log_info("Populating package set for arch: %s", arch)
|
2016-01-20 11:53:08 +00:00
|
|
|
is_multilib = is_arch_multilib(compose.conf, arch)
|
2015-02-10 13:19:34 +00:00
|
|
|
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
2019-08-02 08:30:09 +00:00
|
|
|
pkgset = global_pkgset.subset(arch, arches, exclusive_noarch=exclusive_noarch)
|
2019-07-26 11:19:39 +00:00
|
|
|
pkgset.save_file_list(
|
|
|
|
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
|
|
|
remove_path_prefix=path_prefix,
|
|
|
|
)
|
2015-02-10 13:19:34 +00:00
|
|
|
result[arch] = pkgset
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
def get_create_global_repo_cmd(compose, path_prefix, repo_dir_global, pkgset):
|
2016-08-22 14:08:25 +00:00
|
|
|
createrepo_c = compose.conf["createrepo_c"]
|
2015-07-08 12:27:00 +00:00
|
|
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
2015-02-10 13:19:34 +00:00
|
|
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
2019-07-26 11:19:39 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
# find an old compose suitable for repodata reuse
|
|
|
|
update_md_path = None
|
2020-03-06 13:53:52 +00:00
|
|
|
old_repo_dir = compose.paths.old_compose_path(
|
|
|
|
compose.paths.work.pkgset_repo(pkgset.name, arch="global")
|
|
|
|
)
|
|
|
|
if old_repo_dir:
|
|
|
|
if os.path.isdir(os.path.join(old_repo_dir, "repodata")):
|
|
|
|
compose.log_info("Using old repodata from: %s", old_repo_dir)
|
|
|
|
update_md_path = old_repo_dir
|
|
|
|
else:
|
|
|
|
compose.log_info("No suitable old compose found in: %s", compose.old_composes)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
# IMPORTANT: must not use --skip-stat here -- to make sure that correctly
|
|
|
|
# signed files are pulled in
|
|
|
|
cmd = repo.get_createrepo_cmd(
|
|
|
|
path_prefix,
|
|
|
|
update=True,
|
|
|
|
database=False,
|
|
|
|
skip_stat=False,
|
|
|
|
pkglist=compose.paths.work.package_list(arch="global", pkgset=pkgset),
|
|
|
|
outputdir=repo_dir_global,
|
|
|
|
baseurl="file://%s" % path_prefix,
|
|
|
|
workers=compose.conf["createrepo_num_workers"],
|
|
|
|
update_md_path=update_md_path,
|
|
|
|
checksum=createrepo_checksum,
|
|
|
|
)
|
2018-05-02 07:01:46 +00:00
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
def run_create_global_repo(compose, cmd, logfile):
|
2018-05-02 07:01:46 +00:00
|
|
|
msg = "Running createrepo for the global package set"
|
2019-07-26 11:19:39 +00:00
|
|
|
compose.log_info("[BEGIN] %s", msg)
|
|
|
|
run(cmd, logfile=logfile, show_cmd=True)
|
|
|
|
compose.log_info("[DONE ] %s", msg)
|
2018-05-02 07:01:46 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2019-07-26 14:06:53 +00:00
|
|
|
def create_arch_repos(compose, path_prefix, paths, pkgset, mmds):
|
2019-06-07 11:20:19 +00:00
|
|
|
run_in_threads(
|
2020-07-07 03:00:10 +00:00
|
|
|
_create_arch_repo,
|
2019-07-26 14:06:53 +00:00
|
|
|
[
|
|
|
|
(
|
|
|
|
compose,
|
|
|
|
arch,
|
|
|
|
path_prefix,
|
|
|
|
paths,
|
|
|
|
pkgset,
|
|
|
|
mmds.get(arch) if mmds else None,
|
|
|
|
)
|
|
|
|
for arch in compose.get_arches()
|
|
|
|
],
|
2019-07-26 11:19:39 +00:00
|
|
|
threads=compose.conf["createrepo_num_threads"],
|
2019-06-07 11:20:19 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _create_arch_repo(worker_thread, args, task_num):
|
|
|
|
"""Create a single pkgset repo for given arch."""
|
2019-07-26 14:06:53 +00:00
|
|
|
compose, arch, path_prefix, paths, pkgset, mmd = args
|
2020-07-07 03:00:10 +00:00
|
|
|
repo_dir = compose.paths.work.pkgset_repo(pkgset.name, arch=arch)
|
|
|
|
paths[arch] = repo_dir
|
|
|
|
|
|
|
|
# Try to reuse arch repo from old compose
|
|
|
|
reuse = getattr(pkgset, "reuse", None)
|
|
|
|
if reuse:
|
|
|
|
old_repo_dir = compose.paths.old_compose_path(repo_dir)
|
|
|
|
if os.path.isdir(old_repo_dir):
|
|
|
|
msg = "Copying repodata for reuse: %s" % old_repo_dir
|
|
|
|
try:
|
|
|
|
compose.log_info("[BEGIN] %s", msg)
|
|
|
|
copy_all(old_repo_dir, repo_dir)
|
|
|
|
compose.log_info("[DONE ] %s", msg)
|
|
|
|
return
|
|
|
|
except Exception as e:
|
|
|
|
compose.log_debug(str(e))
|
|
|
|
compose.log_info("[FAILED] %s will try to create arch repo", msg)
|
|
|
|
|
2016-08-22 14:08:25 +00:00
|
|
|
createrepo_c = compose.conf["createrepo_c"]
|
2015-07-08 12:27:00 +00:00
|
|
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
2015-02-10 13:19:34 +00:00
|
|
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
2019-07-26 11:19:39 +00:00
|
|
|
repo_dir_global = compose.paths.work.pkgset_repo(pkgset.name, arch="global")
|
2015-02-10 13:19:34 +00:00
|
|
|
msg = "Running createrepo for arch '%s'" % arch
|
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
compose.log_info("[BEGIN] %s", msg)
|
|
|
|
cmd = repo.get_createrepo_cmd(
|
|
|
|
path_prefix,
|
|
|
|
update=True,
|
|
|
|
database=False,
|
|
|
|
skip_stat=True,
|
|
|
|
pkglist=compose.paths.work.package_list(arch=arch, pkgset=pkgset),
|
|
|
|
outputdir=repo_dir,
|
|
|
|
baseurl="file://%s" % path_prefix,
|
|
|
|
workers=compose.conf["createrepo_num_workers"],
|
|
|
|
update_md_path=repo_dir_global,
|
|
|
|
checksum=createrepo_checksum,
|
|
|
|
)
|
|
|
|
run(
|
|
|
|
cmd,
|
2019-08-02 08:30:09 +00:00
|
|
|
logfile=compose.paths.log.log_file(arch, "arch_repo.%s" % pkgset.name),
|
2019-07-26 11:19:39 +00:00
|
|
|
show_cmd=True,
|
|
|
|
)
|
2019-06-26 13:09:25 +00:00
|
|
|
# Add modulemd to the repo for all modules in all variants on this architecture.
|
2019-07-26 14:06:53 +00:00
|
|
|
if Modulemd and mmd:
|
|
|
|
names = set(x.get_module_name() for x in mmd)
|
2019-09-18 12:47:30 +00:00
|
|
|
overrides_dir = compose.conf.get("module_defaults_override_dir")
|
2019-07-26 14:06:53 +00:00
|
|
|
mod_index = collect_module_defaults(
|
2019-09-18 12:47:30 +00:00
|
|
|
compose.paths.work.module_defaults_dir(), names, overrides_dir=overrides_dir
|
2019-07-26 14:06:53 +00:00
|
|
|
)
|
|
|
|
for x in mmd:
|
|
|
|
mod_index.add_module_stream(x)
|
2019-06-26 13:09:25 +00:00
|
|
|
add_modular_metadata(
|
2019-07-26 11:19:39 +00:00
|
|
|
repo,
|
|
|
|
repo_dir,
|
|
|
|
mod_index,
|
2019-08-02 08:30:09 +00:00
|
|
|
compose.paths.log.log_file(arch, "arch_repo_modulemd.%s" % pkgset.name),
|
2019-06-26 13:09:25 +00:00
|
|
|
)
|
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
compose.log_info("[DONE ] %s", msg)
|
|
|
|
|
2019-07-24 12:08:34 +00:00
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
class MaterializedPackageSet(object):
|
|
|
|
"""A wrapper for PkgsetBase object that represents the package set created
|
|
|
|
as repos on the filesystem.
|
|
|
|
"""
|
2019-07-24 12:08:34 +00:00
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
def __init__(self, package_sets, paths):
|
|
|
|
self.package_sets = package_sets
|
|
|
|
self.paths = paths
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self.package_sets["global"].name
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
"""Direct access to actual package set for particular arch."""
|
|
|
|
return self.package_sets[key]
|
|
|
|
|
|
|
|
def get(self, arch, default=None):
|
|
|
|
"""Get package set for particular arch."""
|
|
|
|
return self.package_sets.get(arch, default or [])
|
|
|
|
|
|
|
|
def iter_packages(self, arch=None):
|
|
|
|
"""Yield all packages in the set, optionally filtering for some arch
|
|
|
|
only.
|
|
|
|
"""
|
|
|
|
if not arch:
|
|
|
|
for arch in self.package_sets:
|
|
|
|
for file_path in self.get(arch):
|
|
|
|
yield self.package_sets[arch][file_path]
|
|
|
|
else:
|
|
|
|
for file_path in self.get(arch):
|
|
|
|
yield self.package_sets[arch][file_path]
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-26 14:06:53 +00:00
|
|
|
def create(klass, compose, pkgset_global, path_prefix, mmd=None):
|
2019-07-26 11:19:39 +00:00
|
|
|
"""Create per-arch pkgsets and create repodata for each arch."""
|
|
|
|
repo_dir_global = compose.paths.work.pkgset_repo(
|
|
|
|
pkgset_global.name, arch="global"
|
|
|
|
)
|
|
|
|
paths = {"global": repo_dir_global}
|
2020-02-25 03:02:39 +00:00
|
|
|
|
|
|
|
pkgset_global.save_file_list(
|
|
|
|
compose.paths.work.package_list(arch="global", pkgset=pkgset_global),
|
|
|
|
remove_path_prefix=path_prefix,
|
2019-07-26 11:19:39 +00:00
|
|
|
)
|
2020-02-25 03:02:39 +00:00
|
|
|
pkgset_global.save_file_cache(
|
|
|
|
compose.paths.work.pkgset_file_cache(pkgset_global.name)
|
2019-07-26 11:19:39 +00:00
|
|
|
)
|
2020-02-25 03:02:39 +00:00
|
|
|
|
|
|
|
if getattr(pkgset_global, "reuse", None) is None:
|
|
|
|
cmd = get_create_global_repo_cmd(
|
|
|
|
compose, path_prefix, repo_dir_global, pkgset_global
|
|
|
|
)
|
|
|
|
logfile = compose.paths.log.log_file(
|
|
|
|
"global", "arch_repo.%s" % pkgset_global.name
|
|
|
|
)
|
|
|
|
t = threading.Thread(
|
|
|
|
target=run_create_global_repo, args=(compose, cmd, logfile)
|
|
|
|
)
|
|
|
|
t.start()
|
2019-07-24 12:08:34 +00:00
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
|
|
|
|
package_sets["global"] = pkgset_global
|
2019-07-24 12:08:34 +00:00
|
|
|
|
2020-02-25 03:02:39 +00:00
|
|
|
if getattr(pkgset_global, "reuse", None) is None:
|
|
|
|
t.join()
|
2019-07-24 12:08:34 +00:00
|
|
|
|
2019-07-26 14:06:53 +00:00
|
|
|
create_arch_repos(compose, path_prefix, paths, pkgset_global, mmd)
|
2019-07-24 12:08:34 +00:00
|
|
|
|
2019-07-26 11:19:39 +00:00
|
|
|
return klass(package_sets, paths)
|
2019-07-26 13:56:45 +00:00
|
|
|
|
2020-03-12 06:34:54 +00:00
|
|
|
@classmethod
|
|
|
|
def create_many(klass, create_partials):
|
|
|
|
"""
|
|
|
|
Creates multiple MaterializedPackageSet in threads.
|
|
|
|
|
|
|
|
:param list of functools.partial create_partials: List of Partial objects
|
|
|
|
created using functools.partial(MaterializedPackageSet.create, compose,
|
|
|
|
pkgset_global, path_prefix, mmd=mmd).
|
|
|
|
:return: List of MaterializedPackageSet objects.
|
|
|
|
"""
|
|
|
|
# Create two pools - small pool for small package sets and big pool for
|
|
|
|
# big package sets. This ensure there will not be too many createrepo
|
|
|
|
# tasks which would need lot of CPU or memory at the same time.
|
|
|
|
big_pool = PartialFuncThreadPool()
|
|
|
|
big_pool.add(PartialFuncWorkerThread(big_pool))
|
|
|
|
small_pool = PartialFuncThreadPool()
|
|
|
|
for i in range(10):
|
|
|
|
small_pool.add(PartialFuncWorkerThread(small_pool))
|
|
|
|
|
|
|
|
# Divide the package sets into big_pool/small_pool based on their size.
|
|
|
|
for partial in create_partials:
|
|
|
|
pkgset = partial.args[1]
|
|
|
|
if len(pkgset) < 500:
|
|
|
|
small_pool.queue_put(partial)
|
|
|
|
else:
|
|
|
|
big_pool.queue_put(partial)
|
|
|
|
|
|
|
|
small_pool.start()
|
|
|
|
big_pool.start()
|
2020-09-15 07:55:32 +00:00
|
|
|
try:
|
|
|
|
small_pool.stop()
|
|
|
|
except Exception:
|
|
|
|
big_pool.kill()
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
big_pool.stop()
|
2020-03-12 06:34:54 +00:00
|
|
|
|
|
|
|
return small_pool.results + big_pool.results
|
|
|
|
|
2019-07-26 13:56:45 +00:00
|
|
|
|
|
|
|
def get_all_arches(compose):
|
|
|
|
all_arches = set(["src"])
|
|
|
|
for arch in compose.get_arches():
|
|
|
|
is_multilib = is_arch_multilib(compose.conf, arch)
|
|
|
|
arches = get_valid_arches(arch, is_multilib)
|
|
|
|
all_arches.update(arches)
|
|
|
|
return all_arches
|