2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import cPickle as pickle
|
|
|
|
|
|
|
|
from kobo.shortcuts import run
|
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
import pungi.phases.pkgset.pkgsets
|
|
|
|
from pungi.arch import get_valid_arches
|
2016-01-20 11:53:08 +00:00
|
|
|
from pungi.util import makedirs, is_arch_multilib
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.wrappers.pungi import PungiWrapper
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.phases.pkgset.common import create_global_repo, create_arch_repos, populate_arch_pkgsets
|
2017-02-09 12:30:07 +00:00
|
|
|
from pungi.phases.gather import get_prepopulate_packages, get_additional_packages
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.linker import LinkerThread, LinkerPool
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
import pungi.phases.pkgset.source
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2015-03-12 21:12:38 +00:00
|
|
|
class PkgsetSourceRepos(pungi.phases.pkgset.source.PkgsetSourceBase):
|
2015-02-10 13:19:34 +00:00
|
|
|
enabled = True
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
package_sets, path_prefix = get_pkgset_from_repos(self.compose)
|
|
|
|
return (package_sets, path_prefix)
|
|
|
|
|
|
|
|
|
|
|
|
def get_pkgset_from_repos(compose):
|
|
|
|
# populate pkgset from yum repos
|
|
|
|
# TODO: noarch hack - secondary arches, use x86_64 noarch where possible
|
|
|
|
flist = []
|
|
|
|
|
2016-08-22 14:08:25 +00:00
|
|
|
link_type = compose.conf["link_type"]
|
2015-02-10 13:19:34 +00:00
|
|
|
pool = LinkerPool(link_type, logger=compose._logger)
|
|
|
|
for i in range(10):
|
|
|
|
pool.add(LinkerThread(pool))
|
|
|
|
|
|
|
|
seen_packages = set()
|
|
|
|
for arch in compose.get_arches():
|
|
|
|
# write a pungi config for remote repos and a local comps repo
|
|
|
|
repos = {}
|
|
|
|
for num, repo in enumerate(compose.conf["pkgset_repos"][arch]):
|
|
|
|
repo_path = repo
|
|
|
|
if "://" not in repo_path:
|
|
|
|
repo_path = os.path.join(compose.config_dir, repo)
|
|
|
|
repos["repo-%s" % num] = repo_path
|
|
|
|
|
|
|
|
comps_repo = None
|
|
|
|
if compose.has_comps:
|
|
|
|
repos["comps"] = compose.paths.work.comps_repo(arch=arch)
|
|
|
|
comps_repo = "comps"
|
|
|
|
write_pungi_config(compose, arch, None, repos=repos, comps_repo=comps_repo)
|
|
|
|
|
|
|
|
pungi = PungiWrapper()
|
|
|
|
pungi_conf = compose.paths.work.pungi_conf(arch=arch)
|
2017-06-07 12:51:44 +00:00
|
|
|
pungi_log = compose.paths.log.log_file(arch, "pkgset_source")
|
2015-02-10 13:19:34 +00:00
|
|
|
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
|
|
|
cmd = pungi.get_pungi_cmd(pungi_conf, destdir=pungi_dir, name="FOO", selfhosting=True, fulltree=True, multilib_methods=["all"], nodownload=False, full_archlist=True, arch=arch, cache_dir=compose.paths.work.pungi_cache_dir(arch=arch))
|
|
|
|
cmd.append("--force")
|
|
|
|
|
|
|
|
# TODO: runroot
|
|
|
|
run(cmd, logfile=pungi_log, show_cmd=True, stdout=False)
|
|
|
|
|
|
|
|
path_prefix = os.path.join(compose.paths.work.topdir(arch="global"), "download") + "/"
|
|
|
|
makedirs(path_prefix)
|
|
|
|
for root, dirs, files in os.walk(pungi_dir):
|
|
|
|
for fn in files:
|
|
|
|
if not fn.endswith(".rpm"):
|
|
|
|
continue
|
|
|
|
if fn in seen_packages:
|
|
|
|
continue
|
|
|
|
seen_packages.add(fn)
|
|
|
|
src = os.path.join(root, fn)
|
|
|
|
dst = os.path.join(path_prefix, os.path.basename(src))
|
|
|
|
flist.append(dst)
|
|
|
|
pool.queue_put((src, dst))
|
|
|
|
|
|
|
|
msg = "Linking downloaded pkgset packages"
|
|
|
|
compose.log_info("[BEGIN] %s" % msg)
|
|
|
|
pool.start()
|
|
|
|
pool.stop()
|
|
|
|
compose.log_info("[DONE ] %s" % msg)
|
|
|
|
|
|
|
|
flist = sorted(set(flist))
|
|
|
|
pkgset_global = populate_global_pkgset(compose, flist, path_prefix)
|
|
|
|
# get_extra_packages(compose, pkgset_global)
|
|
|
|
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
|
|
|
|
|
|
|
|
create_global_repo(compose, path_prefix)
|
|
|
|
for arch in compose.get_arches():
|
|
|
|
# TODO: threads? runroot?
|
|
|
|
create_arch_repos(compose, arch, path_prefix)
|
|
|
|
|
|
|
|
package_sets["global"] = pkgset_global
|
|
|
|
return package_sets, path_prefix
|
|
|
|
|
|
|
|
|
|
|
|
def populate_global_pkgset(compose, file_list, path_prefix):
|
|
|
|
ALL_ARCHES = set(["src"])
|
|
|
|
for arch in compose.get_arches():
|
2016-01-20 11:53:08 +00:00
|
|
|
is_multilib = is_arch_multilib(compose.conf, arch)
|
2015-02-10 13:19:34 +00:00
|
|
|
arches = get_valid_arches(arch, is_multilib)
|
|
|
|
ALL_ARCHES.update(arches)
|
|
|
|
|
|
|
|
msg = "Populating the global package set from a file list"
|
|
|
|
global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "packages.pickle")
|
|
|
|
if compose.DEBUG and os.path.isfile(global_pkgset_path):
|
|
|
|
compose.log_warning("[SKIP ] %s" % msg)
|
|
|
|
pkgset = pickle.load(open(global_pkgset_path, "r"))
|
|
|
|
else:
|
|
|
|
compose.log_info(msg)
|
2015-03-12 21:12:38 +00:00
|
|
|
pkgset = pungi.phases.pkgset.pkgsets.FilelistPackageSet(compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES)
|
2015-02-10 13:19:34 +00:00
|
|
|
pkgset.populate(file_list)
|
|
|
|
f = open(global_pkgset_path, "w")
|
|
|
|
data = pickle.dumps(pkgset)
|
|
|
|
f.write(data)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
# write global package list
|
|
|
|
pkgset.save_file_list(compose.paths.work.package_list(arch="global"), remove_path_prefix=path_prefix)
|
|
|
|
return pkgset
|
|
|
|
|
|
|
|
|
|
|
|
def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, package_set=None):
|
|
|
|
"""write pungi config (kickstart) for arch/variant"""
|
2015-06-06 14:26:21 +00:00
|
|
|
pungi_wrapper = PungiWrapper()
|
2015-02-10 13:19:34 +00:00
|
|
|
pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)
|
|
|
|
msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)
|
|
|
|
|
|
|
|
if compose.DEBUG and os.path.isfile(pungi_cfg):
|
|
|
|
compose.log_warning("[SKIP ] %s" % msg)
|
|
|
|
return
|
|
|
|
|
|
|
|
compose.log_info(msg)
|
|
|
|
|
|
|
|
# TODO move to a function
|
|
|
|
gather_source = "GatherSource%s" % compose.conf["gather_source"]
|
2015-03-12 21:12:38 +00:00
|
|
|
from pungi.phases.gather.source import GatherSourceContainer
|
|
|
|
import pungi.phases.gather.sources
|
|
|
|
GatherSourceContainer.register_module(pungi.phases.gather.sources)
|
2015-02-10 13:19:34 +00:00
|
|
|
container = GatherSourceContainer()
|
|
|
|
SourceClass = container[gather_source]
|
|
|
|
src = SourceClass(compose)
|
|
|
|
|
|
|
|
packages = []
|
|
|
|
pkgs, grps = src(arch, variant)
|
2017-02-09 12:30:07 +00:00
|
|
|
additional_packages = get_additional_packages(compose, arch, None)
|
|
|
|
for pkg_name, pkg_arch in pkgs | additional_packages:
|
2015-02-10 13:19:34 +00:00
|
|
|
if pkg_arch is None:
|
|
|
|
packages.append(pkg_name)
|
|
|
|
else:
|
|
|
|
packages.append("%s.%s" % (pkg_name, pkg_arch))
|
|
|
|
|
|
|
|
# include *all* packages providing system-release
|
|
|
|
if "system-release" not in packages:
|
|
|
|
packages.append("system-release")
|
|
|
|
|
|
|
|
prepopulate = get_prepopulate_packages(compose, arch, None)
|
2015-06-06 14:26:21 +00:00
|
|
|
pungi_wrapper.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)
|