diff --git a/bin/pungi-gather b/bin/pungi-gather new file mode 100755 index 00000000..6cbf1a57 --- /dev/null +++ b/bin/pungi-gather @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import os +import sys +import argparse + +import pungi.ks +from pungi.dnf_wrapper import DnfWrapper, Conf +from pungi.gather_dnf import Gather, GatherOptions + + +def get_parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--arch", + required=True, + ) + parser.add_argument( + "--config", + metavar="PATH", + required=True, + help="path to kickstart config file", + ) + + group = parser.add_argument_group("Repository options") + group.add_argument( + "--lookaside", + action="append", + metavar="[REPOID]", + help="lookaside repositories", + ) + + group = parser.add_argument_group("Gather options") + group.add_argument( + "--nodeps", + action="store_true", + help="disable resolving dependencies", + ) + group.add_argument( + "--selfhosting", + action="store_true", + help="build a self-hosting tree by following build dependencies (optional)", + ) + group.add_argument( + "--fulltree", + action="store_true", + help="build a tree that includes all packages built from corresponding source rpms (optional)", + ) + group.add_argument( + "--greedy", + metavar="METHOD", + # TODO: read choices from library + choices=["none", "all", "build"], + ) + group.add_argument( + "--multilib", + metavar="[METHOD]", + action="append", + ) + return parser + + +def main(): + parser = get_parser() + ns = parser.parse_args() + + dnf_conf = Conf(ns.arch) + dnf_obj = DnfWrapper(dnf_conf) + + ksparser = pungi.ks.get_ksparser(ns.config) + + # read repos from ks + for ks_repo in ksparser.handler.repo.repoList: + dnf_obj.add_repo(ks_repo.name, ks_repo.baseurl) + + dnf_obj.fill_sack(load_system_repo=False, load_available_repos=True) + dnf_obj.read_comps() + + gather_opts = GatherOptions() + + if ns.greedy: + gather_opts.greedy_method = ns.greedy + + if ns.multilib: + gather_opts.multilib_methods = ns.multilib + + if ns.lookaside: + gather_opts.lookaside_repos = ns.lookaside + + gather_opts.multilib_blacklist = ksparser.handler.multilib_blacklist + gather_opts.multilib_whitelist = ksparser.handler.multilib_whitelist + gather_opts.prepopulate = ksparser.handler.prepopulate + gather_opts.fulltree_excludes = ksparser.handler.fulltree_excludes + + g = Gather(dnf_obj, gather_opts) + + packages, conditional_packages = ksparser.get_packages(dnf_obj) + excluded = ksparser.get_excluded_packages(dnf_obj) + + for i in excluded: + packages.add("-%s" % i) + + g.gather(packages, conditional_packages) + + print_rpms(g) + + +def _get_flags(gather_obj, pkg): + flags = gather_obj.result_package_flags.get(pkg, []) + flags = "(%s)" % ",".join(sorted(flags)) + return flags + + +def print_rpms(gather_obj): + for pkg in sorted(gather_obj.result_binary_packages): + print "RPM%s: %s-%s-%s.%s" % (_get_flags(gather_obj, pkg), pkg.name, pkg.version, pkg.release, pkg.arch) + + for pkg in sorted(gather_obj.result_debug_packages): + print "DEBUGINFO%s: %s-%s-%s.%s" % (_get_flags(gather_obj, pkg), pkg.name, pkg.version, pkg.release, pkg.arch) + + for pkg in sorted(gather_obj.result_source_packages): + print "SRPM%s: %s-%s-%s.%s" % (_get_flags(gather_obj, pkg), pkg.name, pkg.version, pkg.release, pkg.arch) + + +if __name__ == "__main__": + main() diff --git a/pungi/dnf_wrapper.py b/pungi/dnf_wrapper.py new file mode 100644 index 00000000..20ff70ae --- /dev/null +++ b/pungi/dnf_wrapper.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- + + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Library General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + + +# TODO: remove all DNF hacks, possibly this whole file + + +from distutils.version import LooseVersion +import os +import shutil +import tempfile + +import dnf +import dnf.arch +import dnf.conf +import dnf.repo +import dnf.sack + +import pungi.arch + + +class Conf(dnf.conf.Conf): + # This is only modified to get our custom Substitutions class in. + def __init__(self, arch, *args, **kwargs): + super(Conf, self).__init__(*args, **kwargs) + self.substitutions = Substitutions(arch) + + +class Substitutions(dict): + # DNF version of Substitutions detects host arch. We don't want that. + def __init__(self, arch): + super(Substitutions, self).__init__() + self['arch'] = arch + self['basearch'] = dnf.arch.basearch(arch) + + +class DnfWrapper(dnf.Base): + def __init__(self, *args, **kwargs): + super(DnfWrapper, self).__init__(*args, **kwargs) + self.arch_wrapper = ArchWrapper(self.conf.substitutions["arch"]) + self.comps_wrapper = CompsWrapper(self) + # use a custom cachedir, delete it after use + self.conf.cachedir = tempfile.mkdtemp(prefix="pungi_dnf_") + + def __del__(self): + if self.conf.cachedir.startswith("/tmp/"): + shutil.rmtree(self.conf.cachedir) + + def add_repo(self, repoid, baseurl=None, mirrorlist=None, ignoregroups=False, lookaside=False): + if "://" not in baseurl: + baseurl = "file://%s" % os.path.abspath(baseurl) + if LooseVersion(dnf.__version__) < LooseVersion("2.0.0"): + repo = dnf.repo.Repo(repoid, self.conf.cachedir) + else: + repo = dnf.repo.Repo(repoid, self.conf) + repo.baseurl = baseurl + repo.mirrorlist = mirrorlist + repo.ignoregroups = ignoregroups + self.repos.add(repo) + repo.priority = 10 if lookaside else 20 + + +class CompsWrapper(object): + def __init__(self, dnf_obj): + self.dnf = dnf_obj + + def __getitem__(self, name): + return self.groups[name] + + @property + def comps(self): + return self.dnf.comps + + @property + def groups(self): + result = {} + for i in self.comps.groups: + result[i.id] = i + return result + + def get_packages_from_group(self, group_id, include_default=True, include_optional=True, include_conditional=True): + packages = [] + conditional = [] + + group = self.groups[group_id] + + # add mandatory packages + packages.extend([i.name for i in group.mandatory_packages]) + + # add default packages + if include_default: + packages.extend([i.name for i in group.default_packages]) + + # add optional packages + if include_optional: + packages.extend([i.name for i in group.optional_packages]) + + for package in group.conditional_packages: + conditional.append({"name": package.requires, "install": package.name}) + + return packages, conditional + + def get_comps_packages(self, groups, exclude_groups): + packages = set() + conditional = [] + + if isinstance(groups, list): + groups = dict([(i, 1) for i in groups]) + + for group_id, group_include in sorted(groups.items()): + if group_id in exclude_groups: + continue + + include_default = group_include in (1, 2) + include_optional = group_include in (2, ) + include_conditional = True + pkgs, cond = self.get_packages_from_group(group_id, include_default, include_optional, include_conditional) + packages.update(pkgs) + for i in cond: + if i not in conditional: + conditional.append(i) + return list(packages), conditional + + +class ArchWrapper(object): + def __init__(self, arch): + self.base_arch = dnf.arch.basearch(arch) + self.all_arches = pungi.arch.get_valid_arches(self.base_arch, multilib=True, add_noarch=True) + self.native_arches = pungi.arch.get_valid_arches(self.base_arch, multilib=False, add_noarch=True) + self.multilib_arches = pungi.arch.get_valid_multilib_arches(self.base_arch) + self.source_arches = ["src", "nosrc"] diff --git a/pungi/gather_dnf.py b/pungi/gather_dnf.py new file mode 100644 index 00000000..eb1107bd --- /dev/null +++ b/pungi/gather_dnf.py @@ -0,0 +1,728 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Library General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +# TODO: logging +# TODO: move print functions from pungi-gather to this module, also write results to a file + + +import logging + +import hawkey +from kobo.rpmlib import parse_nvra + +import pungi.dnf_wrapper +import pungi.multilib_dnf + + +class GatherOptions(object): + def __init__(self, **kwargs): + super(GatherOptions, self).__init__() + + # include all unused sub-packages of already included RPMs + self.fulltree = False + + # include langpacks + self.langpacks = [] # format: [{"package": "langpack-pattern-%s"}] + + # resolve dependencies + self.resolve_deps = True + + # pull build dependencies + self.selfhosting = False + + # none, all, build + # TODO: validate values + self.greedy_method = "none" + + # multilib options + self.multilib_methods = [] + self.multilib_blacklist = [] + self.multilib_whitelist = [] + + # prepopulate + self.prepopulate = [] + + # lookaside repos; packages will be flagged accordingly + self.lookaside_repos = [] + + for key, value in kwargs.items(): + if not hasattr(self, key): + raise ValueError("Invalid gather option: %s" % key) + setattr(self, key, value) + + +def filter_debug_packages(q, arch=None): + result = q.filter(arch__neq=["src", "nosrc"]) + if arch: + arches = pungi.dnf_wrapper.ArchWrapper(arch).all_arches + result = result.filter(arch=arches) + result = result.filter(name__glob=["*-debuginfo", "*-debuginfo-*"]) + return result + + +def filter_native_debug_packages(q, arch): + result = q.filter(arch__neq=["src", "nosrc"]) + arches = pungi.dnf_wrapper.ArchWrapper(arch).native_arches + result = result.filter(arch=arches) + result = result.filter(name__glob=["*-debuginfo", "*-debuginfo-*"]) + return result + + +def filter_multilib_debug_packages(q, arch): + result = q.filter(arch__neq=["src", "nosrc"]) + arches = pungi.dnf_wrapper.ArchWrapper(arch).multilib_arches + result = result.filter(arch=arches) + result = result.filter(name__glob=["*-debuginfo", "*-debuginfo-*"]) + return result + + +def filter_source_packages(q): + result = q.filter(arch=["src", "nosrc"]) + return result + + +def filter_binary_packages(q, arch=None): + result = q.filter(arch__neq=["src", "nosrc"]) + if arch: + arches = pungi.dnf_wrapper.ArchWrapper(arch).all_arches + result = result.filter(arch=arches) + result = result.filter(latest_per_arch=True) + result = result.filter(name__glob__not=["*-debuginfo", "*-debuginfo-*"]) + return result + + +def filter_native_binary_packages(q, arch): + result = q.filter(arch__neq=["src", "nosrc"]) + arches = pungi.dnf_wrapper.ArchWrapper(arch).native_arches + result = result.filter(arch=arches) + result = result.filter(latest_per_arch=True) + result = result.filter(name__glob__not=["*-debuginfo", "*-debuginfo-*"]) + return result + + +def filter_multilib_binary_packages(q, arch): + result = q.filter(arch__neq=["src", "nosrc"]) + arches = pungi.dnf_wrapper.ArchWrapper(arch).multilib_arches + result = result.filter(arch=arches) + result = result.filter(latest_per_arch=True) + result = result.filter(name__glob__not=["*-debuginfo", "*-debuginfo-*"]) + return result + + +def filter_binary_noarch_packages(q): + result = q.filter(arch="noarch") + result = result.filter(latest_per_arch=True) + result = result.filter(name__glob__not=["*-debuginfo", "*-debuginfo-*"]) + return result + + +class GatherBase(object): + def __init__(self, dnf_obj): + self.dnf = dnf_obj + self.q_binary_packages = filter_binary_packages(self._query, arch=self.dnf.basearch).apply() + self.q_native_binary_packages = filter_native_binary_packages(self._query, arch=self.dnf.basearch).apply() + self.q_multilib_binary_packages = filter_multilib_binary_packages(self._query, arch=self.dnf.basearch).apply() + self.q_noarch_binary_packages = filter_binary_packages(self._query).apply() + self.q_debug_packages = filter_debug_packages(self._query, arch=self.dnf.basearch).apply() + self.q_native_debug_packages = filter_native_debug_packages(self._query, arch=self.dnf.basearch).apply() + self.q_multilib_debug_packages = filter_multilib_debug_packages(self._query, arch=self.dnf.basearch).apply() + self.q_source_packages = filter_source_packages(self._query).apply() + + @property + def _query(self): + return self.dnf._sack.query() + + def is_noarch_package(self, pkg): + return pkg.arch == "noarch" + + def is_native_package(self, pkg): + if pkg.arch in ["src", "nosrc"]: + return False + if pkg.arch == "noarch": + return True + if pkg.arch in self.dnf.arch_wrapper.native_arches: + return True + return False + + def is_multilib_package(self, pkg): + if pkg.arch in ["src", "nosrc"]: + return False + if pkg.arch == "noarch": + return False + if pkg.arch in self.dnf.arch_wrapper.multilib_arches: + return True + return False + + +class Gather(GatherBase): + def __init__(self, dnf_obj, gather_options): + super(Gather, self).__init__(dnf_obj) + self.opts = gather_options + self._multilib = pungi.multilib_dnf.Multilib(self.dnf._sack, gather_options.multilib_methods, blacklist=self.opts.multilib_blacklist, whitelist=self.opts.multilib_whitelist) + + # already processed packages + self.finished_add_binary_package_deps = {} # {pkg: [deps]} + self.finished_add_debug_package_deps = {} # {pkg: [deps]} + self.finished_add_source_package_deps = {} # {pkg: [deps]} + + self.finished_get_package_deps_reqs = {} + + self.finished_add_conditional_packages = {} # {pkg: [pkgs]} + self.finished_add_source_packages = {} # {pkg: src-pkg|None} + self.finished_add_debug_packages = {} # {pkg: [debug-pkgs]} + self.finished_add_fulltree_packages = {} # {pkg: [pkgs]} + self.finished_add_langpack_packages = {} # {pkg: [pkgs]} + self.finished_add_multilib_packages = {} # {pkg: pkg|None} + + # result + self.result_binary_packages = set() + self.result_debug_packages = set() + self.result_source_packages = set() + self.result_package_flags = {} + + self.provides_cache = {} + for i in self.q_binary_packages: + for prov in i.provides: + self.provides_cache.setdefault(str(prov), set()).add(i) + + def _set_flag(self, pkg, *flags): + self.result_package_flags.setdefault(pkg, set()).update(flags) + + def _has_flag(self, pkg, flag): + return flag in self.result_package_flags.get(pkg, set()) + + def _get_best_package(self, package_list, pkg=None, req=None): + if not package_list: + return [] + + if self.opts.greedy_method == "all": + return list(package_list) + + all_pkgs = list(package_list) + native_pkgs = self.q_native_binary_packages.filter(pkg=all_pkgs).apply() + multilib_pkgs = self.q_multilib_binary_packages.filter(pkg=all_pkgs).filter(arch__neq="noarch").apply() + + result = set() + + # try seen native packages first + seen_pkgs = set(native_pkgs) & self.result_binary_packages + if seen_pkgs: + result = seen_pkgs + + # then try seen multilib packages + if not result: + seen_pkgs = set(multilib_pkgs) & self.result_binary_packages + if seen_pkgs: + result = seen_pkgs + + if not result: + result = set(native_pkgs) + + if not result: + result = set(multilib_pkgs) + + if not result: + return [] + + # return package with shortest name, alphabetically ordered + result = list(result) + result.sort(lambda x, y: cmp(x.name, y.name)) + result.sort(lambda x, y: cmp(len(x.name), len(y.name))) + + # best arch + arches = self.dnf.arch_wrapper.all_arches + result.sort(lambda x, y: cmp(arches.index(x.arch), arches.index(y.arch))) + match = result[0] + + if self.opts.greedy_method == "build" and req: + if self.is_native_package(match): + return [i for i in native_pkgs if i.sourcerpm == match.sourcerpm] +# return list(native_pkgs.filter(sourcerpm=match.sourcerpm, provides=req)) + else: + return [i for i in multilib_pkgs if i.sourcerpm == match.sourcerpm] +# return list(multilib_pkgs.filter(sourcerpm=match.sourcerpm, provides=req)) + return [match] + + def _add_packages(self, packages, pulled_by=None): + added = set() + for i in packages: + assert i is not None + if i not in self.result_binary_packages: + added.add(i) + pb = "" + if pulled_by: + pb = " (pulled by %s)" % pulled_by + print "Added package %s%s" % (i, pb) + self.result_binary_packages.add(i) + # lookaside + if i.repoid in self.opts.lookaside_repos: + self._set_flag(i, "lookaside") + + for pkg in added: + if pkg is None: + continue + for prov in pkg.provides: + self.finished_get_package_deps_reqs.setdefault(str(prov), set()).add(pkg) + + self.result_binary_packages.update(added) + + def _get_package_deps(self, pkg): + """ + Return all direct (1st level) deps for a package. + """ + assert pkg is not None + result = set() + + for req in pkg.requires: + deps = self.finished_get_package_deps_reqs.setdefault(str(req), set()) + if deps: + result.update(deps) + continue + + # TODO: need query also debuginfo + + deps = self.q_binary_packages.filter(provides=req).apply() + deps = self._get_best_package(deps, req=req) + result.update(deps) + + return result + + def add_initial_packages(self, pattern_list): + added = set() + + excludes = [] + includes = [] + for pattern in pattern_list: + if pattern.startswith("-"): + excludes.append(pattern[1:]) + else: + includes.append(pattern) + + exclude = set() + for pattern in excludes: + # TODO: debug, source + if pattern.endswith(".+"): + pkgs = self.q_multilib_binary_packages.filter_autoglob(name=pattern[:-2]) + else: + pkgs = self.q_binary_packages.filter_autoglob(name=pattern) + + exclude.update(pkgs) + print "EXCLUDED: %s" % list(pkgs) + self.dnf._sack.add_excludes(pkgs) + + # HACK + self.q_binary_packages = self.q_binary_packages.filter(pkg=[i for i in self.q_binary_packages if i not in exclude]).apply() + self.q_native_binary_packages = self.q_native_binary_packages.filter(pkg=[i for i in self.q_native_binary_packages if i not in exclude]).apply() + self.q_multilib_binary_packages = self.q_multilib_binary_packages.filter(pkg=[i for i in self.q_multilib_binary_packages if i not in exclude]).apply() + self.q_noarch_binary_packages = self.q_noarch_binary_packages.filter(pkg=[i for i in self.q_noarch_binary_packages if i not in exclude]).apply() + + for pattern in includes: + if pattern == "system-release" and self.opts.greedy_method == "all": + pkgs = self.q_binary_packages.filter(provides=hawkey.Reldep(self.dnf.sack, "system-release")).apply() + else: + if pattern.endswith(".+"): + pkgs = self.q_multilib_binary_packages.filter_autoglob(name=pattern[:-2]).apply() + else: + pkgs = self.q_binary_packages.filter_autoglob(name=pattern).apply() + + pkgs = self._get_best_package(pkgs) + if pkgs: + added.update(pkgs) + else: + print "Doesn't match: %s" % pattern + + for pkg in added: + self._set_flag(pkg, "input") + + if self.opts.greedy_method == "build": + for pkg in added.copy(): + prov = hawkey.Reldep(self.dnf._sack, pkg.name) + if pkg in self.q_native_binary_packages: + greedy_build_packages = self.q_native_binary_packages.filter(sourcerpm=pkg.sourcerpm, provides=prov) + else: + greedy_build_packages = self.q_multilib_binary_packages.filter(sourcerpm=pkg.sourcerpm, provides=prov) + for i in greedy_build_packages: + self._set_flag(i, "input", "greedy:build") + added.add(i) + + return added + + def add_prepopulate_packages(self): + added = set() + + for name_arch in self.opts.prepopulate: + name, arch = name_arch.rsplit(".", 1) + pkgs = self.q_binary_packages.filter_autoglob(name=name, arch=arch) + pkgs = self._get_best_package(pkgs) + if pkgs: + added.update(pkgs) + else: + print "Prepopulate: Doesn't match: %s" % name_arch + + for pkg in added: + self._set_flag(pkg, "prepopulate") + + return added + + def add_binary_package_deps(self): + added = set() + + if not self.opts.resolve_deps: + return added + + for pkg in self.result_binary_packages.copy(): + assert pkg is not None + + try: + deps = self.finished_add_binary_package_deps[pkg] + except KeyError: + deps = self._get_package_deps(pkg) + for i in deps: + if i not in self.result_binary_packages: + self._add_packages([i], pulled_by=pkg) + added.add(i) + + return added + + def add_conditional_packages(self): + """ + For each binary package add their conditional dependencies as specified in comps. + Return newly added packages. + """ + added = set() + + if not self.opts.resolve_deps: + return added + + for pkg in self.result_binary_packages.copy(): + assert pkg is not None + + try: + deps = self.finished_add_conditional_packages[pkg] + except KeyError: + deps = set() + for cond in self.conditional_packages: + if cond["name"] != pkg.name: + continue + pkgs = self.q_binary_packages.filter(name=cond["install"]).apply() + pkgs = self._get_best_package(pkgs) # TODO: multilib? + deps.update(pkgs) + + for i in deps: + if i not in self.result_binary_packages: + self._add_packages([i], pulled_by=pkg) + self._set_flag(pkg, "conditional") + added.add(i) + + return added + + def add_source_package_deps(self): + added = set() + + if not self.opts.selfhosting: + return added + + for pkg in self.result_source_packages: + assert pkg is not None + + try: + deps = self.finished_add_source_package_deps[pkg] + except KeyError: + deps = self._get_package_deps(pkg) + for i in deps: + if i not in self.result_binary_packages: + self._add_packages([i], pulled_by=pkg) + added.add(i) + self._set_flag(pkg, "self-hosting") + + return added + + def add_source_packages(self): + """ + For each binary package add it's source package. + Return newly added source packages. + """ + added = set() + + for pkg in self.result_binary_packages: + assert pkg is not None + + try: + source_pkg = self.finished_add_source_packages[pkg] + except KeyError: + source_pkg = None + if pkg.sourcerpm: + nvra = parse_nvra(pkg.sourcerpm) + source_pkgs = self.q_source_packages.filter(name=nvra["name"], version=nvra["version"], release=nvra["release"]).apply() + if source_pkgs: + source_pkg = list(source_pkgs)[0] + + self.finished_add_source_packages[pkg] = source_pkg + if source_pkg: + lookaside = self._has_flag(pkg, "lookaside") + if lookaside: + self._set_flag(source_pkg, "lookaside") + if source_pkg not in self.result_source_packages: + added.add(source_pkg) + self.result_source_packages.add(source_pkg) + + return added + + def add_debug_packages(self): + """ + For each binary package add debuginfo packages built from the same source. + Return newly added debug packages. + """ + added = set() + + for pkg in self.result_binary_packages: + assert pkg is not None + + if self.is_noarch_package(pkg): + self.finished_add_debug_packages[pkg] = [] + continue + + try: + debug_pkgs = self.finished_add_debug_packages[pkg] + except KeyError: + debug_pkgs = [] + if pkg.sourcerpm: + if self.is_native_package(pkg): + debug_pkgs = list(self.q_native_debug_packages.filter(sourcerpm=pkg.sourcerpm)) + else: + debug_pkgs = list(self.q_multilib_debug_packages.filter(sourcerpm=pkg.sourcerpm)) + + lookaside = self._has_flag(pkg, "lookaside") + for i in debug_pkgs: + if lookaside: + self._set_flag(i, "lookaside") + if i not in self.result_debug_packages: + added.add(i) + + self.finished_add_debug_packages[pkg] = debug_pkgs + self.result_debug_packages.update(debug_pkgs) + + return added + + def add_fulltree_packages(self): + """ + For each binary package add all binary packages built from the same source. + Return newly added binary packages. + """ + added = set() + + if not self.opts.fulltree: + return added + + for pkg in sorted(self.result_binary_packages): + assert pkg is not None + + try: + fulltree_pkgs = self.finished_add_fulltree_packages[pkg] + except KeyError: + q_native_fulltree_pkgs = self.q_native_binary_packages.filter(sourcerpm=pkg.sourcerpm, arch__neq="noarch").apply() + q_multilib_fulltree_pkgs = self.q_multilib_binary_packages.filter(sourcerpm=pkg.sourcerpm, arch__neq="noarch").apply() + q_noarch_fulltree_pkgs = self.q_native_binary_packages.filter(sourcerpm=pkg.sourcerpm, arch="noarch").apply() + + native_fulltree_pkgs = set(q_native_fulltree_pkgs) + multilib_fulltree_pkgs = set(q_multilib_fulltree_pkgs) + noarch_fulltree_pkgs = set(q_noarch_fulltree_pkgs) + + if not native_fulltree_pkgs: + # no existing native pkgs -> pull multilib + pull_native = False + elif native_fulltree_pkgs & self.result_binary_packages: + # native pkgs in result -> pull native + pull_native = True + elif multilib_fulltree_pkgs & self.result_binary_packages: + # multilib pkgs in result -> pull multilib + pull_native = False + else: + # fallback / default + pull_native = True + + if pull_native: + fulltree_pkgs = list(native_fulltree_pkgs) + else: + fulltree_pkgs = list(multilib_fulltree_pkgs) + + # always pull all noarch subpackages + fulltree_pkgs += list(noarch_fulltree_pkgs) + + for i in fulltree_pkgs: + if i not in self.result_binary_packages: + self._add_packages([i]) + self._set_flag(i, "fulltree") + added.add(i) + + self.finished_add_fulltree_packages[pkg] = fulltree_pkgs + + return added + + def add_langpack_packages(self, langpack_patterns): + """ + For each binary package add all matching langpack packages. + Return newly added binary packages. + + langpack_patterns: [{"name": , "install": }] + """ + added = set() + + if not self.opts.langpacks: + return added + + exceptions = ["man-pages-overrides"] + + for pkg in sorted(self.result_binary_packages): + assert pkg is not None + + try: + langpack_pkgs = self.finished_add_langpack_packages[pkg] + except KeyError: + patterns = [i["install"] for i in langpack_patterns if i["name"] == pkg.name] + patterns = [i.replace("%s", "*") for i in patterns] + langpack_pkgs = self.q_binary_packages.filter(name__glob=patterns).apply() + langpack_pkgs = langpack_pkgs.filter(name__glob__not=["*-devel", "*-static"]) + langpack_pkgs = langpack_pkgs.filter(name__neq=exceptions) + + pkgs_by_name = {} + for i in langpack_pkgs: + pkgs_by_name.setdefault(i.name, set()).add(i) + + langpack_pkgs = set() + for name in sorted(pkgs_by_name): + pkgs = pkgs_by_name[name] + i = self._get_best_package(pkgs) + if i: + # TODO: greedy + i = i[0] + langpack_pkgs.add(i) + self._set_flag(i, "langpack") + if i not in self.result_binary_packages: + self._add_packages([i], pulled_by=pkg) + added.add(pkg) + self.finished_add_langpack_packages[pkg] = langpack_pkgs + + return added + + def add_multilib_packages(self): + added = set() + + if not self.opts.multilib_methods or self.opts.multilib_methods == ["none"]: + return added + + for pkg in sorted(self.result_binary_packages): + try: + self.finished_add_multilib_packages[pkg] + except KeyError: + + if pkg.arch in ("noarch", "src", "nosrc"): + self.finished_add_multilib_packages[pkg] = None + continue + + if pkg.arch in self.dnf.arch_wrapper.multilib_arches: + self.finished_add_multilib_packages[pkg] = None + continue + + pkgs = self.q_multilib_binary_packages.filter(name=pkg.name, version=pkg.version, release=pkg.release, arch__neq="noarch").apply() + pkgs = self._get_best_package(pkgs) + multilib_pkgs = [] + for i in pkgs: + is_multilib = self._multilib.is_multilib(i) + if is_multilib: + multilib_pkgs.append(i) + added.add(i) + self._set_flag(i, "multilib") + self._add_packages([i]) + self.finished_add_multilib_packages[pkg] = i + # TODO: ^^^ may get multiple results; i686, i586, etc. + + return added + + def gather(self, pattern_list, conditional_packages=None): + self.conditional_packages = conditional_packages or [] + + print "INITIAL PACKAGES" + added = self.add_initial_packages(pattern_list) + self._add_packages(added) + + print "PREPOPULATE" + added = self.add_prepopulate_packages() + self._add_packages(added) + + pass_num = 0 + added = False + while 1: + if pass_num > 0 and not added: + break + pass_num += 1 + print 80 * "-" + # self.logger.info("Pass #%s" % pass_num) + print "PASS %s" % pass_num + + print "DEPS" + added = self.add_conditional_packages() + print "ADDED: %s" % bool(added) + if added: + continue + + # resolve deps + print "DEPS" + added = self.add_binary_package_deps() + print "ADDED: %s" % bool(added) + if added: + continue + + added = self.add_source_package_deps() + print "ADDED: %s" % bool(added) + if added: + continue + + print "SOURCE PACKAGES" + added = self.add_source_packages() + print "ADDED: %s" % bool(added) + if added: + continue + + print "DEBUG PACKAGES" + added = self.add_debug_packages() + print "ADDED: %s" % bool(added) + if added: + continue + # TODO: debug deps + + print "FULLTREE" + added = self.add_fulltree_packages() + print "ADDED: %s" % bool(added) + if added: + continue + + print "LANGPACKS" + added = self.add_langpack_packages(self.opts.langpacks) + print "ADDED: %s" % bool(added) + if added: + continue + + print "MULTILIB" + added = self.add_multilib_packages() + print "ADDED: %s" % bool(added) + if added: + continue + + # nothing added -> break depsolving cycle + break diff --git a/pungi/ks.py b/pungi/ks.py index f58f234c..31a2a0a6 100644 --- a/pungi/ks.py +++ b/pungi/ks.py @@ -134,8 +134,57 @@ class KickstartParser(pykickstart.parser.KickstartParser): self.registerSection(MultilibWhitelistSection(self.handler)) self.registerSection(PrepopulateSection(self.handler)) + def get_packages(self, dnf_obj): + packages = set() + conditional_packages = [] + + packages.update(self.handler.packages.packageList) + + for ks_group in self.handler.packages.groupList: + group_id = ks_group.name + include_default = False + include_optional = False + + if ks_group.include == 1: + include_default = True + + if ks_group.include == 2: + include_default = True + include_optional = True + + group_packages, group_conditional_packages = dnf_obj.comps_wrapper.get_packages_from_group(group_id, include_default=include_default, include_optional=include_optional, include_conditional=True) + packages.update(group_packages) + for i in group_conditional_packages: + if i not in conditional_packages: + conditional_packages.append(i) + + return packages, conditional_packages + + def get_excluded_packages(self, dnf_obj): + excluded = set() + excluded.update(self.handler.packages.excludedList) + + for ks_group in self.handler.packages.excludedGroupList: + group_id = ks_group.name + include_default = False + include_optional = False + + if ks_group.include == 1: + include_default = True + + if ks_group.include == 2: + include_default = True + include_optional = True + + group_packages, group_conditional_packages = dnf_obj.comps_wrapper.get_packages_from_group(group_id, include_default=include_default, include_optional=include_optional, include_conditional=False) + excluded.update(group_packages) + + return excluded + HandlerClass = pykickstart.version.returnClassForVersion() + + class PungiHandler(HandlerClass): def __init__(self, *args, **kwargs): HandlerClass.__init__(self, *args, **kwargs) diff --git a/pungi/multilib_dnf.py b/pungi/multilib_dnf.py new file mode 100644 index 00000000..427bdd3b --- /dev/null +++ b/pungi/multilib_dnf.py @@ -0,0 +1,132 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; version 2 of the License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Library General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +import re +import fnmatch + + +DEVEL_BLACKLIST = [ + "dmraid-devel", + "ghc-*", + "java-*-gcj-devel", + "java-*-icedtea-devel", + "java-*-openjdk-devel", + "kdeutils-devel", + "kernel-devel", + "mkinitrd-devel", + "php-devel", +] + + +DEVEL_WHITELIST = [ + "glibc-static", + "libstdc++-static", +] + + +RUNTIME_BLACKLIST = [ + "gcc", + "kernel", + "tomcat-native", +] + + +RUNTIME_WHITELIST = [ + "glibc-static", + "libflashsupport", + "libgnat", + "lmms-vst", + "nspluginwrapper", + "perl-libs", + "redhat-lsb", + "valgrind", + "wine", + "wine-arts", + "yaboot", +] + + +class Multilib(object): + def __init__(self, sack, methods, blacklist=None, whitelist=None): + self.sack = sack + self.methods = [] + self.blacklist = blacklist or [] + self.whitelist = whitelist or [] + self.use_default_blacklists = True # use *_BLACKLIST and *_WHITELIST lists + + for i in methods: + name = "method_%s" % i + func = getattr(self, name) + self.methods.append(func) + + def _match_one(self, pkg, pattern): + return fnmatch.fnmatch(pkg.name, pattern) + + def _match_any(self, pkg, pattern_list): + for i in pattern_list: + if self._match_one(pkg, i): + return True + return False + + def method_none(self, pkg): + return False + + def method_all(self, pkg): + return True + + def method_devel(self, pkg): + if self.use_default_blacklists: + if self._match_any(pkg, DEVEL_BLACKLIST): + return False + if self._match_any(pkg, DEVEL_WHITELIST): + return True + if pkg.name.endswith("-devel"): + return True + if pkg.name.endswith("-static"): + return True + for prov in pkg.provides: + # TODO: split reldep to name/flag/value + prov = str(prov).split(" ")[0] + if "-devel" in prov: + return True + if "-static" in prov: + return True + return False + + def method_runtime(self, pkg): + if self.use_default_blacklists: + if self._match_any(pkg, RUNTIME_BLACKLIST): + return False + if self._match_any(pkg, RUNTIME_WHITELIST): + return True + so = re.compile(r"^.*\.so\.\d+.*$") + for prov in pkg.provides: + prov = str(prov) + if so.match(prov): + return True + return False + + def is_multilib(self, pkg): + if self._match_any(pkg, self.blacklist): + return False + if self._match_any(pkg, self.whitelist): + return True + for i in self.methods: + if i(pkg): + return True + return False