Remove pungi/gather.py and associated code
This commit completly drops support for Yum as a depsolving/repoclosure backend. Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com> (cherry picked from commit f5702e4c9d0d5d9d31421d3d47200581e41f02bf)
This commit is contained in:
parent
4454619be6
commit
2e9baeaf51
@ -703,7 +703,6 @@ def make_schema():
|
|||||||
),
|
),
|
||||||
"repoclosure_backend": {
|
"repoclosure_backend": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
# Gather and repoclosure both have the same backends: yum + dnf
|
|
||||||
"default": _get_default_gather_backend(),
|
"default": _get_default_gather_backend(),
|
||||||
"enum": _get_gather_backends(),
|
"enum": _get_gather_backends(),
|
||||||
},
|
},
|
||||||
@ -1620,10 +1619,8 @@ def update_schema(schema, update_dict):
|
|||||||
|
|
||||||
|
|
||||||
def _get_gather_backends():
|
def _get_gather_backends():
|
||||||
if six.PY2:
|
|
||||||
return ["yum", "dnf"]
|
|
||||||
return ["dnf"]
|
return ["dnf"]
|
||||||
|
|
||||||
|
|
||||||
def _get_default_gather_backend():
|
def _get_default_gather_backend():
|
||||||
return "yum" if six.PY2 else "dnf"
|
return "dnf"
|
||||||
|
@ -466,13 +466,10 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def should_create_yum_database(self):
|
def should_create_yum_database(self):
|
||||||
"""Explicit configuration trumps all. Otherwise check gather backend
|
"""Explicit configuration trumps all. Yum is no longer supported, so
|
||||||
and only create it for Yum.
|
default to False.
|
||||||
"""
|
"""
|
||||||
config = self.conf.get("createrepo_database")
|
return self.conf.get("createrepo_database", False)
|
||||||
if config is not None:
|
|
||||||
return config
|
|
||||||
return self.conf["gather_backend"] == "yum"
|
|
||||||
|
|
||||||
def read_variants(self):
|
def read_variants(self):
|
||||||
# TODO: move to phases/init ?
|
# TODO: move to phases/init ?
|
||||||
|
@ -1,78 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; version 2 of the License.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Library General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from ConfigParser import SafeConfigParser
|
|
||||||
|
|
||||||
from .arch_utils import getBaseArch
|
|
||||||
|
|
||||||
# In development, `here` will point to the bin/ directory with scripts.
|
|
||||||
here = sys.path[0]
|
|
||||||
MULTILIBCONF = (
|
|
||||||
os.path.join(os.path.dirname(__file__), "..", "share", "multilib")
|
|
||||||
if here != "/usr/bin"
|
|
||||||
else "/usr/share/pungi/multilib"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Config(SafeConfigParser):
|
|
||||||
def __init__(self, pungirc=None):
|
|
||||||
SafeConfigParser.__init__(self)
|
|
||||||
|
|
||||||
self.add_section("pungi")
|
|
||||||
self.add_section("lorax")
|
|
||||||
|
|
||||||
self.set("pungi", "osdir", "os")
|
|
||||||
self.set("pungi", "sourcedir", "source")
|
|
||||||
self.set("pungi", "debugdir", "debug")
|
|
||||||
self.set("pungi", "isodir", "iso")
|
|
||||||
self.set("pungi", "multilibconf", MULTILIBCONF)
|
|
||||||
self.set(
|
|
||||||
"pungi", "relnotefilere", "LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG"
|
|
||||||
)
|
|
||||||
self.set("pungi", "relnotedirre", "")
|
|
||||||
self.set(
|
|
||||||
"pungi", "relnotepkgs", "fedora-repos fedora-release fedora-release-notes"
|
|
||||||
)
|
|
||||||
self.set("pungi", "product_path", "Packages")
|
|
||||||
self.set("pungi", "cachedir", "/var/cache/pungi")
|
|
||||||
self.set("pungi", "compress_type", "xz")
|
|
||||||
self.set("pungi", "arch", getBaseArch())
|
|
||||||
self.set("pungi", "family", "Fedora")
|
|
||||||
self.set("pungi", "iso_basename", "Fedora")
|
|
||||||
self.set("pungi", "version", time.strftime("%Y%m%d", time.localtime()))
|
|
||||||
self.set("pungi", "variant", "")
|
|
||||||
self.set("pungi", "destdir", os.getcwd())
|
|
||||||
self.set("pungi", "workdirbase", "/work")
|
|
||||||
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
|
|
||||||
self.set("pungi", "debuginfo", "True")
|
|
||||||
self.set("pungi", "alldeps", "True")
|
|
||||||
self.set("pungi", "isfinal", "False")
|
|
||||||
self.set("pungi", "nohash", "False")
|
|
||||||
self.set("pungi", "full_archlist", "False")
|
|
||||||
self.set("pungi", "multilib", "")
|
|
||||||
self.set("pungi", "lookaside_repos", "")
|
|
||||||
self.set("pungi", "resolve_deps", "True")
|
|
||||||
self.set("pungi", "no_dvd", "False")
|
|
||||||
self.set("pungi", "nomacboot", "False")
|
|
||||||
self.set("pungi", "rootfs_size", "False")
|
|
||||||
|
|
||||||
# if missing, self.read() is a noop, else change 'defaults'
|
|
||||||
if pungirc:
|
|
||||||
self.read(os.path.expanduser(pungirc))
|
|
1585
pungi/gather.py
1585
pungi/gather.py
File diff suppressed because it is too large
Load Diff
@ -1080,7 +1080,7 @@ class Gather(GatherBase):
|
|||||||
if ex.errno == errno.EEXIST:
|
if ex.errno == errno.EEXIST:
|
||||||
self.logger.warning("Downloaded package exists in %s", target)
|
self.logger.warning("Downloaded package exists in %s", target)
|
||||||
else:
|
else:
|
||||||
self.logger.error("Unable to link %s from the yum cache.", pkg.name)
|
self.logger.error("Unable to link %s from the dnf cache.", pkg.name)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def log_count(self, msg, method, *args):
|
def log_count(self, msg, method, *args):
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
|
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
||||||
@ -220,9 +219,7 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
yum_arch = tree_arch_to_yum_arch(arch)
|
yum_arch = tree_arch_to_yum_arch(arch)
|
||||||
tmp_dir = compose.paths.work.tmp_dir(arch, variant)
|
tmp_dir = compose.paths.work.tmp_dir(arch, variant)
|
||||||
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
||||||
# TODO: remove YUM code, fully migrate to DNF
|
|
||||||
backends = {
|
backends = {
|
||||||
"yum": pungi_wrapper.get_pungi_cmd,
|
|
||||||
"dnf": pungi_wrapper.get_pungi_cmd_dnf,
|
"dnf": pungi_wrapper.get_pungi_cmd_dnf,
|
||||||
}
|
}
|
||||||
get_cmd = backends[compose.conf["gather_backend"]]
|
get_cmd = backends[compose.conf["gather_backend"]]
|
||||||
@ -245,17 +242,6 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
with temp_dir(prefix="pungi_") as work_dir:
|
with temp_dir(prefix="pungi_") as work_dir:
|
||||||
run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
|
run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
|
||||||
|
|
||||||
# Clean up tmp dir
|
|
||||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
|
||||||
yumroot_dir = os.path.join(tmp_dir, "work", arch, "yumroot")
|
|
||||||
if os.path.isdir(yumroot_dir):
|
|
||||||
try:
|
|
||||||
shutil.rmtree(yumroot_dir)
|
|
||||||
except Exception as e:
|
|
||||||
compose.log_warning(
|
|
||||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(pungi_log, "r") as f:
|
with open(pungi_log, "r") as f:
|
||||||
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
||||||
|
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
|
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
|
|
||||||
@ -76,7 +75,6 @@ def get_pkgset_from_repos(compose):
|
|||||||
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
||||||
|
|
||||||
backends = {
|
backends = {
|
||||||
"yum": pungi.get_pungi_cmd,
|
|
||||||
"dnf": pungi.get_pungi_cmd_dnf,
|
"dnf": pungi.get_pungi_cmd_dnf,
|
||||||
}
|
}
|
||||||
get_cmd = backends[compose.conf["gather_backend"]]
|
get_cmd = backends[compose.conf["gather_backend"]]
|
||||||
@ -93,8 +91,6 @@ def get_pkgset_from_repos(compose):
|
|||||||
cache_dir=compose.paths.work.pungi_cache_dir(arch=arch),
|
cache_dir=compose.paths.work.pungi_cache_dir(arch=arch),
|
||||||
profiler=profiler,
|
profiler=profiler,
|
||||||
)
|
)
|
||||||
if compose.conf["gather_backend"] == "yum":
|
|
||||||
cmd.append("--force")
|
|
||||||
|
|
||||||
# TODO: runroot
|
# TODO: runroot
|
||||||
run(cmd, logfile=pungi_log, show_cmd=True, stdout=False)
|
run(cmd, logfile=pungi_log, show_cmd=True, stdout=False)
|
||||||
@ -111,17 +107,6 @@ def get_pkgset_from_repos(compose):
|
|||||||
flist.append(dst)
|
flist.append(dst)
|
||||||
pool.queue_put((src, dst))
|
pool.queue_put((src, dst))
|
||||||
|
|
||||||
# Clean up tmp dir
|
|
||||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
|
||||||
yumroot_dir = os.path.join(pungi_dir, "work", arch, "yumroot")
|
|
||||||
if os.path.isdir(yumroot_dir):
|
|
||||||
try:
|
|
||||||
shutil.rmtree(yumroot_dir)
|
|
||||||
except Exception as e:
|
|
||||||
compose.log_warning(
|
|
||||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
|
||||||
)
|
|
||||||
|
|
||||||
msg = "Linking downloaded pkgset packages"
|
msg = "Linking downloaded pkgset packages"
|
||||||
compose.log_info("[BEGIN] %s" % msg)
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
pool.start()
|
pool.start()
|
||||||
|
@ -101,7 +101,6 @@ def run_repoclosure(compose):
|
|||||||
|
|
||||||
|
|
||||||
def _delete_repoclosure_cache_dirs(compose):
|
def _delete_repoclosure_cache_dirs(compose):
|
||||||
if "dnf" == compose.conf["repoclosure_backend"]:
|
|
||||||
from dnf.const import SYSTEM_CACHEDIR
|
from dnf.const import SYSTEM_CACHEDIR
|
||||||
from dnf.util import am_i_root
|
from dnf.util import am_i_root
|
||||||
from dnf.yum.misc import getCacheDir
|
from dnf.yum.misc import getCacheDir
|
||||||
@ -110,10 +109,6 @@ def _delete_repoclosure_cache_dirs(compose):
|
|||||||
top_cache_dir = SYSTEM_CACHEDIR
|
top_cache_dir = SYSTEM_CACHEDIR
|
||||||
else:
|
else:
|
||||||
top_cache_dir = getCacheDir()
|
top_cache_dir = getCacheDir()
|
||||||
else:
|
|
||||||
from yum.misc import getCacheDir
|
|
||||||
|
|
||||||
top_cache_dir = getCacheDir()
|
|
||||||
|
|
||||||
for name in os.listdir(top_cache_dir):
|
for name in os.listdir(top_cache_dir):
|
||||||
if name.startswith(compose.compose_id):
|
if name.startswith(compose.compose_id):
|
||||||
|
@ -1,304 +0,0 @@
|
|||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; version 2 of the License.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Library General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from argparse import ArgumentParser, Action
|
|
||||||
|
|
||||||
from pungi import get_full_version
|
|
||||||
import pungi.gather
|
|
||||||
import pungi.config
|
|
||||||
import pungi.ks
|
|
||||||
|
|
||||||
|
|
||||||
def get_arguments(config):
|
|
||||||
parser = ArgumentParser()
|
|
||||||
|
|
||||||
class SetConfig(Action):
|
|
||||||
def __call__(self, parser, namespace, value, option_string=None):
|
|
||||||
config.set("pungi", self.dest, value)
|
|
||||||
|
|
||||||
parser.add_argument("--version", action="version", version=get_full_version())
|
|
||||||
|
|
||||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
|
||||||
parser.add_argument(
|
|
||||||
"--destdir",
|
|
||||||
dest="destdir",
|
|
||||||
action=SetConfig,
|
|
||||||
help="destination directory (defaults to current directory)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--cachedir",
|
|
||||||
dest="cachedir",
|
|
||||||
action=SetConfig,
|
|
||||||
help="package cache directory (defaults to /var/cache/pungi)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--selfhosting",
|
|
||||||
action="store_true",
|
|
||||||
dest="selfhosting",
|
|
||||||
help="build a self-hosting tree by following build dependencies (optional)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--fulltree",
|
|
||||||
action="store_true",
|
|
||||||
dest="fulltree",
|
|
||||||
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--nosource",
|
|
||||||
action="store_true",
|
|
||||||
dest="nosource",
|
|
||||||
help="disable gathering of source packages (optional)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--nodebuginfo",
|
|
||||||
action="store_true",
|
|
||||||
dest="nodebuginfo",
|
|
||||||
help="disable gathering of debuginfo packages (optional)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--nodownload",
|
|
||||||
action="store_true",
|
|
||||||
dest="nodownload",
|
|
||||||
help="disable downloading of packages. instead, print the package URLs (optional)", # noqa: E501
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--nogreedy",
|
|
||||||
action="store_true",
|
|
||||||
dest="nogreedy",
|
|
||||||
help="disable pulling of all providers of package dependencies (optional)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--nodeps",
|
|
||||||
action="store_false",
|
|
||||||
dest="resolve_deps",
|
|
||||||
default=True,
|
|
||||||
help="disable resolving dependencies",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--force",
|
|
||||||
default=False,
|
|
||||||
action="store_true",
|
|
||||||
help="Force reuse of an existing destination directory (will overwrite files)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--nohash",
|
|
||||||
default=False,
|
|
||||||
action="store_true",
|
|
||||||
help="disable hashing the Packages trees",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--full-archlist",
|
|
||||||
action="store_true",
|
|
||||||
help="Use the full arch list for x86_64 (include i686, i386, etc.)",
|
|
||||||
)
|
|
||||||
parser.add_argument("--arch", help="Override default (uname based) arch")
|
|
||||||
parser.add_argument(
|
|
||||||
"--greedy", metavar="METHOD", help="Greedy method; none, all, build"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--multilib",
|
|
||||||
action="append",
|
|
||||||
metavar="METHOD",
|
|
||||||
help="Multilib method; can be specified multiple times; recommended: devel, runtime", # noqa: E501
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--lookaside-repo",
|
|
||||||
action="append",
|
|
||||||
dest="lookaside_repos",
|
|
||||||
metavar="NAME",
|
|
||||||
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)", # noqa: E501
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--workdirbase",
|
|
||||||
dest="workdirbase",
|
|
||||||
action=SetConfig,
|
|
||||||
help="base working directory (defaults to destdir + /work)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--no-dvd",
|
|
||||||
default=False,
|
|
||||||
action="store_true",
|
|
||||||
dest="no_dvd",
|
|
||||||
help="Do not make a install DVD/CD only the netinstall image and the tree",
|
|
||||||
)
|
|
||||||
parser.add_argument("--lorax-conf", help="Path to lorax.conf file (optional)")
|
|
||||||
parser.add_argument(
|
|
||||||
"-i",
|
|
||||||
"--installpkgs",
|
|
||||||
default=[],
|
|
||||||
action="append",
|
|
||||||
metavar="STRING",
|
|
||||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)", # noqa: E501
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--multilibconf",
|
|
||||||
default=None,
|
|
||||||
action=SetConfig,
|
|
||||||
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"-c",
|
|
||||||
"--config",
|
|
||||||
dest="config",
|
|
||||||
required=True,
|
|
||||||
help="Path to kickstart config file",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-G",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
dest="do_gather",
|
|
||||||
help="Flag to enable processing the Gather stage",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--pungirc",
|
|
||||||
dest="pungirc",
|
|
||||||
default="~/.pungirc",
|
|
||||||
action=SetConfig,
|
|
||||||
help="Read pungi options from config file ",
|
|
||||||
)
|
|
||||||
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
config = pungi.config.Config()
|
|
||||||
opts = get_arguments(config)
|
|
||||||
|
|
||||||
# Read the config to create "new" defaults
|
|
||||||
# reparse command line options so they take precedence
|
|
||||||
config = pungi.config.Config(pungirc=opts.pungirc)
|
|
||||||
opts = get_arguments(config)
|
|
||||||
|
|
||||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
|
||||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
|
||||||
|
|
||||||
config.set("pungi", "force", str(opts.force))
|
|
||||||
|
|
||||||
if config.get("pungi", "workdirbase") == "/work":
|
|
||||||
config.set("pungi", "workdirbase", "%s/work" % config.get("pungi", "destdir"))
|
|
||||||
# Set up our directories
|
|
||||||
if not os.path.exists(config.get("pungi", "destdir")):
|
|
||||||
try:
|
|
||||||
os.makedirs(config.get("pungi", "destdir"))
|
|
||||||
except OSError:
|
|
||||||
print(
|
|
||||||
"Error: Cannot create destination dir %s"
|
|
||||||
% config.get("pungi", "destdir"),
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
print("Warning: Reusing existing destination directory.")
|
|
||||||
|
|
||||||
if not os.path.exists(config.get("pungi", "workdirbase")):
|
|
||||||
try:
|
|
||||||
os.makedirs(config.get("pungi", "workdirbase"))
|
|
||||||
except OSError:
|
|
||||||
print(
|
|
||||||
"Error: Cannot create working base dir %s"
|
|
||||||
% config.get("pungi", "workdirbase"),
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
print("Warning: Reusing existing working base directory.")
|
|
||||||
|
|
||||||
cachedir = config.get("pungi", "cachedir")
|
|
||||||
|
|
||||||
if not os.path.exists(cachedir):
|
|
||||||
try:
|
|
||||||
os.makedirs(cachedir)
|
|
||||||
except OSError:
|
|
||||||
print("Error: Cannot create cache dir %s" % cachedir, file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Set debuginfo flag
|
|
||||||
if opts.nodebuginfo:
|
|
||||||
config.set("pungi", "debuginfo", "False")
|
|
||||||
if opts.greedy:
|
|
||||||
config.set("pungi", "greedy", opts.greedy)
|
|
||||||
else:
|
|
||||||
# XXX: compatibility
|
|
||||||
if opts.nogreedy:
|
|
||||||
config.set("pungi", "greedy", "none")
|
|
||||||
else:
|
|
||||||
config.set("pungi", "greedy", "all")
|
|
||||||
config.set("pungi", "resolve_deps", str(bool(opts.resolve_deps)))
|
|
||||||
if opts.nohash:
|
|
||||||
config.set("pungi", "nohash", "True")
|
|
||||||
if opts.full_archlist:
|
|
||||||
config.set("pungi", "full_archlist", "True")
|
|
||||||
if opts.arch:
|
|
||||||
config.set("pungi", "arch", opts.arch)
|
|
||||||
if opts.multilib:
|
|
||||||
config.set("pungi", "multilib", " ".join(opts.multilib))
|
|
||||||
if opts.lookaside_repos:
|
|
||||||
config.set("pungi", "lookaside_repos", " ".join(opts.lookaside_repos))
|
|
||||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
|
||||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
|
||||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
|
||||||
config.set("pungi", "nodebuginfo", str(bool(opts.nodebuginfo)))
|
|
||||||
|
|
||||||
# Actually do work.
|
|
||||||
mypungi = pungi.gather.Pungi(config, ksparser)
|
|
||||||
|
|
||||||
with mypungi.yumlock:
|
|
||||||
mypungi._inityum() # initialize the yum object for things that need it
|
|
||||||
mypungi.gather()
|
|
||||||
if opts.nodownload:
|
|
||||||
for line in mypungi.list_packages():
|
|
||||||
flags_str = ",".join(line["flags"])
|
|
||||||
if flags_str:
|
|
||||||
flags_str = "(%s)" % flags_str
|
|
||||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
|
||||||
sys.stdout.flush()
|
|
||||||
else:
|
|
||||||
mypungi.downloadPackages()
|
|
||||||
mypungi.makeCompsFile()
|
|
||||||
if not opts.nodebuginfo:
|
|
||||||
mypungi.getDebuginfoList()
|
|
||||||
if opts.nodownload:
|
|
||||||
for line in mypungi.list_debuginfo():
|
|
||||||
flags_str = ",".join(line["flags"])
|
|
||||||
if flags_str:
|
|
||||||
flags_str = "(%s)" % flags_str
|
|
||||||
sys.stdout.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
|
||||||
sys.stdout.flush()
|
|
||||||
else:
|
|
||||||
mypungi.downloadDebuginfo()
|
|
||||||
if not opts.nosource:
|
|
||||||
if opts.nodownload:
|
|
||||||
for line in mypungi.list_srpms():
|
|
||||||
flags_str = ",".join(line["flags"])
|
|
||||||
if flags_str:
|
|
||||||
flags_str = "(%s)" % flags_str
|
|
||||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
|
||||||
sys.stdout.flush()
|
|
||||||
else:
|
|
||||||
mypungi.downloadSRPMs()
|
|
||||||
|
|
||||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
|
||||||
if not opts.nodebuginfo:
|
|
||||||
print("DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024**2))
|
|
||||||
if not opts.nosource:
|
|
||||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
|
||||||
|
|
||||||
print("All done!")
|
|
@ -105,74 +105,6 @@ class PungiWrapper(object):
|
|||||||
|
|
||||||
kickstart.close()
|
kickstart.close()
|
||||||
|
|
||||||
def get_pungi_cmd(
|
|
||||||
self,
|
|
||||||
config,
|
|
||||||
destdir,
|
|
||||||
name,
|
|
||||||
version=None,
|
|
||||||
flavor=None,
|
|
||||||
selfhosting=False,
|
|
||||||
fulltree=False,
|
|
||||||
greedy=None,
|
|
||||||
nodeps=False,
|
|
||||||
nodownload=True,
|
|
||||||
full_archlist=False,
|
|
||||||
arch=None,
|
|
||||||
cache_dir=None,
|
|
||||||
lookaside_repos=None,
|
|
||||||
multilib_methods=None,
|
|
||||||
profiler=False,
|
|
||||||
):
|
|
||||||
cmd = ["pungi"]
|
|
||||||
|
|
||||||
# Gather stage
|
|
||||||
cmd.append("-G")
|
|
||||||
|
|
||||||
# path to a kickstart file
|
|
||||||
cmd.append("--config=%s" % config)
|
|
||||||
|
|
||||||
# destdir is optional in Pungi (defaults to current dir), but
|
|
||||||
# want it mandatory here
|
|
||||||
cmd.append("--destdir=%s" % destdir)
|
|
||||||
|
|
||||||
# turn selfhosting on
|
|
||||||
if selfhosting:
|
|
||||||
cmd.append("--selfhosting")
|
|
||||||
|
|
||||||
# NPLB
|
|
||||||
if fulltree:
|
|
||||||
cmd.append("--fulltree")
|
|
||||||
|
|
||||||
greedy = greedy or "none"
|
|
||||||
cmd.append("--greedy=%s" % greedy)
|
|
||||||
|
|
||||||
if nodeps:
|
|
||||||
cmd.append("--nodeps")
|
|
||||||
|
|
||||||
# don't download packages, just print paths
|
|
||||||
if nodownload:
|
|
||||||
cmd.append("--nodownload")
|
|
||||||
|
|
||||||
if full_archlist:
|
|
||||||
cmd.append("--full-archlist")
|
|
||||||
|
|
||||||
if arch:
|
|
||||||
cmd.append("--arch=%s" % arch)
|
|
||||||
|
|
||||||
if multilib_methods:
|
|
||||||
for i in multilib_methods:
|
|
||||||
cmd.append("--multilib=%s" % i)
|
|
||||||
|
|
||||||
if cache_dir:
|
|
||||||
cmd.append("--cachedir=%s" % cache_dir)
|
|
||||||
|
|
||||||
if lookaside_repos:
|
|
||||||
for i in lookaside_repos:
|
|
||||||
cmd.append("--lookaside-repo=%s" % i)
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
def get_pungi_cmd_dnf(
|
def get_pungi_cmd_dnf(
|
||||||
self,
|
self,
|
||||||
config,
|
config,
|
||||||
@ -258,68 +190,3 @@ class PungiWrapper(object):
|
|||||||
broken_deps.setdefault(match.group(2), set()).add(match.group(1))
|
broken_deps.setdefault(match.group(2), set()).add(match.group(1))
|
||||||
|
|
||||||
return packages, broken_deps, missing_comps
|
return packages, broken_deps, missing_comps
|
||||||
|
|
||||||
def run_pungi(
|
|
||||||
self,
|
|
||||||
ks_file,
|
|
||||||
destdir,
|
|
||||||
name,
|
|
||||||
selfhosting=False,
|
|
||||||
fulltree=False,
|
|
||||||
greedy="",
|
|
||||||
cache_dir=None,
|
|
||||||
arch="",
|
|
||||||
multilib_methods=[],
|
|
||||||
nodeps=False,
|
|
||||||
lookaside_repos=[],
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
This is a replacement for get_pungi_cmd that runs it in-process. Not
|
|
||||||
all arguments are supported.
|
|
||||||
"""
|
|
||||||
from .. import ks, gather, config
|
|
||||||
|
|
||||||
ksparser = ks.get_ksparser(ks_path=ks_file)
|
|
||||||
cfg = config.Config()
|
|
||||||
cfg.set("pungi", "destdir", destdir)
|
|
||||||
cfg.set("pungi", "fulltree", str(fulltree))
|
|
||||||
cfg.set("pungi", "selfhosting", str(selfhosting))
|
|
||||||
cfg.set("pungi", "cachedir", cache_dir)
|
|
||||||
cfg.set("pungi", "full_archlist", "True")
|
|
||||||
cfg.set("pungi", "workdirbase", "%s/work" % destdir)
|
|
||||||
cfg.set("pungi", "greedy", greedy)
|
|
||||||
cfg.set("pungi", "nosource", "False")
|
|
||||||
cfg.set("pungi", "nodebuginfo", "False")
|
|
||||||
cfg.set("pungi", "force", "False")
|
|
||||||
cfg.set("pungi", "resolve_deps", str(not nodeps))
|
|
||||||
if arch:
|
|
||||||
cfg.set("pungi", "arch", arch)
|
|
||||||
if multilib_methods:
|
|
||||||
cfg.set("pungi", "multilib", " ".join(multilib_methods))
|
|
||||||
if lookaside_repos:
|
|
||||||
cfg.set("pungi", "lookaside_repos", " ".join(lookaside_repos))
|
|
||||||
|
|
||||||
mypungi = gather.Pungi(cfg, ksparser)
|
|
||||||
|
|
||||||
with open(os.path.join(destdir, "out"), "w") as f:
|
|
||||||
with mypungi.yumlock:
|
|
||||||
mypungi._inityum()
|
|
||||||
mypungi.gather()
|
|
||||||
|
|
||||||
for line in mypungi.list_packages():
|
|
||||||
flags_str = ",".join(line["flags"])
|
|
||||||
if flags_str:
|
|
||||||
flags_str = "(%s)" % flags_str
|
|
||||||
f.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
|
||||||
mypungi.makeCompsFile()
|
|
||||||
mypungi.getDebuginfoList()
|
|
||||||
for line in mypungi.list_debuginfo():
|
|
||||||
flags_str = ",".join(line["flags"])
|
|
||||||
if flags_str:
|
|
||||||
flags_str = "(%s)" % flags_str
|
|
||||||
f.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
|
||||||
for line in mypungi.list_srpms():
|
|
||||||
flags_str = ",".join(line["flags"])
|
|
||||||
if flags_str:
|
|
||||||
flags_str = "(%s)" % flags_str
|
|
||||||
f.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
|
||||||
|
@ -19,13 +19,8 @@ import os
|
|||||||
from kobo.shortcuts import force_list
|
from kobo.shortcuts import force_list
|
||||||
|
|
||||||
|
|
||||||
def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
def get_repoclosure_cmd(backend="dnf", arch=None, repos=None, lookaside=None):
|
||||||
cmds = {
|
cmds = {
|
||||||
"yum": {
|
|
||||||
"cmd": ["/usr/bin/repoclosure", "--tempcache"],
|
|
||||||
"repoarg": "--repoid=%s",
|
|
||||||
"lookaside": "--lookaside=%s",
|
|
||||||
},
|
|
||||||
"dnf": {
|
"dnf": {
|
||||||
"cmd": ["dnf", "repoclosure"],
|
"cmd": ["dnf", "repoclosure"],
|
||||||
"repoarg": "--repo=%s",
|
"repoarg": "--repo=%s",
|
||||||
@ -44,14 +39,13 @@ def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
|||||||
for i in arches:
|
for i in arches:
|
||||||
cmd.append("--arch=%s" % i)
|
cmd.append("--arch=%s" % i)
|
||||||
|
|
||||||
if backend == "dnf" and arches:
|
if arches:
|
||||||
cmd.append("--forcearch=%s" % arches[0])
|
cmd.append("--forcearch=%s" % arches[0])
|
||||||
|
|
||||||
repos = repos or {}
|
repos = repos or {}
|
||||||
for repo_id, repo_path in repos.items():
|
for repo_id, repo_path in repos.items():
|
||||||
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
||||||
cmd.append(cmds[backend]["repoarg"] % repo_id)
|
cmd.append(cmds[backend]["repoarg"] % repo_id)
|
||||||
if backend == "dnf":
|
|
||||||
# For dnf we want to add all repos with the --repo option (which
|
# For dnf we want to add all repos with the --repo option (which
|
||||||
# enables only those and not any system repo), and the repos to
|
# enables only those and not any system repo), and the repos to
|
||||||
# check are also listed with the --check option.
|
# check are also listed with the --check option.
|
||||||
|
1
setup.py
1
setup.py
@ -30,7 +30,6 @@ setup(
|
|||||||
entry_points={
|
entry_points={
|
||||||
"console_scripts": [
|
"console_scripts": [
|
||||||
"comps_filter = pungi.scripts.comps_filter:main",
|
"comps_filter = pungi.scripts.comps_filter:main",
|
||||||
"pungi = pungi.scripts.pungi:main",
|
|
||||||
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
||||||
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
||||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
FROM centos:7
|
|
||||||
LABEL \
|
|
||||||
name="Pungi test" \
|
|
||||||
description="Run tests using tox with Python 2" \
|
|
||||||
vendor="Pungi developers" \
|
|
||||||
license="MIT"
|
|
||||||
|
|
||||||
RUN yum -y update && yum -y install epel-release && yum -y install \
|
|
||||||
git \
|
|
||||||
libmodulemd2 \
|
|
||||||
make \
|
|
||||||
python3 \
|
|
||||||
python-createrepo_c \
|
|
||||||
python-gobject-base \
|
|
||||||
python-gssapi \
|
|
||||||
python-libcomps \
|
|
||||||
pykickstart \
|
|
||||||
&& yum clean all
|
|
||||||
|
|
||||||
# python-tox in yum repo is too old, let's install latest version
|
|
||||||
RUN pip3 install tox
|
|
||||||
|
|
||||||
WORKDIR /src
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
CMD ["tox", "-e", "py27"]
|
|
1
tests/Jenkinsfile
vendored
1
tests/Jenkinsfile
vendored
@ -40,7 +40,6 @@ git fetch proposed
|
|||||||
git checkout origin/master
|
git checkout origin/master
|
||||||
git merge --no-ff "proposed/$params.BRANCH" -m "Merge PR"
|
git merge --no-ff "proposed/$params.BRANCH" -m "Merge PR"
|
||||||
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test tox -r -e flake8,black,py3,bandit
|
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test tox -r -e flake8,black,py3,bandit
|
||||||
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test-py2 tox -r -e py27
|
|
||||||
"""
|
"""
|
||||||
sh "cat job.sh"
|
sh "cat job.sh"
|
||||||
sh "ssh -o StrictHostKeyChecking=no root@$hostname mkdir $remote_dir"
|
sh "ssh -o StrictHostKeyChecking=no root@$hostname mkdir $remote_dir"
|
||||||
|
@ -748,15 +748,6 @@ class StatusTest(unittest.TestCase):
|
|||||||
self.compose.conf["createrepo_database"] = True
|
self.compose.conf["createrepo_database"] = True
|
||||||
self.assertTrue(self.compose.should_create_yum_database)
|
self.assertTrue(self.compose.should_create_yum_database)
|
||||||
|
|
||||||
def test_no_database_with_yum_backend(self):
|
|
||||||
self.compose.conf["gather_backend"] = "yum"
|
|
||||||
self.assertTrue(self.compose.should_create_yum_database)
|
|
||||||
|
|
||||||
def test_no_database_with_yum_backend_config_override(self):
|
|
||||||
self.compose.conf["gather_backend"] = "yum"
|
|
||||||
self.compose.conf["createrepo_database"] = False
|
|
||||||
self.assertFalse(self.compose.should_create_yum_database)
|
|
||||||
|
|
||||||
|
|
||||||
class DumpContainerMetadataTest(unittest.TestCase):
|
class DumpContainerMetadataTest(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -271,16 +271,6 @@ class GatherConfigTestCase(ConfigTestCase):
|
|||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
self.assertEqual(cfg["gather_backend"], "dnf")
|
self.assertEqual(cfg["gather_backend"], "dnf")
|
||||||
|
|
||||||
def test_yum_backend_is_default_on_py2(self):
|
|
||||||
cfg = load_config(
|
|
||||||
pkgset_source="koji",
|
|
||||||
pkgset_koji_tag="f27",
|
|
||||||
)
|
|
||||||
|
|
||||||
with mock.patch("six.PY2", new=True):
|
|
||||||
self.assertValidation(cfg, [])
|
|
||||||
self.assertEqual(cfg["gather_backend"], "yum")
|
|
||||||
|
|
||||||
def test_yum_backend_is_rejected_on_py3(self):
|
def test_yum_backend_is_rejected_on_py3(self):
|
||||||
cfg = load_config(
|
cfg = load_config(
|
||||||
pkgset_source="koji",
|
pkgset_source="koji",
|
||||||
@ -464,7 +454,7 @@ class RepoclosureTestCase(ConfigTestCase):
|
|||||||
repoclosure_backend="fnd", # Intentionally with a typo
|
repoclosure_backend="fnd", # Intentionally with a typo
|
||||||
)
|
)
|
||||||
|
|
||||||
options = ["yum", "dnf"] if six.PY2 else ["dnf"]
|
options = ["dnf"]
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
[
|
[
|
||||||
|
@ -25,11 +25,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_DNF = False
|
HAS_DNF = False
|
||||||
|
|
||||||
if six.PY2:
|
|
||||||
HAS_YUM = True
|
|
||||||
else:
|
|
||||||
HAS_YUM = False
|
|
||||||
|
|
||||||
|
|
||||||
def convert_pkg_map(data):
|
def convert_pkg_map(data):
|
||||||
"""
|
"""
|
||||||
@ -2137,71 +2132,6 @@ class DepsolvingBase(object):
|
|||||||
self.assertEqual(pkg_map["debuginfo"], [])
|
self.assertEqual(pkg_map["debuginfo"], [])
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(HAS_YUM, "YUM only available on Python 2")
|
|
||||||
class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super(PungiYumDepsolvingTestCase, self).setUp()
|
|
||||||
self.ks = os.path.join(self.tmp_dir, "ks")
|
|
||||||
self.out = os.path.join(self.tmp_dir, "out")
|
|
||||||
self.cwd = os.path.join(self.tmp_dir, "cwd")
|
|
||||||
os.mkdir(self.cwd)
|
|
||||||
self.old_cwd = os.getcwd()
|
|
||||||
os.chdir(self.cwd)
|
|
||||||
|
|
||||||
logger = logging.getLogger("Pungi")
|
|
||||||
if not logger.handlers:
|
|
||||||
formatter = logging.Formatter("%(name)s:%(levelname)s: %(message)s")
|
|
||||||
console = logging.StreamHandler(sys.stdout)
|
|
||||||
console.setFormatter(formatter)
|
|
||||||
console.setLevel(logging.INFO)
|
|
||||||
logger.addHandler(console)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
os.chdir(self.old_cwd)
|
|
||||||
super(PungiYumDepsolvingTestCase, self).tearDown()
|
|
||||||
|
|
||||||
def go(
|
|
||||||
self,
|
|
||||||
packages,
|
|
||||||
groups,
|
|
||||||
lookaside=None,
|
|
||||||
prepopulate=None,
|
|
||||||
fulltree_excludes=None,
|
|
||||||
multilib_blacklist=None,
|
|
||||||
multilib_whitelist=None,
|
|
||||||
**kwargs
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Write a kickstart with given packages and groups, then run the
|
|
||||||
depsolving and parse the output.
|
|
||||||
"""
|
|
||||||
p = PungiWrapper()
|
|
||||||
repos = {"repo": self.repo}
|
|
||||||
if lookaside:
|
|
||||||
repos["lookaside"] = lookaside
|
|
||||||
kwargs["lookaside_repos"] = ["lookaside"]
|
|
||||||
p.write_kickstart(
|
|
||||||
self.ks,
|
|
||||||
repos,
|
|
||||||
groups,
|
|
||||||
packages,
|
|
||||||
prepopulate=prepopulate,
|
|
||||||
multilib_whitelist=multilib_whitelist,
|
|
||||||
multilib_blacklist=multilib_blacklist,
|
|
||||||
fulltree_excludes=fulltree_excludes,
|
|
||||||
)
|
|
||||||
kwargs.setdefault("cache_dir", self.tmp_dir)
|
|
||||||
# Unless the test specifies an arch, we need to default to x86_64.
|
|
||||||
# Otherwise the arch of current machine will be used, which will cause
|
|
||||||
# failure most of the time.
|
|
||||||
kwargs.setdefault("arch", "x86_64")
|
|
||||||
|
|
||||||
p.run_pungi(self.ks, self.tmp_dir, "DP", **kwargs)
|
|
||||||
with open(self.out, "r") as f:
|
|
||||||
pkg_map, self.broken_deps, _ = p.parse_log(f)
|
|
||||||
return convert_pkg_map(pkg_map)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_dnf_packages(pkgs, flags):
|
def convert_dnf_packages(pkgs, flags):
|
||||||
convert_table = {
|
convert_table = {
|
||||||
# Hawkey returns nosrc package as src
|
# Hawkey returns nosrc package as src
|
||||||
|
@ -1,31 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
except ImportError:
|
||||||
|
import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import pungi.phases.repoclosure as repoclosure_phase
|
import pungi.phases.repoclosure as repoclosure_phase
|
||||||
from tests.helpers import DummyCompose, PungiTestCase, mk_boom
|
from tests.helpers import DummyCompose, PungiTestCase, mk_boom
|
||||||
|
|
||||||
try:
|
|
||||||
import dnf # noqa: F401
|
|
||||||
|
|
||||||
HAS_DNF = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_DNF = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
import yum # noqa: F401
|
|
||||||
|
|
||||||
HAS_YUM = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_YUM = False
|
|
||||||
|
|
||||||
|
|
||||||
class TestRepoclosure(PungiTestCase):
|
class TestRepoclosure(PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -49,53 +32,6 @@ class TestRepoclosure(PungiTestCase):
|
|||||||
|
|
||||||
self.assertEqual(mock_grc.call_args_list, [])
|
self.assertEqual(mock_grc.call_args_list, [])
|
||||||
|
|
||||||
@unittest.skipUnless(HAS_YUM, "YUM is not available")
|
|
||||||
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
|
|
||||||
@mock.patch("pungi.phases.repoclosure.run")
|
|
||||||
def test_repoclosure_default_backend(self, mock_run, mock_grc):
|
|
||||||
with mock.patch("six.PY2", new=True):
|
|
||||||
compose = DummyCompose(self.topdir, {})
|
|
||||||
|
|
||||||
repoclosure_phase.run_repoclosure(compose)
|
|
||||||
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
mock_grc.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
backend="yum",
|
|
||||||
arch=["amd64", "x86_64", "noarch"],
|
|
||||||
lookaside={},
|
|
||||||
repos=self._get_repo(compose.compose_id, "Everything", "amd64"),
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
backend="yum",
|
|
||||||
arch=["amd64", "x86_64", "noarch"],
|
|
||||||
lookaside={},
|
|
||||||
repos=self._get_repo(compose.compose_id, "Client", "amd64"),
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
backend="yum",
|
|
||||||
arch=["amd64", "x86_64", "noarch"],
|
|
||||||
lookaside={},
|
|
||||||
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
backend="yum",
|
|
||||||
arch=["x86_64", "noarch"],
|
|
||||||
lookaside={},
|
|
||||||
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
backend="yum",
|
|
||||||
arch=["x86_64", "noarch"],
|
|
||||||
lookaside={},
|
|
||||||
repos=self._get_repo(compose.compose_id, "Everything", "x86_64"),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@unittest.skipUnless(HAS_DNF, "DNF is not available")
|
|
||||||
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
|
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
|
||||||
@mock.patch("pungi.phases.repoclosure.run")
|
@mock.patch("pungi.phases.repoclosure.run")
|
||||||
def test_repoclosure_dnf_backend(self, mock_run, mock_grc):
|
def test_repoclosure_dnf_backend(self, mock_run, mock_grc):
|
||||||
@ -179,7 +115,6 @@ class TestRepoclosure(PungiTestCase):
|
|||||||
with self.assertRaises(RuntimeError):
|
with self.assertRaises(RuntimeError):
|
||||||
repoclosure_phase.run_repoclosure(compose)
|
repoclosure_phase.run_repoclosure(compose)
|
||||||
|
|
||||||
@unittest.skipUnless(HAS_DNF, "DNF is not available")
|
|
||||||
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
|
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
|
||||||
@mock.patch("pungi.phases.repoclosure.run")
|
@mock.patch("pungi.phases.repoclosure.run")
|
||||||
def test_repoclosure_overwrite_options_creates_correct_commands(
|
def test_repoclosure_overwrite_options_creates_correct_commands(
|
||||||
|
@ -9,11 +9,6 @@ from . import helpers
|
|||||||
|
|
||||||
|
|
||||||
class RepoclosureWrapperTestCase(helpers.BaseTestCase):
|
class RepoclosureWrapperTestCase(helpers.BaseTestCase):
|
||||||
def test_minimal_command(self):
|
|
||||||
self.assertEqual(
|
|
||||||
rc.get_repoclosure_cmd(), ["/usr/bin/repoclosure", "--tempcache"]
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_minimal_dnf_command(self):
|
def test_minimal_dnf_command(self):
|
||||||
self.assertEqual(rc.get_repoclosure_cmd(backend="dnf"), ["dnf", "repoclosure"])
|
self.assertEqual(rc.get_repoclosure_cmd(backend="dnf"), ["dnf", "repoclosure"])
|
||||||
|
|
||||||
@ -23,37 +18,6 @@ class RepoclosureWrapperTestCase(helpers.BaseTestCase):
|
|||||||
|
|
||||||
self.assertEqual(str(ctx.exception), "Unknown repoclosure backend: rpm")
|
self.assertEqual(str(ctx.exception), "Unknown repoclosure backend: rpm")
|
||||||
|
|
||||||
def test_multiple_arches(self):
|
|
||||||
self.assertEqual(
|
|
||||||
rc.get_repoclosure_cmd(arch=["x86_64", "i686", "noarch"]),
|
|
||||||
[
|
|
||||||
"/usr/bin/repoclosure",
|
|
||||||
"--tempcache",
|
|
||||||
"--arch=x86_64",
|
|
||||||
"--arch=i686",
|
|
||||||
"--arch=noarch",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_full_command(self):
|
|
||||||
repos = {"my-repo": "/mnt/koji/repo"}
|
|
||||||
lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
|
|
||||||
|
|
||||||
cmd = rc.get_repoclosure_cmd(arch="x86_64", repos=repos, lookaside=lookaside)
|
|
||||||
self.assertEqual(cmd[0], "/usr/bin/repoclosure")
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
cmd[1:],
|
|
||||||
[
|
|
||||||
"--tempcache",
|
|
||||||
"--arch=x86_64",
|
|
||||||
"--repofrompath=my-repo,file:///mnt/koji/repo",
|
|
||||||
"--repofrompath=fedora,http://kojipkgs.fp.o/repo",
|
|
||||||
"--repoid=my-repo",
|
|
||||||
"--lookaside=fedora",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_full_dnf_command(self):
|
def test_full_dnf_command(self):
|
||||||
repos = {"my-repo": "/mnt/koji/repo"}
|
repos = {"my-repo": "/mnt/koji/repo"}
|
||||||
lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
|
lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
|
||||||
@ -103,44 +67,6 @@ class RepoclosureWrapperTestCase(helpers.BaseTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_expand_repo(self):
|
|
||||||
repos = {
|
|
||||||
"local": "/mnt/koji/repo",
|
|
||||||
"remote": "http://kojipkgs.fp.o/repo",
|
|
||||||
}
|
|
||||||
cmd = rc.get_repoclosure_cmd(repos=repos)
|
|
||||||
self.assertEqual(cmd[0], "/usr/bin/repoclosure")
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
cmd[1:],
|
|
||||||
[
|
|
||||||
"--tempcache",
|
|
||||||
"--repofrompath=local,file:///mnt/koji/repo",
|
|
||||||
"--repofrompath=remote,http://kojipkgs.fp.o/repo",
|
|
||||||
"--repoid=local",
|
|
||||||
"--repoid=remote",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_expand_lookaside(self):
|
|
||||||
repos = {
|
|
||||||
"local": "/mnt/koji/repo",
|
|
||||||
"remote": "http://kojipkgs.fp.o/repo",
|
|
||||||
}
|
|
||||||
cmd = rc.get_repoclosure_cmd(lookaside=repos)
|
|
||||||
self.assertEqual(cmd[0], "/usr/bin/repoclosure")
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
cmd[1:],
|
|
||||||
[
|
|
||||||
"--tempcache",
|
|
||||||
"--repofrompath=local,file:///mnt/koji/repo",
|
|
||||||
"--repofrompath=remote,http://kojipkgs.fp.o/repo",
|
|
||||||
"--lookaside=local",
|
|
||||||
"--lookaside=remote",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FusExtractorTestCase(helpers.PungiTestCase):
|
class FusExtractorTestCase(helpers.PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -6,20 +6,6 @@ import os
|
|||||||
import pungi.phases.test as test_phase
|
import pungi.phases.test as test_phase
|
||||||
from tests.helpers import DummyCompose, PungiTestCase, touch, FIXTURE_DIR
|
from tests.helpers import DummyCompose, PungiTestCase, touch, FIXTURE_DIR
|
||||||
|
|
||||||
try:
|
|
||||||
import dnf # noqa: F401
|
|
||||||
|
|
||||||
HAS_DNF = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_DNF = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
import yum # noqa: F401
|
|
||||||
|
|
||||||
HAS_YUM = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_YUM = False
|
|
||||||
|
|
||||||
|
|
||||||
PAD = b"\0" * 100
|
PAD = b"\0" * 100
|
||||||
UNBOOTABLE_ISO = (b"\0" * 0x8001) + b"CD001" + PAD
|
UNBOOTABLE_ISO = (b"\0" * 0x8001) + b"CD001" + PAD
|
||||||
|
Loading…
Reference in New Issue
Block a user