Fix flake8 complaints - E501

E501 line too long (92 > 88 characters)
E501 line too long (103 > 88 characters)
...

JIRA: COMPOSE-4108
Signed-off-by: Haibo Lin <hlin@redhat.com>
This commit is contained in:
Haibo Lin 2020-02-06 15:09:32 +08:00
parent 3eddcfccd8
commit c0193c9fca
55 changed files with 337 additions and 275 deletions

View File

@ -291,45 +291,48 @@ def _extend_with_default_and_alias(validator_class, offline=False):
if "alias" in subschema:
if subschema["alias"] in instance:
msg = (
"WARNING: Config option '%s' is deprecated and now an alias to '%s', "
"please use '%s' instead. "
"WARNING: Config option '%s' is deprecated and "
"now an alias to '%s', please use '%s' instead. "
"In:\n%s" % (subschema["alias"], property, property, instance)
)
yield ConfigOptionWarning(msg)
if property in instance:
msg = (
"ERROR: Config option '%s' is an alias of '%s', only one can be used."
% (subschema["alias"], property)
"ERROR: Config option '%s' is an alias of '%s', "
"only one can be used." % (subschema["alias"], property)
)
yield ConfigOptionError(msg)
instance.pop(subschema["alias"])
else:
instance.setdefault(property, instance.pop(subschema["alias"]))
# update instance for append option
# If append is defined in schema, append values from append options to property. If property
# is not present in instance, set it to empty list, and append the values from append options.
# If append is defined in schema, append values from append
# options to property. If property is not present in instance,
# set it to empty list, and append the values from append options.
# Note: property's schema must support a list of values.
if "append" in subschema:
appends = force_list(subschema["append"])
for append in appends:
if append in instance:
msg = (
"WARNING: Config option '%s' is deprecated, its value will be appended to option '%s'. "
"WARNING: Config option '%s' is deprecated, "
"its value will be appended to option '%s'. "
"In:\n%s" % (append, property, instance)
)
yield ConfigOptionWarning(msg)
if property in instance:
msg = (
"WARNING: Value from config option '%s' is now appended to option '%s'."
% (append, property)
"WARNING: Value from config option '%s' is "
"now appended to option '%s'." % (append, property)
)
yield ConfigOptionWarning(msg)
instance[property] = force_list(instance[property])
instance[property].extend(force_list(instance.pop(append)))
else:
msg = (
"WARNING: Config option '%s' is not found, but '%s' is specified, value from '%s' "
"is now added as '%s'."
"WARNING: Config option '%s' is not found, "
"but '%s' is specified, "
"value from '%s' is now added as '%s'."
% (property, append, append, property)
)
yield ConfigOptionWarning(msg)
@ -559,7 +562,7 @@ def make_schema():
"release_version": {"type": "string"},
"release_type": {"type": "string", "enum": RELEASE_TYPES, "default": "ga"},
"release_is_layered": {
"deprecated": "remove it. It's layered if there's configuration for base product"
"deprecated": "remove it. It's layered if there's configuration for base product" # noqa: E501
},
"release_internal": {"type": "boolean", "default": False},
"release_discinfo_description": {"type": "string"},
@ -570,7 +573,7 @@ def make_schema():
"base_product_version": {"type": "string"},
"base_product_type": {"type": "string", "default": "ga"},
"runroot": {
"deprecated": "remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally",
"deprecated": "remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally", # noqa: E501
},
"global_runroot_method": {"type": "string", "enum": RUNROOT_TYPES},
"runroot_method": {
@ -600,7 +603,7 @@ def make_schema():
"check_deps": {"type": "boolean", "default": True},
"require_all_comps_packages": {"type": "boolean", "default": False},
"bootable": {
"deprecated": "remove it. Setting buildinstall_method option if you want a bootable installer"
"deprecated": "remove it. Setting buildinstall_method option if you want a bootable installer" # noqa: E501
},
"gather_method": {
"oneOf": [
@ -802,8 +805,8 @@ def make_schema():
"image_volid_layered_product_formats": {
"$ref": "#/definitions/list_of_strings",
"default": [
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}",
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}",
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}", # noqa: E501
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}", # noqa: E501
],
},
"restricted_volid": {"type": "boolean", "default": False},
@ -829,7 +832,7 @@ def make_schema():
"media_checksum_base_filename": {"type": "string", "default": ""},
"filter_system_release_packages": {"type": "boolean", "default": True},
"keep_original_comps": {
"deprecated": "remove <groups> tag from respective variant in variants XML"
"deprecated": "remove <groups> tag from respective variant in variants XML" # noqa: E501
},
"link_type": {
"type": "string",
@ -1061,7 +1064,7 @@ def make_schema():
"subvariant": {"type": "string"},
"format": {
"anyOf": [
# The variant with explicit extension is deprecated.
# The variant with explicit extension is deprecated. # noqa: E501
{"$ref": "#/definitions/string_pairs"},
{"$ref": "#/definitions/strings"},
]

View File

@ -46,13 +46,7 @@ def emit(f, cmd):
print(" ".join([quote(x) for x in cmd]), file=f)
FIND_TEMPLATE_SNIPPET = """
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
TEMPLATE=/usr/share/lorax;
fi
""".replace(
"\n", ""
)
FIND_TEMPLATE_SNIPPET = """if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then TEMPLATE=/usr/share/lorax; fi""" # noqa: E501
def make_image(f, opts):

View File

@ -301,7 +301,8 @@ class Pungi(PungiBase):
# greedy methods:
# * none: only best match package
# * all: all packages matching a provide
# * build: best match package + all other packages from the same SRPM having the same provide
# * build: best match package + all other packages from
# the same SRPM having the same provide
self.greedy_method = self.config.get("pungi", "greedy")
self.lookaside_repos = self.config.get("pungi", "lookaside_repos").split(" ")
@ -318,12 +319,12 @@ class Pungi(PungiBase):
self.sourcerpm_srpmpo_map = {}
# flags
self.input_packages = (
set()
) # packages specified in %packages kickstart section including those defined via comps groups
self.comps_packages = (
set()
) # packages specified in %packages kickstart section *indirectly* via comps groups
# packages specified in %packages kickstart section including
# those defined via comps groups
self.input_packages = set()
# packages specified in %packages kickstart section
# *indirectly* via comps groups
self.comps_packages = set()
self.prepopulate_packages = (
set()
) # packages specified in %prepopulate kickstart section
@ -724,7 +725,8 @@ class Pungi(PungiBase):
found = False
for dep in deps:
if dep in self.po_list:
# HACK: there can be builds in the input list on which we want to apply the "build" greedy rules
# HACK: there can be builds in the input list on
# which we want to apply the "build" greedy rules
if (
self.greedy_method == "build"
and dep.sourcerpm not in self.completed_greedy_build
@ -878,7 +880,8 @@ class Pungi(PungiBase):
break
if found:
msg = (
"Added multilib package %s.%s (repo: %s) for package %s.%s (method: %s)"
"Added multilib package %s.%s (repo: %s) for "
"package %s.%s (method: %s)"
% (
match.name,
match.arch,
@ -973,7 +976,8 @@ class Pungi(PungiBase):
except yum.Errors.GroupsError:
# no groups or no comps at all
self.logger.warning(
"Could not get langpacks due to missing comps in repodata or --ignoregroups=true option."
"Could not get langpacks due to missing comps in repodata "
"or --ignoregroups=true option."
)
self.langpacks = []
@ -1018,14 +1022,16 @@ class Pungi(PungiBase):
if "core" in [i.groupid for i in self.ayum.comps.groups]:
if "core" not in [i.name for i in self.ksparser.handler.packages.groupList]:
self.logger.warning(
"The @core group is no longer added by default; Please add @core to the kickstart if you want it in."
"The @core group is no longer added by default; Please add "
"@core to the kickstart if you want it in."
)
if "base" in [i.groupid for i in self.ayum.comps.groups]:
if "base" not in [i.name for i in self.ksparser.handler.packages.groupList]:
if self.ksparser.handler.packages.addBase:
self.logger.warning(
"The --nobase kickstart option is no longer supported; Please add @base to the kickstart if you want it in."
"The --nobase kickstart option is no longer supported; "
"Please add @base to the kickstart if you want it in."
)
# Check to see if we want all the defaults
@ -1059,7 +1065,8 @@ class Pungi(PungiBase):
multilib = True
if self.greedy_method == "all" and name == "system-release":
# HACK: handles a special case, when system-release virtual provide is specified in the greedy mode
# HACK: handles a special case, when system-release virtual
# provide is specified in the greedy mode
matches = self.ayum.whatProvides(name, None, None).returnPackages()
else:
exactmatched, matched, unmatched = yum.packages.parsePackages(
@ -1131,7 +1138,8 @@ class Pungi(PungiBase):
for txmbr in self.ayum.tsInfo:
if not txmbr.po in self.po_list:
if not is_package(txmbr.po):
# we don't want sources which can be pulled in, because 'src' arch is part of self.valid_arches
# we don't want sources which can be pulled in,
# because 'src' arch is part of self.valid_arches
continue
if not txmbr.isDep:
continue
@ -1182,7 +1190,8 @@ class Pungi(PungiBase):
continue
def get_srpm_po(self, po):
"""Given a package object, get a package object for the corresponding source rpm."""
"""Given a package object, get a package object for the
corresponding source rpm."""
# return srpm_po from cache if available
srpm_po = self.sourcerpm_srpmpo_map.get(po.sourcerpm, None)
@ -1315,11 +1324,15 @@ class Pungi(PungiBase):
if not include_native:
# if there's no native package already pulled in...
if has_native and not include_multilib:
# include all native packages, but only if we're not pulling multilib already
# SCENARIO: a noarch package was already pulled in and there are x86_64 and i686 packages -> we want x86_64 in to complete the package set
# include all native packages, but only if we're not pulling
# multilib already
# SCENARIO: a noarch package was already pulled in and there
# are x86_64 and i686 packages -> we want x86_64 in to complete
# the package set
include_native = True
elif has_multilib:
# SCENARIO: a noarch package was already pulled in and there are no x86_64 packages; we want i686 in to complete the package set
# SCENARIO: a noarch package was already pulled in and there are
# no x86_64 packages; we want i686 in to complete the package set
include_multilib = True
for po in self.excludePackages(self.bin_by_src[srpm_po]):
@ -1405,7 +1418,8 @@ class Pungi(PungiBase):
# Ensure the pkgdir exists, force if requested, and make sure we clean it out
if relpkgdir.endswith("SRPMS"):
# Since we share source dirs with other arches don't clean, but do allow us to use it
# Since we share source dirs with other arches don't clean, but
# do allow us to use it
pungi.util._ensuredir(pkgdir, self.logger, force=True, clean=False)
else:
pungi.util._ensuredir(
@ -1433,7 +1447,8 @@ class Pungi(PungiBase):
target = os.path.join(pkgdir, basename)
else:
target = os.path.join(pkgdir, po.name[0].lower(), basename)
# Make sure we have the hashed dir available to link into we only want dirs there to corrospond to packages
# Make sure we have the hashed dir available to link into we
# only want dirs there to corrospond to packages
# that we are including so we can not just do A-Z 0-9
pungi.util._ensuredir(
os.path.join(pkgdir, po.name[0].lower()),
@ -1504,7 +1519,8 @@ class Pungi(PungiBase):
ourcomps.write(self.ayum.comps.xml())
ourcomps.close()
# Disable this until https://bugzilla.redhat.com/show_bug.cgi?id=442097 is fixed.
# Disable this until https://bugzilla.redhat.com/show_bug.cgi?id=442097
# is fixed.
# Run the xslt filter over our comps file
# compsfilter = ['/usr/bin/xsltproc', '--novalid']
# compsfilter.append('-o')
@ -1819,13 +1835,15 @@ class Pungi(PungiBase):
cmd.append("--isfinal")
cmd.extend(["--volid", self._shortenVolID()])
# on ppc64 we need to tell lorax to only use ppc64 packages so that the media will run on all 64 bit ppc boxes
# on ppc64 we need to tell lorax to only use ppc64 packages so that
# the media will run on all 64 bit ppc boxes
if self.tree_arch == "ppc64":
cmd.extend(["--buildarch", "ppc64"])
elif self.tree_arch == "ppc64le":
cmd.extend(["--buildarch", "ppc64le"])
# Only supported mac hardware is x86 make sure we only enable mac support on arches that need it
# Only supported mac hardware is x86 make sure we only enable mac
# support on arches that need it
if self.tree_arch in ["x86_64"] and not self.is_nomacboot:
cmd.append("--macboot")
else:

View File

@ -616,7 +616,8 @@ class Gather(GatherBase):
@Profiler("Gather.add_conditional_packages()")
def add_conditional_packages(self):
"""
For each binary package add their conditional dependencies as specified in comps.
For each binary package add their conditional dependencies
as specified in comps.
Return newly added packages.
"""
added = set()

View File

@ -27,7 +27,8 @@ class SimpleAcyclicOrientedGraph(object):
self._graph[start].append(end)
self._all_nodes.add(start)
self._all_nodes.add(end)
# try to find opposite direction path (from end to start) to detect newly created cycle
# try to find opposite direction path (from end to start)
# to detect newly created cycle
path = SimpleAcyclicOrientedGraph.find_path(self._graph, end, start)
if path:
raise ValueError("There is a cycle in the graph: %s" % path)
@ -95,7 +96,9 @@ class SimpleAcyclicOrientedGraph(object):
# orphan node = no edge is connected with this node
orphans = self._all_nodes - self.get_active_nodes()
if orphans:
break # restart iteration not to set size self._all_nodes during iteration
# restart iteration not to set size self._all_nodes
# during iteration
break
for orphan in orphans:
if orphan not in spanning_line:
spanning_line.insert(0, orphan)

View File

@ -240,12 +240,20 @@ def compose_to_composeinfo(compose):
).rstrip("/")
"""
# XXX: not suported (yet?)
debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
debug_iso_dir = (
compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
)
if debug_iso_dir:
var.debug_iso_dir[arch] = relative_path(debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
debug_jigdo_dir = compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
var.debug_iso_dir[arch] = relative_path(
debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
).rstrip("/")
debug_jigdo_dir = (
compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
)
if debug_jigdo_dir:
var.debug_jigdo_dir[arch] = relative_path(debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.debug_jigdo_dir[arch] = relative_path(
debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
).rstrip("/")
"""
for v in variant.get_variants(recursive=False):

View File

@ -160,7 +160,8 @@ class WorkPaths(object):
Examples:
work/global/pungi-cache
"""
# WARNING: Using the same cache dir with repos of the same names may lead to a race condition
# WARNING: Using the same cache dir with repos of the same names
# may lead to a race condition.
# We should use per arch variant cache dirs to workaround this.
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi-cache")
if variant:

View File

@ -538,7 +538,8 @@ class BuildinstallThread(WorkerThread):
# This should avoid a possible race condition with multiple processes
# trying to get a kerberos ticket at the same time.
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
# Kerberos authentication failed:
# Permission denied in replay cache code (-1765328215)
time.sleep(num * 3)
# Start the runroot task.

View File

@ -114,8 +114,8 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
if bootable and not self.bi.succeeded(variant, arch):
self.logger.warning(
"ISO should be bootable, but buildinstall failed. Skipping for %s.%s"
% (variant, arch)
"ISO should be bootable, but buildinstall failed. "
"Skipping for %s.%s" % (variant, arch)
)
continue
@ -430,7 +430,8 @@ def split_iso(compose, arch, variant, no_split=False, logger=None):
result = ms.split()
if no_split and result[0]["size"] > split_size:
logger.warning(
"ISO for %s.%s does not fit on single media! It is %s bytes too big. (Total size: %s B)"
"ISO for %s.%s does not fit on single media! It is %s bytes too big. "
"(Total size: %s B)"
% (variant.uid, arch, result[0]["size"] - split_size, result[0]["size"])
)
return result

View File

@ -213,7 +213,8 @@ def create_variant_repo(
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
run(cmd, logfile=log_file, show_cmd=True)
# productinfo is not supported by modifyrepo in any way
# this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)
# this is a HACK to make CDN happy (dmach: at least I think,
# need to confirm with dgregor)
shutil.copy2(
product_id_path, os.path.join(repo_dir, "repodata", "productid")
)

View File

@ -85,14 +85,15 @@ class GatherPhase(PhaseBase):
if variant.modules:
errors.append("Modular compose requires libmodulemd package.")
# check whether variants from configuration value 'variant_as_lookaside' are correct
# check whether variants from configuration value
# 'variant_as_lookaside' are correct
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
all_variants = self.compose.all_variants
for (requiring, required) in variant_as_lookaside:
if requiring in all_variants and required not in all_variants:
errors.append(
"variant_as_lookaside: variant %r doesn't exist but is required by %r"
% (required, requiring)
"variant_as_lookaside: variant %r doesn't exist but is "
"required by %r" % (required, requiring)
)
if errors:
@ -566,7 +567,8 @@ def _trim_variants(
for pkg_type, pkgs in move_to_parent_pkgs.items():
for pkg in pkgs:
compose.log_debug(
"Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s"
"Moving package to parent "
"(arch: %s, variant: %s, pkg_type: %s): %s"
% (
arch,
variant.uid,
@ -673,8 +675,8 @@ def get_prepopulate_packages(compose, arch, variant, include_arch=True):
pkg_name, pkg_arch = split_name_arch(i)
if pkg_arch not in get_compatible_arches(arch, multilib=True):
raise ValueError(
"Incompatible package arch '%s' for tree arch '%s' in prepopulate package '%s'"
% (pkg_arch, arch, pkg_name)
"Incompatible package arch '%s' for tree arch '%s' "
"in prepopulate package '%s'" % (pkg_arch, arch, pkg_name)
)
if include_arch:
result.add(i)
@ -691,8 +693,8 @@ def get_additional_packages(compose, arch, variant):
arch, multilib=True
):
raise ValueError(
"Incompatible package arch '%s' for tree arch '%s' in additional package '%s'"
% (pkg_arch, arch, pkg_name)
"Incompatible package arch '%s' for tree arch '%s' in "
"additional package '%s'" % (pkg_arch, arch, pkg_name)
)
result.add((pkg_name, pkg_arch))
return result

View File

@ -161,8 +161,8 @@ def write_pungi_config(
if not groups and not packages_str and not prepopulate:
raise RuntimeError(
"No packages included in %s.%s (no comps groups, no input packages, no prepopulate)"
% (variant.uid, arch)
"No packages included in %s.%s "
"(no comps groups, no input packages, no prepopulate)" % (variant.uid, arch)
)
pungi_wrapper.write_kickstart(
@ -206,7 +206,8 @@ def resolve_deps(compose, arch, variant, source_name=None):
# addon
if variant.type in ["addon", "layered-product"]:
# packages having SRPM in parent variant are excluded from fulltree (via %fulltree-excludes)
# packages having SRPM in parent variant are excluded from
# fulltree (via %fulltree-excludes)
fulltree = True
selfhosting = False

View File

@ -72,8 +72,8 @@ class ImageBuildPhase(
install_tree_source = self.compose.all_variants.get(install_tree_from)
if not install_tree_source:
raise RuntimeError(
"There is no variant %s to get install tree from when building image for %s."
% (install_tree_from, variant.uid)
"There is no variant %s to get install tree from "
"when building image for %s." % (install_tree_from, variant.uid)
)
return translate_path(
self.compose,
@ -236,7 +236,8 @@ class CreateImageBuildThread(WorkerThread):
)
# avoid race conditions?
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
# Kerberos authentication failed:
# Permission denied in replay cache code (-1765328215)
time.sleep(num * 3)
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
self.pool.log_debug("build-image outputs: %s" % (output))

View File

@ -124,7 +124,7 @@ def write_arch_comps(compose, arch):
)
UNMATCHED_GROUP_MSG = "Variant %s.%s requires comps group %s which does not match anything in input comps file"
UNMATCHED_GROUP_MSG = "Variant %s.%s requires comps group %s which does not match anything in input comps file" # noqa: E501
def get_lookaside_groups(compose, variant):

View File

@ -210,7 +210,8 @@ class CreateLiveImageThread(WorkerThread):
)
# avoid race conditions?
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
# Kerberos authentication failed:
# Permission denied in replay cache code (-1765328215)
time.sleep(num * 3)
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)

View File

@ -47,8 +47,8 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
variant = self.compose.all_variants[variant_uid]
except KeyError:
raise RuntimeError(
"There is no variant %s to get repo from when building live media for %s."
% (variant_uid, variant.uid)
"There is no variant %s to get repo from when building "
"live media for %s." % (variant_uid, variant.uid)
)
return translate_path(
self.compose,

View File

@ -90,7 +90,7 @@ class OstreeInstallerThread(WorkerThread):
)
repos = get_repo_urls(
None, # compose==None. Special value says that method should ignore deprecated variant-type repo
None, # compose==None. Special value says that method should ignore deprecated variant-type repo # noqa: E501
shortcuts.force_list(config["repo"]) + self.baseurls,
arch=arch,
logger=self.pool,

View File

@ -137,7 +137,8 @@ class PackageSetBase(kobo.log.LoggingBase):
def get_error(sigkeys, infos):
return (
"RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s"
"RPM(s) not found for sigs: %s. Check log for details. "
"Unsigned packages:\n%s"
% (
sigkeys,
"\n".join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
@ -181,7 +182,8 @@ class PackageSetBase(kobo.log.LoggingBase):
return self.rpms_by_arch
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
"""Create a subset of this package set that only includes packages compatible with"""
"""Create a subset of this package set that only includes
packages compatible with"""
pkgset = PackageSetBase(
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
)

View File

@ -544,8 +544,8 @@ def _get_modules_from_koji_tags(
# There are some module names that were listed in configuration and not
# found in any tag...
raise RuntimeError(
"Configuration specified patterns (%s) that don't match any modules in the configured tags."
% ", ".join(expected_modules)
"Configuration specified patterns (%s) that don't match "
"any modules in the configured tags." % ", ".join(expected_modules)
)

View File

@ -68,7 +68,7 @@ def main():
default=[],
action="append",
metavar="GROUPID",
help="keep this group in environments even if they are not defined in the comps",
help="keep this group in environments even if they are not defined in the comps", # noqa: E501
)
parser.add_argument(
"--no-cleanup",

View File

@ -94,7 +94,7 @@ def get_arguments(config):
"--fulltree",
action="store_true",
dest="fulltree",
help="build a tree that includes all packages built from corresponding source rpms (optional)",
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
)
parser.add_argument(
"--nosource",
@ -112,7 +112,7 @@ def get_arguments(config):
"--nodownload",
action="store_true",
dest="nodownload",
help="disable downloading of packages. instead, print the package URLs (optional)",
help="disable downloading of packages. instead, print the package URLs (optional)", # noqa: E501
)
parser.add_argument(
"--norelnotes",
@ -150,7 +150,7 @@ def get_arguments(config):
"--isfinal",
default=False,
action="store_true",
help="Specify this is a GA tree, which causes betanag to be turned off during install",
help="Specify this is a GA tree, which causes betanag to be turned off during install", # noqa: E501
)
parser.add_argument(
"--nohash",
@ -171,14 +171,14 @@ def get_arguments(config):
"--multilib",
action="append",
metavar="METHOD",
help="Multilib method; can be specified multiple times; recommended: devel, runtime",
help="Multilib method; can be specified multiple times; recommended: devel, runtime", # noqa: E501
)
parser.add_argument(
"--lookaside-repo",
action="append",
dest="lookaside_repos",
metavar="NAME",
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)",
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)", # noqa: E501
)
parser.add_argument(
"--workdirbase",
@ -200,7 +200,7 @@ def get_arguments(config):
default=[],
action="append",
metavar="STRING",
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)",
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)", # noqa: E501
)
parser.add_argument(
"--multilibconf",
@ -275,7 +275,7 @@ def get_arguments(config):
dest="rootfs_size",
action=SetConfig,
default=False,
help="Size of root filesystem in GiB. If not specified, use lorax default value",
help="Size of root filesystem in GiB. If not specified, use lorax default value", # noqa: E501
)
parser.add_argument(
@ -341,7 +341,7 @@ def main():
enforcing = False
if enforcing:
print(
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled."
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled." # noqa: E501
)
print("Consider running with setenforce 0.")

View File

@ -52,7 +52,7 @@ def get_parser():
group.add_argument(
"--fulltree",
action="store_true",
help="build a tree that includes all packages built from corresponding source rpms (optional)",
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
)
group.add_argument(
"--greedy",

View File

@ -51,7 +51,7 @@ def main():
)
parser.add_argument(
"--label",
help="specify compose label (example: Snapshot-1.0); required for production composes",
help="specify compose label (example: Snapshot-1.0); required for production composes", # noqa: E501
)
parser.add_argument(
"--no-label",
@ -71,7 +71,7 @@ def main():
dest="old_composes",
default=[],
action="append",
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.", # noqa: E501
)
parser.add_argument("--config", help="Config file", required=True)
parser.add_argument(
@ -149,7 +149,7 @@ def main():
metavar="STATUS",
action="append",
default=[],
help="only create latest symbol link to this compose when compose status matches specified status",
help="only create latest symbol link to this compose when compose status matches specified status", # noqa: E501
)
parser.add_argument(
"--print-output-dir",
@ -241,7 +241,7 @@ def main():
# Remove when all config files are up to date
if "productimg" in opts.skip_phase or "productimg" in opts.just_phase:
print(
"WARNING: productimg phase has been removed, please remove it from --skip-phase or --just-phase option",
"WARNING: productimg phase has been removed, please remove it from --skip-phase or --just-phase option", # noqa: E501
file=sys.stderr,
)
for err in errors[:]:
@ -402,8 +402,9 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
.rstrip("\n")
)
except IOError:
# Filename is not print intentionally in case someone puts password directly into the option
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option"
# Filename is not print intentionally in case someone puts
# password directly into the option
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option" # noqa: E501
compose.log_error(err_msg)
print(err_msg)
raise RuntimeError(err_msg)
@ -479,7 +480,8 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
latest_link = False
if create_latest_link:
if latest_link_status is None:
# create latest symbol link by default if latest_link_status is not specified
# create latest symbol link by default if latest_link_status
# is not specified
latest_link = True
else:
latest_link_status = [s.upper() for s in latest_link_status]
@ -487,7 +489,8 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
latest_link = True
else:
compose.log_warning(
"Compose status (%s) doesn't match with specified latest-link-status (%s), not create latest link."
"Compose status (%s) doesn't match with specified "
"latest-link-status (%s), not create latest link."
% (compose.get_status(), str(latest_link_status))
)

View File

@ -240,7 +240,8 @@ def get_arch_variant_data(conf, var_name, arch, variant, keys=None):
if conf_arch != "*" and conf_arch != arch:
continue
if conf_arch == "*" and arch == "src":
# src is excluded from '*' and needs to be explicitly added to the mapping
# src is excluded from '*' and needs to be explicitly
# added to the mapping
continue
if keys is not None:
keys.add(conf_variant)
@ -834,8 +835,8 @@ def get_repo_urls(compose, repos, arch="$basearch", logger=None):
if repo is None:
if logger:
logger.log_warning(
"Variant-type source repository is deprecated and will be ignored during 'OSTreeInstaller' phase: %s"
% (repo)
"Variant-type source repository is deprecated and will "
"be ignored during 'OSTreeInstaller' phase: %s" % (repo)
)
else:
urls.append(repo)
@ -897,8 +898,8 @@ def get_repo_dicts(repos, logger=None):
if repo_dict == {}:
if logger:
logger.log_warning(
"Variant-type source repository is deprecated and will be ignored during 'OSTree' phase: %s"
% (repo)
"Variant-type source repository is deprecated and will "
"be ignored during 'OSTree' phase: %s" % (repo)
)
else:
repo_dicts.append(repo_dict)

View File

@ -112,7 +112,8 @@ class CompsFilter(object):
def filter_environments(self, arch, variant, only_arch=False):
"""
Filter environments according to arch.
If only_arch is set, then only environments for the specified arch are preserved.
If only_arch is set, then only environments for the specified
arch are preserved.
Multiple arches separated by comma can be specified in the XML.
"""
self._filter_elements_by_attr("/comps/environment", "arch", arch, only_arch)
@ -265,7 +266,7 @@ class CompsWrapper(object):
stripped_pkg = pkg.strip()
if pkg != stripped_pkg:
errors.append(
"Package name %s in group '%s' contains leading or trailing whitespace"
"Package name %s in group '%s' contains leading or trailing whitespace" # noqa: E501
% (stripped_pkg, group)
)
@ -308,7 +309,7 @@ class CompsWrapper(object):
for pkg in group.packages:
if pkg.type == libcomps.PACKAGE_TYPE_UNKNOWN:
raise RuntimeError(
"Failed to process comps file. Package %s in group %s has unknown type"
"Failed to process comps file. Package %s in group %s has unknown type" # noqa: E501
% (pkg.name, group.id)
)

View File

@ -477,7 +477,8 @@ def mount(image, logger=None, use_guestmount=True):
cmd = ["mount", "-o", "loop", image, mount_dir]
ret, out = run(cmd, env=env, can_fail=True, universal_newlines=True)
if ret != 0:
# The mount command failed, something is wrong. Log the output and raise an exception.
# The mount command failed, something is wrong.
# Log the output and raise an exception.
if logger:
logger.error(
"Command %s exited with %s and output:\n%s" % (cmd, ret, out)

View File

@ -257,11 +257,12 @@ class KojiWrapper(object):
):
"""
@param config_options
@param conf_file_dest - a destination in compose workdir for the conf file to be written
@param conf_file_dest - a destination in compose workdir for
the conf file to be written
@param wait=True
@param scratch=False
"""
# Usage: koji image-build [options] <name> <version> <target> <install-tree-url> <arch> [<arch>...]
# Usage: koji image-build [options] <name> <version> <target> <install-tree-url> <arch> [<arch>...] # noqa: E501
sub_command = "image-build"
# The minimum set of options
min_options = (
@ -303,7 +304,7 @@ class KojiWrapper(object):
return cmd
def get_live_media_cmd(self, options, wait=True):
# Usage: koji spin-livemedia [options] <name> <version> <target> <arch> <kickstart-file>
# Usage: koji spin-livemedia [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
cmd = self._get_cmd("spin-livemedia")
for key in ("name", "version", "target", "arch", "ksfile"):
@ -353,8 +354,8 @@ class KojiWrapper(object):
specfile=None,
ksurl=None,
):
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file>
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
# Examples:
# * name: RHEL-7.0
# * name: Satellite-6.0.1-RHEL-6
@ -408,7 +409,8 @@ class KojiWrapper(object):
cmd.append("--release=%s" % release)
# IMPORTANT: all --opts have to be provided *before* args
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
# Usage:
# koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
cmd.append(name)
cmd.append(version)
@ -445,7 +447,8 @@ class KojiWrapper(object):
if retcode == 0 or not (
self._has_connection_error(output) or self._has_offline_error(output)
):
# Task finished for reason other than connection error or server offline error.
# Task finished for reason other than connection error
# or server offline error.
return retcode, output
attempt += 1
@ -612,7 +615,8 @@ class KojiWrapper(object):
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
)
# TODO: Maybe use different approach for non-scratch builds - see get_image_path()
# TODO: Maybe use different approach for non-scratch
# builds - see get_image_path()
# Get list of filenames that should be returned
result_files = task_result["rpms"]
@ -675,19 +679,22 @@ class KojiWrapper(object):
self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None
):
"""
Calls the `koji_session_fnc` using Koji multicall feature N times based on the list of
arguments passed in `list_of_args` and `list_of_kwargs`.
Returns list of responses sorted the same way as input args/kwargs. In case of error,
the error message is logged and None is returned.
Calls the `koji_session_fnc` using Koji multicall feature N times based on
the list of arguments passed in `list_of_args` and `list_of_kwargs`.
Returns list of responses sorted the same way as input args/kwargs.
In case of error, the error message is logged and None is returned.
For example to get the package ids of "httpd" and "apr" packages:
ids = multicall_map(session, session.getPackageID, ["httpd", "apr"])
# ids is now [280, 632]
:param KojiSessions koji_session: KojiSession to use for multicall.
:param object koji_session_fnc: Python object representing the KojiSession method to call.
:param list list_of_args: List of args which are passed to each call of koji_session_fnc.
:param list list_of_kwargs: List of kwargs which are passed to each call of koji_session_fnc.
:param object koji_session_fnc: Python object representing the
KojiSession method to call.
:param list list_of_args: List of args which are passed to each
call of koji_session_fnc.
:param list list_of_kwargs: List of kwargs which are passed to
each call of koji_session_fnc.
"""
if list_of_args is None and list_of_kwargs is None:
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
@ -729,22 +736,23 @@ class KojiWrapper(object):
results = []
# For the response specification, see
# https://web.archive.org/web/20060624230303/http://www.xmlrpc.com/discuss/msgReader$1208?mode=topic
# https://web.archive.org/web/20060624230303/http://www.xmlrpc.com/discuss/msgReader$1208?mode=topic # noqa: E501
# Relevant part of this:
# Multicall returns an array of responses. There will be one response for each call in
# the original array. The result will either be a one-item array containing the result value,
# Multicall returns an array of responses. There will be one response
# for each call in the original array. The result will either be
# a one-item array containing the result value,
# or a struct of the form found inside the standard <fault> element.
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
if type(response) == list:
if not response:
raise ValueError(
"Empty list returned for multicall of method %r with args %r, %r"
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
% (koji_session_fnc, args, kwargs)
)
results.append(response[0])
else:
raise ValueError(
"Unexpected data returned for multicall of method %r with args %r, %r: %r"
"Unexpected data returned for multicall of method %r with args %r, %r: %r" # noqa: E501
% (koji_session_fnc, args, kwargs, response)
)

View File

@ -114,7 +114,7 @@ class LoraxWrapper(object):
brand=None,
):
# RHEL 6 compatibility
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root>
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root> # noqa: E501
brand = brand or "redhat"
# HACK: ignore provided release

View File

@ -132,7 +132,8 @@ class PungiWrapper(object):
# path to a kickstart file
cmd.append("--config=%s" % config)
# destdir is optional in Pungi (defaults to current dir), but want it mandatory here
# destdir is optional in Pungi (defaults to current dir), but
# want it mandatory here
cmd.append("--destdir=%s" % destdir)
# name

View File

@ -311,7 +311,7 @@ class Variant(object):
return self.uid
def __repr__(self):
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format(
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format( # noqa: E501
self
)
@ -350,12 +350,14 @@ class Variant(object):
if self.type != "variant":
raise RuntimeError("Only 'variant' can contain another variants.")
if variant.id == self.id:
# due to os/<variant.id> path -- addon id would conflict with parent variant id
# due to os/<variant.id> path -- addon id would conflict with
# parent variant id
raise RuntimeError(
"Child variant id must be different than parent variant id: %s"
% variant.id
)
# sometimes an addon or layered product can be part of multiple variants with different set of arches
# sometimes an addon or layered product can be part of multiple
# variants with different set of arches
arches = sorted(set(self.arches).intersection(set(variant.arches)))
if self.arches and not arches:
raise RuntimeError(

View File

@ -235,7 +235,8 @@ class UnifiedISO(object):
# override paths
arch_ti[variant.uid].repository = variant.uid
arch_ti[variant.uid].packages = variant.uid
# set to None, replace with source_*; requires productmd changes or upstream version
# set to None, replace with source_*; requires productmd
# changes or upstream version
# arch_ti[variant.uid].source_repository = variant.uid
# arch_ti[variant.uid].source_packages = variant.uid
@ -387,7 +388,8 @@ class UnifiedISO(object):
run(iso.get_manifest_cmd(iso_path))
img = productmd.images.Image(im)
# temporary path, just a file name; to be replaced with variant specific path
# temporary path, just a file name; to be replaced with
# variant specific path
img.path = os.path.basename(iso_path)
img.mtime = int(os.stat(iso_path).st_mtime)
img.size = os.path.getsize(iso_path)

View File

@ -42,7 +42,7 @@ setup(
"pungi-make-ostree = pungi.ostree:main",
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
"pungi-orchestrate = pungi_utils.orchestrator:main",
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main",
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
"pungi-config-dump = pungi.scripts.config_dump:cli_main",

View File

@ -1407,10 +1407,10 @@ class BuildinstallThreadTestCase(PungiTestCase):
compose._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway."
"[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call(
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details."
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details." # noqa: E501
% self.topdir
),
]
@ -1448,10 +1448,10 @@ class BuildinstallThreadTestCase(PungiTestCase):
compose._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Buildinstall (variant Server, arch x86_64) failed, but going on anyway."
"[FAIL] Buildinstall (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call(
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall-Server.x86_64.log for more details."
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall-Server.x86_64.log for more details." # noqa: E501
% self.topdir
),
]

View File

@ -217,7 +217,7 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*", # noqa: E501
)
self.assertEqual(config.get("release_name", None), "dummy product")
@ -270,7 +270,7 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*", # noqa: E501
)
self.assertEqual(config.get("release_name", None), "dummy product")
@ -297,12 +297,12 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(errors), 1)
self.assertRegexpMatches(
errors[0],
r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*",
r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*", # noqa: E501
)
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*", # noqa: E501
)
self.assertEqual(config.get("release_name", None), "dummy product")
@ -384,11 +384,11 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'",
r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'", # noqa: E501
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@ -426,11 +426,11 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,", # noqa: E501
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@ -472,19 +472,19 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 4)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,", # noqa: E501
)
self.assertRegexpMatches(
warnings[2],
r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'",
r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'", # noqa: E501
)
self.assertRegexpMatches(
warnings[3],
r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.",
r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.", # noqa: E501
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(
@ -534,11 +534,11 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*",
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*", # noqa: E501
)
self.assertEqual(
config.get("live_images")[0][1]["armhfp"]["repo"], "Everything"

View File

@ -544,7 +544,7 @@ class ComposeTestCase(unittest.TestCase):
mock.call("Excluding variant Live: filtered by configuration."),
mock.call("Excluding variant Crashy: all its arches are filtered."),
mock.call(
"Excluding variant Server-ResilientStorage: filtered by configuration."
"Excluding variant Server-ResilientStorage: filtered by configuration." # noqa: E501
),
mock.call(
"Excluding variant Server-Gluster: filtered by configuration."
@ -635,7 +635,7 @@ class StatusTest(unittest.TestCase):
),
mock.call(
20,
"Failed live on variant <Server>, arch <x86_64>, subvariant <None>.",
"Failed live on variant <Server>, arch <x86_64>, subvariant <None>.", # noqa: E501
),
],
any_order=True,

View File

@ -73,7 +73,7 @@ class ReleaseConfigTestCase(ConfigTestCase):
self.assertValidation(
cfg,
warnings=[
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product."
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product." # noqa: E501
],
)
@ -158,7 +158,7 @@ class RunrootConfigTestCase(ConfigTestCase):
self.assertValidation(
cfg,
warnings=[
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally." # noqa: E501
],
)
@ -168,7 +168,7 @@ class RunrootConfigTestCase(ConfigTestCase):
self.assertValidation(
cfg,
warnings=[
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally." # noqa: E501
],
)
@ -180,7 +180,7 @@ class BuildinstallConfigTestCase(ConfigTestCase):
self.assertValidation(
cfg,
warnings=[
"WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer."
"WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer." # noqa: E501
],
)
@ -298,7 +298,7 @@ class OstreeConfigTestCase(ConfigTestCase):
{
"x86_64": {
"treefile": "fedora-atomic-docker-host.json",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git", # noqa: E501
"repo": "Everything",
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
@ -329,18 +329,18 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": [
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl" # noqa: E501
],
"add_template_var": [
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
"add_arch_template": [
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl" # noqa: E501
],
"rootfs_size": "3",
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
@ -364,7 +364,7 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": [
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl" # noqa: E501
],
"add_template_var": [
"ostree_osname=fedora-atomic",
@ -372,7 +372,7 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
],
"add_arch_template": 15,
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
@ -420,7 +420,7 @@ class TestRegexValidation(ConfigTestCase):
def test_incorrect_regular_expression(self):
cfg = load_config(PKGSET_REPOS, multilib=[("^*$", {"*": []})])
msg = "Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat"
msg = "Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat" # noqa: E501
if six.PY3:
msg += " at position 1"
self.assertValidation(cfg, [msg], [])

View File

@ -850,7 +850,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway."
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]
@ -896,7 +896,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway."
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call(
"Runroot task failed: 1234. See %s for more details."
@ -987,7 +987,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway."
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]

View File

@ -56,7 +56,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest", # noqa: E501
]
)
@ -113,7 +113,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
["/usr/bin/isohybrid", "--uefi", "DP-1.0-20160405.t.3-x86_64.iso"]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest", # noqa: E501
]
)
@ -165,7 +165,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
),
" ".join(["/usr/bin/isohybrid", "DP-1.0-20160405.t.3-i386.iso"]),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-i386.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest", # noqa: E501
]
)
@ -218,7 +218,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest", # noqa: E501
]
)
@ -262,7 +262,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-s390x.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest", # noqa: E501
]
)
@ -314,7 +314,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest", # noqa: E501
]
)
@ -373,7 +373,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest", # noqa: E501
" ".join(
[
"jigdo-file",

View File

@ -745,9 +745,9 @@ class TestCreateVariantRepo(PungiTestCase):
deltas=True,
oldpackagedirs=[
self.topdir
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/a",
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/a", # noqa: E501
self.topdir
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/b",
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/b", # noqa: E501
],
use_xz=False,
extra_args=[],
@ -1168,7 +1168,7 @@ class TestCreateVariantRepo(PungiTestCase):
modules_metadata = ModulesMetadata(compose)
modulemd_filename.return_value = "Server/x86_64/os/repodata/3511d16a723e1bd69826e591508f07e377d2212769b59178a9-modules.yaml.gz"
modulemd_filename.return_value = "Server/x86_64/os/repodata/3511d16a723e1bd69826e591508f07e377d2212769b59178a9-modules.yaml.gz" # noqa: E501
create_variant_repo(
compose,
"x86_64",
@ -1270,7 +1270,7 @@ class TestGetProductIds(PungiTestCase):
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
self.assertRegexpMatches(
str(ctx.exception),
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)",
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)", # noqa: E501
)
@mock.patch("pungi.phases.createrepo.get_dir_from_scm")

View File

@ -1176,7 +1176,7 @@ class DepsolvingBase(object):
def test_bash_multilib_exclude(self):
# test if excluding a package really works
# NOTE: dummy-bash-doc would pull x86_64 bash in (we want noarch pulling 64bit deps in composes)
# NOTE: dummy-bash-doc would pull x86_64 bash in (we want noarch pulling 64bit deps in composes) # noqa: E501
packages = [
"dummy-bash.+",
"-dummy-bash-doc",

View File

@ -146,7 +146,7 @@ class TestWritePungiConfig(helpers.PungiTestCase):
)
self.assertEqual(
str(ctx.exception),
"No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)",
"No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)", # noqa: E501
)
self.assertEqual(PungiWrapper.return_value.mock_calls, [])
@ -188,7 +188,7 @@ class TestCheckDeps(helpers.PungiTestCase):
self.compose.log_error.call_args_list,
[
mock.call(
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']"
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']" # noqa: E501
% (self.variant, self.arch)
)
],

View File

@ -24,7 +24,8 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
spanning_line = self.g.prune_graph()
self.assertEqual(4, len(spanning_line))
# 'Base' as a lookaside should be at the end of the spanning line, order of others is not crucial
# 'Base' as a lookaside should be at the end of the spanning line,
# order of others is not crucial
self.assertEqual("Base", spanning_line[-1])
def test_complex_graph(self):

View File

@ -28,7 +28,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -110,7 +110,7 @@ class TestImageBuildPhase(PungiTestCase):
compose = DummyCompose(
self.topdir,
{
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
"image_build_target": "f24",
"image_build_version": "Rawhide",
@ -173,7 +173,7 @@ class TestImageBuildPhase(PungiTestCase):
compose = DummyCompose(
self.topdir,
{
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
"image_build_target": "f24",
"image_build": {
@ -241,7 +241,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -277,7 +277,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -346,7 +346,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -414,7 +414,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -488,7 +488,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -555,7 +555,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -598,7 +598,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -641,7 +641,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -681,7 +681,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -745,7 +745,7 @@ class TestImageBuildPhase(PungiTestCase):
"name": "Fedora-Docker-Base",
"target": "f24",
"version": "Rawhide",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"kickstart": "fedora-docker-base.ks",
"distro": "Fedora-20",
"disk_size": 3,
@ -873,7 +873,7 @@ class TestCreateImageBuildThread(PungiTestCase):
mock.call(
koji_wrapper.get_image_build_cmd.return_value,
log_file=self.topdir
+ "/logs/amd64-x86_64/imagebuild-Client-KDE-docker-qcow2.amd64-x86_64.log",
+ "/logs/amd64-x86_64/imagebuild-Client-KDE-docker-qcow2.amd64-x86_64.log", # noqa: E501
)
],
)
@ -885,25 +885,25 @@ class TestCreateImageBuildThread(PungiTestCase):
mock.call.link(
"/koji/task/1235/Fedora-Docker-Base-20160103.amd64.qcow2",
self.topdir
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.qcow2",
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.qcow2", # noqa: E501
link_type="hardlink-or-copy",
),
mock.call.link(
"/koji/task/1235/Fedora-Docker-Base-20160103.amd64.tar.gz",
self.topdir
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.tar.gz",
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.tar.gz", # noqa: E501
link_type="hardlink-or-copy",
),
mock.call.link(
"/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.qcow2",
self.topdir
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.qcow2",
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.qcow2", # noqa: E501
link_type="hardlink-or-copy",
),
mock.call.link(
"/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.tar.gz",
self.topdir
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.tar.gz",
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.tar.gz", # noqa: E501
link_type="hardlink-or-copy",
),
],
@ -992,14 +992,14 @@ class TestCreateImageBuildThread(PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway."
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway." # noqa: E501
),
mock.call(
"ImageBuild task failed: 1234. See %s for more details."
% (
os.path.join(
self.topdir,
"logs/amd64-x86_64/imagebuild-Client-Client-docker-qcow2.amd64-x86_64.log",
"logs/amd64-x86_64/imagebuild-Client-Client-docker-qcow2.amd64-x86_64.log", # noqa: E501
)
)
),
@ -1046,7 +1046,7 @@ class TestCreateImageBuildThread(PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway."
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]

View File

@ -107,7 +107,7 @@ class TestImageChecksumPhase(PungiTestCase):
{
"media_checksums": ["sha256"],
"media_checksum_one_file": True,
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s",
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s", # noqa: E501
},
)
compose.compose_label = "Alpha-1.0"
@ -121,7 +121,7 @@ class TestImageChecksumPhase(PungiTestCase):
dump_checksums.assert_called_once_with(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM",
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM", # noqa: E501
set([("image.iso", 123, "sha256", "cafebabe")]),
)
cc.assert_called_once_with(
@ -137,7 +137,7 @@ class TestImageChecksumPhase(PungiTestCase):
self.topdir,
{
"media_checksums": ["md5", "sha256"],
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s",
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s", # noqa: E501
},
)
@ -189,7 +189,7 @@ class TestImageChecksumPhase(PungiTestCase):
self.topdir,
{
"media_checksums": ["md5", "sha256"],
"media_checksum_base_filename": "{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}",
"media_checksum_base_filename": "{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}", # noqa: E501
},
)

View File

@ -80,7 +80,8 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.wrappers.iso.run")
def test_mount_iso(self, mock_run, mock_unmount):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('1' - guestmount is not available)
# value determines type of the mount/unmount
# command ('1' - guestmount is not available)
# for approach as a root, pair commands mount-umount are used
mock_run.side_effect = [(1, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
@ -99,7 +100,8 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.wrappers.iso.run")
def test_guestmount(self, mock_run, mock_unmount, mock_rmtree):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available)
# value determines type of the mount/unmount
# command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
@ -118,7 +120,8 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.wrappers.iso.run")
def test_guestmount_cleans_up_cache(self, mock_run, mock_unmount, mock_rmtree):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available)
# value determines type of the mount/unmount
# command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
@ -139,7 +142,8 @@ class TestIsoUtils(unittest.TestCase):
self, mock_run, mock_unmount, mock_rmtree
):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available)
# value determines type of the mount/unmount
# command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount("dummy") as temp_dir:

View File

@ -160,16 +160,16 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
"id": 563977,
"state": 1,
},
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/i386/os/",
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/i386/os/", # noqa: E501
{
"disk_size": "3",
"distro": "Fedora-20",
"format": ["qcow2", "raw-xz"],
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks",
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks", # noqa: E501
"release": "20160103",
"repo": [
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/",
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/",
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/", # noqa: E501
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/", # noqa: E501
],
"scratch": True,
},
@ -216,16 +216,16 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
"id": 563977,
"state": 1,
},
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/x86_64/os/",
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/x86_64/os/", # noqa: E501
{
"disk_size": "3",
"distro": "Fedora-20",
"format": ["qcow2", "raw-xz"],
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks",
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks", # noqa: E501
"release": "20160103",
"repo": [
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/",
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/",
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/", # noqa: E501
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/", # noqa: E501
],
"scratch": True,
},
@ -593,7 +593,7 @@ class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
self.assertEqual(cmd[-2], "s390x")
self.assertEqual(
cmd[-1],
"rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; /bin/echo '&' && chmod -R a+r '/output dir' /foo && chown -R 1010 '/output dir' /foo",
"rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; /bin/echo '&' && chmod -R a+r '/output dir' /foo && chown -R 1010 '/output dir' /foo", # noqa: E501
)
six.assertCountEqual(
self,

View File

@ -91,7 +91,7 @@ class TestLiveImagesPhase(PungiTestCase):
compose.variants["Client"],
disc_num=None,
disc_type="live",
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s",
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s", # noqa: E501
)
],
)
@ -345,7 +345,7 @@ class TestLiveImagesPhase(PungiTestCase):
{
"amd64": {
"kickstart": "test.ks",
"ksurl": "https://git.example.com/kickstarts.git?#CAFEBABE",
"ksurl": "https://git.example.com/kickstarts.git?#CAFEBABE", # noqa: E501
"repo": ["http://example.com/repo/", "Everything"],
"type": "appliance",
"target": "f27",
@ -618,7 +618,7 @@ class TestLiveImagesPhase(PungiTestCase):
compose.variants["Client"],
disc_num=None,
disc_type="Live",
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s",
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s", # noqa: E501
)
],
)
@ -695,7 +695,7 @@ class TestCreateLiveImageThread(PungiTestCase):
write_manifest_cmd = " && ".join(
[
"cd " + self.topdir + "/compose/Client/amd64/iso",
"isoinfo -R -f -i image-name | grep -v '/TRANS.TBL$' | sort >> image-name.manifest",
"isoinfo -R -f -i image-name | grep -v '/TRANS.TBL$' | sort >> image-name.manifest", # noqa: E501
]
)
self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])
@ -806,7 +806,7 @@ class TestCreateLiveImageThread(PungiTestCase):
write_manifest_cmd = " && ".join(
[
"cd " + self.topdir + "/compose/Client/amd64/iso",
"isoinfo -R -f -i image.iso | grep -v '/TRANS.TBL$' | sort >> image.iso.manifest",
"isoinfo -R -f -i image.iso | grep -v '/TRANS.TBL$' | sort >> image.iso.manifest", # noqa: E501
]
)
self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])
@ -998,10 +998,10 @@ class TestCreateLiveImageThread(PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway."
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway." # noqa: E501
),
mock.call(
"LiveImage task failed: 123. See %s/logs/amd64/liveimage-None-None-xyz.amd64.log for more details."
"LiveImage task failed: 123. See %s/logs/amd64/liveimage-None-None-xyz.amd64.log for more details." # noqa: E501
% self.topdir
),
]
@ -1046,7 +1046,7 @@ class TestCreateLiveImageThread(PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway."
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]

View File

@ -634,7 +634,7 @@ class TestLiveMediaThread(PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway."
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway." # noqa: E501
),
mock.call(
"Live media task failed: 1234. See %s for more details."
@ -709,7 +709,7 @@ class TestLiveMediaThread(PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway."
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]

View File

@ -126,13 +126,13 @@ ARCHIVES = [
"checksum_type": 0,
"extra": {
"docker": {
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7", # noqa: E501
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e", # noqa: E501
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
},
"image": {"arch": "x86_64"},
},
"filename": "docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz",
"filename": "docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz", # noqa: E501
"id": 1436049,
"metadata_only": False,
"size": 174038795,
@ -155,8 +155,8 @@ METADATA = {
"filename": ARCHIVES[0]["filename"],
"size": ARCHIVES[0]["size"],
"docker": {
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7", # noqa: E501
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e", # noqa: E501
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
},
"image": {"arch": "x86_64"},
@ -179,7 +179,7 @@ SCRATCH_METADATA = {
{
"koji_task": 12345,
"repositories": [
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632", # noqa: E501
],
}
]
@ -288,7 +288,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
config["osbs"] = {"^Server$": cfg}
errors, warnings = checks.validate(config, offline=True)
self.assertIn(
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas"
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas" # noqa: E501
% cfg,
errors,
)

View File

@ -236,7 +236,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
self.compose.supported = False
pool = mock.Mock()
cfg = {
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
"release": "20160321.n.0",
}
koji = KojiWrapper.return_value
@ -324,7 +324,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
cfg = {
"release": "20160321.n.0",
"repo": [
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
"https://example.com/extra-repo1.repo",
"https://example.com/extra-repo2.repo",
],
@ -369,8 +369,8 @@ class OstreeThreadTest(helpers.PungiTestCase):
cfg = {
"release": "20160321.n.0",
"repo": [
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
"Server", # this variant-type repo is deprecated, in result will be replaced with default repo
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
"Server", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
"https://example.com/extra-repo1.repo",
"https://example.com/extra-repo2.repo",
],
@ -493,7 +493,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
):
pool = mock.Mock()
cfg = {
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
"release": "20160321.n.0",
"add_template": ["some_file.txt"],
"add_arch_template": ["other_file.txt"],
@ -562,7 +562,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
):
pool = mock.Mock()
cfg = {
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
"release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
"installpkgs": ["fedora-productimg-atomic"],
"add_template": [
@ -576,7 +576,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
],
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
@ -606,13 +606,13 @@ class OstreeThreadTest(helpers.PungiTestCase):
isfinal=True,
extra=[
"--installpkgs=fedora-productimg-atomic",
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl",
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl",
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl", # noqa: E501
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl", # noqa: E501
"--add-template-var=ostree_osname=fedora-atomic",
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
"--add-arch-template-var=ostree_osname=fedora-atomic",
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
"--logfile=%s/%s/lorax.log" % (self.topdir, LOG_PATH),
],
weight=123,
@ -633,7 +633,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
):
pool = mock.Mock()
cfg = {
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo. # noqa: E501
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": [
@ -647,7 +647,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
],
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
@ -677,13 +677,13 @@ class OstreeThreadTest(helpers.PungiTestCase):
isfinal=True,
extra=[
"--installpkgs=fedora-productimg-atomic",
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl",
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl",
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl", # noqa: E501
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl", # noqa: E501
"--add-template-var=ostree_osname=fedora-atomic",
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
"--add-arch-template-var=ostree_osname=fedora-atomic",
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
"--logfile=%s/%s/lorax.log" % (self.topdir, LOG_PATH),
],
weight=123,
@ -713,7 +713,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway."
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]
@ -748,7 +748,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
pool._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway."
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call(
"Runroot task failed: 1234. See %s/%s/runroot.log for more details."

View File

@ -200,7 +200,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
},
{
"name": "http:__example.com_work__basearch_comps_repo_Everything",
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything",
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything", # noqa: E501
},
]
}
@ -295,7 +295,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
self.compose._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call(
"Runroot task failed: 1234. See %s for more details."
@ -322,7 +322,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
self.compose._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
),
mock.call("BOOM"),
]
@ -675,7 +675,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
cfg = {
"repo": [ # Variant type repos will not be included into extra_config. This part of the config is deprecated
"repo": [ # Variant type repos will not be included into extra_config. This part of the config is deprecated # noqa: E501
"Everything", # do not include
{
"name": "repo_a",
@ -704,7 +704,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
with open(extra_config_file, "r") as extra_config_fd:
extra_config = json.load(extra_config_fd)
self.assertTrue(extra_config.get("keep_original_sources", False))
# should equal to number of valid repositories in cfg['repo'] + default repository + comps repository
# should equal to number of valid repositories in cfg['repo'] + default repository + comps repository # noqa: E501
self.assertEqual(len(extra_config.get("repo", [])), 3)
self.assertEqual(
extra_config.get("repo").pop()["baseurl"],

View File

@ -397,7 +397,7 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
"--add-template=/path/to/lorax.tmpl",
"--add-arch-template=/path/to/lorax-embed.tmpl",
"--add-template-var=ostree_osname=dummy",
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree",
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree", # noqa: E501
"--rootfs-size=None",
self.output,
],
@ -463,7 +463,7 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
"--add-template-var=ostree_osname=dummy-atomic",
"--add-template-var=ostree_ref=dummy/x86_64/docker",
"--add-arch-template-var=ostree_osname=dummy-atomic",
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree",
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree", # noqa: E501
"--rootfs-size=None",
self.output,
],

View File

@ -295,7 +295,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
)
figure = re.compile(
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501
re.DOTALL,
)
self.assertRegexpMatches(str(ctx.exception), figure)
@ -320,7 +320,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
figure = re.compile(
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501
re.DOTALL,
)
self.assertRegexpMatches(str(ctx.exception), figure)

View File

@ -148,5 +148,5 @@ class FusExtractorTestCase(helpers.PungiTestCase):
self.assertFileContent(
self.output,
"Problem 1/1\n - nothing provides foo\nProblem 1/1\n - nothing provides quux\n",
"Problem 1/1\n - nothing provides foo\nProblem 1/1\n - nothing provides quux\n", # noqa: E501
)

View File

@ -867,7 +867,7 @@ class GetRepoFuncsTestCase(unittest.TestCase):
def test_get_repo_dicts(self):
repos = [
"http://example.com/repo",
"Server", # this repo format is deprecated (and will not be included into final repo_dict)
"Server", # this repo format is deprecated (and will not be included into final repo_dict) # noqa: E501
{"baseurl": "Client"}, # this repo format is deprecated
{"baseurl": "ftp://example.com/linux/repo"},
{"name": "testrepo", "baseurl": "ftp://example.com/linux/repo"},