Fix flake8 complaints - E501
E501 line too long (92 > 88 characters) E501 line too long (103 > 88 characters) ... JIRA: COMPOSE-4108 Signed-off-by: Haibo Lin <hlin@redhat.com>
This commit is contained in:
parent
3eddcfccd8
commit
c0193c9fca
@ -291,45 +291,48 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
if "alias" in subschema:
|
if "alias" in subschema:
|
||||||
if subschema["alias"] in instance:
|
if subschema["alias"] in instance:
|
||||||
msg = (
|
msg = (
|
||||||
"WARNING: Config option '%s' is deprecated and now an alias to '%s', "
|
"WARNING: Config option '%s' is deprecated and "
|
||||||
"please use '%s' instead. "
|
"now an alias to '%s', please use '%s' instead. "
|
||||||
"In:\n%s" % (subschema["alias"], property, property, instance)
|
"In:\n%s" % (subschema["alias"], property, property, instance)
|
||||||
)
|
)
|
||||||
yield ConfigOptionWarning(msg)
|
yield ConfigOptionWarning(msg)
|
||||||
if property in instance:
|
if property in instance:
|
||||||
msg = (
|
msg = (
|
||||||
"ERROR: Config option '%s' is an alias of '%s', only one can be used."
|
"ERROR: Config option '%s' is an alias of '%s', "
|
||||||
% (subschema["alias"], property)
|
"only one can be used." % (subschema["alias"], property)
|
||||||
)
|
)
|
||||||
yield ConfigOptionError(msg)
|
yield ConfigOptionError(msg)
|
||||||
instance.pop(subschema["alias"])
|
instance.pop(subschema["alias"])
|
||||||
else:
|
else:
|
||||||
instance.setdefault(property, instance.pop(subschema["alias"]))
|
instance.setdefault(property, instance.pop(subschema["alias"]))
|
||||||
# update instance for append option
|
# update instance for append option
|
||||||
# If append is defined in schema, append values from append options to property. If property
|
# If append is defined in schema, append values from append
|
||||||
# is not present in instance, set it to empty list, and append the values from append options.
|
# options to property. If property is not present in instance,
|
||||||
|
# set it to empty list, and append the values from append options.
|
||||||
# Note: property's schema must support a list of values.
|
# Note: property's schema must support a list of values.
|
||||||
if "append" in subschema:
|
if "append" in subschema:
|
||||||
appends = force_list(subschema["append"])
|
appends = force_list(subschema["append"])
|
||||||
for append in appends:
|
for append in appends:
|
||||||
if append in instance:
|
if append in instance:
|
||||||
msg = (
|
msg = (
|
||||||
"WARNING: Config option '%s' is deprecated, its value will be appended to option '%s'. "
|
"WARNING: Config option '%s' is deprecated, "
|
||||||
|
"its value will be appended to option '%s'. "
|
||||||
"In:\n%s" % (append, property, instance)
|
"In:\n%s" % (append, property, instance)
|
||||||
)
|
)
|
||||||
yield ConfigOptionWarning(msg)
|
yield ConfigOptionWarning(msg)
|
||||||
if property in instance:
|
if property in instance:
|
||||||
msg = (
|
msg = (
|
||||||
"WARNING: Value from config option '%s' is now appended to option '%s'."
|
"WARNING: Value from config option '%s' is "
|
||||||
% (append, property)
|
"now appended to option '%s'." % (append, property)
|
||||||
)
|
)
|
||||||
yield ConfigOptionWarning(msg)
|
yield ConfigOptionWarning(msg)
|
||||||
instance[property] = force_list(instance[property])
|
instance[property] = force_list(instance[property])
|
||||||
instance[property].extend(force_list(instance.pop(append)))
|
instance[property].extend(force_list(instance.pop(append)))
|
||||||
else:
|
else:
|
||||||
msg = (
|
msg = (
|
||||||
"WARNING: Config option '%s' is not found, but '%s' is specified, value from '%s' "
|
"WARNING: Config option '%s' is not found, "
|
||||||
"is now added as '%s'."
|
"but '%s' is specified, "
|
||||||
|
"value from '%s' is now added as '%s'."
|
||||||
% (property, append, append, property)
|
% (property, append, append, property)
|
||||||
)
|
)
|
||||||
yield ConfigOptionWarning(msg)
|
yield ConfigOptionWarning(msg)
|
||||||
@ -559,7 +562,7 @@ def make_schema():
|
|||||||
"release_version": {"type": "string"},
|
"release_version": {"type": "string"},
|
||||||
"release_type": {"type": "string", "enum": RELEASE_TYPES, "default": "ga"},
|
"release_type": {"type": "string", "enum": RELEASE_TYPES, "default": "ga"},
|
||||||
"release_is_layered": {
|
"release_is_layered": {
|
||||||
"deprecated": "remove it. It's layered if there's configuration for base product"
|
"deprecated": "remove it. It's layered if there's configuration for base product" # noqa: E501
|
||||||
},
|
},
|
||||||
"release_internal": {"type": "boolean", "default": False},
|
"release_internal": {"type": "boolean", "default": False},
|
||||||
"release_discinfo_description": {"type": "string"},
|
"release_discinfo_description": {"type": "string"},
|
||||||
@ -570,7 +573,7 @@ def make_schema():
|
|||||||
"base_product_version": {"type": "string"},
|
"base_product_version": {"type": "string"},
|
||||||
"base_product_type": {"type": "string", "default": "ga"},
|
"base_product_type": {"type": "string", "default": "ga"},
|
||||||
"runroot": {
|
"runroot": {
|
||||||
"deprecated": "remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally",
|
"deprecated": "remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally", # noqa: E501
|
||||||
},
|
},
|
||||||
"global_runroot_method": {"type": "string", "enum": RUNROOT_TYPES},
|
"global_runroot_method": {"type": "string", "enum": RUNROOT_TYPES},
|
||||||
"runroot_method": {
|
"runroot_method": {
|
||||||
@ -600,7 +603,7 @@ def make_schema():
|
|||||||
"check_deps": {"type": "boolean", "default": True},
|
"check_deps": {"type": "boolean", "default": True},
|
||||||
"require_all_comps_packages": {"type": "boolean", "default": False},
|
"require_all_comps_packages": {"type": "boolean", "default": False},
|
||||||
"bootable": {
|
"bootable": {
|
||||||
"deprecated": "remove it. Setting buildinstall_method option if you want a bootable installer"
|
"deprecated": "remove it. Setting buildinstall_method option if you want a bootable installer" # noqa: E501
|
||||||
},
|
},
|
||||||
"gather_method": {
|
"gather_method": {
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
@ -802,8 +805,8 @@ def make_schema():
|
|||||||
"image_volid_layered_product_formats": {
|
"image_volid_layered_product_formats": {
|
||||||
"$ref": "#/definitions/list_of_strings",
|
"$ref": "#/definitions/list_of_strings",
|
||||||
"default": [
|
"default": [
|
||||||
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}",
|
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}", # noqa: E501
|
||||||
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}",
|
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}", # noqa: E501
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"restricted_volid": {"type": "boolean", "default": False},
|
"restricted_volid": {"type": "boolean", "default": False},
|
||||||
@ -829,7 +832,7 @@ def make_schema():
|
|||||||
"media_checksum_base_filename": {"type": "string", "default": ""},
|
"media_checksum_base_filename": {"type": "string", "default": ""},
|
||||||
"filter_system_release_packages": {"type": "boolean", "default": True},
|
"filter_system_release_packages": {"type": "boolean", "default": True},
|
||||||
"keep_original_comps": {
|
"keep_original_comps": {
|
||||||
"deprecated": "remove <groups> tag from respective variant in variants XML"
|
"deprecated": "remove <groups> tag from respective variant in variants XML" # noqa: E501
|
||||||
},
|
},
|
||||||
"link_type": {
|
"link_type": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@ -1061,7 +1064,7 @@ def make_schema():
|
|||||||
"subvariant": {"type": "string"},
|
"subvariant": {"type": "string"},
|
||||||
"format": {
|
"format": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
# The variant with explicit extension is deprecated.
|
# The variant with explicit extension is deprecated. # noqa: E501
|
||||||
{"$ref": "#/definitions/string_pairs"},
|
{"$ref": "#/definitions/string_pairs"},
|
||||||
{"$ref": "#/definitions/strings"},
|
{"$ref": "#/definitions/strings"},
|
||||||
]
|
]
|
||||||
|
@ -46,13 +46,7 @@ def emit(f, cmd):
|
|||||||
print(" ".join([quote(x) for x in cmd]), file=f)
|
print(" ".join([quote(x) for x in cmd]), file=f)
|
||||||
|
|
||||||
|
|
||||||
FIND_TEMPLATE_SNIPPET = """
|
FIND_TEMPLATE_SNIPPET = """if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then TEMPLATE=/usr/share/lorax; fi""" # noqa: E501
|
||||||
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
|
|
||||||
TEMPLATE=/usr/share/lorax;
|
|
||||||
fi
|
|
||||||
""".replace(
|
|
||||||
"\n", ""
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def make_image(f, opts):
|
def make_image(f, opts):
|
||||||
|
@ -301,7 +301,8 @@ class Pungi(PungiBase):
|
|||||||
# greedy methods:
|
# greedy methods:
|
||||||
# * none: only best match package
|
# * none: only best match package
|
||||||
# * all: all packages matching a provide
|
# * all: all packages matching a provide
|
||||||
# * build: best match package + all other packages from the same SRPM having the same provide
|
# * build: best match package + all other packages from
|
||||||
|
# the same SRPM having the same provide
|
||||||
self.greedy_method = self.config.get("pungi", "greedy")
|
self.greedy_method = self.config.get("pungi", "greedy")
|
||||||
|
|
||||||
self.lookaside_repos = self.config.get("pungi", "lookaside_repos").split(" ")
|
self.lookaside_repos = self.config.get("pungi", "lookaside_repos").split(" ")
|
||||||
@ -318,12 +319,12 @@ class Pungi(PungiBase):
|
|||||||
self.sourcerpm_srpmpo_map = {}
|
self.sourcerpm_srpmpo_map = {}
|
||||||
|
|
||||||
# flags
|
# flags
|
||||||
self.input_packages = (
|
# packages specified in %packages kickstart section including
|
||||||
set()
|
# those defined via comps groups
|
||||||
) # packages specified in %packages kickstart section including those defined via comps groups
|
self.input_packages = set()
|
||||||
self.comps_packages = (
|
# packages specified in %packages kickstart section
|
||||||
set()
|
# *indirectly* via comps groups
|
||||||
) # packages specified in %packages kickstart section *indirectly* via comps groups
|
self.comps_packages = set()
|
||||||
self.prepopulate_packages = (
|
self.prepopulate_packages = (
|
||||||
set()
|
set()
|
||||||
) # packages specified in %prepopulate kickstart section
|
) # packages specified in %prepopulate kickstart section
|
||||||
@ -724,7 +725,8 @@ class Pungi(PungiBase):
|
|||||||
found = False
|
found = False
|
||||||
for dep in deps:
|
for dep in deps:
|
||||||
if dep in self.po_list:
|
if dep in self.po_list:
|
||||||
# HACK: there can be builds in the input list on which we want to apply the "build" greedy rules
|
# HACK: there can be builds in the input list on
|
||||||
|
# which we want to apply the "build" greedy rules
|
||||||
if (
|
if (
|
||||||
self.greedy_method == "build"
|
self.greedy_method == "build"
|
||||||
and dep.sourcerpm not in self.completed_greedy_build
|
and dep.sourcerpm not in self.completed_greedy_build
|
||||||
@ -878,7 +880,8 @@ class Pungi(PungiBase):
|
|||||||
break
|
break
|
||||||
if found:
|
if found:
|
||||||
msg = (
|
msg = (
|
||||||
"Added multilib package %s.%s (repo: %s) for package %s.%s (method: %s)"
|
"Added multilib package %s.%s (repo: %s) for "
|
||||||
|
"package %s.%s (method: %s)"
|
||||||
% (
|
% (
|
||||||
match.name,
|
match.name,
|
||||||
match.arch,
|
match.arch,
|
||||||
@ -973,7 +976,8 @@ class Pungi(PungiBase):
|
|||||||
except yum.Errors.GroupsError:
|
except yum.Errors.GroupsError:
|
||||||
# no groups or no comps at all
|
# no groups or no comps at all
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"Could not get langpacks due to missing comps in repodata or --ignoregroups=true option."
|
"Could not get langpacks due to missing comps in repodata "
|
||||||
|
"or --ignoregroups=true option."
|
||||||
)
|
)
|
||||||
self.langpacks = []
|
self.langpacks = []
|
||||||
|
|
||||||
@ -1018,14 +1022,16 @@ class Pungi(PungiBase):
|
|||||||
if "core" in [i.groupid for i in self.ayum.comps.groups]:
|
if "core" in [i.groupid for i in self.ayum.comps.groups]:
|
||||||
if "core" not in [i.name for i in self.ksparser.handler.packages.groupList]:
|
if "core" not in [i.name for i in self.ksparser.handler.packages.groupList]:
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"The @core group is no longer added by default; Please add @core to the kickstart if you want it in."
|
"The @core group is no longer added by default; Please add "
|
||||||
|
"@core to the kickstart if you want it in."
|
||||||
)
|
)
|
||||||
|
|
||||||
if "base" in [i.groupid for i in self.ayum.comps.groups]:
|
if "base" in [i.groupid for i in self.ayum.comps.groups]:
|
||||||
if "base" not in [i.name for i in self.ksparser.handler.packages.groupList]:
|
if "base" not in [i.name for i in self.ksparser.handler.packages.groupList]:
|
||||||
if self.ksparser.handler.packages.addBase:
|
if self.ksparser.handler.packages.addBase:
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"The --nobase kickstart option is no longer supported; Please add @base to the kickstart if you want it in."
|
"The --nobase kickstart option is no longer supported; "
|
||||||
|
"Please add @base to the kickstart if you want it in."
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check to see if we want all the defaults
|
# Check to see if we want all the defaults
|
||||||
@ -1059,7 +1065,8 @@ class Pungi(PungiBase):
|
|||||||
multilib = True
|
multilib = True
|
||||||
|
|
||||||
if self.greedy_method == "all" and name == "system-release":
|
if self.greedy_method == "all" and name == "system-release":
|
||||||
# HACK: handles a special case, when system-release virtual provide is specified in the greedy mode
|
# HACK: handles a special case, when system-release virtual
|
||||||
|
# provide is specified in the greedy mode
|
||||||
matches = self.ayum.whatProvides(name, None, None).returnPackages()
|
matches = self.ayum.whatProvides(name, None, None).returnPackages()
|
||||||
else:
|
else:
|
||||||
exactmatched, matched, unmatched = yum.packages.parsePackages(
|
exactmatched, matched, unmatched = yum.packages.parsePackages(
|
||||||
@ -1131,7 +1138,8 @@ class Pungi(PungiBase):
|
|||||||
for txmbr in self.ayum.tsInfo:
|
for txmbr in self.ayum.tsInfo:
|
||||||
if not txmbr.po in self.po_list:
|
if not txmbr.po in self.po_list:
|
||||||
if not is_package(txmbr.po):
|
if not is_package(txmbr.po):
|
||||||
# we don't want sources which can be pulled in, because 'src' arch is part of self.valid_arches
|
# we don't want sources which can be pulled in,
|
||||||
|
# because 'src' arch is part of self.valid_arches
|
||||||
continue
|
continue
|
||||||
if not txmbr.isDep:
|
if not txmbr.isDep:
|
||||||
continue
|
continue
|
||||||
@ -1182,7 +1190,8 @@ class Pungi(PungiBase):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
def get_srpm_po(self, po):
|
def get_srpm_po(self, po):
|
||||||
"""Given a package object, get a package object for the corresponding source rpm."""
|
"""Given a package object, get a package object for the
|
||||||
|
corresponding source rpm."""
|
||||||
|
|
||||||
# return srpm_po from cache if available
|
# return srpm_po from cache if available
|
||||||
srpm_po = self.sourcerpm_srpmpo_map.get(po.sourcerpm, None)
|
srpm_po = self.sourcerpm_srpmpo_map.get(po.sourcerpm, None)
|
||||||
@ -1315,11 +1324,15 @@ class Pungi(PungiBase):
|
|||||||
if not include_native:
|
if not include_native:
|
||||||
# if there's no native package already pulled in...
|
# if there's no native package already pulled in...
|
||||||
if has_native and not include_multilib:
|
if has_native and not include_multilib:
|
||||||
# include all native packages, but only if we're not pulling multilib already
|
# include all native packages, but only if we're not pulling
|
||||||
# SCENARIO: a noarch package was already pulled in and there are x86_64 and i686 packages -> we want x86_64 in to complete the package set
|
# multilib already
|
||||||
|
# SCENARIO: a noarch package was already pulled in and there
|
||||||
|
# are x86_64 and i686 packages -> we want x86_64 in to complete
|
||||||
|
# the package set
|
||||||
include_native = True
|
include_native = True
|
||||||
elif has_multilib:
|
elif has_multilib:
|
||||||
# SCENARIO: a noarch package was already pulled in and there are no x86_64 packages; we want i686 in to complete the package set
|
# SCENARIO: a noarch package was already pulled in and there are
|
||||||
|
# no x86_64 packages; we want i686 in to complete the package set
|
||||||
include_multilib = True
|
include_multilib = True
|
||||||
|
|
||||||
for po in self.excludePackages(self.bin_by_src[srpm_po]):
|
for po in self.excludePackages(self.bin_by_src[srpm_po]):
|
||||||
@ -1405,7 +1418,8 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
# Ensure the pkgdir exists, force if requested, and make sure we clean it out
|
# Ensure the pkgdir exists, force if requested, and make sure we clean it out
|
||||||
if relpkgdir.endswith("SRPMS"):
|
if relpkgdir.endswith("SRPMS"):
|
||||||
# Since we share source dirs with other arches don't clean, but do allow us to use it
|
# Since we share source dirs with other arches don't clean, but
|
||||||
|
# do allow us to use it
|
||||||
pungi.util._ensuredir(pkgdir, self.logger, force=True, clean=False)
|
pungi.util._ensuredir(pkgdir, self.logger, force=True, clean=False)
|
||||||
else:
|
else:
|
||||||
pungi.util._ensuredir(
|
pungi.util._ensuredir(
|
||||||
@ -1433,7 +1447,8 @@ class Pungi(PungiBase):
|
|||||||
target = os.path.join(pkgdir, basename)
|
target = os.path.join(pkgdir, basename)
|
||||||
else:
|
else:
|
||||||
target = os.path.join(pkgdir, po.name[0].lower(), basename)
|
target = os.path.join(pkgdir, po.name[0].lower(), basename)
|
||||||
# Make sure we have the hashed dir available to link into we only want dirs there to corrospond to packages
|
# Make sure we have the hashed dir available to link into we
|
||||||
|
# only want dirs there to corrospond to packages
|
||||||
# that we are including so we can not just do A-Z 0-9
|
# that we are including so we can not just do A-Z 0-9
|
||||||
pungi.util._ensuredir(
|
pungi.util._ensuredir(
|
||||||
os.path.join(pkgdir, po.name[0].lower()),
|
os.path.join(pkgdir, po.name[0].lower()),
|
||||||
@ -1504,7 +1519,8 @@ class Pungi(PungiBase):
|
|||||||
ourcomps.write(self.ayum.comps.xml())
|
ourcomps.write(self.ayum.comps.xml())
|
||||||
ourcomps.close()
|
ourcomps.close()
|
||||||
|
|
||||||
# Disable this until https://bugzilla.redhat.com/show_bug.cgi?id=442097 is fixed.
|
# Disable this until https://bugzilla.redhat.com/show_bug.cgi?id=442097
|
||||||
|
# is fixed.
|
||||||
# Run the xslt filter over our comps file
|
# Run the xslt filter over our comps file
|
||||||
# compsfilter = ['/usr/bin/xsltproc', '--novalid']
|
# compsfilter = ['/usr/bin/xsltproc', '--novalid']
|
||||||
# compsfilter.append('-o')
|
# compsfilter.append('-o')
|
||||||
@ -1819,13 +1835,15 @@ class Pungi(PungiBase):
|
|||||||
cmd.append("--isfinal")
|
cmd.append("--isfinal")
|
||||||
cmd.extend(["--volid", self._shortenVolID()])
|
cmd.extend(["--volid", self._shortenVolID()])
|
||||||
|
|
||||||
# on ppc64 we need to tell lorax to only use ppc64 packages so that the media will run on all 64 bit ppc boxes
|
# on ppc64 we need to tell lorax to only use ppc64 packages so that
|
||||||
|
# the media will run on all 64 bit ppc boxes
|
||||||
if self.tree_arch == "ppc64":
|
if self.tree_arch == "ppc64":
|
||||||
cmd.extend(["--buildarch", "ppc64"])
|
cmd.extend(["--buildarch", "ppc64"])
|
||||||
elif self.tree_arch == "ppc64le":
|
elif self.tree_arch == "ppc64le":
|
||||||
cmd.extend(["--buildarch", "ppc64le"])
|
cmd.extend(["--buildarch", "ppc64le"])
|
||||||
|
|
||||||
# Only supported mac hardware is x86 make sure we only enable mac support on arches that need it
|
# Only supported mac hardware is x86 make sure we only enable mac
|
||||||
|
# support on arches that need it
|
||||||
if self.tree_arch in ["x86_64"] and not self.is_nomacboot:
|
if self.tree_arch in ["x86_64"] and not self.is_nomacboot:
|
||||||
cmd.append("--macboot")
|
cmd.append("--macboot")
|
||||||
else:
|
else:
|
||||||
|
@ -616,7 +616,8 @@ class Gather(GatherBase):
|
|||||||
@Profiler("Gather.add_conditional_packages()")
|
@Profiler("Gather.add_conditional_packages()")
|
||||||
def add_conditional_packages(self):
|
def add_conditional_packages(self):
|
||||||
"""
|
"""
|
||||||
For each binary package add their conditional dependencies as specified in comps.
|
For each binary package add their conditional dependencies
|
||||||
|
as specified in comps.
|
||||||
Return newly added packages.
|
Return newly added packages.
|
||||||
"""
|
"""
|
||||||
added = set()
|
added = set()
|
||||||
|
@ -27,7 +27,8 @@ class SimpleAcyclicOrientedGraph(object):
|
|||||||
self._graph[start].append(end)
|
self._graph[start].append(end)
|
||||||
self._all_nodes.add(start)
|
self._all_nodes.add(start)
|
||||||
self._all_nodes.add(end)
|
self._all_nodes.add(end)
|
||||||
# try to find opposite direction path (from end to start) to detect newly created cycle
|
# try to find opposite direction path (from end to start)
|
||||||
|
# to detect newly created cycle
|
||||||
path = SimpleAcyclicOrientedGraph.find_path(self._graph, end, start)
|
path = SimpleAcyclicOrientedGraph.find_path(self._graph, end, start)
|
||||||
if path:
|
if path:
|
||||||
raise ValueError("There is a cycle in the graph: %s" % path)
|
raise ValueError("There is a cycle in the graph: %s" % path)
|
||||||
@ -95,7 +96,9 @@ class SimpleAcyclicOrientedGraph(object):
|
|||||||
# orphan node = no edge is connected with this node
|
# orphan node = no edge is connected with this node
|
||||||
orphans = self._all_nodes - self.get_active_nodes()
|
orphans = self._all_nodes - self.get_active_nodes()
|
||||||
if orphans:
|
if orphans:
|
||||||
break # restart iteration not to set size self._all_nodes during iteration
|
# restart iteration not to set size self._all_nodes
|
||||||
|
# during iteration
|
||||||
|
break
|
||||||
for orphan in orphans:
|
for orphan in orphans:
|
||||||
if orphan not in spanning_line:
|
if orphan not in spanning_line:
|
||||||
spanning_line.insert(0, orphan)
|
spanning_line.insert(0, orphan)
|
||||||
|
@ -240,12 +240,20 @@ def compose_to_composeinfo(compose):
|
|||||||
).rstrip("/")
|
).rstrip("/")
|
||||||
"""
|
"""
|
||||||
# XXX: not suported (yet?)
|
# XXX: not suported (yet?)
|
||||||
debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
|
debug_iso_dir = (
|
||||||
|
compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
|
||||||
|
)
|
||||||
if debug_iso_dir:
|
if debug_iso_dir:
|
||||||
var.debug_iso_dir[arch] = relative_path(debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
var.debug_iso_dir[arch] = relative_path(
|
||||||
debug_jigdo_dir = compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
|
debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||||
|
).rstrip("/")
|
||||||
|
debug_jigdo_dir = (
|
||||||
|
compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
|
||||||
|
)
|
||||||
if debug_jigdo_dir:
|
if debug_jigdo_dir:
|
||||||
var.debug_jigdo_dir[arch] = relative_path(debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
var.debug_jigdo_dir[arch] = relative_path(
|
||||||
|
debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||||
|
).rstrip("/")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for v in variant.get_variants(recursive=False):
|
for v in variant.get_variants(recursive=False):
|
||||||
|
@ -160,7 +160,8 @@ class WorkPaths(object):
|
|||||||
Examples:
|
Examples:
|
||||||
work/global/pungi-cache
|
work/global/pungi-cache
|
||||||
"""
|
"""
|
||||||
# WARNING: Using the same cache dir with repos of the same names may lead to a race condition
|
# WARNING: Using the same cache dir with repos of the same names
|
||||||
|
# may lead to a race condition.
|
||||||
# We should use per arch variant cache dirs to workaround this.
|
# We should use per arch variant cache dirs to workaround this.
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi-cache")
|
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi-cache")
|
||||||
if variant:
|
if variant:
|
||||||
|
@ -538,7 +538,8 @@ class BuildinstallThread(WorkerThread):
|
|||||||
|
|
||||||
# This should avoid a possible race condition with multiple processes
|
# This should avoid a possible race condition with multiple processes
|
||||||
# trying to get a kerberos ticket at the same time.
|
# trying to get a kerberos ticket at the same time.
|
||||||
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
# Kerberos authentication failed:
|
||||||
|
# Permission denied in replay cache code (-1765328215)
|
||||||
time.sleep(num * 3)
|
time.sleep(num * 3)
|
||||||
|
|
||||||
# Start the runroot task.
|
# Start the runroot task.
|
||||||
|
@ -114,8 +114,8 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
|
|
||||||
if bootable and not self.bi.succeeded(variant, arch):
|
if bootable and not self.bi.succeeded(variant, arch):
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"ISO should be bootable, but buildinstall failed. Skipping for %s.%s"
|
"ISO should be bootable, but buildinstall failed. "
|
||||||
% (variant, arch)
|
"Skipping for %s.%s" % (variant, arch)
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -430,7 +430,8 @@ def split_iso(compose, arch, variant, no_split=False, logger=None):
|
|||||||
result = ms.split()
|
result = ms.split()
|
||||||
if no_split and result[0]["size"] > split_size:
|
if no_split and result[0]["size"] > split_size:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"ISO for %s.%s does not fit on single media! It is %s bytes too big. (Total size: %s B)"
|
"ISO for %s.%s does not fit on single media! It is %s bytes too big. "
|
||||||
|
"(Total size: %s B)"
|
||||||
% (variant.uid, arch, result[0]["size"] - split_size, result[0]["size"])
|
% (variant.uid, arch, result[0]["size"] - split_size, result[0]["size"])
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
@ -213,7 +213,8 @@ def create_variant_repo(
|
|||||||
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
|
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
|
||||||
run(cmd, logfile=log_file, show_cmd=True)
|
run(cmd, logfile=log_file, show_cmd=True)
|
||||||
# productinfo is not supported by modifyrepo in any way
|
# productinfo is not supported by modifyrepo in any way
|
||||||
# this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)
|
# this is a HACK to make CDN happy (dmach: at least I think,
|
||||||
|
# need to confirm with dgregor)
|
||||||
shutil.copy2(
|
shutil.copy2(
|
||||||
product_id_path, os.path.join(repo_dir, "repodata", "productid")
|
product_id_path, os.path.join(repo_dir, "repodata", "productid")
|
||||||
)
|
)
|
||||||
|
@ -85,14 +85,15 @@ class GatherPhase(PhaseBase):
|
|||||||
if variant.modules:
|
if variant.modules:
|
||||||
errors.append("Modular compose requires libmodulemd package.")
|
errors.append("Modular compose requires libmodulemd package.")
|
||||||
|
|
||||||
# check whether variants from configuration value 'variant_as_lookaside' are correct
|
# check whether variants from configuration value
|
||||||
|
# 'variant_as_lookaside' are correct
|
||||||
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
|
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
|
||||||
all_variants = self.compose.all_variants
|
all_variants = self.compose.all_variants
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for (requiring, required) in variant_as_lookaside:
|
||||||
if requiring in all_variants and required not in all_variants:
|
if requiring in all_variants and required not in all_variants:
|
||||||
errors.append(
|
errors.append(
|
||||||
"variant_as_lookaside: variant %r doesn't exist but is required by %r"
|
"variant_as_lookaside: variant %r doesn't exist but is "
|
||||||
% (required, requiring)
|
"required by %r" % (required, requiring)
|
||||||
)
|
)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
@ -566,7 +567,8 @@ def _trim_variants(
|
|||||||
for pkg_type, pkgs in move_to_parent_pkgs.items():
|
for pkg_type, pkgs in move_to_parent_pkgs.items():
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
compose.log_debug(
|
compose.log_debug(
|
||||||
"Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s"
|
"Moving package to parent "
|
||||||
|
"(arch: %s, variant: %s, pkg_type: %s): %s"
|
||||||
% (
|
% (
|
||||||
arch,
|
arch,
|
||||||
variant.uid,
|
variant.uid,
|
||||||
@ -673,8 +675,8 @@ def get_prepopulate_packages(compose, arch, variant, include_arch=True):
|
|||||||
pkg_name, pkg_arch = split_name_arch(i)
|
pkg_name, pkg_arch = split_name_arch(i)
|
||||||
if pkg_arch not in get_compatible_arches(arch, multilib=True):
|
if pkg_arch not in get_compatible_arches(arch, multilib=True):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Incompatible package arch '%s' for tree arch '%s' in prepopulate package '%s'"
|
"Incompatible package arch '%s' for tree arch '%s' "
|
||||||
% (pkg_arch, arch, pkg_name)
|
"in prepopulate package '%s'" % (pkg_arch, arch, pkg_name)
|
||||||
)
|
)
|
||||||
if include_arch:
|
if include_arch:
|
||||||
result.add(i)
|
result.add(i)
|
||||||
@ -691,8 +693,8 @@ def get_additional_packages(compose, arch, variant):
|
|||||||
arch, multilib=True
|
arch, multilib=True
|
||||||
):
|
):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Incompatible package arch '%s' for tree arch '%s' in additional package '%s'"
|
"Incompatible package arch '%s' for tree arch '%s' in "
|
||||||
% (pkg_arch, arch, pkg_name)
|
"additional package '%s'" % (pkg_arch, arch, pkg_name)
|
||||||
)
|
)
|
||||||
result.add((pkg_name, pkg_arch))
|
result.add((pkg_name, pkg_arch))
|
||||||
return result
|
return result
|
||||||
|
@ -161,8 +161,8 @@ def write_pungi_config(
|
|||||||
|
|
||||||
if not groups and not packages_str and not prepopulate:
|
if not groups and not packages_str and not prepopulate:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"No packages included in %s.%s (no comps groups, no input packages, no prepopulate)"
|
"No packages included in %s.%s "
|
||||||
% (variant.uid, arch)
|
"(no comps groups, no input packages, no prepopulate)" % (variant.uid, arch)
|
||||||
)
|
)
|
||||||
|
|
||||||
pungi_wrapper.write_kickstart(
|
pungi_wrapper.write_kickstart(
|
||||||
@ -206,7 +206,8 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
|
|
||||||
# addon
|
# addon
|
||||||
if variant.type in ["addon", "layered-product"]:
|
if variant.type in ["addon", "layered-product"]:
|
||||||
# packages having SRPM in parent variant are excluded from fulltree (via %fulltree-excludes)
|
# packages having SRPM in parent variant are excluded from
|
||||||
|
# fulltree (via %fulltree-excludes)
|
||||||
fulltree = True
|
fulltree = True
|
||||||
selfhosting = False
|
selfhosting = False
|
||||||
|
|
||||||
|
@ -72,8 +72,8 @@ class ImageBuildPhase(
|
|||||||
install_tree_source = self.compose.all_variants.get(install_tree_from)
|
install_tree_source = self.compose.all_variants.get(install_tree_from)
|
||||||
if not install_tree_source:
|
if not install_tree_source:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"There is no variant %s to get install tree from when building image for %s."
|
"There is no variant %s to get install tree from "
|
||||||
% (install_tree_from, variant.uid)
|
"when building image for %s." % (install_tree_from, variant.uid)
|
||||||
)
|
)
|
||||||
return translate_path(
|
return translate_path(
|
||||||
self.compose,
|
self.compose,
|
||||||
@ -236,7 +236,8 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# avoid race conditions?
|
# avoid race conditions?
|
||||||
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
# Kerberos authentication failed:
|
||||||
|
# Permission denied in replay cache code (-1765328215)
|
||||||
time.sleep(num * 3)
|
time.sleep(num * 3)
|
||||||
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
||||||
self.pool.log_debug("build-image outputs: %s" % (output))
|
self.pool.log_debug("build-image outputs: %s" % (output))
|
||||||
|
@ -124,7 +124,7 @@ def write_arch_comps(compose, arch):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
UNMATCHED_GROUP_MSG = "Variant %s.%s requires comps group %s which does not match anything in input comps file"
|
UNMATCHED_GROUP_MSG = "Variant %s.%s requires comps group %s which does not match anything in input comps file" # noqa: E501
|
||||||
|
|
||||||
|
|
||||||
def get_lookaside_groups(compose, variant):
|
def get_lookaside_groups(compose, variant):
|
||||||
|
@ -210,7 +210,8 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# avoid race conditions?
|
# avoid race conditions?
|
||||||
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
# Kerberos authentication failed:
|
||||||
|
# Permission denied in replay cache code (-1765328215)
|
||||||
time.sleep(num * 3)
|
time.sleep(num * 3)
|
||||||
|
|
||||||
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
||||||
|
@ -47,8 +47,8 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
|||||||
variant = self.compose.all_variants[variant_uid]
|
variant = self.compose.all_variants[variant_uid]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"There is no variant %s to get repo from when building live media for %s."
|
"There is no variant %s to get repo from when building "
|
||||||
% (variant_uid, variant.uid)
|
"live media for %s." % (variant_uid, variant.uid)
|
||||||
)
|
)
|
||||||
return translate_path(
|
return translate_path(
|
||||||
self.compose,
|
self.compose,
|
||||||
|
@ -90,7 +90,7 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
repos = get_repo_urls(
|
repos = get_repo_urls(
|
||||||
None, # compose==None. Special value says that method should ignore deprecated variant-type repo
|
None, # compose==None. Special value says that method should ignore deprecated variant-type repo # noqa: E501
|
||||||
shortcuts.force_list(config["repo"]) + self.baseurls,
|
shortcuts.force_list(config["repo"]) + self.baseurls,
|
||||||
arch=arch,
|
arch=arch,
|
||||||
logger=self.pool,
|
logger=self.pool,
|
||||||
|
@ -137,7 +137,8 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
def get_error(sigkeys, infos):
|
def get_error(sigkeys, infos):
|
||||||
return (
|
return (
|
||||||
"RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s"
|
"RPM(s) not found for sigs: %s. Check log for details. "
|
||||||
|
"Unsigned packages:\n%s"
|
||||||
% (
|
% (
|
||||||
sigkeys,
|
sigkeys,
|
||||||
"\n".join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
|
"\n".join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
|
||||||
@ -181,7 +182,8 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
return self.rpms_by_arch
|
return self.rpms_by_arch
|
||||||
|
|
||||||
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
|
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
|
||||||
"""Create a subset of this package set that only includes packages compatible with"""
|
"""Create a subset of this package set that only includes
|
||||||
|
packages compatible with"""
|
||||||
pkgset = PackageSetBase(
|
pkgset = PackageSetBase(
|
||||||
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
||||||
)
|
)
|
||||||
|
@ -544,8 +544,8 @@ def _get_modules_from_koji_tags(
|
|||||||
# There are some module names that were listed in configuration and not
|
# There are some module names that were listed in configuration and not
|
||||||
# found in any tag...
|
# found in any tag...
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Configuration specified patterns (%s) that don't match any modules in the configured tags."
|
"Configuration specified patterns (%s) that don't match "
|
||||||
% ", ".join(expected_modules)
|
"any modules in the configured tags." % ", ".join(expected_modules)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ def main():
|
|||||||
default=[],
|
default=[],
|
||||||
action="append",
|
action="append",
|
||||||
metavar="GROUPID",
|
metavar="GROUPID",
|
||||||
help="keep this group in environments even if they are not defined in the comps",
|
help="keep this group in environments even if they are not defined in the comps", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-cleanup",
|
"--no-cleanup",
|
||||||
|
@ -94,7 +94,7 @@ def get_arguments(config):
|
|||||||
"--fulltree",
|
"--fulltree",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="fulltree",
|
dest="fulltree",
|
||||||
help="build a tree that includes all packages built from corresponding source rpms (optional)",
|
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nosource",
|
"--nosource",
|
||||||
@ -112,7 +112,7 @@ def get_arguments(config):
|
|||||||
"--nodownload",
|
"--nodownload",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="nodownload",
|
dest="nodownload",
|
||||||
help="disable downloading of packages. instead, print the package URLs (optional)",
|
help="disable downloading of packages. instead, print the package URLs (optional)", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--norelnotes",
|
"--norelnotes",
|
||||||
@ -150,7 +150,7 @@ def get_arguments(config):
|
|||||||
"--isfinal",
|
"--isfinal",
|
||||||
default=False,
|
default=False,
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Specify this is a GA tree, which causes betanag to be turned off during install",
|
help="Specify this is a GA tree, which causes betanag to be turned off during install", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nohash",
|
"--nohash",
|
||||||
@ -171,14 +171,14 @@ def get_arguments(config):
|
|||||||
"--multilib",
|
"--multilib",
|
||||||
action="append",
|
action="append",
|
||||||
metavar="METHOD",
|
metavar="METHOD",
|
||||||
help="Multilib method; can be specified multiple times; recommended: devel, runtime",
|
help="Multilib method; can be specified multiple times; recommended: devel, runtime", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--lookaside-repo",
|
"--lookaside-repo",
|
||||||
action="append",
|
action="append",
|
||||||
dest="lookaside_repos",
|
dest="lookaside_repos",
|
||||||
metavar="NAME",
|
metavar="NAME",
|
||||||
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)",
|
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--workdirbase",
|
"--workdirbase",
|
||||||
@ -200,7 +200,7 @@ def get_arguments(config):
|
|||||||
default=[],
|
default=[],
|
||||||
action="append",
|
action="append",
|
||||||
metavar="STRING",
|
metavar="STRING",
|
||||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)",
|
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--multilibconf",
|
"--multilibconf",
|
||||||
@ -275,7 +275,7 @@ def get_arguments(config):
|
|||||||
dest="rootfs_size",
|
dest="rootfs_size",
|
||||||
action=SetConfig,
|
action=SetConfig,
|
||||||
default=False,
|
default=False,
|
||||||
help="Size of root filesystem in GiB. If not specified, use lorax default value",
|
help="Size of root filesystem in GiB. If not specified, use lorax default value", # noqa: E501
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@ -341,7 +341,7 @@ def main():
|
|||||||
enforcing = False
|
enforcing = False
|
||||||
if enforcing:
|
if enforcing:
|
||||||
print(
|
print(
|
||||||
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled."
|
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled." # noqa: E501
|
||||||
)
|
)
|
||||||
print("Consider running with setenforce 0.")
|
print("Consider running with setenforce 0.")
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ def get_parser():
|
|||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--fulltree",
|
"--fulltree",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="build a tree that includes all packages built from corresponding source rpms (optional)",
|
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--greedy",
|
"--greedy",
|
||||||
|
@ -51,7 +51,7 @@ def main():
|
|||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--label",
|
"--label",
|
||||||
help="specify compose label (example: Snapshot-1.0); required for production composes",
|
help="specify compose label (example: Snapshot-1.0); required for production composes", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-label",
|
"--no-label",
|
||||||
@ -71,7 +71,7 @@ def main():
|
|||||||
dest="old_composes",
|
dest="old_composes",
|
||||||
default=[],
|
default=[],
|
||||||
action="append",
|
action="append",
|
||||||
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
|
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument("--config", help="Config file", required=True)
|
parser.add_argument("--config", help="Config file", required=True)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@ -149,7 +149,7 @@ def main():
|
|||||||
metavar="STATUS",
|
metavar="STATUS",
|
||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
help="only create latest symbol link to this compose when compose status matches specified status",
|
help="only create latest symbol link to this compose when compose status matches specified status", # noqa: E501
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--print-output-dir",
|
"--print-output-dir",
|
||||||
@ -241,7 +241,7 @@ def main():
|
|||||||
# Remove when all config files are up to date
|
# Remove when all config files are up to date
|
||||||
if "productimg" in opts.skip_phase or "productimg" in opts.just_phase:
|
if "productimg" in opts.skip_phase or "productimg" in opts.just_phase:
|
||||||
print(
|
print(
|
||||||
"WARNING: productimg phase has been removed, please remove it from --skip-phase or --just-phase option",
|
"WARNING: productimg phase has been removed, please remove it from --skip-phase or --just-phase option", # noqa: E501
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
for err in errors[:]:
|
for err in errors[:]:
|
||||||
@ -402,8 +402,9 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
.rstrip("\n")
|
.rstrip("\n")
|
||||||
)
|
)
|
||||||
except IOError:
|
except IOError:
|
||||||
# Filename is not print intentionally in case someone puts password directly into the option
|
# Filename is not print intentionally in case someone puts
|
||||||
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option"
|
# password directly into the option
|
||||||
|
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option" # noqa: E501
|
||||||
compose.log_error(err_msg)
|
compose.log_error(err_msg)
|
||||||
print(err_msg)
|
print(err_msg)
|
||||||
raise RuntimeError(err_msg)
|
raise RuntimeError(err_msg)
|
||||||
@ -479,7 +480,8 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
latest_link = False
|
latest_link = False
|
||||||
if create_latest_link:
|
if create_latest_link:
|
||||||
if latest_link_status is None:
|
if latest_link_status is None:
|
||||||
# create latest symbol link by default if latest_link_status is not specified
|
# create latest symbol link by default if latest_link_status
|
||||||
|
# is not specified
|
||||||
latest_link = True
|
latest_link = True
|
||||||
else:
|
else:
|
||||||
latest_link_status = [s.upper() for s in latest_link_status]
|
latest_link_status = [s.upper() for s in latest_link_status]
|
||||||
@ -487,7 +489,8 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
latest_link = True
|
latest_link = True
|
||||||
else:
|
else:
|
||||||
compose.log_warning(
|
compose.log_warning(
|
||||||
"Compose status (%s) doesn't match with specified latest-link-status (%s), not create latest link."
|
"Compose status (%s) doesn't match with specified "
|
||||||
|
"latest-link-status (%s), not create latest link."
|
||||||
% (compose.get_status(), str(latest_link_status))
|
% (compose.get_status(), str(latest_link_status))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -240,7 +240,8 @@ def get_arch_variant_data(conf, var_name, arch, variant, keys=None):
|
|||||||
if conf_arch != "*" and conf_arch != arch:
|
if conf_arch != "*" and conf_arch != arch:
|
||||||
continue
|
continue
|
||||||
if conf_arch == "*" and arch == "src":
|
if conf_arch == "*" and arch == "src":
|
||||||
# src is excluded from '*' and needs to be explicitly added to the mapping
|
# src is excluded from '*' and needs to be explicitly
|
||||||
|
# added to the mapping
|
||||||
continue
|
continue
|
||||||
if keys is not None:
|
if keys is not None:
|
||||||
keys.add(conf_variant)
|
keys.add(conf_variant)
|
||||||
@ -834,8 +835,8 @@ def get_repo_urls(compose, repos, arch="$basearch", logger=None):
|
|||||||
if repo is None:
|
if repo is None:
|
||||||
if logger:
|
if logger:
|
||||||
logger.log_warning(
|
logger.log_warning(
|
||||||
"Variant-type source repository is deprecated and will be ignored during 'OSTreeInstaller' phase: %s"
|
"Variant-type source repository is deprecated and will "
|
||||||
% (repo)
|
"be ignored during 'OSTreeInstaller' phase: %s" % (repo)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
urls.append(repo)
|
urls.append(repo)
|
||||||
@ -897,8 +898,8 @@ def get_repo_dicts(repos, logger=None):
|
|||||||
if repo_dict == {}:
|
if repo_dict == {}:
|
||||||
if logger:
|
if logger:
|
||||||
logger.log_warning(
|
logger.log_warning(
|
||||||
"Variant-type source repository is deprecated and will be ignored during 'OSTree' phase: %s"
|
"Variant-type source repository is deprecated and will "
|
||||||
% (repo)
|
"be ignored during 'OSTree' phase: %s" % (repo)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
repo_dicts.append(repo_dict)
|
repo_dicts.append(repo_dict)
|
||||||
|
@ -112,7 +112,8 @@ class CompsFilter(object):
|
|||||||
def filter_environments(self, arch, variant, only_arch=False):
|
def filter_environments(self, arch, variant, only_arch=False):
|
||||||
"""
|
"""
|
||||||
Filter environments according to arch.
|
Filter environments according to arch.
|
||||||
If only_arch is set, then only environments for the specified arch are preserved.
|
If only_arch is set, then only environments for the specified
|
||||||
|
arch are preserved.
|
||||||
Multiple arches separated by comma can be specified in the XML.
|
Multiple arches separated by comma can be specified in the XML.
|
||||||
"""
|
"""
|
||||||
self._filter_elements_by_attr("/comps/environment", "arch", arch, only_arch)
|
self._filter_elements_by_attr("/comps/environment", "arch", arch, only_arch)
|
||||||
@ -265,7 +266,7 @@ class CompsWrapper(object):
|
|||||||
stripped_pkg = pkg.strip()
|
stripped_pkg = pkg.strip()
|
||||||
if pkg != stripped_pkg:
|
if pkg != stripped_pkg:
|
||||||
errors.append(
|
errors.append(
|
||||||
"Package name %s in group '%s' contains leading or trailing whitespace"
|
"Package name %s in group '%s' contains leading or trailing whitespace" # noqa: E501
|
||||||
% (stripped_pkg, group)
|
% (stripped_pkg, group)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -308,7 +309,7 @@ class CompsWrapper(object):
|
|||||||
for pkg in group.packages:
|
for pkg in group.packages:
|
||||||
if pkg.type == libcomps.PACKAGE_TYPE_UNKNOWN:
|
if pkg.type == libcomps.PACKAGE_TYPE_UNKNOWN:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Failed to process comps file. Package %s in group %s has unknown type"
|
"Failed to process comps file. Package %s in group %s has unknown type" # noqa: E501
|
||||||
% (pkg.name, group.id)
|
% (pkg.name, group.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -477,7 +477,8 @@ def mount(image, logger=None, use_guestmount=True):
|
|||||||
cmd = ["mount", "-o", "loop", image, mount_dir]
|
cmd = ["mount", "-o", "loop", image, mount_dir]
|
||||||
ret, out = run(cmd, env=env, can_fail=True, universal_newlines=True)
|
ret, out = run(cmd, env=env, can_fail=True, universal_newlines=True)
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
# The mount command failed, something is wrong. Log the output and raise an exception.
|
# The mount command failed, something is wrong.
|
||||||
|
# Log the output and raise an exception.
|
||||||
if logger:
|
if logger:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Command %s exited with %s and output:\n%s" % (cmd, ret, out)
|
"Command %s exited with %s and output:\n%s" % (cmd, ret, out)
|
||||||
|
@ -257,11 +257,12 @@ class KojiWrapper(object):
|
|||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@param config_options
|
@param config_options
|
||||||
@param conf_file_dest - a destination in compose workdir for the conf file to be written
|
@param conf_file_dest - a destination in compose workdir for
|
||||||
|
the conf file to be written
|
||||||
@param wait=True
|
@param wait=True
|
||||||
@param scratch=False
|
@param scratch=False
|
||||||
"""
|
"""
|
||||||
# Usage: koji image-build [options] <name> <version> <target> <install-tree-url> <arch> [<arch>...]
|
# Usage: koji image-build [options] <name> <version> <target> <install-tree-url> <arch> [<arch>...] # noqa: E501
|
||||||
sub_command = "image-build"
|
sub_command = "image-build"
|
||||||
# The minimum set of options
|
# The minimum set of options
|
||||||
min_options = (
|
min_options = (
|
||||||
@ -303,7 +304,7 @@ class KojiWrapper(object):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_live_media_cmd(self, options, wait=True):
|
def get_live_media_cmd(self, options, wait=True):
|
||||||
# Usage: koji spin-livemedia [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage: koji spin-livemedia [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||||
cmd = self._get_cmd("spin-livemedia")
|
cmd = self._get_cmd("spin-livemedia")
|
||||||
|
|
||||||
for key in ("name", "version", "target", "arch", "ksfile"):
|
for key in ("name", "version", "target", "arch", "ksfile"):
|
||||||
@ -353,8 +354,8 @@ class KojiWrapper(object):
|
|||||||
specfile=None,
|
specfile=None,
|
||||||
ksurl=None,
|
ksurl=None,
|
||||||
):
|
):
|
||||||
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||||
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||||
# Examples:
|
# Examples:
|
||||||
# * name: RHEL-7.0
|
# * name: RHEL-7.0
|
||||||
# * name: Satellite-6.0.1-RHEL-6
|
# * name: Satellite-6.0.1-RHEL-6
|
||||||
@ -408,7 +409,8 @@ class KojiWrapper(object):
|
|||||||
cmd.append("--release=%s" % release)
|
cmd.append("--release=%s" % release)
|
||||||
|
|
||||||
# IMPORTANT: all --opts have to be provided *before* args
|
# IMPORTANT: all --opts have to be provided *before* args
|
||||||
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage:
|
||||||
|
# koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
||||||
|
|
||||||
cmd.append(name)
|
cmd.append(name)
|
||||||
cmd.append(version)
|
cmd.append(version)
|
||||||
@ -445,7 +447,8 @@ class KojiWrapper(object):
|
|||||||
if retcode == 0 or not (
|
if retcode == 0 or not (
|
||||||
self._has_connection_error(output) or self._has_offline_error(output)
|
self._has_connection_error(output) or self._has_offline_error(output)
|
||||||
):
|
):
|
||||||
# Task finished for reason other than connection error or server offline error.
|
# Task finished for reason other than connection error
|
||||||
|
# or server offline error.
|
||||||
return retcode, output
|
return retcode, output
|
||||||
|
|
||||||
attempt += 1
|
attempt += 1
|
||||||
@ -612,7 +615,8 @@ class KojiWrapper(object):
|
|||||||
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Maybe use different approach for non-scratch builds - see get_image_path()
|
# TODO: Maybe use different approach for non-scratch
|
||||||
|
# builds - see get_image_path()
|
||||||
|
|
||||||
# Get list of filenames that should be returned
|
# Get list of filenames that should be returned
|
||||||
result_files = task_result["rpms"]
|
result_files = task_result["rpms"]
|
||||||
@ -675,19 +679,22 @@ class KojiWrapper(object):
|
|||||||
self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None
|
self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Calls the `koji_session_fnc` using Koji multicall feature N times based on the list of
|
Calls the `koji_session_fnc` using Koji multicall feature N times based on
|
||||||
arguments passed in `list_of_args` and `list_of_kwargs`.
|
the list of arguments passed in `list_of_args` and `list_of_kwargs`.
|
||||||
Returns list of responses sorted the same way as input args/kwargs. In case of error,
|
Returns list of responses sorted the same way as input args/kwargs.
|
||||||
the error message is logged and None is returned.
|
In case of error, the error message is logged and None is returned.
|
||||||
|
|
||||||
For example to get the package ids of "httpd" and "apr" packages:
|
For example to get the package ids of "httpd" and "apr" packages:
|
||||||
ids = multicall_map(session, session.getPackageID, ["httpd", "apr"])
|
ids = multicall_map(session, session.getPackageID, ["httpd", "apr"])
|
||||||
# ids is now [280, 632]
|
# ids is now [280, 632]
|
||||||
|
|
||||||
:param KojiSessions koji_session: KojiSession to use for multicall.
|
:param KojiSessions koji_session: KojiSession to use for multicall.
|
||||||
:param object koji_session_fnc: Python object representing the KojiSession method to call.
|
:param object koji_session_fnc: Python object representing the
|
||||||
:param list list_of_args: List of args which are passed to each call of koji_session_fnc.
|
KojiSession method to call.
|
||||||
:param list list_of_kwargs: List of kwargs which are passed to each call of koji_session_fnc.
|
:param list list_of_args: List of args which are passed to each
|
||||||
|
call of koji_session_fnc.
|
||||||
|
:param list list_of_kwargs: List of kwargs which are passed to
|
||||||
|
each call of koji_session_fnc.
|
||||||
"""
|
"""
|
||||||
if list_of_args is None and list_of_kwargs is None:
|
if list_of_args is None and list_of_kwargs is None:
|
||||||
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
||||||
@ -729,22 +736,23 @@ class KojiWrapper(object):
|
|||||||
results = []
|
results = []
|
||||||
|
|
||||||
# For the response specification, see
|
# For the response specification, see
|
||||||
# https://web.archive.org/web/20060624230303/http://www.xmlrpc.com/discuss/msgReader$1208?mode=topic
|
# https://web.archive.org/web/20060624230303/http://www.xmlrpc.com/discuss/msgReader$1208?mode=topic # noqa: E501
|
||||||
# Relevant part of this:
|
# Relevant part of this:
|
||||||
# Multicall returns an array of responses. There will be one response for each call in
|
# Multicall returns an array of responses. There will be one response
|
||||||
# the original array. The result will either be a one-item array containing the result value,
|
# for each call in the original array. The result will either be
|
||||||
|
# a one-item array containing the result value,
|
||||||
# or a struct of the form found inside the standard <fault> element.
|
# or a struct of the form found inside the standard <fault> element.
|
||||||
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
||||||
if type(response) == list:
|
if type(response) == list:
|
||||||
if not response:
|
if not response:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Empty list returned for multicall of method %r with args %r, %r"
|
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
||||||
% (koji_session_fnc, args, kwargs)
|
% (koji_session_fnc, args, kwargs)
|
||||||
)
|
)
|
||||||
results.append(response[0])
|
results.append(response[0])
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Unexpected data returned for multicall of method %r with args %r, %r: %r"
|
"Unexpected data returned for multicall of method %r with args %r, %r: %r" # noqa: E501
|
||||||
% (koji_session_fnc, args, kwargs, response)
|
% (koji_session_fnc, args, kwargs, response)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ class LoraxWrapper(object):
|
|||||||
brand=None,
|
brand=None,
|
||||||
):
|
):
|
||||||
# RHEL 6 compatibility
|
# RHEL 6 compatibility
|
||||||
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root>
|
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root> # noqa: E501
|
||||||
|
|
||||||
brand = brand or "redhat"
|
brand = brand or "redhat"
|
||||||
# HACK: ignore provided release
|
# HACK: ignore provided release
|
||||||
|
@ -132,7 +132,8 @@ class PungiWrapper(object):
|
|||||||
# path to a kickstart file
|
# path to a kickstart file
|
||||||
cmd.append("--config=%s" % config)
|
cmd.append("--config=%s" % config)
|
||||||
|
|
||||||
# destdir is optional in Pungi (defaults to current dir), but want it mandatory here
|
# destdir is optional in Pungi (defaults to current dir), but
|
||||||
|
# want it mandatory here
|
||||||
cmd.append("--destdir=%s" % destdir)
|
cmd.append("--destdir=%s" % destdir)
|
||||||
|
|
||||||
# name
|
# name
|
||||||
|
@ -311,7 +311,7 @@ class Variant(object):
|
|||||||
return self.uid
|
return self.uid
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format(
|
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format( # noqa: E501
|
||||||
self
|
self
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -350,12 +350,14 @@ class Variant(object):
|
|||||||
if self.type != "variant":
|
if self.type != "variant":
|
||||||
raise RuntimeError("Only 'variant' can contain another variants.")
|
raise RuntimeError("Only 'variant' can contain another variants.")
|
||||||
if variant.id == self.id:
|
if variant.id == self.id:
|
||||||
# due to os/<variant.id> path -- addon id would conflict with parent variant id
|
# due to os/<variant.id> path -- addon id would conflict with
|
||||||
|
# parent variant id
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Child variant id must be different than parent variant id: %s"
|
"Child variant id must be different than parent variant id: %s"
|
||||||
% variant.id
|
% variant.id
|
||||||
)
|
)
|
||||||
# sometimes an addon or layered product can be part of multiple variants with different set of arches
|
# sometimes an addon or layered product can be part of multiple
|
||||||
|
# variants with different set of arches
|
||||||
arches = sorted(set(self.arches).intersection(set(variant.arches)))
|
arches = sorted(set(self.arches).intersection(set(variant.arches)))
|
||||||
if self.arches and not arches:
|
if self.arches and not arches:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
@ -235,7 +235,8 @@ class UnifiedISO(object):
|
|||||||
# override paths
|
# override paths
|
||||||
arch_ti[variant.uid].repository = variant.uid
|
arch_ti[variant.uid].repository = variant.uid
|
||||||
arch_ti[variant.uid].packages = variant.uid
|
arch_ti[variant.uid].packages = variant.uid
|
||||||
# set to None, replace with source_*; requires productmd changes or upstream version
|
# set to None, replace with source_*; requires productmd
|
||||||
|
# changes or upstream version
|
||||||
# arch_ti[variant.uid].source_repository = variant.uid
|
# arch_ti[variant.uid].source_repository = variant.uid
|
||||||
# arch_ti[variant.uid].source_packages = variant.uid
|
# arch_ti[variant.uid].source_packages = variant.uid
|
||||||
|
|
||||||
@ -387,7 +388,8 @@ class UnifiedISO(object):
|
|||||||
run(iso.get_manifest_cmd(iso_path))
|
run(iso.get_manifest_cmd(iso_path))
|
||||||
|
|
||||||
img = productmd.images.Image(im)
|
img = productmd.images.Image(im)
|
||||||
# temporary path, just a file name; to be replaced with variant specific path
|
# temporary path, just a file name; to be replaced with
|
||||||
|
# variant specific path
|
||||||
img.path = os.path.basename(iso_path)
|
img.path = os.path.basename(iso_path)
|
||||||
img.mtime = int(os.stat(iso_path).st_mtime)
|
img.mtime = int(os.stat(iso_path).st_mtime)
|
||||||
img.size = os.path.getsize(iso_path)
|
img.size = os.path.getsize(iso_path)
|
||||||
|
2
setup.py
2
setup.py
@ -42,7 +42,7 @@ setup(
|
|||||||
"pungi-make-ostree = pungi.ostree:main",
|
"pungi-make-ostree = pungi.ostree:main",
|
||||||
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
||||||
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
||||||
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main",
|
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
||||||
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
||||||
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
||||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||||
|
@ -1407,10 +1407,10 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
compose._logger.error.assert_has_calls(
|
compose._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway."
|
"[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details."
|
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details." # noqa: E501
|
||||||
% self.topdir
|
% self.topdir
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@ -1448,10 +1448,10 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
compose._logger.error.assert_has_calls(
|
compose._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Buildinstall (variant Server, arch x86_64) failed, but going on anyway."
|
"[FAIL] Buildinstall (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall-Server.x86_64.log for more details."
|
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall-Server.x86_64.log for more details." # noqa: E501
|
||||||
% self.topdir
|
% self.topdir
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -217,7 +217,7 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(warnings), 1)
|
self.assertEqual(len(warnings), 1)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
|
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(config.get("release_name", None), "dummy product")
|
self.assertEqual(config.get("release_name", None), "dummy product")
|
||||||
|
|
||||||
@ -270,7 +270,7 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(warnings), 1)
|
self.assertEqual(len(warnings), 1)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
|
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(config.get("release_name", None), "dummy product")
|
self.assertEqual(config.get("release_name", None), "dummy product")
|
||||||
|
|
||||||
@ -297,12 +297,12 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(errors), 1)
|
self.assertEqual(len(errors), 1)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
errors[0],
|
errors[0],
|
||||||
r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*",
|
r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(len(warnings), 1)
|
self.assertEqual(len(warnings), 1)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
|
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(config.get("release_name", None), "dummy product")
|
self.assertEqual(config.get("release_name", None), "dummy product")
|
||||||
|
|
||||||
@ -384,11 +384,11 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(warnings), 2)
|
self.assertEqual(len(warnings), 2)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
|
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[1],
|
warnings[1],
|
||||||
r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'",
|
r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(config.get("release_name", None), "dummy product")
|
self.assertEqual(config.get("release_name", None), "dummy product")
|
||||||
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
|
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
|
||||||
@ -426,11 +426,11 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(warnings), 2)
|
self.assertEqual(len(warnings), 2)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
|
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[1],
|
warnings[1],
|
||||||
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
|
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(config.get("release_name", None), "dummy product")
|
self.assertEqual(config.get("release_name", None), "dummy product")
|
||||||
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
|
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
|
||||||
@ -472,19 +472,19 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(warnings), 4)
|
self.assertEqual(len(warnings), 4)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
|
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[1],
|
warnings[1],
|
||||||
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
|
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[2],
|
warnings[2],
|
||||||
r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'",
|
r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[3],
|
warnings[3],
|
||||||
r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.",
|
r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(config.get("release_name", None), "dummy product")
|
self.assertEqual(config.get("release_name", None), "dummy product")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@ -534,11 +534,11 @@ class TestSchemaValidator(unittest.TestCase):
|
|||||||
self.assertEqual(len(warnings), 2)
|
self.assertEqual(len(warnings), 2)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[0],
|
warnings[0],
|
||||||
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
|
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
warnings[1],
|
warnings[1],
|
||||||
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*",
|
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
config.get("live_images")[0][1]["armhfp"]["repo"], "Everything"
|
config.get("live_images")[0][1]["armhfp"]["repo"], "Everything"
|
||||||
|
@ -544,7 +544,7 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
mock.call("Excluding variant Live: filtered by configuration."),
|
mock.call("Excluding variant Live: filtered by configuration."),
|
||||||
mock.call("Excluding variant Crashy: all its arches are filtered."),
|
mock.call("Excluding variant Crashy: all its arches are filtered."),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Excluding variant Server-ResilientStorage: filtered by configuration."
|
"Excluding variant Server-ResilientStorage: filtered by configuration." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Excluding variant Server-Gluster: filtered by configuration."
|
"Excluding variant Server-Gluster: filtered by configuration."
|
||||||
@ -635,7 +635,7 @@ class StatusTest(unittest.TestCase):
|
|||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
20,
|
20,
|
||||||
"Failed live on variant <Server>, arch <x86_64>, subvariant <None>.",
|
"Failed live on variant <Server>, arch <x86_64>, subvariant <None>.", # noqa: E501
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
any_order=True,
|
any_order=True,
|
||||||
|
@ -73,7 +73,7 @@ class ReleaseConfigTestCase(ConfigTestCase):
|
|||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
warnings=[
|
warnings=[
|
||||||
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product."
|
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product." # noqa: E501
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -158,7 +158,7 @@ class RunrootConfigTestCase(ConfigTestCase):
|
|||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
warnings=[
|
warnings=[
|
||||||
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
|
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally." # noqa: E501
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -168,7 +168,7 @@ class RunrootConfigTestCase(ConfigTestCase):
|
|||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
warnings=[
|
warnings=[
|
||||||
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
|
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally." # noqa: E501
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -180,7 +180,7 @@ class BuildinstallConfigTestCase(ConfigTestCase):
|
|||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
warnings=[
|
warnings=[
|
||||||
"WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer."
|
"WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer." # noqa: E501
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -298,7 +298,7 @@ class OstreeConfigTestCase(ConfigTestCase):
|
|||||||
{
|
{
|
||||||
"x86_64": {
|
"x86_64": {
|
||||||
"treefile": "fedora-atomic-docker-host.json",
|
"treefile": "fedora-atomic-docker-host.json",
|
||||||
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
|
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git", # noqa: E501
|
||||||
"repo": "Everything",
|
"repo": "Everything",
|
||||||
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
|
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
|
||||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||||
@ -329,18 +329,18 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
|
|||||||
"release": None,
|
"release": None,
|
||||||
"installpkgs": ["fedora-productimg-atomic"],
|
"installpkgs": ["fedora-productimg-atomic"],
|
||||||
"add_template": [
|
"add_template": [
|
||||||
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
|
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl" # noqa: E501
|
||||||
],
|
],
|
||||||
"add_template_var": [
|
"add_template_var": [
|
||||||
"ostree_osname=fedora-atomic",
|
"ostree_osname=fedora-atomic",
|
||||||
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
||||||
],
|
],
|
||||||
"add_arch_template": [
|
"add_arch_template": [
|
||||||
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
|
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl" # noqa: E501
|
||||||
],
|
],
|
||||||
"rootfs_size": "3",
|
"rootfs_size": "3",
|
||||||
"add_arch_template_var": [
|
"add_arch_template_var": [
|
||||||
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
|
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
|
||||||
"ostree_osname=fedora-atomic",
|
"ostree_osname=fedora-atomic",
|
||||||
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
||||||
],
|
],
|
||||||
@ -364,7 +364,7 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
|
|||||||
"release": None,
|
"release": None,
|
||||||
"installpkgs": ["fedora-productimg-atomic"],
|
"installpkgs": ["fedora-productimg-atomic"],
|
||||||
"add_template": [
|
"add_template": [
|
||||||
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
|
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl" # noqa: E501
|
||||||
],
|
],
|
||||||
"add_template_var": [
|
"add_template_var": [
|
||||||
"ostree_osname=fedora-atomic",
|
"ostree_osname=fedora-atomic",
|
||||||
@ -372,7 +372,7 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
|
|||||||
],
|
],
|
||||||
"add_arch_template": 15,
|
"add_arch_template": 15,
|
||||||
"add_arch_template_var": [
|
"add_arch_template_var": [
|
||||||
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
|
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
|
||||||
"ostree_osname=fedora-atomic",
|
"ostree_osname=fedora-atomic",
|
||||||
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
||||||
],
|
],
|
||||||
@ -420,7 +420,7 @@ class TestRegexValidation(ConfigTestCase):
|
|||||||
def test_incorrect_regular_expression(self):
|
def test_incorrect_regular_expression(self):
|
||||||
cfg = load_config(PKGSET_REPOS, multilib=[("^*$", {"*": []})])
|
cfg = load_config(PKGSET_REPOS, multilib=[("^*$", {"*": []})])
|
||||||
|
|
||||||
msg = "Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat"
|
msg = "Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat" # noqa: E501
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
msg += " at position 1"
|
msg += " at position 1"
|
||||||
self.assertValidation(cfg, [msg], [])
|
self.assertValidation(cfg, [msg], [])
|
||||||
|
@ -850,7 +850,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway."
|
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
@ -896,7 +896,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway."
|
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Runroot task failed: 1234. See %s for more details."
|
"Runroot task failed: 1234. See %s for more details."
|
||||||
@ -987,7 +987,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway."
|
"[FAIL] Iso (variant Server, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
|
@ -56,7 +56,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
),
|
),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -113,7 +113,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
["/usr/bin/isohybrid", "--uefi", "DP-1.0-20160405.t.3-x86_64.iso"]
|
["/usr/bin/isohybrid", "--uefi", "DP-1.0-20160405.t.3-x86_64.iso"]
|
||||||
),
|
),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -165,7 +165,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
" ".join(["/usr/bin/isohybrid", "DP-1.0-20160405.t.3-i386.iso"]),
|
" ".join(["/usr/bin/isohybrid", "DP-1.0-20160405.t.3-i386.iso"]),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-i386.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-i386.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -218,7 +218,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
),
|
),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -262,7 +262,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
),
|
),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-s390x.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-s390x.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -314,7 +314,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
),
|
),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -373,7 +373,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
),
|
),
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest", # noqa: E501
|
||||||
" ".join(
|
" ".join(
|
||||||
[
|
[
|
||||||
"jigdo-file",
|
"jigdo-file",
|
||||||
|
@ -745,9 +745,9 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
deltas=True,
|
deltas=True,
|
||||||
oldpackagedirs=[
|
oldpackagedirs=[
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/a",
|
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/a", # noqa: E501
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/b",
|
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages/b", # noqa: E501
|
||||||
],
|
],
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
@ -1168,7 +1168,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
|
|
||||||
modules_metadata = ModulesMetadata(compose)
|
modules_metadata = ModulesMetadata(compose)
|
||||||
|
|
||||||
modulemd_filename.return_value = "Server/x86_64/os/repodata/3511d16a723e1bd69826e591508f07e377d2212769b59178a9-modules.yaml.gz"
|
modulemd_filename.return_value = "Server/x86_64/os/repodata/3511d16a723e1bd69826e591508f07e377d2212769b59178a9-modules.yaml.gz" # noqa: E501
|
||||||
create_variant_repo(
|
create_variant_repo(
|
||||||
compose,
|
compose,
|
||||||
"x86_64",
|
"x86_64",
|
||||||
@ -1270,7 +1270,7 @@ class TestGetProductIds(PungiTestCase):
|
|||||||
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
|
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
str(ctx.exception),
|
str(ctx.exception),
|
||||||
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)",
|
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)", # noqa: E501
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.createrepo.get_dir_from_scm")
|
@mock.patch("pungi.phases.createrepo.get_dir_from_scm")
|
||||||
|
@ -1176,7 +1176,7 @@ class DepsolvingBase(object):
|
|||||||
|
|
||||||
def test_bash_multilib_exclude(self):
|
def test_bash_multilib_exclude(self):
|
||||||
# test if excluding a package really works
|
# test if excluding a package really works
|
||||||
# NOTE: dummy-bash-doc would pull x86_64 bash in (we want noarch pulling 64bit deps in composes)
|
# NOTE: dummy-bash-doc would pull x86_64 bash in (we want noarch pulling 64bit deps in composes) # noqa: E501
|
||||||
packages = [
|
packages = [
|
||||||
"dummy-bash.+",
|
"dummy-bash.+",
|
||||||
"-dummy-bash-doc",
|
"-dummy-bash-doc",
|
||||||
|
@ -146,7 +146,7 @@ class TestWritePungiConfig(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
str(ctx.exception),
|
str(ctx.exception),
|
||||||
"No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)",
|
"No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)", # noqa: E501
|
||||||
)
|
)
|
||||||
self.assertEqual(PungiWrapper.return_value.mock_calls, [])
|
self.assertEqual(PungiWrapper.return_value.mock_calls, [])
|
||||||
|
|
||||||
@ -188,7 +188,7 @@ class TestCheckDeps(helpers.PungiTestCase):
|
|||||||
self.compose.log_error.call_args_list,
|
self.compose.log_error.call_args_list,
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']"
|
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']" # noqa: E501
|
||||||
% (self.variant, self.arch)
|
% (self.variant, self.arch)
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
|
@ -24,7 +24,8 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
|
|||||||
spanning_line = self.g.prune_graph()
|
spanning_line = self.g.prune_graph()
|
||||||
|
|
||||||
self.assertEqual(4, len(spanning_line))
|
self.assertEqual(4, len(spanning_line))
|
||||||
# 'Base' as a lookaside should be at the end of the spanning line, order of others is not crucial
|
# 'Base' as a lookaside should be at the end of the spanning line,
|
||||||
|
# order of others is not crucial
|
||||||
self.assertEqual("Base", spanning_line[-1])
|
self.assertEqual("Base", spanning_line[-1])
|
||||||
|
|
||||||
def test_complex_graph(self):
|
def test_complex_graph(self):
|
||||||
|
@ -28,7 +28,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -110,7 +110,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
|
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||||
"image_build_target": "f24",
|
"image_build_target": "f24",
|
||||||
"image_build_version": "Rawhide",
|
"image_build_version": "Rawhide",
|
||||||
@ -173,7 +173,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
|
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||||
"image_build_target": "f24",
|
"image_build_target": "f24",
|
||||||
"image_build": {
|
"image_build": {
|
||||||
@ -241,7 +241,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -277,7 +277,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -346,7 +346,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -414,7 +414,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -488,7 +488,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -555,7 +555,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -598,7 +598,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -641,7 +641,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -681,7 +681,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -745,7 +745,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"name": "Fedora-Docker-Base",
|
"name": "Fedora-Docker-Base",
|
||||||
"target": "f24",
|
"target": "f24",
|
||||||
"version": "Rawhide",
|
"version": "Rawhide",
|
||||||
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git",
|
"ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
|
||||||
"kickstart": "fedora-docker-base.ks",
|
"kickstart": "fedora-docker-base.ks",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"disk_size": 3,
|
"disk_size": 3,
|
||||||
@ -873,7 +873,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
mock.call(
|
mock.call(
|
||||||
koji_wrapper.get_image_build_cmd.return_value,
|
koji_wrapper.get_image_build_cmd.return_value,
|
||||||
log_file=self.topdir
|
log_file=self.topdir
|
||||||
+ "/logs/amd64-x86_64/imagebuild-Client-KDE-docker-qcow2.amd64-x86_64.log",
|
+ "/logs/amd64-x86_64/imagebuild-Client-KDE-docker-qcow2.amd64-x86_64.log", # noqa: E501
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -885,25 +885,25 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
mock.call.link(
|
mock.call.link(
|
||||||
"/koji/task/1235/Fedora-Docker-Base-20160103.amd64.qcow2",
|
"/koji/task/1235/Fedora-Docker-Base-20160103.amd64.qcow2",
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.qcow2",
|
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.qcow2", # noqa: E501
|
||||||
link_type="hardlink-or-copy",
|
link_type="hardlink-or-copy",
|
||||||
),
|
),
|
||||||
mock.call.link(
|
mock.call.link(
|
||||||
"/koji/task/1235/Fedora-Docker-Base-20160103.amd64.tar.gz",
|
"/koji/task/1235/Fedora-Docker-Base-20160103.amd64.tar.gz",
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.tar.gz",
|
+ "/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.tar.gz", # noqa: E501
|
||||||
link_type="hardlink-or-copy",
|
link_type="hardlink-or-copy",
|
||||||
),
|
),
|
||||||
mock.call.link(
|
mock.call.link(
|
||||||
"/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.qcow2",
|
"/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.qcow2",
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.qcow2",
|
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.qcow2", # noqa: E501
|
||||||
link_type="hardlink-or-copy",
|
link_type="hardlink-or-copy",
|
||||||
),
|
),
|
||||||
mock.call.link(
|
mock.call.link(
|
||||||
"/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.tar.gz",
|
"/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.tar.gz",
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.tar.gz",
|
+ "/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.tar.gz", # noqa: E501
|
||||||
link_type="hardlink-or-copy",
|
link_type="hardlink-or-copy",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@ -992,14 +992,14 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway."
|
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"ImageBuild task failed: 1234. See %s for more details."
|
"ImageBuild task failed: 1234. See %s for more details."
|
||||||
% (
|
% (
|
||||||
os.path.join(
|
os.path.join(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
"logs/amd64-x86_64/imagebuild-Client-Client-docker-qcow2.amd64-x86_64.log",
|
"logs/amd64-x86_64/imagebuild-Client-Client-docker-qcow2.amd64-x86_64.log", # noqa: E501
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -1046,7 +1046,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway."
|
"[FAIL] Image build (variant Client, arch *, subvariant Client) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
|
@ -107,7 +107,7 @@ class TestImageChecksumPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"media_checksums": ["sha256"],
|
"media_checksums": ["sha256"],
|
||||||
"media_checksum_one_file": True,
|
"media_checksum_one_file": True,
|
||||||
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s",
|
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s", # noqa: E501
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.compose_label = "Alpha-1.0"
|
compose.compose_label = "Alpha-1.0"
|
||||||
@ -121,7 +121,7 @@ class TestImageChecksumPhase(PungiTestCase):
|
|||||||
|
|
||||||
dump_checksums.assert_called_once_with(
|
dump_checksums.assert_called_once_with(
|
||||||
self.topdir
|
self.topdir
|
||||||
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM",
|
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM", # noqa: E501
|
||||||
set([("image.iso", 123, "sha256", "cafebabe")]),
|
set([("image.iso", 123, "sha256", "cafebabe")]),
|
||||||
)
|
)
|
||||||
cc.assert_called_once_with(
|
cc.assert_called_once_with(
|
||||||
@ -137,7 +137,7 @@ class TestImageChecksumPhase(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"media_checksums": ["md5", "sha256"],
|
"media_checksums": ["md5", "sha256"],
|
||||||
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s",
|
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s", # noqa: E501
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -189,7 +189,7 @@ class TestImageChecksumPhase(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"media_checksums": ["md5", "sha256"],
|
"media_checksums": ["md5", "sha256"],
|
||||||
"media_checksum_base_filename": "{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}",
|
"media_checksum_base_filename": "{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}", # noqa: E501
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -80,7 +80,8 @@ class TestIsoUtils(unittest.TestCase):
|
|||||||
@mock.patch("pungi.wrappers.iso.run")
|
@mock.patch("pungi.wrappers.iso.run")
|
||||||
def test_mount_iso(self, mock_run, mock_unmount):
|
def test_mount_iso(self, mock_run, mock_unmount):
|
||||||
# first tuple is return value for command 'which guestmount'
|
# first tuple is return value for command 'which guestmount'
|
||||||
# value determines type of the mount/unmount command ('1' - guestmount is not available)
|
# value determines type of the mount/unmount
|
||||||
|
# command ('1' - guestmount is not available)
|
||||||
# for approach as a root, pair commands mount-umount are used
|
# for approach as a root, pair commands mount-umount are used
|
||||||
mock_run.side_effect = [(1, ""), (0, "")]
|
mock_run.side_effect = [(1, ""), (0, "")]
|
||||||
with iso.mount("dummy") as temp_dir:
|
with iso.mount("dummy") as temp_dir:
|
||||||
@ -99,7 +100,8 @@ class TestIsoUtils(unittest.TestCase):
|
|||||||
@mock.patch("pungi.wrappers.iso.run")
|
@mock.patch("pungi.wrappers.iso.run")
|
||||||
def test_guestmount(self, mock_run, mock_unmount, mock_rmtree):
|
def test_guestmount(self, mock_run, mock_unmount, mock_rmtree):
|
||||||
# first tuple is return value for command 'which guestmount'
|
# first tuple is return value for command 'which guestmount'
|
||||||
# value determines type of the mount/unmount command ('0' - guestmount is available)
|
# value determines type of the mount/unmount
|
||||||
|
# command ('0' - guestmount is available)
|
||||||
# for approach as a non-root, pair commands guestmount-fusermount are used
|
# for approach as a non-root, pair commands guestmount-fusermount are used
|
||||||
mock_run.side_effect = [(0, ""), (0, "")]
|
mock_run.side_effect = [(0, ""), (0, "")]
|
||||||
with iso.mount("dummy") as temp_dir:
|
with iso.mount("dummy") as temp_dir:
|
||||||
@ -118,7 +120,8 @@ class TestIsoUtils(unittest.TestCase):
|
|||||||
@mock.patch("pungi.wrappers.iso.run")
|
@mock.patch("pungi.wrappers.iso.run")
|
||||||
def test_guestmount_cleans_up_cache(self, mock_run, mock_unmount, mock_rmtree):
|
def test_guestmount_cleans_up_cache(self, mock_run, mock_unmount, mock_rmtree):
|
||||||
# first tuple is return value for command 'which guestmount'
|
# first tuple is return value for command 'which guestmount'
|
||||||
# value determines type of the mount/unmount command ('0' - guestmount is available)
|
# value determines type of the mount/unmount
|
||||||
|
# command ('0' - guestmount is available)
|
||||||
# for approach as a non-root, pair commands guestmount-fusermount are used
|
# for approach as a non-root, pair commands guestmount-fusermount are used
|
||||||
mock_run.side_effect = [(0, ""), (0, "")]
|
mock_run.side_effect = [(0, ""), (0, "")]
|
||||||
with iso.mount("dummy") as temp_dir:
|
with iso.mount("dummy") as temp_dir:
|
||||||
@ -139,7 +142,8 @@ class TestIsoUtils(unittest.TestCase):
|
|||||||
self, mock_run, mock_unmount, mock_rmtree
|
self, mock_run, mock_unmount, mock_rmtree
|
||||||
):
|
):
|
||||||
# first tuple is return value for command 'which guestmount'
|
# first tuple is return value for command 'which guestmount'
|
||||||
# value determines type of the mount/unmount command ('0' - guestmount is available)
|
# value determines type of the mount/unmount
|
||||||
|
# command ('0' - guestmount is available)
|
||||||
# for approach as a non-root, pair commands guestmount-fusermount are used
|
# for approach as a non-root, pair commands guestmount-fusermount are used
|
||||||
mock_run.side_effect = [(0, ""), (0, "")]
|
mock_run.side_effect = [(0, ""), (0, "")]
|
||||||
with iso.mount("dummy") as temp_dir:
|
with iso.mount("dummy") as temp_dir:
|
||||||
|
@ -160,16 +160,16 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
"id": 563977,
|
"id": 563977,
|
||||||
"state": 1,
|
"state": 1,
|
||||||
},
|
},
|
||||||
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/i386/os/",
|
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/i386/os/", # noqa: E501
|
||||||
{
|
{
|
||||||
"disk_size": "3",
|
"disk_size": "3",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"format": ["qcow2", "raw-xz"],
|
"format": ["qcow2", "raw-xz"],
|
||||||
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks",
|
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks", # noqa: E501
|
||||||
"release": "20160103",
|
"release": "20160103",
|
||||||
"repo": [
|
"repo": [
|
||||||
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/",
|
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/", # noqa: E501
|
||||||
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/",
|
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/", # noqa: E501
|
||||||
],
|
],
|
||||||
"scratch": True,
|
"scratch": True,
|
||||||
},
|
},
|
||||||
@ -216,16 +216,16 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
"id": 563977,
|
"id": 563977,
|
||||||
"state": 1,
|
"state": 1,
|
||||||
},
|
},
|
||||||
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/x86_64/os/",
|
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/x86_64/os/", # noqa: E501
|
||||||
{
|
{
|
||||||
"disk_size": "3",
|
"disk_size": "3",
|
||||||
"distro": "Fedora-20",
|
"distro": "Fedora-20",
|
||||||
"format": ["qcow2", "raw-xz"],
|
"format": ["qcow2", "raw-xz"],
|
||||||
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks",
|
"kickstart": "work/cli-image/1451798116.800155.wYJWTVHw/fedora-cloud-base-2878aa0.ks", # noqa: E501
|
||||||
"release": "20160103",
|
"release": "20160103",
|
||||||
"repo": [
|
"repo": [
|
||||||
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/",
|
"http://infrastructure.fedoraproject.org/pub/alt/releases/23/Cloud/$arch/os/", # noqa: E501
|
||||||
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/",
|
"http://infrastructure.fedoraproject.org/pub/fedora/linux/updates/23/$arch/", # noqa: E501
|
||||||
],
|
],
|
||||||
"scratch": True,
|
"scratch": True,
|
||||||
},
|
},
|
||||||
@ -593,7 +593,7 @@ class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
self.assertEqual(cmd[-2], "s390x")
|
self.assertEqual(cmd[-2], "s390x")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
cmd[-1],
|
cmd[-1],
|
||||||
"rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; /bin/echo '&' && chmod -R a+r '/output dir' /foo && chown -R 1010 '/output dir' /foo",
|
"rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; /bin/echo '&' && chmod -R a+r '/output dir' /foo && chown -R 1010 '/output dir' /foo", # noqa: E501
|
||||||
)
|
)
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
|
@ -91,7 +91,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
compose.variants["Client"],
|
compose.variants["Client"],
|
||||||
disc_num=None,
|
disc_num=None,
|
||||||
disc_type="live",
|
disc_type="live",
|
||||||
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s",
|
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s", # noqa: E501
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -345,7 +345,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"amd64": {
|
"amd64": {
|
||||||
"kickstart": "test.ks",
|
"kickstart": "test.ks",
|
||||||
"ksurl": "https://git.example.com/kickstarts.git?#CAFEBABE",
|
"ksurl": "https://git.example.com/kickstarts.git?#CAFEBABE", # noqa: E501
|
||||||
"repo": ["http://example.com/repo/", "Everything"],
|
"repo": ["http://example.com/repo/", "Everything"],
|
||||||
"type": "appliance",
|
"type": "appliance",
|
||||||
"target": "f27",
|
"target": "f27",
|
||||||
@ -618,7 +618,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
compose.variants["Client"],
|
compose.variants["Client"],
|
||||||
disc_num=None,
|
disc_num=None,
|
||||||
disc_type="Live",
|
disc_type="Live",
|
||||||
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s",
|
format="%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s", # noqa: E501
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -695,7 +695,7 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||||||
write_manifest_cmd = " && ".join(
|
write_manifest_cmd = " && ".join(
|
||||||
[
|
[
|
||||||
"cd " + self.topdir + "/compose/Client/amd64/iso",
|
"cd " + self.topdir + "/compose/Client/amd64/iso",
|
||||||
"isoinfo -R -f -i image-name | grep -v '/TRANS.TBL$' | sort >> image-name.manifest",
|
"isoinfo -R -f -i image-name | grep -v '/TRANS.TBL$' | sort >> image-name.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])
|
self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])
|
||||||
@ -806,7 +806,7 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||||||
write_manifest_cmd = " && ".join(
|
write_manifest_cmd = " && ".join(
|
||||||
[
|
[
|
||||||
"cd " + self.topdir + "/compose/Client/amd64/iso",
|
"cd " + self.topdir + "/compose/Client/amd64/iso",
|
||||||
"isoinfo -R -f -i image.iso | grep -v '/TRANS.TBL$' | sort >> image.iso.manifest",
|
"isoinfo -R -f -i image.iso | grep -v '/TRANS.TBL$' | sort >> image.iso.manifest", # noqa: E501
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])
|
self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])
|
||||||
@ -998,10 +998,10 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway."
|
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"LiveImage task failed: 123. See %s/logs/amd64/liveimage-None-None-xyz.amd64.log for more details."
|
"LiveImage task failed: 123. See %s/logs/amd64/liveimage-None-None-xyz.amd64.log for more details." # noqa: E501
|
||||||
% self.topdir
|
% self.topdir
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@ -1046,7 +1046,7 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway."
|
"[FAIL] Live (variant Client, arch amd64, subvariant Client) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
|
@ -634,7 +634,7 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway."
|
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Live media task failed: 1234. See %s for more details."
|
"Live media task failed: 1234. See %s for more details."
|
||||||
@ -709,7 +709,7 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway."
|
"[FAIL] Live media (variant Server, arch *, subvariant KDE) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
|
@ -126,13 +126,13 @@ ARCHIVES = [
|
|||||||
"checksum_type": 0,
|
"checksum_type": 0,
|
||||||
"extra": {
|
"extra": {
|
||||||
"docker": {
|
"docker": {
|
||||||
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
|
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7", # noqa: E501
|
||||||
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
|
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e", # noqa: E501
|
||||||
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
|
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
|
||||||
},
|
},
|
||||||
"image": {"arch": "x86_64"},
|
"image": {"arch": "x86_64"},
|
||||||
},
|
},
|
||||||
"filename": "docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz",
|
"filename": "docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz", # noqa: E501
|
||||||
"id": 1436049,
|
"id": 1436049,
|
||||||
"metadata_only": False,
|
"metadata_only": False,
|
||||||
"size": 174038795,
|
"size": 174038795,
|
||||||
@ -155,8 +155,8 @@ METADATA = {
|
|||||||
"filename": ARCHIVES[0]["filename"],
|
"filename": ARCHIVES[0]["filename"],
|
||||||
"size": ARCHIVES[0]["size"],
|
"size": ARCHIVES[0]["size"],
|
||||||
"docker": {
|
"docker": {
|
||||||
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
|
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7", # noqa: E501
|
||||||
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
|
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e", # noqa: E501
|
||||||
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
|
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
|
||||||
},
|
},
|
||||||
"image": {"arch": "x86_64"},
|
"image": {"arch": "x86_64"},
|
||||||
@ -179,7 +179,7 @@ SCRATCH_METADATA = {
|
|||||||
{
|
{
|
||||||
"koji_task": 12345,
|
"koji_task": 12345,
|
||||||
"repositories": [
|
"repositories": [
|
||||||
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
|
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632", # noqa: E501
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -288,7 +288,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
|
|||||||
config["osbs"] = {"^Server$": cfg}
|
config["osbs"] = {"^Server$": cfg}
|
||||||
errors, warnings = checks.validate(config, offline=True)
|
errors, warnings = checks.validate(config, offline=True)
|
||||||
self.assertIn(
|
self.assertIn(
|
||||||
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas"
|
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas" # noqa: E501
|
||||||
% cfg,
|
% cfg,
|
||||||
errors,
|
errors,
|
||||||
)
|
)
|
||||||
|
@ -236,7 +236,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
self.compose.supported = False
|
self.compose.supported = False
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cfg = {
|
cfg = {
|
||||||
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
|
||||||
"release": "20160321.n.0",
|
"release": "20160321.n.0",
|
||||||
}
|
}
|
||||||
koji = KojiWrapper.return_value
|
koji = KojiWrapper.return_value
|
||||||
@ -324,7 +324,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
cfg = {
|
cfg = {
|
||||||
"release": "20160321.n.0",
|
"release": "20160321.n.0",
|
||||||
"repo": [
|
"repo": [
|
||||||
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
|
||||||
"https://example.com/extra-repo1.repo",
|
"https://example.com/extra-repo1.repo",
|
||||||
"https://example.com/extra-repo2.repo",
|
"https://example.com/extra-repo2.repo",
|
||||||
],
|
],
|
||||||
@ -369,8 +369,8 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
cfg = {
|
cfg = {
|
||||||
"release": "20160321.n.0",
|
"release": "20160321.n.0",
|
||||||
"repo": [
|
"repo": [
|
||||||
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
|
||||||
"Server", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"Server", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
|
||||||
"https://example.com/extra-repo1.repo",
|
"https://example.com/extra-repo1.repo",
|
||||||
"https://example.com/extra-repo2.repo",
|
"https://example.com/extra-repo2.repo",
|
||||||
],
|
],
|
||||||
@ -493,7 +493,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
):
|
):
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cfg = {
|
cfg = {
|
||||||
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
|
||||||
"release": "20160321.n.0",
|
"release": "20160321.n.0",
|
||||||
"add_template": ["some_file.txt"],
|
"add_template": ["some_file.txt"],
|
||||||
"add_arch_template": ["other_file.txt"],
|
"add_arch_template": ["other_file.txt"],
|
||||||
@ -562,7 +562,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
):
|
):
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cfg = {
|
cfg = {
|
||||||
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo # noqa: E501
|
||||||
"release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
|
"release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||||
"installpkgs": ["fedora-productimg-atomic"],
|
"installpkgs": ["fedora-productimg-atomic"],
|
||||||
"add_template": [
|
"add_template": [
|
||||||
@ -576,7 +576,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
|
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
|
||||||
],
|
],
|
||||||
"add_arch_template_var": [
|
"add_arch_template_var": [
|
||||||
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
|
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
|
||||||
"ostree_osname=fedora-atomic",
|
"ostree_osname=fedora-atomic",
|
||||||
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
||||||
],
|
],
|
||||||
@ -606,13 +606,13 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
isfinal=True,
|
isfinal=True,
|
||||||
extra=[
|
extra=[
|
||||||
"--installpkgs=fedora-productimg-atomic",
|
"--installpkgs=fedora-productimg-atomic",
|
||||||
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl",
|
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl", # noqa: E501
|
||||||
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl",
|
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl", # noqa: E501
|
||||||
"--add-template-var=ostree_osname=fedora-atomic",
|
"--add-template-var=ostree_osname=fedora-atomic",
|
||||||
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
|
||||||
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
|
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
|
||||||
"--add-arch-template-var=ostree_osname=fedora-atomic",
|
"--add-arch-template-var=ostree_osname=fedora-atomic",
|
||||||
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
|
||||||
"--logfile=%s/%s/lorax.log" % (self.topdir, LOG_PATH),
|
"--logfile=%s/%s/lorax.log" % (self.topdir, LOG_PATH),
|
||||||
],
|
],
|
||||||
weight=123,
|
weight=123,
|
||||||
@ -633,7 +633,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
):
|
):
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cfg = {
|
cfg = {
|
||||||
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo
|
"repo": "Everything", # this variant-type repo is deprecated, in result will be replaced with default repo. # noqa: E501
|
||||||
"release": None,
|
"release": None,
|
||||||
"installpkgs": ["fedora-productimg-atomic"],
|
"installpkgs": ["fedora-productimg-atomic"],
|
||||||
"add_template": [
|
"add_template": [
|
||||||
@ -647,7 +647,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
|
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
|
||||||
],
|
],
|
||||||
"add_arch_template_var": [
|
"add_arch_template_var": [
|
||||||
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
|
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
|
||||||
"ostree_osname=fedora-atomic",
|
"ostree_osname=fedora-atomic",
|
||||||
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
||||||
],
|
],
|
||||||
@ -677,13 +677,13 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
isfinal=True,
|
isfinal=True,
|
||||||
extra=[
|
extra=[
|
||||||
"--installpkgs=fedora-productimg-atomic",
|
"--installpkgs=fedora-productimg-atomic",
|
||||||
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl",
|
"--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl", # noqa: E501
|
||||||
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl",
|
"--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl", # noqa: E501
|
||||||
"--add-template-var=ostree_osname=fedora-atomic",
|
"--add-template-var=ostree_osname=fedora-atomic",
|
||||||
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
|
||||||
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
|
"--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", # noqa: E501
|
||||||
"--add-arch-template-var=ostree_osname=fedora-atomic",
|
"--add-arch-template-var=ostree_osname=fedora-atomic",
|
||||||
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
|
"--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", # noqa: E501
|
||||||
"--logfile=%s/%s/lorax.log" % (self.topdir, LOG_PATH),
|
"--logfile=%s/%s/lorax.log" % (self.topdir, LOG_PATH),
|
||||||
],
|
],
|
||||||
weight=123,
|
weight=123,
|
||||||
@ -713,7 +713,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway."
|
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
@ -748,7 +748,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
pool._logger.error.assert_has_calls(
|
pool._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway."
|
"[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Runroot task failed: 1234. See %s/%s/runroot.log for more details."
|
"Runroot task failed: 1234. See %s/%s/runroot.log for more details."
|
||||||
|
@ -200,7 +200,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "http:__example.com_work__basearch_comps_repo_Everything",
|
"name": "http:__example.com_work__basearch_comps_repo_Everything",
|
||||||
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything",
|
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything", # noqa: E501
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@ -295,7 +295,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
self.compose._logger.error.assert_has_calls(
|
self.compose._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
|
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call(
|
mock.call(
|
||||||
"Runroot task failed: 1234. See %s for more details."
|
"Runroot task failed: 1234. See %s for more details."
|
||||||
@ -322,7 +322,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
self.compose._logger.error.assert_has_calls(
|
self.compose._logger.error.assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
|
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
),
|
),
|
||||||
mock.call("BOOM"),
|
mock.call("BOOM"),
|
||||||
]
|
]
|
||||||
@ -675,7 +675,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
|
koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
|
||||||
|
|
||||||
cfg = {
|
cfg = {
|
||||||
"repo": [ # Variant type repos will not be included into extra_config. This part of the config is deprecated
|
"repo": [ # Variant type repos will not be included into extra_config. This part of the config is deprecated # noqa: E501
|
||||||
"Everything", # do not include
|
"Everything", # do not include
|
||||||
{
|
{
|
||||||
"name": "repo_a",
|
"name": "repo_a",
|
||||||
@ -704,7 +704,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
with open(extra_config_file, "r") as extra_config_fd:
|
with open(extra_config_file, "r") as extra_config_fd:
|
||||||
extra_config = json.load(extra_config_fd)
|
extra_config = json.load(extra_config_fd)
|
||||||
self.assertTrue(extra_config.get("keep_original_sources", False))
|
self.assertTrue(extra_config.get("keep_original_sources", False))
|
||||||
# should equal to number of valid repositories in cfg['repo'] + default repository + comps repository
|
# should equal to number of valid repositories in cfg['repo'] + default repository + comps repository # noqa: E501
|
||||||
self.assertEqual(len(extra_config.get("repo", [])), 3)
|
self.assertEqual(len(extra_config.get("repo", [])), 3)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
extra_config.get("repo").pop()["baseurl"],
|
extra_config.get("repo").pop()["baseurl"],
|
||||||
|
@ -397,7 +397,7 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
|
|||||||
"--add-template=/path/to/lorax.tmpl",
|
"--add-template=/path/to/lorax.tmpl",
|
||||||
"--add-arch-template=/path/to/lorax-embed.tmpl",
|
"--add-arch-template=/path/to/lorax-embed.tmpl",
|
||||||
"--add-template-var=ostree_osname=dummy",
|
"--add-template-var=ostree_osname=dummy",
|
||||||
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree",
|
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree", # noqa: E501
|
||||||
"--rootfs-size=None",
|
"--rootfs-size=None",
|
||||||
self.output,
|
self.output,
|
||||||
],
|
],
|
||||||
@ -463,7 +463,7 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
|
|||||||
"--add-template-var=ostree_osname=dummy-atomic",
|
"--add-template-var=ostree_osname=dummy-atomic",
|
||||||
"--add-template-var=ostree_ref=dummy/x86_64/docker",
|
"--add-template-var=ostree_ref=dummy/x86_64/docker",
|
||||||
"--add-arch-template-var=ostree_osname=dummy-atomic",
|
"--add-arch-template-var=ostree_osname=dummy-atomic",
|
||||||
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree",
|
"--add-arch-template-var=ostree_repo=http://www.example.com/ostree", # noqa: E501
|
||||||
"--rootfs-size=None",
|
"--rootfs-size=None",
|
||||||
self.output,
|
self.output,
|
||||||
],
|
],
|
||||||
|
@ -295,7 +295,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
figure = re.compile(
|
figure = re.compile(
|
||||||
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
|
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501
|
||||||
re.DOTALL,
|
re.DOTALL,
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(str(ctx.exception), figure)
|
self.assertRegexpMatches(str(ctx.exception), figure)
|
||||||
@ -320,7 +320,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
|
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
|
||||||
|
|
||||||
figure = re.compile(
|
figure = re.compile(
|
||||||
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
|
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501
|
||||||
re.DOTALL,
|
re.DOTALL,
|
||||||
)
|
)
|
||||||
self.assertRegexpMatches(str(ctx.exception), figure)
|
self.assertRegexpMatches(str(ctx.exception), figure)
|
||||||
|
@ -148,5 +148,5 @@ class FusExtractorTestCase(helpers.PungiTestCase):
|
|||||||
|
|
||||||
self.assertFileContent(
|
self.assertFileContent(
|
||||||
self.output,
|
self.output,
|
||||||
"Problem 1/1\n - nothing provides foo\nProblem 1/1\n - nothing provides quux\n",
|
"Problem 1/1\n - nothing provides foo\nProblem 1/1\n - nothing provides quux\n", # noqa: E501
|
||||||
)
|
)
|
||||||
|
@ -867,7 +867,7 @@ class GetRepoFuncsTestCase(unittest.TestCase):
|
|||||||
def test_get_repo_dicts(self):
|
def test_get_repo_dicts(self):
|
||||||
repos = [
|
repos = [
|
||||||
"http://example.com/repo",
|
"http://example.com/repo",
|
||||||
"Server", # this repo format is deprecated (and will not be included into final repo_dict)
|
"Server", # this repo format is deprecated (and will not be included into final repo_dict) # noqa: E501
|
||||||
{"baseurl": "Client"}, # this repo format is deprecated
|
{"baseurl": "Client"}, # this repo format is deprecated
|
||||||
{"baseurl": "ftp://example.com/linux/repo"},
|
{"baseurl": "ftp://example.com/linux/repo"},
|
||||||
{"name": "testrepo", "baseurl": "ftp://example.com/linux/repo"},
|
{"name": "testrepo", "baseurl": "ftp://example.com/linux/repo"},
|
||||||
|
Loading…
Reference in New Issue
Block a user