Format code

Code didn't get well formatted when jenkins unusable.

Signed-off-by: Haibo Lin <hlin@redhat.com>
This commit is contained in:
Haibo Lin 2021-03-02 18:19:05 +08:00
parent 735bfaa0d6
commit b217470464
27 changed files with 214 additions and 89 deletions

View File

@ -131,8 +131,8 @@ def getArchList(thisarch=None): # pragma: no cover
def _try_read_cpuinfo(): # pragma: no cover def _try_read_cpuinfo(): # pragma: no cover
""" Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not """Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
mounted). """ mounted)."""
try: try:
with open("/proc/cpuinfo", "r") as f: with open("/proc/cpuinfo", "r") as f:
return f.readlines() return f.readlines()
@ -141,8 +141,8 @@ def _try_read_cpuinfo(): # pragma: no cover
def _parse_auxv(): # pragma: no cover def _parse_auxv(): # pragma: no cover
""" Read /proc/self/auxv and parse it into global dict for easier access """Read /proc/self/auxv and parse it into global dict for easier access
later on, very similar to what rpm does. """ later on, very similar to what rpm does."""
# In case we can't open and read /proc/self/auxv, just return # In case we can't open and read /proc/self/auxv, just return
try: try:
with open("/proc/self/auxv", "rb") as f: with open("/proc/self/auxv", "rb") as f:
@ -326,8 +326,8 @@ def getMultiArchInfo(arch=canonArch): # pragma: no cover
def getBaseArch(myarch=None): # pragma: no cover def getBaseArch(myarch=None): # pragma: no cover
"""returns 'base' arch for myarch, if specified, or canonArch if not. """returns 'base' arch for myarch, if specified, or canonArch if not.
base arch is the arch before noarch in the arches dict if myarch is not base arch is the arch before noarch in the arches dict if myarch is not
a key in the multilibArches.""" a key in the multilibArches."""
if not myarch: if not myarch:
myarch = canonArch myarch = canonArch

View File

@ -75,8 +75,7 @@ def is_isohybrid_needed(conf):
def is_genisoimage_needed(conf): def is_genisoimage_needed(conf):
"""This is only needed locally for createiso without runroot. """This is only needed locally for createiso without runroot."""
"""
runroot_tag = conf.get("runroot_tag", "") runroot_tag = conf.get("runroot_tag", "")
if runroot_tag or conf.get("createiso_use_xorrisofs"): if runroot_tag or conf.get("createiso_use_xorrisofs"):
return False return False

View File

@ -519,7 +519,7 @@ class Pungi(PungiBase):
def verifyCachePkg(self, po, path): # Stolen from yum def verifyCachePkg(self, po, path): # Stolen from yum
"""check the package checksum vs the cache """check the package checksum vs the cache
return True if pkg is good, False if not""" return True if pkg is good, False if not"""
(csum_type, csum) = po.returnIdSum() (csum_type, csum) = po.returnIdSum()
@ -682,7 +682,7 @@ class Pungi(PungiBase):
def get_package_deps(self, po): def get_package_deps(self, po):
"""Add the dependencies for a given package to the """Add the dependencies for a given package to the
transaction info""" transaction info"""
added = set() added = set()
if po.repoid in self.lookaside_repos: if po.repoid in self.lookaside_repos:
# Don't resolve deps for stuff in lookaside. # Don't resolve deps for stuff in lookaside.
@ -911,7 +911,7 @@ class Pungi(PungiBase):
def getPackagesFromGroup(self, group): def getPackagesFromGroup(self, group):
"""Get a list of package names from a ksparser group object """Get a list of package names from a ksparser group object
Returns a list of package names""" Returns a list of package names"""
packages = [] packages = []
@ -951,7 +951,7 @@ class Pungi(PungiBase):
def _addDefaultGroups(self, excludeGroups=None): def _addDefaultGroups(self, excludeGroups=None):
"""Cycle through the groups and return at list of the ones that ara """Cycle through the groups and return at list of the ones that ara
default.""" default."""
excludeGroups = excludeGroups or [] excludeGroups = excludeGroups or []
# This is mostly stolen from anaconda. # This is mostly stolen from anaconda.
@ -1217,8 +1217,8 @@ class Pungi(PungiBase):
def createSourceHashes(self): def createSourceHashes(self):
"""Create two dicts - one that maps binary POs to source POs, and """Create two dicts - one that maps binary POs to source POs, and
one that maps a single source PO to all binary POs it produces. one that maps a single source PO to all binary POs it produces.
Requires yum still configured.""" Requires yum still configured."""
self.src_by_bin = {} self.src_by_bin = {}
self.bin_by_src = {} self.bin_by_src = {}
self.logger.info("Generating source <-> binary package mappings") self.logger.info("Generating source <-> binary package mappings")
@ -1232,8 +1232,8 @@ class Pungi(PungiBase):
def add_srpms(self, po_list=None): def add_srpms(self, po_list=None):
"""Cycle through the list of package objects and """Cycle through the list of package objects and
find the sourcerpm for them. Requires yum still find the sourcerpm for them. Requires yum still
configured and a list of package objects""" configured and a list of package objects"""
srpms = set() srpms = set()
po_list = po_list or self.po_list po_list = po_list or self.po_list
@ -1275,9 +1275,9 @@ class Pungi(PungiBase):
def add_fulltree(self, srpm_po_list=None): def add_fulltree(self, srpm_po_list=None):
"""Cycle through all package objects, and add any """Cycle through all package objects, and add any
that correspond to a source rpm that we are including. that correspond to a source rpm that we are including.
Requires yum still configured and a list of package Requires yum still configured and a list of package
objects.""" objects."""
self.logger.info("Completing package set") self.logger.info("Completing package set")
@ -1357,8 +1357,8 @@ class Pungi(PungiBase):
def getDebuginfoList(self): def getDebuginfoList(self):
"""Cycle through the list of package objects and find """Cycle through the list of package objects and find
debuginfo rpms for them. Requires yum still debuginfo rpms for them. Requires yum still
configured and a list of package objects""" configured and a list of package objects"""
added = set() added = set()
for po in self.all_pkgs: for po in self.all_pkgs:
@ -1398,7 +1398,7 @@ class Pungi(PungiBase):
def _downloadPackageList(self, polist, relpkgdir): def _downloadPackageList(self, polist, relpkgdir):
"""Cycle through the list of package objects and """Cycle through the list of package objects and
download them from their respective repos.""" download them from their respective repos."""
for pkg in sorted(polist): for pkg in sorted(polist):
repo = self.ayum.repos.getRepo(pkg.repoid) repo = self.ayum.repos.getRepo(pkg.repoid)
@ -1533,7 +1533,7 @@ class Pungi(PungiBase):
@yumlocked @yumlocked
def downloadSRPMs(self): def downloadSRPMs(self):
"""Cycle through the list of srpms and """Cycle through the list of srpms and
find the package objects for them, Then download them.""" find the package objects for them, Then download them."""
# do the downloads # do the downloads
self._downloadPackageList(self.srpm_po_list, os.path.join("source", "SRPMS")) self._downloadPackageList(self.srpm_po_list, os.path.join("source", "SRPMS"))
@ -1541,7 +1541,7 @@ class Pungi(PungiBase):
@yumlocked @yumlocked
def downloadDebuginfo(self): def downloadDebuginfo(self):
"""Cycle through the list of debuginfo rpms and """Cycle through the list of debuginfo rpms and
download them.""" download them."""
# do the downloads # do the downloads
self._downloadPackageList( self._downloadPackageList(
@ -1980,7 +1980,7 @@ class Pungi(PungiBase):
def doGetRelnotes(self): def doGetRelnotes(self):
"""Get extra files from packages in the tree to put in the topdir of """Get extra files from packages in the tree to put in the topdir of
the tree.""" the tree."""
docsdir = os.path.join(self.workdir, "docs") docsdir = os.path.join(self.workdir, "docs")
relnoterpms = self.config.get("pungi", "relnotepkgs").split() relnoterpms = self.config.get("pungi", "relnotepkgs").split()

View File

@ -54,8 +54,7 @@ class SimpleAcyclicOrientedGraph(object):
return False if node in self._graph else True return False if node in self._graph else True
def remove_final_endpoint(self, node): def remove_final_endpoint(self, node):
""" """"""
"""
remove_start_points = [] remove_start_points = []
for start, ends in self._graph.items(): for start, ends in self._graph.items():
if node in ends: if node in ends:

View File

@ -96,7 +96,12 @@ class RunOSBuildThread(WorkerThread):
self.can_fail = can_fail self.can_fail = can_fail
self.num = num self.num = num
with util.failable( with util.failable(
compose, can_fail, variant, "*", "osbuild", logger=self.pool._logger, compose,
can_fail,
variant,
"*",
"osbuild",
logger=self.pool._logger,
): ):
self.worker( self.worker(
compose, variant, config, arches, version, release, target, repo compose, variant, config, arches, version, release, target, repo

View File

@ -16,7 +16,10 @@ def parse_args():
parser = argparse.ArgumentParser(add_help=True) parser = argparse.ArgumentParser(add_help=True)
parser.add_argument( parser.add_argument(
"compose", metavar="<compose-path>", nargs=1, help="path to compose", "compose",
metavar="<compose-path>",
nargs=1,
help="path to compose",
) )
parser.add_argument( parser.add_argument(
"--arch", "--arch",

View File

@ -18,13 +18,18 @@ from pungi.util import temp_dir
def get_parser(): def get_parser():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
"--profiler", action="store_true", "--profiler",
action="store_true",
) )
parser.add_argument( parser.add_argument(
"--arch", required=True, "--arch",
required=True,
) )
parser.add_argument( parser.add_argument(
"--config", metavar="PATH", required=True, help="path to kickstart config file", "--config",
metavar="PATH",
required=True,
help="path to kickstart config file",
) )
parser.add_argument( parser.add_argument(
"--download-to", "--download-to",
@ -42,7 +47,9 @@ def get_parser():
group = parser.add_argument_group("Gather options") group = parser.add_argument_group("Gather options")
group.add_argument( group.add_argument(
"--nodeps", action="store_true", help="disable resolving dependencies", "--nodeps",
action="store_true",
help="disable resolving dependencies",
) )
group.add_argument( group.add_argument(
"--selfhosting", "--selfhosting",
@ -61,7 +68,9 @@ def get_parser():
choices=["none", "all", "build"], choices=["none", "all", "build"],
) )
group.add_argument( group.add_argument(
"--multilib", metavar="[METHOD]", action="append", "--multilib",
metavar="[METHOD]",
action="append",
) )
group.add_argument( group.add_argument(
"--tempdir", "--tempdir",

View File

@ -941,7 +941,7 @@ def get_repo_dicts(repos, logger=None):
def version_generator(compose, gen): def version_generator(compose, gen):
"""If ``gen`` is a known generator, create a value. Otherwise return """If ``gen`` is a known generator, create a value. Otherwise return
the argument value unchanged. the argument value unchanged.
""" """
if gen == "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN": if gen == "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN":
return "%s.%s" % (compose.image_version, compose.image_release) return "%s.%s" % (compose.image_version, compose.image_release)
@ -963,8 +963,8 @@ def version_generator(compose, gen):
def retry(timeout=120, interval=30, wait_on=Exception): def retry(timeout=120, interval=30, wait_on=Exception):
""" A decorator that allows to retry a section of code until success or """A decorator that allows to retry a section of code until success or
timeout. timeout.
""" """
def wrapper(function): def wrapper(function):

View File

@ -355,7 +355,10 @@ class CompsWrapper(object):
if environment.option_ids: if environment.option_ids:
append_grouplist( append_grouplist(
doc, env_node, set(environment.option_ids), "optionlist", doc,
env_node,
set(environment.option_ids),
"optionlist",
) )
if self.comps.langpacks: if self.comps.langpacks:

View File

@ -26,7 +26,12 @@ Pungi).
def get_cmd( def get_cmd(
conf_file, arch, repos, lookasides, platform=None, filter_packages=None, conf_file,
arch,
repos,
lookasides,
platform=None,
filter_packages=None,
): ):
cmd = ["fus", "--verbose", "--arch", arch] cmd = ["fus", "--verbose", "--arch", arch]

View File

@ -25,7 +25,7 @@ class JigdoWrapper(kobo.log.LoggingBase):
self, image, files, output_dir, cache=None, no_servers=False, report=None self, image, files, output_dir, cache=None, no_servers=False, report=None
): ):
""" """
files: [{"path", "label", "uri"}] files: [{"path", "label", "uri"}]
""" """
cmd = ["jigdo-file", "make-template"] cmd = ["jigdo-file", "make-template"]

View File

@ -202,7 +202,14 @@ class KojiWrapper(object):
return cmd return cmd
def get_pungi_ostree_cmd( def get_pungi_ostree_cmd(
self, target, arch, args, channel=None, packages=None, mounts=None, weight=None, self,
target,
arch,
args,
channel=None,
packages=None,
mounts=None,
weight=None,
): ):
cmd = self._get_cmd("pungi-ostree", "--nowait", "--task-id") cmd = self._get_cmd("pungi-ostree", "--nowait", "--task-id")
@ -322,9 +329,11 @@ class KojiWrapper(object):
"ksurl", "ksurl",
"distro", "distro",
) )
assert set(min_options).issubset(set(config_options["image-build"].keys())), ( assert set(min_options).issubset(
"image-build requires at least %s got '%s'" set(config_options["image-build"].keys())
% (", ".join(min_options), config_options) ), "image-build requires at least %s got '%s'" % (
", ".join(min_options),
config_options,
) )
cfg_parser = configparser.ConfigParser() cfg_parser = configparser.ConfigParser()
for section, opts in config_options.items(): for section, opts in config_options.items():

View File

@ -302,8 +302,7 @@ def block_on(parts, name):
def check_finished_processes(processes): def check_finished_processes(processes):
"""Walk through all active processes and check if something finished. """Walk through all active processes and check if something finished."""
"""
for proc in processes.keys(): for proc in processes.keys():
proc.poll() proc.poll()
if proc.returncode is not None: if proc.returncode is not None:

View File

@ -215,7 +215,10 @@ class DummyCompose(object):
self.log_warning = mock.Mock() self.log_warning = mock.Mock()
self.get_image_name = mock.Mock(return_value="image-name") self.get_image_name = mock.Mock(return_value="image-name")
self.image = mock.Mock( self.image = mock.Mock(
path="Client/i386/iso/image.iso", can_fail=False, size=123, _max_size=None, path="Client/i386/iso/image.iso",
can_fail=False,
size=123,
_max_size=None,
) )
self.im = mock.Mock(images={"Client": {"amd64": [self.image]}}) self.im = mock.Mock(images={"Client": {"amd64": [self.image]}})
self.old_composes = [] self.old_composes = []
@ -302,7 +305,10 @@ def mk_boom(cls=Exception, msg="BOOM"):
return b return b
PKGSET_REPOS = dict(pkgset_source="repos", pkgset_repos={},) PKGSET_REPOS = dict(
pkgset_source="repos",
pkgset_repos={},
)
BASE_CONFIG = dict( BASE_CONFIG = dict(
release_short="test", release_short="test",

View File

@ -1920,7 +1920,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata" "pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
) )
def test_reuse_old_buildinstall_result_no_old_compose( def test_reuse_old_buildinstall_result_no_old_compose(
self, load_old_buildinstall_metadata, self,
load_old_buildinstall_metadata,
): ):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test() compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
load_old_buildinstall_metadata.return_value = None load_old_buildinstall_metadata.return_value = None
@ -1935,7 +1936,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata" "pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
) )
def test_reuse_old_buildinstall_result_different_cmd( def test_reuse_old_buildinstall_result_different_cmd(
self, load_old_buildinstall_metadata, self,
load_old_buildinstall_metadata,
): ):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test() compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
@ -1958,7 +1960,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata" "pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
) )
def test_reuse_old_buildinstall_result_different_installed_pkgs( def test_reuse_old_buildinstall_result_different_installed_pkgs(
self, load_old_buildinstall_metadata, self,
load_old_buildinstall_metadata,
): ):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test() compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
load_old_buildinstall_metadata.return_value = { load_old_buildinstall_metadata.return_value = {
@ -1978,7 +1981,9 @@ class BuildinstallThreadTestCase(PungiTestCase):
) )
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper") @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_reuse_old_buildinstall_result_different_buildroot_rpms( def test_reuse_old_buildinstall_result_different_buildroot_rpms(
self, KojiWrapperMock, load_old_buildinstall_metadata, self,
KojiWrapperMock,
load_old_buildinstall_metadata,
): ):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test() compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
load_old_buildinstall_metadata.return_value = { load_old_buildinstall_metadata.return_value = {

View File

@ -22,7 +22,9 @@ class ConfigTestCase(unittest.TestCase):
class PkgsetConfigTestCase(ConfigTestCase): class PkgsetConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self): def test_validate_minimal_pkgset_koji(self):
cfg = load_config(pkgset_source="koji",) cfg = load_config(
pkgset_source="koji",
)
self.assertValidation(cfg) self.assertValidation(cfg)
@ -36,7 +38,9 @@ class PkgsetConfigTestCase(ConfigTestCase):
def test_pkgset_mismatch_repos(self): def test_pkgset_mismatch_repos(self):
cfg = load_config( cfg = load_config(
pkgset_source="repos", pkgset_koji_tag="f25", pkgset_koji_inherit=False, pkgset_source="repos",
pkgset_koji_tag="f25",
pkgset_koji_inherit=False,
) )
self.assertValidation( self.assertValidation(
@ -51,7 +55,10 @@ class PkgsetConfigTestCase(ConfigTestCase):
) )
def test_pkgset_mismatch_koji(self): def test_pkgset_mismatch_koji(self):
cfg = load_config(pkgset_source="koji", pkgset_repos={"whatever": "/foo"},) cfg = load_config(
pkgset_source="koji",
pkgset_repos={"whatever": "/foo"},
)
self.assertValidation( self.assertValidation(
cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")] cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")]
@ -78,7 +85,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
) )
def test_only_config_base_product_name(self): def test_only_config_base_product_name(self):
cfg = load_config(PKGSET_REPOS, base_product_name="Prod",) cfg = load_config(
PKGSET_REPOS,
base_product_name="Prod",
)
self.assertValidation( self.assertValidation(
cfg, cfg,
@ -99,7 +109,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
) )
def test_only_config_base_product_short(self): def test_only_config_base_product_short(self):
cfg = load_config(PKGSET_REPOS, base_product_short="bp",) cfg = load_config(
PKGSET_REPOS,
base_product_short="bp",
)
self.assertValidation( self.assertValidation(
cfg, cfg,
@ -118,7 +131,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
) )
def test_only_config_base_product_version(self): def test_only_config_base_product_version(self):
cfg = load_config(PKGSET_REPOS, base_product_version="1.0",) cfg = load_config(
PKGSET_REPOS,
base_product_version="1.0",
)
self.assertValidation( self.assertValidation(
cfg, cfg,
@ -141,19 +157,28 @@ class ReleaseConfigTestCase(ConfigTestCase):
class ImageNameConfigTestCase(ConfigTestCase): class ImageNameConfigTestCase(ConfigTestCase):
def test_image_name_simple_string(self): def test_image_name_simple_string(self):
cfg = load_config(PKGSET_REPOS, image_name_format="foobar",) cfg = load_config(
PKGSET_REPOS,
image_name_format="foobar",
)
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
def test_image_name_variant_mapping(self): def test_image_name_variant_mapping(self):
cfg = load_config(PKGSET_REPOS, image_name_format={"^Server$": "foobar"},) cfg = load_config(
PKGSET_REPOS,
image_name_format={"^Server$": "foobar"},
)
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
class RunrootConfigTestCase(ConfigTestCase): class RunrootConfigTestCase(ConfigTestCase):
def test_set_runroot_true(self): def test_set_runroot_true(self):
cfg = load_config(PKGSET_REPOS, runroot=True,) cfg = load_config(
PKGSET_REPOS,
runroot=True,
)
self.assertValidation( self.assertValidation(
cfg, cfg,
@ -163,7 +188,10 @@ class RunrootConfigTestCase(ConfigTestCase):
) )
def test_set_runroot_false(self): def test_set_runroot_false(self):
cfg = load_config(PKGSET_REPOS, runroot=False,) cfg = load_config(
PKGSET_REPOS,
runroot=False,
)
self.assertValidation( self.assertValidation(
cfg, cfg,
@ -175,7 +203,10 @@ class RunrootConfigTestCase(ConfigTestCase):
class BuildinstallConfigTestCase(ConfigTestCase): class BuildinstallConfigTestCase(ConfigTestCase):
def test_bootable_deprecated(self): def test_bootable_deprecated(self):
cfg = load_config(PKGSET_REPOS, bootable=True,) cfg = load_config(
PKGSET_REPOS,
bootable=True,
)
self.assertValidation( self.assertValidation(
cfg, cfg,
@ -185,7 +216,10 @@ class BuildinstallConfigTestCase(ConfigTestCase):
) )
def test_buildinstall_method_without_bootable(self): def test_buildinstall_method_without_bootable(self):
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax",) cfg = load_config(
PKGSET_REPOS,
buildinstall_method="lorax",
)
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
@ -231,7 +265,9 @@ class BuildinstallConfigTestCase(ConfigTestCase):
class CreaterepoConfigTestCase(ConfigTestCase): class CreaterepoConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self): def test_validate_minimal_pkgset_koji(self):
cfg = load_config( cfg = load_config(
pkgset_source="koji", pkgset_koji_tag="f25", product_id_allow_missing=True, pkgset_source="koji",
pkgset_koji_tag="f25",
product_id_allow_missing=True,
) )
self.assertValidation( self.assertValidation(
@ -242,14 +278,20 @@ class CreaterepoConfigTestCase(ConfigTestCase):
class GatherConfigTestCase(ConfigTestCase): class GatherConfigTestCase(ConfigTestCase):
def test_dnf_backend_is_default_on_py3(self): def test_dnf_backend_is_default_on_py3(self):
cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",) cfg = load_config(
pkgset_source="koji",
pkgset_koji_tag="f27",
)
with mock.patch("six.PY2", new=False): with mock.patch("six.PY2", new=False):
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
self.assertEqual(cfg["gather_backend"], "dnf") self.assertEqual(cfg["gather_backend"], "dnf")
def test_yum_backend_is_default_on_py2(self): def test_yum_backend_is_default_on_py2(self):
cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",) cfg = load_config(
pkgset_source="koji",
pkgset_koji_tag="f27",
)
with mock.patch("six.PY2", new=True): with mock.patch("six.PY2", new=True):
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
@ -257,7 +299,9 @@ class GatherConfigTestCase(ConfigTestCase):
def test_yum_backend_is_rejected_on_py3(self): def test_yum_backend_is_rejected_on_py3(self):
cfg = load_config( cfg = load_config(
pkgset_source="koji", pkgset_koji_tag="f27", gather_backend="yum", pkgset_source="koji",
pkgset_koji_tag="f27",
gather_backend="yum",
) )
with mock.patch("six.PY2", new=False): with mock.patch("six.PY2", new=False):
@ -402,7 +446,10 @@ class LiveMediaConfigTestCase(ConfigTestCase):
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE") self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
def test_global_config_null_release(self): def test_global_config_null_release(self):
cfg = load_config(PKGSET_REPOS, live_media_release=None,) cfg = load_config(
PKGSET_REPOS,
live_media_release=None,
)
self.assertValidation(cfg) self.assertValidation(cfg)
@ -429,7 +476,8 @@ class TestRegexValidation(ConfigTestCase):
class RepoclosureTestCase(ConfigTestCase): class RepoclosureTestCase(ConfigTestCase):
def test_invalid_backend(self): def test_invalid_backend(self):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, repoclosure_backend="fnd", # Intentionally with a typo PKGSET_REPOS,
repoclosure_backend="fnd", # Intentionally with a typo
) )
options = ["yum", "dnf"] if six.PY2 else ["dnf"] options = ["yum", "dnf"] if six.PY2 else ["dnf"]
@ -445,7 +493,10 @@ class RepoclosureTestCase(ConfigTestCase):
class VariantAsLookasideTestCase(ConfigTestCase): class VariantAsLookasideTestCase(ConfigTestCase):
def test_empty(self): def test_empty(self):
variant_as_lookaside = [] variant_as_lookaside = []
cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,) cfg = load_config(
PKGSET_REPOS,
variant_as_lookaside=variant_as_lookaside,
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_basic(self): def test_basic(self):
@ -454,14 +505,20 @@ class VariantAsLookasideTestCase(ConfigTestCase):
("Server", "Client"), ("Server", "Client"),
("Everything", "Spin"), ("Everything", "Spin"),
] ]
cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,) cfg = load_config(
PKGSET_REPOS,
variant_as_lookaside=variant_as_lookaside,
)
self.assertValidation(cfg) self.assertValidation(cfg)
class SkipPhasesTestCase(ConfigTestCase): class SkipPhasesTestCase(ConfigTestCase):
def test_empty(self): def test_empty(self):
skip_phases = [] skip_phases = []
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,) cfg = load_config(
PKGSET_REPOS,
skip_phases=skip_phases,
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_basic(self): def test_basic(self):
@ -469,7 +526,10 @@ class SkipPhasesTestCase(ConfigTestCase):
"buildinstall", "buildinstall",
"gather", "gather",
] ]
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,) cfg = load_config(
PKGSET_REPOS,
skip_phases=skip_phases,
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_bad_phase_name(self): def test_bad_phase_name(self):
@ -477,5 +537,8 @@ class SkipPhasesTestCase(ConfigTestCase):
"gather", "gather",
"non-existing-phase_name", "non-existing-phase_name",
] ]
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,) cfg = load_config(
PKGSET_REPOS,
skip_phases=skip_phases,
)
self.assertNotEqual(checks.validate(cfg), ([], [])) self.assertNotEqual(checks.validate(cfg), ([], []))

View File

@ -158,7 +158,9 @@ def make_mocked_modifyrepo_cmd(tc, module_artifacts):
for ms in module_streams: for ms in module_streams:
tc.assertIn(ms.get_stream_name(), module_artifacts) tc.assertIn(ms.get_stream_name(), module_artifacts)
six.assertCountEqual( six.assertCountEqual(
tc, ms.get_rpm_artifacts(), module_artifacts[ms.get_stream_name()], tc,
ms.get_rpm_artifacts(),
module_artifacts[ms.get_stream_name()],
) )
return mocked_modifyrepo_cmd return mocked_modifyrepo_cmd

View File

@ -596,7 +596,9 @@ class GetExtraFilesTest(helpers.PungiTestCase):
get_file.call_args_list, get_file.call_args_list,
[ [
mock.call( mock.call(
cfg1, os.path.join(self.dir, "legalese"), compose=self.compose, cfg1,
os.path.join(self.dir, "legalese"),
compose=self.compose,
), ),
mock.call(cfg2, self.dir, compose=self.compose), mock.call(cfg2, self.dir, compose=self.compose),
], ],
@ -832,7 +834,8 @@ class GetIsoContentsTest(helpers.PungiTestCase):
["Client"], ["Client"],
os.path.join(self.topdir, "compose/Server/source/tree/.treeinfo"), os.path.join(self.topdir, "compose/Server/source/tree/.treeinfo"),
os.path.join( os.path.join(
self.topdir, "work/src/Server/extra-iso-extra-files/.treeinfo", self.topdir,
"work/src/Server/extra-iso-extra-files/.treeinfo",
), ),
), ),
], ],

View File

@ -147,7 +147,8 @@ class TestParseOutput(unittest.TestCase):
touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n") touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n")
packages, modules = fus.parse_output(self.file) packages, modules = fus.parse_output(self.file)
self.assertEqual( self.assertEqual(
packages, set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]), packages,
set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
) )
self.assertEqual(modules, set()) self.assertEqual(modules, set())

View File

@ -2620,5 +2620,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
six.assertCountEqual(self, pkg_map["rpm"], []) six.assertCountEqual(self, pkg_map["rpm"], [])
six.assertCountEqual(self, pkg_map["srpm"], []) six.assertCountEqual(self, pkg_map["srpm"], [])
six.assertCountEqual( six.assertCountEqual(
self, pkg_map["debuginfo"], ["dummy-bash-debuginfo-4.2.37-6.x86_64.rpm"], self,
pkg_map["debuginfo"],
["dummy-bash-debuginfo-4.2.37-6.x86_64.rpm"],
) )

View File

@ -350,7 +350,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
], ],
) )
self.assertEqual( self.assertEqual(
wc.call_args_list, [mock.call(self.config1, ["mod:master"], [])], wc.call_args_list,
[mock.call(self.config1, ["mod:master"], [])],
) )
self.assertEqual( self.assertEqual(
gc.call_args_list, gc.call_args_list,
@ -454,7 +455,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
], ],
) )
self.assertEqual( self.assertEqual(
wc.call_args_list, [mock.call(self.config1, [], ["pkg"])], wc.call_args_list,
[mock.call(self.config1, [], ["pkg"])],
) )
self.assertEqual( self.assertEqual(
gc.call_args_list, gc.call_args_list,

View File

@ -168,7 +168,10 @@ class ImageContainerThreadTest(helpers.PungiTestCase):
[ [
mock.call.login(), mock.call.login(),
mock.call.koji_proxy.buildContainer( mock.call.koji_proxy.buildContainer(
cfg["url"], cfg["target"], opts, priority=None, cfg["url"],
cfg["target"],
opts,
priority=None,
), ),
mock.call.watch_task( mock.call.watch_task(
12345, 12345,

View File

@ -39,7 +39,9 @@ class KojiWrapperBaseTestCase(unittest.TestCase):
koji.get_profile_module = mock.Mock( koji.get_profile_module = mock.Mock(
return_value=mock.Mock( return_value=mock.Mock(
config=DumbMock( config=DumbMock(
server="koji.example.com", authtype="kerberos", cert="", server="koji.example.com",
authtype="kerberos",
cert="",
), ),
pathinfo=mock.Mock( pathinfo=mock.Mock(
work=mock.Mock(return_value="/koji"), work=mock.Mock(return_value="/koji"),

View File

@ -53,7 +53,8 @@ class OSBSPhaseTest(helpers.PungiTestCase):
self.assertEqual(data, phase.pool.registries) self.assertEqual(data, phase.pool.registries)
self.assertEqual( self.assertEqual(
compose.notifier.call_args_list, [], compose.notifier.call_args_list,
[],
) )

View File

@ -204,7 +204,8 @@ class TestRunrootKoji(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestRunrootKoji, self).setUp() super(TestRunrootKoji, self).setUp()
self.compose = helpers.DummyCompose( self.compose = helpers.DummyCompose(
self.topdir, {"runroot": True, "runroot_tag": "f28-build"}, self.topdir,
{"runroot": True, "runroot_tag": "f28-build"},
) )
self.runroot = Runroot(self.compose) self.runroot = Runroot(self.compose)

View File

@ -267,7 +267,8 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock()) @mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_unified_strict(self): def test_too_big_unified_strict(self):
compose = DummyCompose( compose = DummyCompose(
self.topdir, {"createiso_max_size_is_strict": [(".*", {"*": True})]}, self.topdir,
{"createiso_max_size_is_strict": [(".*", {"*": True})]},
) )
compose.image.format = "iso" compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False

View File

@ -183,7 +183,8 @@ class TestGitRefResolver(unittest.TestCase):
def test_resolver_offline_branch(self, mock_resolve_url, mock_resolve_ref): def test_resolver_offline_branch(self, mock_resolve_url, mock_resolve_ref):
resolver = util.GitUrlResolver(offline=True) resolver = util.GitUrlResolver(offline=True)
self.assertEqual( self.assertEqual(
resolver("http://example.com/repo.git", "master"), "master", resolver("http://example.com/repo.git", "master"),
"master",
) )
self.assertEqual(mock_resolve_url.call_args_list, []) self.assertEqual(mock_resolve_url.call_args_list, [])
self.assertEqual(mock_resolve_ref.call_args_list, []) self.assertEqual(mock_resolve_ref.call_args_list, [])
@ -935,7 +936,8 @@ class TestVersionGenerator(unittest.TestCase):
def test_version_from_version(self): def test_version_from_version(self):
self.assertEqual( self.assertEqual(
util.version_generator(self.compose, "!VERSION_FROM_VERSION"), "8", util.version_generator(self.compose, "!VERSION_FROM_VERSION"),
"8",
) )