Port to Python 3

This should make all tests pass on both Python 2 and Python 3.

Unittest2 is required on Py 2.6 and Py 3.

Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
Lubomír Sedlář 2017-09-05 10:01:21 +02:00
parent 3088df8e60
commit ed22e07ef9
50 changed files with 203 additions and 208 deletions

View File

@ -11,7 +11,6 @@ import locale
import datetime
import getpass
import socket
import time
import pipes
import json
@ -475,7 +474,7 @@ if __name__ == "__main__":
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
COMPOSE.write_status("DOOMED")
import kobo.tback
with open(tb_path, "w") as f:
with open(tb_path, "wb") as f:
f.write(kobo.tback.Traceback().get_traceback())
else:
print("Exception: %s" % ex)

View File

@ -13,7 +13,7 @@ def get_full_version():
if os.path.isdir(os.path.join(location, '.git')):
import subprocess
proc = subprocess.Popen(['git', '--git-dir=%s/.git' % location, 'describe', '--tags'],
stdout=subprocess.PIPE)
stdout=subprocess.PIPE, universal_newlines=True)
output, _ = proc.communicate()
return re.sub(r'-1.fc\d\d?', '', output.strip().replace('pungi-', ''))
else:

View File

@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
@ -152,7 +151,7 @@ def _validate_requires(schema, conf, valid_options):
def has_default(x):
return schema['properties'].get(x, {}).get('default') == conf[x]
for name, opt in valid_options.iteritems():
for name, opt in valid_options.items():
value = conf.get(name)
errors.extend(_check_dep(name, value, opt.get('conflicts', []),
@ -178,7 +177,7 @@ def validate(config):
DefaultValidator = _extend_with_default_and_alias(jsonschema.Draft4Validator)
validator = DefaultValidator(schema,
{'array': (tuple, list),
'regex': (str, unicode)})
'regex': six.string_types})
errors = []
warnings = []
for error in validator.iter_errors(config):
@ -239,7 +238,7 @@ def _extend_with_default_and_alias(validator_class):
Hook the instance and yield errors and warnings.
"""
errors = []
for property, subschema in properties.iteritems():
for property, subschema in properties.items():
# update instance for alias option
# If alias option for the property is present and property is not specified,
# update the property in instance with value from alias option.
@ -285,7 +284,7 @@ def _extend_with_default_and_alias(validator_class):
Assign default values to options that have them defined and are not
specified.
"""
for property, subschema in properties.iteritems():
for property, subschema in properties.items():
if "default" in subschema and property not in instance:
instance.setdefault(property, subschema["default"])

View File

@ -236,7 +236,7 @@ class Compose(kobo.log.LoggingBase):
def get_variants(self, types=None, arch=None):
result = []
for i in self.variants.itervalues():
for i in self.variants.values():
if (not types or i.type in types) and (not arch or arch in i.arches):
result.append(i)
result.extend(i.get_variants(types=types, arch=arch))
@ -257,7 +257,7 @@ class Compose(kobo.log.LoggingBase):
return self._status_file
def _log_failed_deliverables(self):
for kind, data in self.failed_deliverables.iteritems():
for kind, data in self.failed_deliverables.items():
for variant, arch, subvariant in data:
self.log_info('Failed %s on variant <%s>, arch <%s>, subvariant <%s>.'
% (kind, variant, arch, subvariant))

View File

@ -242,12 +242,12 @@ class Gather(GatherBase):
# return package with shortest name, alphabetically ordered
result = list(result)
result.sort(lambda x, y: cmp(x.name, y.name))
result.sort(lambda x, y: cmp(len(x.name), len(y.name)))
result.sort(key=lambda x: x.name)
result.sort(key=lambda x: len(x.name))
# best arch
arches = self.dnf.arch_wrapper.all_arches
result.sort(lambda x, y: cmp(arches.index(x.arch), arches.index(y.arch)))
result.sort(key=lambda x: arches.index(x.arch))
match = result[0]
if self.opts.greedy_method == "build" and req:
@ -405,7 +405,7 @@ class Gather(GatherBase):
for po in pkgs:
packages_by_name.setdefault(po.name, []).append(po)
for name, packages in packages_by_name.iteritems():
for name, packages in packages_by_name.items():
pkgs = self._get_best_package(packages)
if pkgs:
added.update(pkgs)

View File

@ -176,7 +176,7 @@ def write_compose_info(compose):
for variant in ci_copy.variants.variants.values():
for field in variant.paths._fields:
field_paths = getattr(variant.paths, field)
for arch, dirpath in field_paths.iteritems():
for arch, dirpath in field_paths.items():
dirpath = os.path.join(compose.paths.compose.topdir(), dirpath)
if not (os.path.isdir(dirpath) and os.listdir(dirpath)):
field_paths[arch] = None

View File

@ -53,7 +53,7 @@ class Multilib(object):
return False
if pkg.name in self.whitelist:
return 'whitelist'
for method, cls in self.methods.iteritems():
for method, cls in self.methods.items():
if cls.select(pkg):
return method
return False

View File

@ -130,11 +130,11 @@ def create_variant_repo(compose, arch, variant, pkg_type):
manifest = productmd.rpms.Rpms()
manifest.load(manifest_file)
for rpms_arch, data in manifest.rpms.get(variant.uid, {}).iteritems():
for rpms_arch, data in manifest.rpms.get(variant.uid, {}).items():
if arch is not None and arch != rpms_arch:
continue
for srpm_data in data.itervalues():
for rpm_nevra, rpm_data in srpm_data.iteritems():
for srpm_data in data.values():
for rpm_nevra, rpm_data in srpm_data.items():
if types[pkg_type][0] != rpm_data['category']:
continue
path = os.path.join(compose.topdir, "compose", rpm_data["path"])
@ -185,7 +185,7 @@ def create_variant_repo(compose, arch, variant, pkg_type):
if arch in variant.arch_mmds:
import yaml
modules = []
for mmd in variant.arch_mmds[arch].itervalues():
for mmd in variant.arch_mmds[arch].values():
# Create copy of architecture specific mmd to filter out packages
# which are not part of this particular repo.
repo_mmd = copy.deepcopy(mmd)

View File

@ -31,7 +31,7 @@ from pungi.arch import split_name_arch, get_compatible_arches
def get_gather_source(name):
import pungi.phases.gather.sources
from source import GatherSourceContainer
from .source import GatherSourceContainer
GatherSourceContainer.register_module(pungi.phases.gather.sources)
container = GatherSourceContainer()
return container["GatherSource%s" % name]
@ -39,7 +39,7 @@ def get_gather_source(name):
def get_gather_method(name):
import pungi.phases.gather.methods
from method import GatherMethodContainer
from .method import GatherMethodContainer
GatherMethodContainer.register_module(pungi.phases.gather.methods)
container = GatherMethodContainer()
return container["GatherMethod%s" % name]
@ -100,7 +100,7 @@ def get_parent_pkgs(arch, variant, result_dict):
result = _mk_pkg_map(iterable_class=set)
if variant.parent is None:
return result
for pkg_type, pkgs in result_dict.get(arch, {}).get(variant.parent.uid, {}).iteritems():
for pkg_type, pkgs in result_dict.get(arch, {}).get(variant.parent.uid, {}).items():
for pkg in pkgs:
nvra = parse_nvra(pkg["path"])
result[pkg_type].add((nvra["name"], nvra["arch"]))
@ -142,7 +142,7 @@ def write_packages(compose, arch, variant, pkg_map, path_prefix):
msg = "Writing package list (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
for pkg_type, pkgs in pkg_map.iteritems():
for pkg_type, pkgs in pkg_map.items():
file_name = compose.paths.work.package_list(arch=arch, variant=variant, pkg_type=pkg_type)
with open(file_name, "w") as pkg_list:
for pkg in pkgs:
@ -188,7 +188,7 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
addon_pkgs = _mk_pkg_map(iterable_class=set)
move_to_parent_pkgs = _mk_pkg_map()
removed_pkgs = _mk_pkg_map()
for pkg_type, pkgs in pkg_map.iteritems():
for pkg_type, pkgs in pkg_map.items():
new_pkgs = []
for pkg in pkgs:
@ -262,13 +262,13 @@ def _trim_variants(result, compose, variant_type, remove_pkgs=None, move_to_pare
compose, arch, variant, pkg_map, parent_pkgs, remove_pkgs=remove_pkgs)
# update all_addon_pkgs
for pkg_type, pkgs in included_packages.iteritems():
for pkg_type, pkgs in included_packages.items():
all_included_packages.setdefault(pkg_type, set()).update(pkgs)
if move_to_parent:
# move packages to parent
parent_pkg_map = result[arch][variant.parent.uid]
for pkg_type, pkgs in move_to_parent_pkgs.iteritems():
for pkg_type, pkgs in move_to_parent_pkgs.items():
for pkg in pkgs:
compose.log_debug("Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s"
% (arch, variant.uid, pkg_type, os.path.basename(pkg["path"])))
@ -355,7 +355,7 @@ def get_prepopulate_packages(compose, arch, variant, include_arch=True):
variants = [variant.uid] if variant else prepopulate_data.keys()
for var in variants:
for build, packages in prepopulate_data.get(var, {}).get(arch, {}).iteritems():
for build, packages in prepopulate_data.get(var, {}).get(arch, {}).items():
for i in packages:
pkg_name, pkg_arch = split_name_arch(i)
if pkg_arch not in get_compatible_arches(arch, multilib=True):

View File

@ -48,12 +48,12 @@ class GatherSourceJson(pungi.phases.gather.source.GatherSourceBase):
if variant is None:
# get all packages for all variants
for variant_uid in mapping:
for pkg_name, pkg_arches in mapping[variant_uid][arch].iteritems():
for pkg_name, pkg_arches in mapping[variant_uid][arch].items():
for pkg_arch in pkg_arches:
packages.add((pkg_name, pkg_arch))
else:
# get packages for a particular variant
for pkg_name, pkg_arches in mapping[variant.uid][arch].iteritems():
for pkg_name, pkg_arches in mapping[variant.uid][arch].items():
for pkg_arch in pkg_arches:
packages.add((pkg_name, pkg_arch))
return packages, set()

View File

@ -194,7 +194,7 @@ class CreateImageBuildThread(WorkerThread):
paths = koji_wrapper.get_image_paths(output["task_id"])
for arch, paths in paths.iteritems():
for arch, paths in paths.items():
for path in paths:
# format is list of tuples [('qcow2', '.qcow2'), ('raw-xz', 'raw.xz'),]
for format, suffix in cmd['format']:

View File

@ -89,7 +89,7 @@ def _compute_checksums(results, cache, variant, arch, path, images,
# digest from first run..
cache[full_path] = shortcuts.compute_file_checksums(full_path, checksum_types)
digests = cache[full_path]
for checksum, digest in digests.iteritems():
for checksum, digest in digests.items():
# Update metadata with the checksum
image.add_checksum(None, checksum, digest)
# If not turned of, create the file-specific checksum file
@ -112,7 +112,7 @@ def _compute_checksums(results, cache, variant, arch, path, images,
def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen):
results = defaultdict(set)
cache = {}
for (variant, arch, path), images in get_images(topdir, im).iteritems():
for (variant, arch, path), images in get_images(topdir, im).items():
_compute_checksums(results, cache, variant, arch, path, images,
checksum_types, base_checksum_name_gen, one_file)

View File

@ -134,7 +134,7 @@ class LiveMediaThread(WorkerThread):
paths = koji_wrapper.get_image_paths(output['task_id'])
for arch, paths in paths.iteritems():
for arch, paths in paths.items():
for path in paths:
if path.endswith('.iso'):
image_infos.append({'path': path, 'arch': arch})

View File

@ -23,8 +23,8 @@ class PkgsetPhase(PhaseBase):
def run(self):
pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
from source import PkgsetSourceContainer
import sources
from .source import PkgsetSourceContainer
from . import sources
PkgsetSourceContainer.register_module(sources)
container = PkgsetSourceContainer()
SourceClass = container[pkgset_source]

View File

@ -15,7 +15,7 @@
import os
import cPickle as pickle
from six.moves import cPickle as pickle
import json
import re
from kobo.shortcuts import force_list

View File

@ -15,7 +15,7 @@
import os
import cPickle as pickle
from six.moves import cPickle as pickle
from kobo.shortcuts import run

View File

@ -109,7 +109,7 @@ def check(compose, variant, arch, image):
can_fail = getattr(image, 'can_fail', False)
with failable(compose, can_fail, variant, arch, deliverable,
subvariant=image.subvariant):
with open(path) as f:
with open(path, 'rb') as f:
iso = is_iso(f)
if image.format == 'iso' and not iso:
raise RuntimeError('%s does not look like an ISO file' % path)
@ -132,16 +132,16 @@ def _check_magic(f, offset, bytes):
def is_iso(f):
return _check_magic(f, 0x8001, 'CD001')
return _check_magic(f, 0x8001, b'CD001')
def has_mbr(f):
return _check_magic(f, 0x1fe, '\x55\xAA')
return _check_magic(f, 0x1fe, b'\x55\xAA')
def has_gpt(f):
return _check_magic(f, 0x200, 'EFI PART')
return _check_magic(f, 0x200, b'EFI PART')
def has_eltorito(f):
return _check_magic(f, 0x8801, 'CD001\1EL TORITO SPECIFICATION')
return _check_magic(f, 0x8801, b'CD001\1EL TORITO SPECIFICATION')

View File

@ -24,12 +24,12 @@ import hashlib
import errno
import pipes
import re
import urlparse
import contextlib
import traceback
import tempfile
import time
import functools
from six.moves import urllib, range
from kobo.shortcuts import run, force_list
from productmd.common import get_major_version
@ -251,7 +251,7 @@ def resolve_git_url(url):
Raises RuntimeError if there was an error. Most likely cause is failure to
run git command.
"""
r = urlparse.urlsplit(url)
r = urllib.parse.urlsplit(url)
ref = _get_git_ref(r.fragment)
if not ref:
return url
@ -260,7 +260,7 @@ def resolve_git_url(url):
# the final result must use original scheme.
scheme = r.scheme.replace('git+', '')
baseurl = urlparse.urlunsplit((scheme, r.netloc, r.path, '', ''))
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, '', ''))
_, output = git_ls_remote(baseurl, ref)
lines = [line for line in output.split('\n') if line]
@ -274,9 +274,9 @@ def resolve_git_url(url):
raise RuntimeError('Failed to resolve %s', url)
fragment = lines[0].split()[0]
result = urlparse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
if '?#' in url:
# The urlparse library drops empty query string. This hack puts it back in.
# The urllib library drops empty query string. This hack puts it back in.
result = result.replace('#', '?#')
return result
@ -309,7 +309,7 @@ def get_variant_data(conf, var_name, variant, keys=None):
:rtype: a list of values
"""
result = []
for conf_variant, conf_data in conf.get(var_name, {}).iteritems():
for conf_variant, conf_data in conf.get(var_name, {}).items():
if not re.match(conf_variant, variant.uid):
continue
if keys is not None:
@ -322,7 +322,7 @@ def get_variant_data(conf, var_name, variant, keys=None):
def _apply_substitutions(compose, volid):
for k, v in compose.conf['volume_id_substitutions'].iteritems():
for k, v in compose.conf['volume_id_substitutions'].items():
volid = volid.replace(k, v)
return volid
@ -565,16 +565,16 @@ def recursive_file_list(directory):
def levenshtein(a, b):
"""Compute Levenshtein edit distance between two strings."""
mat = [[0 for _ in xrange(len(a) + 1)] for _ in xrange(len(b) + 1)]
mat = [[0 for _ in range(len(a) + 1)] for _ in range(len(b) + 1)]
for i in xrange(len(a) + 1):
for i in range(len(a) + 1):
mat[0][i] = i
for j in xrange(len(b) + 1):
for j in range(len(b) + 1):
mat[j][0] = j
for j in xrange(1, len(b) + 1):
for i in xrange(1, len(a) + 1):
for j in range(1, len(b) + 1):
for i in range(1, len(a) + 1):
cost = 0 if a[i - 1] == b[j - 1] else 1
mat[j][i] = min(mat[j - 1][i] + 1,
mat[j][i - 1] + 1,
@ -616,7 +616,7 @@ def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
If both path and logger are specified, more debugging information will be
printed in case of failure.
"""
for i in xrange(max_retries):
for i in range(max_retries):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode == 0:

View File

@ -225,7 +225,7 @@ def append(doc, parent, elem, content=None, lang=None, **kwargs):
node.appendChild(doc.createTextNode(content))
if lang:
node.setAttribute("xml:lang", lang)
for attr, value in kwargs.iteritems():
for attr, value in kwargs.items():
node.setAttribute(attr, value)
parent.appendChild(node)
return node

View File

@ -19,6 +19,7 @@ import sys
import pipes
from fnmatch import fnmatch
import contextlib
from functools import cmp_to_key
from kobo.shortcuts import force_list, relative_path, run
from pungi import util
@ -340,7 +341,7 @@ def write_graft_points(file_name, h, exclude=None):
seen_dirs.add(dn)
f = open(file_name, "w")
for i in sorted(result, cmp=cmp_graft_points):
for i in sorted(result, key=cmp_to_key(cmp_graft_points)):
# make sure all files required for boot come first,
# otherwise there may be problems with booting (large LBA address, etc.)
found = False

View File

@ -23,7 +23,7 @@ import contextlib
import koji
from kobo.shortcuts import run
from ConfigParser import ConfigParser
from six.moves import configparser
from .. import util
from ..arch_utils import getBaseArch
@ -166,10 +166,10 @@ class KojiWrapper(object):
# The minimum set of options
min_options = ("name", "version", "target", "install_tree", "arches", "format", "kickstart", "ksurl", "distro")
assert set(min_options).issubset(set(config_options['image-build'].keys())), "image-build requires at least %s got '%s'" % (", ".join(min_options), config_options)
cfg_parser = ConfigParser()
for section, opts in config_options.iteritems():
cfg_parser = configparser.ConfigParser()
for section, opts in config_options.items():
cfg_parser.add_section(section)
for option, value in opts.iteritems():
for option, value in opts.items():
cfg_parser.set(section, option, value)
fd = open(conf_file_dest, "w")

View File

@ -50,7 +50,7 @@ class PungiWrapper(object):
kickstart = open(ks_path, "w")
# repos
for repo_name, repo_url in repos.items() + lookaside_repos.items():
for repo_name, repo_url in list(repos.items()) + list(lookaside_repos.items()):
if "://" not in repo_url:
repo_url = "file://" + os.path.abspath(repo_url)
repo_str = "repo --name=%s --baseurl=%s" % (repo_name, repo_url)
@ -214,7 +214,7 @@ class PungiWrapper(object):
missing_comps = set()
for line in f:
for file_type, pattern in PACKAGES_RE.iteritems():
for file_type, pattern in PACKAGES_RE.items():
match = pattern.match(line)
if match:
item = {}

View File

@ -36,7 +36,7 @@ def get_repoclosure_cmd(backend='yum', arch=None, repos=None, lookaside=None):
cmd.append("--arch=%s" % i)
repos = repos or {}
for repo_id, repo_path in repos.iteritems():
for repo_id, repo_path in repos.items():
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
cmd.append(cmds[backend]['repoarg'] % repo_id)
if backend == 'dnf':
@ -46,7 +46,7 @@ def get_repoclosure_cmd(backend='yum', arch=None, repos=None, lookaside=None):
cmd.append('--check=%s' % repo_id)
lookaside = lookaside or {}
for repo_id, repo_path in lookaside.iteritems():
for repo_id, repo_path in lookaside.items():
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
cmd.append(cmds[backend]['lookaside'] % repo_id)

View File

@ -15,15 +15,9 @@
from __future__ import print_function
import os
import sys
import copy
import lxml.etree
# HACK: define cmp in python3
if sys.version_info[0] == 3:
def cmp(a, b):
return (a > b) - (a < b)
from functools import total_ordering
def get_variants_dtd(logger=None):
@ -203,6 +197,7 @@ class VariantsXmlParser(object):
return result
@total_ordering
class Variant(object):
def __init__(self, id, name, type, arches, groups, environments=None,
buildinstallpackages=None, is_empty=False, parent=None,
@ -216,10 +211,9 @@ class Variant(object):
self.name = name
self.type = type
self.arches = sorted(copy.deepcopy(arches))
self.groups = sorted(copy.deepcopy(groups), lambda x, y: cmp(x["name"], y["name"]))
self.environments = sorted(copy.deepcopy(environments), lambda x, y: cmp(x["name"], y["name"]))
self.modules = sorted(copy.deepcopy(modules),
lambda x, y: cmp(x["name"], y["name"]))
self.groups = sorted(copy.deepcopy(groups), key=lambda x: x["name"])
self.environments = sorted(copy.deepcopy(environments), key=lambda x: x["name"])
self.modules = sorted(copy.deepcopy(modules), key=lambda x: x["name"])
self.buildinstallpackages = sorted(buildinstallpackages)
self.variants = {}
self.parent = parent
@ -238,19 +232,18 @@ class Variant(object):
def __repr__(self):
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format(self)
def __cmp__(self, other):
# variant < addon, layered-product < optional
if self.type == other.type:
return cmp(self.uid, other.uid)
if self.type == "variant":
return -1
if other.type == "variant":
return 1
if self.type == "optional":
return 1
if other.type == "optional":
return -1
return cmp(self.uid, other.uid)
def __eq__(self, other):
return self.type == other.type and self.uid == other.uid
def __ne__(self, other):
return not (self == other)
def __lt__(self, other):
ORDERING = {'variant': 0, 'addon': 1, 'layered-product': 1, 'optional': 2}
return (ORDERING[self.type], self.uid) < (ORDERING[other.type], other.uid)
def __hash__(self):
return hash((self.type, self.uid))
@property
def uid(self):

View File

@ -220,7 +220,7 @@ class UnifiedISO(object):
def createrepo(self):
# remove old repomd.xml checksums from treeinfo
for arch, ti in self.treeinfo.iteritems():
for arch, ti in self.treeinfo.items():
print("Removing old repomd.xml checksums from treeinfo: {0}".format(arch))
for i in ti.checksums.checksums.keys():
if "repomd.xml" in i:
@ -257,7 +257,7 @@ class UnifiedISO(object):
ti.checksums.add(os.path.relpath(repomd_path, tree_dir), 'sha256', root_dir=tree_dir)
# write treeinfo
for arch, ti in self.treeinfo.iteritems():
for arch, ti in self.treeinfo.items():
print("Writing treeinfo: {0}".format(arch))
ti_path = os.path.join(self.temp_dir, "trees", arch, ".treeinfo")
makedirs(os.path.dirname(ti_path))
@ -265,7 +265,7 @@ class UnifiedISO(object):
def discinfo(self):
# write discinfo and media repo
for arch, ti in self.treeinfo.iteritems():
for arch, ti in self.treeinfo.items():
di_path = os.path.join(self.temp_dir, "trees", arch, ".discinfo")
description = "%s %s" % (ti.release.name, ti.release.version)
if ti.release.is_layered:

View File

@ -10,6 +10,7 @@ import tempfile
import shutil
import errno
import imp
import six
from pungi.util import get_arch_variant_data
from pungi import paths, checks
@ -42,7 +43,7 @@ class MockVariant(mock.Mock):
return self.uid
def get_variants(self, arch=None, types=None):
return [v for v in self.variants.values()
return [v for v in list(self.variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)]
@ -117,7 +118,7 @@ class DummyCompose(object):
self.variants['Server'].variants['HA'] = self.all_variants['Server-HA']
def get_variants(self, arch=None, types=None):
return [v for v in self.all_variants.values()
return [v for v in list(self.all_variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)]
def can_fail(self, variant, arch, deliverable):
@ -126,7 +127,7 @@ class DummyCompose(object):
def get_arches(self):
result = set()
for variant in self.variants.itervalues():
for variant in list(self.variants.values()):
result |= set(variant.arches)
return sorted(result)
@ -142,6 +143,8 @@ def touch(path, content=None):
os.makedirs(os.path.dirname(path))
except OSError:
pass
if not isinstance(content, six.binary_type):
content = content.encode()
with open(path, 'wb') as f:
f.write(content)
return path

View File

@ -1,9 +1,8 @@
import mock
import unittest
import six
import pungi
from helpers import load_bin
from tests.helpers import load_bin
cli = load_bin("pungi-koji")
@ -12,8 +11,13 @@ class PungiKojiTestCase(unittest.TestCase):
@mock.patch('sys.argv', new=['prog', '--version'])
@mock.patch('sys.stderr', new_callable=six.StringIO)
@mock.patch('sys.stdout', new_callable=six.StringIO)
@mock.patch('pungi_cli_fake_pungi-koji.get_full_version', return_value='a-b-c.111')
def test_version(self, get_full_version, stderr):
def test_version(self, get_full_version, stdout, stderr):
with self.assertRaises(SystemExit):
cli.main()
self.assertMultiLineEqual(stderr.getvalue(), 'a-b-c.111\n')
# Python 2.7 prints the version to stderr, 3.4+ to stdout.
if six.PY3:
self.assertMultiLineEqual(stdout.getvalue(), 'a-b-c.111\n')
else:
self.assertMultiLineEqual(stderr.getvalue(), 'a-b-c.111\n')

View File

@ -840,5 +840,6 @@ class TestTweakConfigs(PungiTestCase):
f.read().strip(),
':LABEL=new\\\\x20volid ks=hd:LABEL=new\\\\x20volid:/ks.cfg')
if __name__ == "__main__":
unittest.main()

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import mock
@ -8,7 +7,7 @@ except ImportError:
import unittest
import os
import sys
import StringIO
from six import StringIO
import kobo.conf
@ -26,7 +25,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
def custom_exists(path):
return False
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = custom_exists
result = checks.check({})
@ -35,7 +34,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
self.assertFalse(result)
def test_all_deps_ok(self):
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine:
machine.return_value = 'x86_64'
with mock.patch('os.path.exists') as exists:
@ -50,7 +49,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'create_jigdo': False
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine:
machine.return_value = 'x86_64'
with mock.patch('os.path.exists') as exists:
@ -67,7 +66,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'runroot': True,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
result = checks.check(conf)
@ -81,7 +80,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'runroot': True,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
result = checks.check(conf)
@ -96,14 +95,14 @@ class CheckDependenciesTestCase(unittest.TestCase):
'runroot': True,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine:
machine.return_value = 'armhfp'
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
result = checks.check(conf)
self.assertRegexpMatches(out.getvalue(), r'^Not checking.*Expect failures.*$')
self.assertRegex(out.getvalue(), r'^Not checking.*Expect failures.*$')
self.assertTrue(result)
def test_isohybrid_not_needed_in_runroot(self):
@ -111,7 +110,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'runroot': True,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
result = checks.check(conf)
@ -124,7 +123,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'runroot': True,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])
result = checks.check(conf)
@ -139,7 +138,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'bootable': True,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])
result = checks.check(conf)
@ -148,7 +147,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
self.assertFalse(result)
def test_requires_modifyrepo(self):
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/modifyrepo'])
result = checks.check({})
@ -157,7 +156,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
self.assertFalse(result)
def test_requires_createrepo_c(self):
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/createrepo_c'])
result = checks.check({})
@ -170,7 +169,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
'createrepo_c': False,
}
with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/createrepo_c'])
result = checks.check(conf)
@ -228,7 +227,7 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertRegex(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@ -276,7 +275,7 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertRegex(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@ -300,9 +299,9 @@ class TestSchemaValidator(unittest.TestCase):
config = self._load_conf_from_string(string)
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 1)
self.assertRegexpMatches(errors[0], r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*")
self.assertRegex(errors[0], r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*")
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertRegex(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@ -341,8 +340,8 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")
self.assertRegex(warnings[0], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")
self.assertRegex(warnings[1], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("foophase", {}).get("repo", None), "http://www.exampe.com/os")
@ -381,8 +380,8 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'")
self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegex(warnings[1], r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'")
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@ -420,8 +419,8 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")
self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegex(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@ -463,10 +462,10 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 4)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")
self.assertRegexpMatches(warnings[2], r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'")
self.assertRegexpMatches(warnings[3], r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.")
self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegex(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")
self.assertRegex(warnings[2], r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'")
self.assertRegex(warnings[3], r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.")
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server", "Client"])
@ -516,8 +515,8 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*")
self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegex(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*")
self.assertEqual(config.get("live_images")[0][1]['armhfp']['repo'], 'Everything')
@ -550,7 +549,3 @@ class TestUmask(unittest.TestCase):
[mock.call.warning('Unusually strict umask detected (0%03o), '
'expect files with broken permissions.', 0o044)]
)
if __name__ == "__main__":
unittest.main()

View File

@ -234,9 +234,9 @@ class ComposeTestCase(unittest.TestCase):
compose = Compose(conf, self.tmp_dir)
compose.read_variants()
self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Crashy', 'Live', 'Server'])
self.assertEqual(sorted([v.uid for v in compose.variants['Server'].variants.itervalues()]),
self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()),
['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])
self.assertItemsEqual(compose.variants['Client'].arches,
['i386', 'x86_64'])
@ -278,9 +278,9 @@ class ComposeTestCase(unittest.TestCase):
compose = Compose(conf, self.tmp_dir)
compose.read_variants()
self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Live', 'Server'])
self.assertEqual(sorted([v.uid for v in compose.variants['Server'].variants.itervalues()]),
self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()),
['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])
self.assertItemsEqual(compose.variants['Client'].arches,
['x86_64'])
@ -324,7 +324,7 @@ class ComposeTestCase(unittest.TestCase):
compose = Compose(conf, self.tmp_dir)
compose.read_variants()
self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Server'])
self.assertItemsEqual(compose.variants['Client'].arches,
['i386', 'x86_64'])
@ -364,7 +364,7 @@ class ComposeTestCase(unittest.TestCase):
compose = Compose(conf, self.tmp_dir, logger=logger)
compose.read_variants()
self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Server'])
self.assertItemsEqual(compose.variants['Client'].arches,
['x86_64'])

View File

@ -8,6 +8,7 @@ except ImportError:
import unittest
import os
import six
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
@ -391,11 +392,10 @@ class TestRegexValidation(ConfigTestCase):
cfg = load_config(PKGSET_REPOS,
multilib=[('^*$', {'*': []})])
self.assertValidation(
cfg,
['Failed validation in multilib.0.0: incorrect regular '
'expression: nothing to repeat'],
[])
msg = 'Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat'
if six.PY3:
msg += ' at position 1'
self.assertValidation(cfg, [msg], [])
class RepoclosureTestCase(ConfigTestCase):

View File

@ -1,8 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import mock
import os
import subprocess
@ -27,9 +25,6 @@ class ConfigValidateScriptTest(helpers.PungiTestCase):
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
self.assertEqual(b'', stdout)
self.assertEqual(b'', stderr)
self.assertEqual(0, p.returncode)
self.assertEqual('', stdout)
self.assertEqual('', stderr)
if __name__ == '__main__':
unittest.main()

View File

@ -31,6 +31,7 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
pool = ThreadPool.return_value
phase = createiso.CreateisoPhase(compose)
phase.logger = mock.Mock()
phase.run()
self.assertEqual(len(pool.add.call_args_list), 0)
@ -88,6 +89,7 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
pool = ThreadPool.return_value
phase = createiso.CreateisoPhase(compose)
phase.logger = mock.Mock()
phase.run()
self.assertEqual(prepare_iso.call_args_list,
@ -151,6 +153,7 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
pool = ThreadPool.return_value
phase = createiso.CreateisoPhase(compose)
phase.logger = mock.Mock()
phase.run()
self.assertItemsEqual(
@ -633,7 +636,7 @@ class DummySize(object):
self.sizes = sizes
def __call__(self, path):
for fragment, size in self.sizes.iteritems():
for fragment, size in self.sizes.items():
if fragment in path:
return size
return 0

View File

@ -6,7 +6,7 @@ import mock
import os
import sys
import StringIO
from six.moves import StringIO
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
@ -19,7 +19,7 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
def setUp(self):
super(CreateIsoScriptTest, self).setUp()
self.outdir = os.path.join(self.topdir, 'isos')
self.out = StringIO.StringIO()
self.out = StringIO()
self.maxDiff = None
def assertScript(self, cmds):

View File

@ -715,6 +715,7 @@ class ANYSingleton(object):
def __repr__(self):
return u'ANY'
ANY = ANYSingleton()
@ -728,7 +729,7 @@ class TestGetProductIds(PungiTestCase):
def assertProductIds(self, mapping):
pids = glob.glob(self.compose.paths.work.product_id('*', '*'))
expected = set()
for variant, arches in mapping.iteritems():
for variant, arches in mapping.items():
for arch in arches:
expected.add(os.path.join(self.topdir, 'work', arch,
'product_id',
@ -800,8 +801,8 @@ class TestGetProductIds(PungiTestCase):
get_productids_from_scm(self.compose)
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, ANY)])
self.assertEqual(str(ctx.exception),
'No product certificate found (arch: amd64, variant: Everything)')
self.assertRegex(str(ctx.exception),
r'No product certificate found \(arch: amd64, variant: (Everything|Client)\)')
@mock.patch('pungi.phases.createrepo.get_dir_from_scm')
def test_multiple_matching(self, get_dir_from_scm):
@ -822,8 +823,8 @@ class TestGetProductIds(PungiTestCase):
get_productids_from_scm(self.compose)
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, ANY)])
self.assertRegexpMatches(str(ctx.exception),
'Multiple product certificates found.+')
self.assertRegex(str(ctx.exception),
'Multiple product certificates found.+')
if __name__ == "__main__":

View File

@ -159,7 +159,7 @@ class TestCopyFiles(helpers.PungiTestCase):
extra_files.copy_extra_files(
compose, [cfg], 'x86_64', compose.variants['Server'], package_sets)
self.assertRegexpMatches(str(ctx.exception), 'No.*package.*matching bad-server\*.*')
self.assertRegex(str(ctx.exception), 'No.*package.*matching bad-server\*.*')
self.assertEqual(len(get_file_from_scm.call_args_list), 0)
self.assertEqual(get_dir_from_scm.call_args_list, [])

View File

@ -9,6 +9,7 @@ except ImportError:
import os
import tempfile
import shutil
import six
import sys
import logging
@ -27,6 +28,11 @@ try:
except ImportError:
HAS_DNF = False
if six.PY2:
HAS_YUM = True
else:
HAS_YUM = False
def convert_pkg_map(data):
"""
@ -1627,6 +1633,7 @@ class DepsolvingBase(object):
])
@unittest.skipUnless(HAS_YUM, 'YUM only available on Python 2')
class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def setUp(self):
@ -1789,6 +1796,3 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
self.assertFlags("dummy-krb5-devel-1.10-5.x86_64", [PkgFlag.lookaside])
self.assertFlags("dummy-krb5-1.10-5.src", [PkgFlag.lookaside])
self.assertFlags("dummy-krb5-debuginfo-1.10-5.x86_64", [PkgFlag.lookaside])
if __name__ == "__main__":
unittest.main()

View File

@ -34,6 +34,9 @@ class MockPkg(object):
def __repr__(self):
return self.nvr
def __lt__(self, another):
return self.nvr < another.nvr
def _join(a, *rest):
res = copy.deepcopy(a)

View File

@ -83,8 +83,8 @@ class TestInitPhase(PungiTestCase):
phase = init.InitPhase(compose)
phase.run()
self.assertEqual(write_global.mock_calls, [])
self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])
self.assertItemsEqual(write_global.mock_calls, [])
self.assertItemsEqual(write_prepopulate.mock_calls, [mock.call(compose)])
self.assertItemsEqual(write_arch.mock_calls, [])
self.assertItemsEqual(create_comps.mock_calls, [])
self.assertItemsEqual(write_variant.mock_calls, [])

View File

@ -18,7 +18,7 @@ from pungi.wrappers.kojiwrapper import KojiWrapper, get_buildroot_rpms
class DumbMock(object):
def __init__(self, **kwargs):
for key, value in kwargs.iteritems():
for key, value in kwargs.items():
setattr(self, key, value)

View File

@ -328,7 +328,7 @@ class TestLiveMediaPhase(PungiTestCase):
phase = LiveMediaPhase(compose)
with self.assertRaisesRegexp(RuntimeError, r'no.+Missing.+when building.+Server'):
with self.assertRaisesRegex(RuntimeError, r'no.+Missing.+when building.+Server'):
phase.run()
@mock.patch('pungi.phases.livemedia_phase.ThreadPool')
@ -353,7 +353,7 @@ class TestLiveMediaPhase(PungiTestCase):
phase = LiveMediaPhase(compose)
with self.assertRaisesRegexp(RuntimeError, r'There is no variant Missing to get repo from.'):
with self.assertRaisesRegex(RuntimeError, r'There is no variant Missing to get repo from.'):
phase.run()
@mock.patch('pungi.util.resolve_git_url')

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
try:
@ -59,7 +58,3 @@ class LoraxWrapperTest(unittest.TestCase):
'--add-arch-template-var=va1', '--add-arch-template-var=va2',
'--logfile=/tmp/lorax.log',
'/mnt/output_dir'])
if __name__ == "__main__":
unittest.main()

View File

@ -6,7 +6,10 @@ import json
import mock
import os
import sys
import unittest
try:
import unittest2 as unittest
except ImportError:
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

View File

@ -443,7 +443,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
with self.assertRaises(RuntimeError) as ctx:
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.assertRegexpMatches(str(ctx.exception), r"task 12345 failed: see .+ for details")
self.assertRegex(str(ctx.exception), r"task 12345 failed: see .+ for details")
@mock.patch('pungi.util.resolve_git_url')
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')

View File

@ -2,7 +2,6 @@
# -*- coding: utf-8 -*-
import unittest
import mock
import os
@ -360,6 +359,3 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
'--add-arch-template-var=ostree_repo=http://www.example.com/ostree',
'--rootfs-size=None',
self.output])])
if __name__ == '__main__':
unittest.main()

View File

@ -121,7 +121,7 @@ class FakePool(object):
class PkgsetCompareMixin(object):
def assertPkgsetEqual(self, actual, expected):
for k, v1 in expected.iteritems():
for k, v1 in expected.items():
self.assertIn(k, actual)
v2 = actual.pop(k)
self.assertItemsEqual(v1, v2)
@ -148,7 +148,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
helpers.touch(os.path.join(self.topdir, filename))
def assertPkgsetEqual(self, actual, expected):
for k, v1 in expected.iteritems():
for k, v1 in expected.items():
self.assertIn(k, actual)
v2 = actual.pop(k)
self.assertItemsEqual(v1, v2)
@ -259,8 +259,8 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
self.assertRegexpMatches(str(ctx.exception),
r'^RPM\(s\) not found for sigs: .+Check log for details.+')
self.assertRegex(str(ctx.exception),
r'^RPM\(s\) not found for sigs: .+Check log for details.+')
def test_can_not_find_any_package(self):
pkgset = pkgsets.KojiPackageSet(self.koji_wrapper, ['cafebabe', None], arches=['x86_64'])
@ -272,8 +272,8 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
self.assertRegexpMatches(str(ctx.exception),
r'^RPM\(s\) not found for sigs: .+Check log for details.+')
self.assertRegex(str(ctx.exception),
r'^RPM\(s\) not found for sigs: .+Check log for details.+')
def test_packages_attribute(self):
self._touch_files([

View File

@ -94,11 +94,11 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
self.koji_wrapper = mock.Mock()
self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle')
@mock.patch('cPickle.dumps')
@mock.patch('six.moves.cPickle.dumps')
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet')
def test_populate(self, KojiPackageSet, pickle_dumps):
pickle_dumps.return_value = 'DATA'
pickle_dumps.return_value = b'DATA'
orig_pkgset = KojiPackageSet.return_value
@ -117,7 +117,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
with open(self.pkgset_path) as f:
self.assertEqual(f.read(), 'DATA')
@mock.patch('cPickle.dumps')
@mock.patch('six.moves.cPickle.dumps')
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet')
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, pickle_dumps):
self.compose = helpers.DummyCompose(self.topdir, {
@ -126,7 +126,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
})
self.compose.DEBUG = False
pickle_dumps.return_value = 'DATA'
pickle_dumps.return_value = b'DATA'
orig_pkgset = KojiPackageSet.return_value
@ -147,7 +147,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
with open(self.pkgset_path) as f:
self.assertEqual(f.read(), 'DATA')
@mock.patch('cPickle.load')
@mock.patch('six.moves.cPickle.load')
def test_populate_in_debug_mode(self, pickle_load):
helpers.touch(self.pkgset_path, 'DATA')
self.compose.DEBUG = True
@ -167,7 +167,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
[mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
remove_path_prefix='/prefix')])
@mock.patch('cPickle.dumps')
@mock.patch('six.moves.cPickle.dumps')
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate')
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list')
def test_populate_packages_to_gather(self, save_file_list, popuplate,
@ -182,7 +182,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
]
})
self.compose.DEBUG = False
pickle_dumps.return_value = 'DATA'
pickle_dumps.return_value = b'DATA'
pkgset = source_koji.populate_global_pkgset(
self.compose, self.koji_wrapper, '/prefix', 123456)

View File

@ -213,7 +213,7 @@ class GitSCMTestCase(SCMBaseTest):
self.destdir)
self.assertStructure(retval, ['some_file.txt'])
self.assertEqual(1, len(commands))
self.assertRegexpMatches(
self.assertRegex(
commands[0],
r'/usr/bin/git clone --depth 1 --branch=master https://example.com/git/repo.git /tmp/.+')
@ -308,7 +308,7 @@ class GitSCMTestCase(SCMBaseTest):
self.destdir)
self.assertStructure(retval, ['first', 'second'])
self.assertRegexpMatches(
self.assertRegex(
commands[0],
r'/usr/bin/git clone --depth 1 --branch=master https://example.com/git/repo.git /tmp/.+')
self.assertEqual(commands[1:], ['make'])

View File

@ -17,12 +17,12 @@ import pungi.phases.test as test_phase
from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom
PAD = '\0' * 100
UNBOOTABLE_ISO = ('\0' * 0x8001) + 'CD001' + PAD
ISO_WITH_MBR = ('\0' * 0x1fe) + '\x55\xAA' + ('\0' * 0x7e01) + 'CD001' + PAD
ISO_WITH_GPT = ('\0' * 0x200) + 'EFI PART' + ('\0' * 0x7df9) + 'CD001' + PAD
ISO_WITH_MBR_AND_GPT = ('\0' * 0x1fe) + '\x55\xAAEFI PART' + ('\0' * 0x7df9) + 'CD001' + PAD
ISO_WITH_TORITO = ('\0' * 0x8001) + 'CD001' + ('\0' * 0x7fa) + '\0CD001\1EL TORITO SPECIFICATION' + PAD
PAD = b'\0' * 100
UNBOOTABLE_ISO = (b'\0' * 0x8001) + b'CD001' + PAD
ISO_WITH_MBR = (b'\0' * 0x1fe) + b'\x55\xAA' + (b'\0' * 0x7e01) + b'CD001' + PAD
ISO_WITH_GPT = (b'\0' * 0x200) + b'EFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD
ISO_WITH_MBR_AND_GPT = (b'\0' * 0x1fe) + b'\x55\xAAEFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD
ISO_WITH_TORITO = (b'\0' * 0x8001) + b'CD001' + (b'\0' * 0x7fa) + b'\0CD001\1EL TORITO SPECIFICATION' + PAD
class TestCheckImageSanity(PungiTestCase):
@ -187,7 +187,7 @@ class TestRepoclosure(PungiTestCase):
})
test_phase.run_repoclosure(compose)
self.assertItemsEqual(mock_grc.call_args_list, [])
self.assertEqual(mock_grc.call_args_list, [])
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')

View File

@ -5,7 +5,7 @@ import mock
import os
import shutil
import sys
from ConfigParser import SafeConfigParser
from six.moves.configparser import SafeConfigParser
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
@ -26,14 +26,14 @@ class TestUnifiedIsos(PungiTestCase):
compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
isos = unified_isos.UnifiedISO(compose_path)
self.assertEqual(isos.compose_path, compose_path)
self.assertRegexpMatches(isos.temp_dir,
'^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))
self.assertRegex(isos.temp_dir,
'^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))
def test_can_find_compose_subdir(self):
isos = unified_isos.UnifiedISO(os.path.join(self.topdir, COMPOSE_ID))
self.assertEqual(isos.compose_path, os.path.join(self.topdir, COMPOSE_ID, 'compose'))
self.assertRegexpMatches(isos.temp_dir,
'^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))
self.assertRegex(isos.temp_dir,
'^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))
@mock.patch('os.rename')
def test_dump_manifest(self, rename):