Format tests with black

JIRA: COMPOSE-4086
Signed-off-by: Haibo Lin <hlin@redhat.com>
This commit is contained in:
Haibo Lin 2020-01-22 18:02:22 +08:00
parent ef33d00f5b
commit 38142d30ba
51 changed files with 13767 additions and 9180 deletions

View File

@ -76,6 +76,7 @@ setup(
] ]
}, },
tests_require = [ tests_require = [
"black",
"mock", "mock",
"nose", "nose",
"nose-cov", "nose-cov",

View File

@ -23,23 +23,25 @@ from pungi.module_util import Modulemd
class BaseTestCase(unittest.TestCase): class BaseTestCase(unittest.TestCase):
def assertFilesEqual(self, fn1, fn2): def assertFilesEqual(self, fn1, fn2):
with open(fn1, 'rb') as f1: with open(fn1, "rb") as f1:
lines1 = f1.read().decode('utf-8').splitlines() lines1 = f1.read().decode("utf-8").splitlines()
with open(fn2, 'rb') as f2: with open(fn2, "rb") as f2:
lines2 = f2.read().decode('utf-8').splitlines() lines2 = f2.read().decode("utf-8").splitlines()
diff = '\n'.join(difflib.unified_diff(lines1, lines2, diff = "\n".join(
fromfile='EXPECTED', tofile='ACTUAL')) difflib.unified_diff(lines1, lines2, fromfile="EXPECTED", tofile="ACTUAL")
self.assertEqual(diff, '', 'Files differ:\n' + diff) )
self.assertEqual(diff, "", "Files differ:\n" + diff)
def assertFileContent(self, fn, expected): def assertFileContent(self, fn, expected):
with open(fn, 'rb') as f: with open(fn, "rb") as f:
lines = f.read().decode('utf-8').splitlines() lines = f.read().decode("utf-8").splitlines()
diff = '\n'.join(difflib.unified_diff( diff = "\n".join(
lines, expected.splitlines(), fromfile='EXPECTED', tofile='ACTUAL') difflib.unified_diff(
lines, expected.splitlines(), fromfile="EXPECTED", tofile="ACTUAL"
) )
self.assertEqual(diff, '', 'Files differ:\n' + diff) )
self.assertEqual(diff, "", "Files differ:\n" + diff)
class PungiTestCase(BaseTestCase): class PungiTestCase(BaseTestCase):
@ -72,7 +74,7 @@ class PungiTestCase(BaseTestCase):
class MockVariant(mock.Mock): class MockVariant(mock.Mock):
def __init__(self, is_empty=False, name=None, *args, **kwargs): def __init__(self, is_empty=False, name=None, *args, **kwargs):
super(MockVariant, self).__init__(*args, is_empty=is_empty, **kwargs) super(MockVariant, self).__init__(*args, is_empty=is_empty, **kwargs)
self.parent = kwargs.get('parent', None) self.parent = kwargs.get("parent", None)
self.arch_mmds = {} self.arch_mmds = {}
self.module_uid_to_koji_tag = {} self.module_uid_to_koji_tag = {}
self.variants = {} self.variants = {}
@ -85,8 +87,11 @@ class MockVariant(mock.Mock):
return self.uid return self.uid
def get_variants(self, arch=None, types=None): def get_variants(self, arch=None, types=None):
return [v for v in list(self.variants.values()) return [
if (not arch or arch in v.arches) and (not types or v.type in types)] v
for v in list(self.variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)
]
def get_modules(self, arch=None, types=None): def get_modules(self, arch=None, types=None):
return [] return []
@ -133,22 +138,19 @@ class IterableMock(mock.Mock):
class DummyCompose(object): class DummyCompose(object):
def __init__(self, topdir, config): def __init__(self, topdir, config):
self.supported = True self.supported = True
self.compose_date = '20151203' self.compose_date = "20151203"
self.compose_type_suffix = '.t' self.compose_type_suffix = ".t"
self.compose_type = 'test' self.compose_type = "test"
self.compose_respin = 0 self.compose_respin = 0
self.compose_id = 'Test-20151203.0.t' self.compose_id = "Test-20151203.0.t"
self.compose_label = None self.compose_label = None
self.compose_label_major_version = None self.compose_label_major_version = None
self.image_release = '20151203.t.0' self.image_release = "20151203.t.0"
self.image_version = '25' self.image_version = "25"
self.ci_base = mock.Mock( self.ci_base = mock.Mock(
release_id='Test-1.0', release_id="Test-1.0",
release=mock.Mock( release=mock.Mock(
short='test', short="test", version="1.0", is_layered=False, type_suffix=""
version='1.0',
is_layered=False,
type_suffix=''
), ),
) )
self.topdir = topdir self.topdir = topdir
@ -157,12 +159,27 @@ class DummyCompose(object):
self.paths = paths.Paths(self) self.paths = paths.Paths(self)
self.has_comps = True self.has_comps = True
self.variants = { self.variants = {
'Server': MockVariant(uid='Server', arches=['x86_64', 'amd64'], "Server": MockVariant(
type='variant', id='Server', name='Server'), uid="Server",
'Client': MockVariant(uid='Client', arches=['amd64'], arches=["x86_64", "amd64"],
type='variant', id='Client', name='Client'), type="variant",
'Everything': MockVariant(uid='Everything', arches=['x86_64', 'amd64'], id="Server",
type='variant', id='Everything', name='Everything'), name="Server",
),
"Client": MockVariant(
uid="Client",
arches=["amd64"],
type="variant",
id="Client",
name="Client",
),
"Everything": MockVariant(
uid="Everything",
arches=["x86_64", "amd64"],
type="variant",
id="Everything",
name="Everything",
),
} }
self.all_variants = self.variants.copy() self.all_variants = self.variants.copy()
@ -174,13 +191,13 @@ class DummyCompose(object):
self.log_error = mock.Mock() self.log_error = mock.Mock()
self.log_debug = mock.Mock() self.log_debug = mock.Mock()
self.log_warning = mock.Mock() self.log_warning = mock.Mock()
self.get_image_name = mock.Mock(return_value='image-name') self.get_image_name = mock.Mock(return_value="image-name")
self.image = mock.Mock( self.image = mock.Mock(
path='Client/i386/iso/image.iso', can_fail=False, size=123, _max_size=None, path="Client/i386/iso/image.iso", can_fail=False, size=123, _max_size=None,
) )
self.im = mock.Mock(images={'Client': {'amd64': [self.image]}}) self.im = mock.Mock(images={"Client": {"amd64": [self.image]}})
self.old_composes = [] self.old_composes = []
self.config_dir = '/home/releng/config' self.config_dir = "/home/releng/config"
self.notifier = None self.notifier = None
self.attempt_deliverable = mock.Mock() self.attempt_deliverable = mock.Mock()
self.fail_deliverable = mock.Mock() self.fail_deliverable = mock.Mock()
@ -189,23 +206,32 @@ class DummyCompose(object):
self.cache_region = None self.cache_region = None
def setup_optional(self): def setup_optional(self):
self.all_variants['Server-optional'] = MockVariant( self.all_variants["Server-optional"] = MockVariant(
uid='Server-optional', arches=['x86_64'], type='optional') uid="Server-optional", arches=["x86_64"], type="optional"
self.all_variants['Server-optional'].parent = self.variants['Server'] )
self.variants['Server'].variants['optional'] = self.all_variants['Server-optional'] self.all_variants["Server-optional"].parent = self.variants["Server"]
self.variants["Server"].variants["optional"] = self.all_variants[
"Server-optional"
]
def setup_addon(self): def setup_addon(self):
self.all_variants['Server-HA'] = MockVariant( self.all_variants["Server-HA"] = MockVariant(
uid='Server-HA', arches=['x86_64'], type='addon', is_empty=False) uid="Server-HA", arches=["x86_64"], type="addon", is_empty=False
self.all_variants['Server-HA'].parent = self.variants['Server'] )
self.variants['Server'].variants['HA'] = self.all_variants['Server-HA'] self.all_variants["Server-HA"].parent = self.variants["Server"]
self.variants["Server"].variants["HA"] = self.all_variants["Server-HA"]
def get_variants(self, arch=None, types=None): def get_variants(self, arch=None, types=None):
return [v for v in list(self.all_variants.values()) return [
if (not arch or arch in v.arches) and (not types or v.type in types)] v
for v in list(self.all_variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)
]
def can_fail(self, variant, arch, deliverable): def can_fail(self, variant, arch, deliverable):
failable = get_arch_variant_data(self.conf, 'failable_deliverables', arch, variant) failable = get_arch_variant_data(
self.conf, "failable_deliverables", arch, variant
)
return deliverable in failable return deliverable in failable
def get_arches(self): def get_arches(self):
@ -221,19 +247,19 @@ class DummyCompose(object):
def touch(path, content=None): def touch(path, content=None):
"""Helper utility that creates an dummy file in given location. Directories """Helper utility that creates an dummy file in given location. Directories
will be created.""" will be created."""
content = content or (path + '\n') content = content or (path + "\n")
try: try:
os.makedirs(os.path.dirname(path)) os.makedirs(os.path.dirname(path))
except OSError: except OSError:
pass pass
if not isinstance(content, six.binary_type): if not isinstance(content, six.binary_type):
content = content.encode() content = content.encode()
with open(path, 'wb') as f: with open(path, "wb") as f:
f.write(content) f.write(content)
return path return path
FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixtures') FIXTURE_DIR = os.path.join(os.path.dirname(__file__), "fixtures")
def copy_fixture(fixture_name, dest): def copy_fixture(fixture_name, dest):
@ -243,27 +269,25 @@ def copy_fixture(fixture_name, dest):
def boom(*args, **kwargs): def boom(*args, **kwargs):
raise Exception('BOOM') raise Exception("BOOM")
def mk_boom(cls=Exception, msg='BOOM'): def mk_boom(cls=Exception, msg="BOOM"):
def b(*args, **kwargs): def b(*args, **kwargs):
raise cls(msg) raise cls(msg)
return b return b
PKGSET_REPOS = dict( PKGSET_REPOS = dict(pkgset_source="repos", pkgset_repos={},)
pkgset_source='repos',
pkgset_repos={},
)
BASE_CONFIG = dict( BASE_CONFIG = dict(
release_short='test', release_short="test",
release_name='Test', release_name="Test",
release_version='1.0', release_version="1.0",
variants_file='variants.xml', variants_file="variants.xml",
createrepo_checksum='sha256', createrepo_checksum="sha256",
gather_method='deps', gather_method="deps",
) )

View File

@ -6,49 +6,66 @@ import unittest
import os import os
import sys import sys
from pungi.arch import (get_compatible_arches, get_valid_arches, get_valid_multilib_arches, from pungi.arch import (
is_excluded, is_valid_arch, split_name_arch) get_compatible_arches,
get_valid_arches,
get_valid_multilib_arches,
is_excluded,
is_valid_arch,
split_name_arch,
)
class TestArch(unittest.TestCase): class TestArch(unittest.TestCase):
def test_i386(self): def test_i386(self):
arches = get_valid_arches("i386") arches = get_valid_arches("i386")
self.assertEqual(arches, ['i686', 'i586', 'i486', 'i386', 'noarch']) self.assertEqual(arches, ["i686", "i586", "i486", "i386", "noarch"])
arches = get_valid_arches("i386", multilib=False) arches = get_valid_arches("i386", multilib=False)
self.assertEqual(arches, ['i686', 'i586', 'i486', 'i386', 'noarch']) self.assertEqual(arches, ["i686", "i586", "i486", "i386", "noarch"])
arches = get_valid_arches("i386", add_src=True) arches = get_valid_arches("i386", add_src=True)
self.assertEqual(arches, ['i686', 'i586', 'i486', 'i386', 'noarch', 'src']) self.assertEqual(arches, ["i686", "i586", "i486", "i386", "noarch", "src"])
def test_x86_64(self): def test_x86_64(self):
arches = get_valid_arches("x86_64") arches = get_valid_arches("x86_64")
self.assertEqual(arches, ['x86_64', 'athlon', 'i686', 'i586', 'i486', 'i386', 'noarch']) self.assertEqual(
arches, ["x86_64", "athlon", "i686", "i586", "i486", "i386", "noarch"]
)
arches = get_valid_arches("x86_64", multilib=False) arches = get_valid_arches("x86_64", multilib=False)
self.assertEqual(arches, ['x86_64', 'noarch']) self.assertEqual(arches, ["x86_64", "noarch"])
arches = get_valid_arches("x86_64", add_src=True) arches = get_valid_arches("x86_64", add_src=True)
self.assertEqual(arches, ['x86_64', 'athlon', 'i686', 'i586', 'i486', 'i386', 'noarch', 'src']) self.assertEqual(
arches,
["x86_64", "athlon", "i686", "i586", "i486", "i386", "noarch", "src"],
)
def test_armhfp(self): def test_armhfp(self):
arches = get_valid_arches("armhfp") arches = get_valid_arches("armhfp")
self.assertEqual(arches, ['armv7hnl', 'armv7hl', 'armv6hl', 'noarch']) self.assertEqual(arches, ["armv7hnl", "armv7hl", "armv6hl", "noarch"])
arches = get_valid_arches("armhfp", multilib=False) arches = get_valid_arches("armhfp", multilib=False)
self.assertEqual(arches, ['armv7hnl', 'armv7hl', 'armv6hl', 'noarch']) self.assertEqual(arches, ["armv7hnl", "armv7hl", "armv6hl", "noarch"])
arches = get_valid_arches("armhfp", add_src=True) arches = get_valid_arches("armhfp", add_src=True)
self.assertEqual(arches, ['armv7hnl', 'armv7hl', 'armv6hl', 'noarch', 'src']) self.assertEqual(arches, ["armv7hnl", "armv7hl", "armv6hl", "noarch", "src"])
def test_get_compatible_arches(self): def test_get_compatible_arches(self):
self.assertEqual(get_compatible_arches("noarch"), ["noarch"]) self.assertEqual(get_compatible_arches("noarch"), ["noarch"])
self.assertEqual(get_compatible_arches("i386"), get_valid_arches("i386")) self.assertEqual(get_compatible_arches("i386"), get_valid_arches("i386"))
self.assertEqual(get_compatible_arches("i586"), get_valid_arches("i386")) self.assertEqual(get_compatible_arches("i586"), get_valid_arches("i386"))
self.assertEqual(get_compatible_arches("x86_64"), get_valid_arches("x86_64", multilib=False)) self.assertEqual(
self.assertEqual(get_compatible_arches("ppc64p7"), get_valid_arches("ppc64", multilib=False)) get_compatible_arches("x86_64"), get_valid_arches("x86_64", multilib=False)
self.assertEqual(get_compatible_arches("armhfp"), get_valid_arches("armv7hnl", multilib=False)) )
self.assertEqual(
get_compatible_arches("ppc64p7"), get_valid_arches("ppc64", multilib=False)
)
self.assertEqual(
get_compatible_arches("armhfp"),
get_valid_arches("armv7hnl", multilib=False),
)
def test_is_valid_arch(self): def test_is_valid_arch(self):
self.assertEqual(is_valid_arch("i386"), True) self.assertEqual(is_valid_arch("i386"), True)
@ -63,29 +80,38 @@ class TestArch(unittest.TestCase):
self.assertEqual(split_name_arch("package"), ("package", None)) self.assertEqual(split_name_arch("package"), ("package", None))
self.assertEqual(split_name_arch("package.x86_64"), ("package", "x86_64")) self.assertEqual(split_name_arch("package.x86_64"), ("package", "x86_64"))
self.assertEqual(split_name_arch("package.foo"), ("package.foo", None)) self.assertEqual(split_name_arch("package.foo"), ("package.foo", None))
self.assertEqual(split_name_arch("i386"), ("i386", None)) # we suppose that $name is never empty self.assertEqual(
split_name_arch("i386"), ("i386", None)
) # we suppose that $name is never empty
def test_get_valid_multilib_arches(self): def test_get_valid_multilib_arches(self):
self.assertEqual(get_valid_multilib_arches("noarch"), []) self.assertEqual(get_valid_multilib_arches("noarch"), [])
self.assertEqual(get_valid_multilib_arches("athlon"), []) self.assertEqual(get_valid_multilib_arches("athlon"), [])
self.assertEqual(get_valid_multilib_arches("x86_64"), ['athlon', 'i686', 'i586', 'i486', 'i386']) self.assertEqual(
get_valid_multilib_arches("x86_64"),
["athlon", "i686", "i586", "i486", "i386"],
)
class TestExclusiveExcludeArch(unittest.TestCase): class TestExclusiveExcludeArch(unittest.TestCase):
def test_no_exclude(self): def test_no_exclude(self):
pkg = mock.Mock(excludearch=[], exclusivearch=[], file_name='pkg.rpm') pkg = mock.Mock(excludearch=[], exclusivearch=[], file_name="pkg.rpm")
self.assertFalse(is_excluded(pkg, ['x86_64'])) self.assertFalse(is_excluded(pkg, ["x86_64"]))
def test_exclude_arch(self): def test_exclude_arch(self):
log = mock.Mock() log = mock.Mock()
pkg = mock.Mock(excludearch=['x86_64'], exclusivearch=[], file_name='pkg.rpm') pkg = mock.Mock(excludearch=["x86_64"], exclusivearch=[], file_name="pkg.rpm")
self.assertTrue(is_excluded(pkg, ['x86_64'], logger=log)) self.assertTrue(is_excluded(pkg, ["x86_64"], logger=log))
self.assertEqual(log.mock_calls, self.assertEqual(
[mock.call.debug("Excluding (EXCLUDEARCH: ['x86_64']): pkg.rpm")]) log.mock_calls,
[mock.call.debug("Excluding (EXCLUDEARCH: ['x86_64']): pkg.rpm")],
)
def test_exclusive_arch(self): def test_exclusive_arch(self):
log = mock.Mock() log = mock.Mock()
pkg = mock.Mock(excludearch=[], exclusivearch=['aarch64'], file_name='pkg.rpm') pkg = mock.Mock(excludearch=[], exclusivearch=["aarch64"], file_name="pkg.rpm")
self.assertTrue(is_excluded(pkg, ['x86_64'], logger=log)) self.assertTrue(is_excluded(pkg, ["x86_64"], logger=log))
self.assertEqual(log.mock_calls, self.assertEqual(
[mock.call.debug("Excluding (EXCLUSIVEARCH: ['aarch64']): pkg.rpm")]) log.mock_calls,
[mock.call.debug("Excluding (EXCLUSIVEARCH: ['aarch64']): pkg.rpm")],
)

View File

@ -1,4 +1,5 @@
import mock import mock
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -9,17 +10,16 @@ from pungi.scripts.pungi_koji import cli_main
class PungiKojiTestCase(unittest.TestCase): class PungiKojiTestCase(unittest.TestCase):
@mock.patch("sys.argv", new=["prog", "--version"])
@mock.patch('sys.argv', new=['prog', '--version']) @mock.patch("sys.stderr", new_callable=six.StringIO)
@mock.patch('sys.stderr', new_callable=six.StringIO) @mock.patch("sys.stdout", new_callable=six.StringIO)
@mock.patch('sys.stdout', new_callable=six.StringIO) @mock.patch("pungi.scripts.pungi_koji.get_full_version", return_value="a-b-c.111")
@mock.patch('pungi.scripts.pungi_koji.get_full_version', return_value='a-b-c.111')
def test_version(self, get_full_version, stdout, stderr): def test_version(self, get_full_version, stdout, stderr):
with self.assertRaises(SystemExit) as cm: with self.assertRaises(SystemExit) as cm:
cli_main() cli_main()
self.assertEqual(cm.exception.code, 0) self.assertEqual(cm.exception.code, 0)
# Python 2.7 prints the version to stderr, 3.4+ to stdout. # Python 2.7 prints the version to stderr, 3.4+ to stdout.
if six.PY3: if six.PY3:
self.assertMultiLineEqual(stdout.getvalue(), 'a-b-c.111\n') self.assertMultiLineEqual(stdout.getvalue(), "a-b-c.111\n")
else: else:
self.assertMultiLineEqual(stderr.getvalue(), 'a-b-c.111\n') self.assertMultiLineEqual(stderr.getvalue(), "a-b-c.111\n")

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import mock import mock
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -15,7 +16,6 @@ from pungi import checks
class CheckDependenciesTestCase(unittest.TestCase): class CheckDependenciesTestCase(unittest.TestCase):
def dont_find(self, paths): def dont_find(self, paths):
return lambda path: path not in paths return lambda path: path not in paths
@ -23,149 +23,147 @@ class CheckDependenciesTestCase(unittest.TestCase):
def custom_exists(path): def custom_exists(path):
return False return False
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = custom_exists exists.side_effect = custom_exists
result = checks.check({}) result = checks.check({})
self.assertGreater(len(out.getvalue().strip().split('\n')), 1) self.assertGreater(len(out.getvalue().strip().split("\n")), 1)
self.assertFalse(result) self.assertFalse(result)
def test_all_deps_ok(self): def test_all_deps_ok(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine: with mock.patch("platform.machine") as machine:
machine.return_value = 'x86_64' machine.return_value = "x86_64"
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find([]) exists.side_effect = self.dont_find([])
result = checks.check({}) result = checks.check({})
self.assertEqual('', out.getvalue()) self.assertEqual("", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
def test_does_not_require_jigdo_if_not_configured(self): def test_does_not_require_jigdo_if_not_configured(self):
conf = { conf = {"create_jigdo": False}
'create_jigdo': False
}
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine: with mock.patch("platform.machine") as machine:
machine.return_value = 'x86_64' machine.return_value = "x86_64"
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/jigdo-lite']) exists.side_effect = self.dont_find(["/usr/bin/jigdo-lite"])
result = checks.check(conf) result = checks.check(conf)
self.assertEqual('', out.getvalue()) self.assertEqual("", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
def test_isohybrid_not_required_without_productimg_phase(self): def test_isohybrid_not_required_without_productimg_phase(self):
conf = { conf = {
'bootable': True, "bootable": True,
'productimg': False, "productimg": False,
'runroot_tag': 'dummy_tag', "runroot_tag": "dummy_tag",
} }
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid']) exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf) result = checks.check(conf)
self.assertEqual('', out.getvalue()) self.assertEqual("", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
def test_isohybrid_not_required_on_not_bootable(self): def test_isohybrid_not_required_on_not_bootable(self):
conf = { conf = {
'bootable': False, "bootable": False,
'runroot_tag': 'dummy_tag', "runroot_tag": "dummy_tag",
} }
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid']) exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf) result = checks.check(conf)
self.assertEqual('', out.getvalue()) self.assertEqual("", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
def test_isohybrid_not_required_on_arm(self): def test_isohybrid_not_required_on_arm(self):
conf = { conf = {
'buildinstall_method': 'lorax', "buildinstall_method": "lorax",
'runroot_tag': '', "runroot_tag": "",
} }
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine: with mock.patch("platform.machine") as machine:
machine.return_value = 'armhfp' machine.return_value = "armhfp"
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid']) exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf) result = checks.check(conf)
self.assertRegexpMatches(out.getvalue(), r'^Not checking.*Expect failures.*$') self.assertRegexpMatches(out.getvalue(), r"^Not checking.*Expect failures.*$")
self.assertTrue(result) self.assertTrue(result)
def test_isohybrid_not_needed_in_runroot(self): def test_isohybrid_not_needed_in_runroot(self):
conf = { conf = {
'runroot_tag': 'dummy_tag', "runroot_tag": "dummy_tag",
} }
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid']) exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf) result = checks.check(conf)
self.assertEqual('', out.getvalue()) self.assertEqual("", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
def test_genisoimg_not_needed_in_runroot(self): def test_genisoimg_not_needed_in_runroot(self):
conf = { conf = {
'runroot_tag': 'dummy_tag', "runroot_tag": "dummy_tag",
} }
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/genisoimage']) exists.side_effect = self.dont_find(["/usr/bin/genisoimage"])
result = checks.check(conf) result = checks.check(conf)
self.assertEqual('', out.getvalue()) self.assertEqual("", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
def test_requires_modifyrepo(self): def test_requires_modifyrepo(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/modifyrepo']) exists.side_effect = self.dont_find(["/usr/bin/modifyrepo"])
result = checks.check({'createrepo_c': False}) result = checks.check({"createrepo_c": False})
self.assertIn('createrepo', out.getvalue()) self.assertIn("createrepo", out.getvalue())
self.assertFalse(result) self.assertFalse(result)
def test_requires_modifyrepo_c(self): def test_requires_modifyrepo_c(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/modifyrepo_c']) exists.side_effect = self.dont_find(["/usr/bin/modifyrepo_c"])
result = checks.check({'createrepo_c': True}) result = checks.check({"createrepo_c": True})
self.assertIn('createrepo_c', out.getvalue()) self.assertIn("createrepo_c", out.getvalue())
self.assertFalse(result) self.assertFalse(result)
def test_requires_createrepo_c(self): def test_requires_createrepo_c(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/createrepo_c']) exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
result = checks.check({}) result = checks.check({})
self.assertIn('createrepo_c', out.getvalue()) self.assertIn("createrepo_c", out.getvalue())
self.assertFalse(result) self.assertFalse(result)
def test_doesnt_require_createrepo_c_if_configured(self): def test_doesnt_require_createrepo_c_if_configured(self):
conf = { conf = {
'createrepo_c': False, "createrepo_c": False,
} }
with mock.patch('sys.stdout', new_callable=StringIO) as out: with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists: with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(['/usr/bin/createrepo_c']) exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
result = checks.check(conf) result = checks.check(conf)
self.assertNotIn('createrepo_c', out.getvalue()) self.assertNotIn("createrepo_c", out.getvalue())
self.assertTrue(result) self.assertTrue(result)
@ -175,7 +173,7 @@ class TestSchemaValidator(unittest.TestCase):
conf.load_from_string(string) conf.load_from_string(string)
return conf return conf
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_property(self, make_schema): def test_property(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -198,7 +196,7 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 0) self.assertEqual(len(warnings), 0)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_alias_property(self, make_schema): def test_alias_property(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -218,10 +216,13 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 1) self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*") self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_required_is_missing(self, make_schema): def test_required_is_missing(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -241,11 +242,13 @@ class TestSchemaValidator(unittest.TestCase):
config = self._load_conf_from_string(string) config = self._load_conf_from_string(string)
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 1) self.assertEqual(len(errors), 1)
self.assertIn("Failed validation in : 'release_name' is a required property", errors) self.assertIn(
"Failed validation in : 'release_name' is a required property", errors
)
self.assertEqual(len(warnings), 1) self.assertEqual(len(warnings), 1)
self.assertIn("WARNING: Unrecognized config option: name.", warnings) self.assertIn("WARNING: Unrecognized config option: name.", warnings)
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_required_is_in_alias(self, make_schema): def test_required_is_in_alias(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -266,10 +269,13 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 1) self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*") self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_redundant_alias(self, make_schema): def test_redundant_alias(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -290,12 +296,18 @@ class TestSchemaValidator(unittest.TestCase):
config = self._load_conf_from_string(string) config = self._load_conf_from_string(string)
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 1) self.assertEqual(len(errors), 1)
self.assertRegexpMatches(errors[0], r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*") self.assertRegexpMatches(
errors[0],
r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*",
)
self.assertEqual(len(warnings), 1) self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*") self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_properties_in_deep(self, make_schema): def test_properties_in_deep(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -303,15 +315,10 @@ class TestSchemaValidator(unittest.TestCase):
"type": "object", "type": "object",
"properties": { "properties": {
"release_name": {"type": "string", "alias": "product_name"}, "release_name": {"type": "string", "alias": "product_name"},
"keys": { "keys": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
},
"foophase": { "foophase": {
"type": "object", "type": "object",
"properties": { "properties": {"repo": {"type": "string", "alias": "tree"}},
"repo": {"type": "string", "alias": "tree"},
},
"additionalProperties": False, "additionalProperties": False,
"required": ["repo"], "required": ["repo"],
}, },
@ -331,22 +338,27 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2) self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*") self.assertRegexpMatches(
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*") warnings[0],
r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("foophase", {}).get("repo", None), "http://www.exampe.com/os") self.assertEqual(
config.get("foophase", {}).get("repo", None), "http://www.exampe.com/os"
)
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_append_option(self, make_schema): def test_append_option(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration", "title": "Pungi Configuration",
"type": "object", "type": "object",
"definitions": { "definitions": {
"list_of_strings": { "list_of_strings": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
},
"strings": { "strings": {
"anyOf": [ "anyOf": [
{"type": "string"}, {"type": "string"},
@ -356,7 +368,7 @@ class TestSchemaValidator(unittest.TestCase):
}, },
"properties": { "properties": {
"release_name": {"type": "string"}, "release_name": {"type": "string"},
"repo": {"$ref": "#/definitions/strings", "append": "repo_from"} "repo": {"$ref": "#/definitions/strings", "append": "repo_from"},
}, },
"additionalProperties": False, "additionalProperties": False,
} }
@ -371,22 +383,25 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2) self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*") self.assertRegexpMatches(
self.assertRegexpMatches(warnings[1], r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'") warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"]) self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_append_to_nonexist_option(self, make_schema): def test_append_to_nonexist_option(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration", "title": "Pungi Configuration",
"type": "object", "type": "object",
"definitions": { "definitions": {
"list_of_strings": { "list_of_strings": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
},
"strings": { "strings": {
"anyOf": [ "anyOf": [
{"type": "string"}, {"type": "string"},
@ -396,7 +411,7 @@ class TestSchemaValidator(unittest.TestCase):
}, },
"properties": { "properties": {
"release_name": {"type": "string"}, "release_name": {"type": "string"},
"repo": {"$ref": "#/definitions/strings", "append": "repo_from"} "repo": {"$ref": "#/definitions/strings", "append": "repo_from"},
}, },
"additionalProperties": False, "additionalProperties": False,
} }
@ -410,22 +425,25 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2) self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*") self.assertRegexpMatches(
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,") warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"]) self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_multiple_appends(self, make_schema): def test_multiple_appends(self, make_schema):
schema = { schema = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration", "title": "Pungi Configuration",
"type": "object", "type": "object",
"definitions": { "definitions": {
"list_of_strings": { "list_of_strings": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
},
"strings": { "strings": {
"anyOf": [ "anyOf": [
{"type": "string"}, {"type": "string"},
@ -437,8 +455,8 @@ class TestSchemaValidator(unittest.TestCase):
"release_name": {"type": "string"}, "release_name": {"type": "string"},
"repo": { "repo": {
"$ref": "#/definitions/strings", "$ref": "#/definitions/strings",
"append": ["repo_from", "source_repo_from"] "append": ["repo_from", "source_repo_from"],
} },
}, },
"additionalProperties": False, "additionalProperties": False,
} }
@ -453,14 +471,28 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 4) self.assertEqual(len(warnings), 4)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*") self.assertRegexpMatches(
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,") warnings[0],
self.assertRegexpMatches(warnings[2], r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'") r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
self.assertRegexpMatches(warnings[3], r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.") )
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
)
self.assertRegexpMatches(
warnings[2],
r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'",
)
self.assertRegexpMatches(
warnings[3],
r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.",
)
self.assertEqual(config.get("release_name", None), "dummy product") self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server", "Client"]) self.assertEqual(
config.get("repo", None), ["http://url/to/repo", "Server", "Client"]
)
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_anyof_validator_not_raise_our_warnings_as_error(self, make_schema): def test_anyof_validator_not_raise_our_warnings_as_error(self, make_schema):
# https://pagure.io/pungi/issue/598 # https://pagure.io/pungi/issue/598
schema = { schema = {
@ -470,26 +502,21 @@ class TestSchemaValidator(unittest.TestCase):
"definitions": { "definitions": {
"live_image_config": { "live_image_config": {
"type": "object", "type": "object",
"properties": { "properties": {"repo": {"type": "string", "append": "repo_from"}},
"repo": {
"type": "string",
"append": "repo_from",
},
},
}, },
}, },
"properties": { "properties": {
"live_images": checks._variant_arch_mapping({ "live_images": checks._variant_arch_mapping(
{
"anyOf": [ "anyOf": [
{"$ref": "#/definitions/live_image_config"}, {"$ref": "#/definitions/live_image_config"},
{ {
"type": "array", "type": "array",
"items": { "items": {"$ref": "#/definitions/live_image_config"},
"$ref": "#/definitions/live_image_config" },
}
}
] ]
}), }
),
}, },
} }
make_schema.return_value = schema make_schema.return_value = schema
@ -506,12 +533,20 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config) errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0) self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2) self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*") self.assertRegexpMatches(
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*") warnings[0],
self.assertEqual(config.get("live_images")[0][1]['armhfp']['repo'], 'Everything') r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*",
)
self.assertEqual(
config.get("live_images")[0][1]["armhfp"]["repo"], "Everything"
)
@mock.patch("pungi.util.resolve_git_url") @mock.patch("pungi.util.resolve_git_url")
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_resolve_url(self, make_schema, resolve_git_url): def test_resolve_url(self, make_schema, resolve_git_url):
resolve_git_url.return_value = "git://example.com/repo.git#CAFE" resolve_git_url.return_value = "git://example.com/repo.git#CAFE"
make_schema.return_value = { make_schema.return_value = {
@ -527,7 +562,7 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(config["foo"], resolve_git_url.return_value) self.assertEqual(config["foo"], resolve_git_url.return_value)
@mock.patch("pungi.util.resolve_git_url") @mock.patch("pungi.util.resolve_git_url")
@mock.patch('pungi.checks.make_schema') @mock.patch("pungi.checks.make_schema")
def test_resolve_url_when_offline(self, make_schema, resolve_git_url): def test_resolve_url_when_offline(self, make_schema, resolve_git_url):
make_schema.return_value = { make_schema.return_value = {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-04/schema#",
@ -594,8 +629,13 @@ class TestUmask(unittest.TestCase):
checks.check_umask(logger) checks.check_umask(logger)
self.assertEqual( self.assertEqual(
logger.mock_calls, logger.mock_calls,
[mock.call.warning('Unusually strict umask detected (0%03o), ' [
'expect files with broken permissions.', 0o044)] mock.call.warning(
"Unusually strict umask detected (0%03o), "
"expect files with broken permissions.",
0o044,
)
],
) )

View File

@ -2,6 +2,7 @@
import logging import logging
import mock import mock
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -18,7 +19,9 @@ from pungi.compose import Compose
class ConfigWrapper(dict): class ConfigWrapper(dict):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(ConfigWrapper, self).__init__(*args, **kwargs) super(ConfigWrapper, self).__init__(*args, **kwargs)
self._open_file = '%s/fixtures/config.conf' % os.path.abspath(os.path.dirname(__file__)) self._open_file = "%s/fixtures/config.conf" % os.path.abspath(
os.path.dirname(__file__)
)
class ComposeTestCase(unittest.TestCase): class ComposeTestCase(unittest.TestCase):
@ -28,17 +31,19 @@ class ComposeTestCase(unittest.TestCase):
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tmp_dir) shutil.rmtree(self.tmp_dir)
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_setup_logger(self, ci): def test_setup_logger(self, ci):
conf = {} conf = {}
logger = logging.getLogger('test_setup_logger') logger = logging.getLogger("test_setup_logger")
compose = Compose(conf, self.tmp_dir, logger=logger) compose = Compose(conf, self.tmp_dir, logger=logger)
self.assertEqual(len(logger.handlers), 2) self.assertEqual(len(logger.handlers), 2)
pungi_log = logger.handlers[0].stream.name pungi_log = logger.handlers[0].stream.name
exclude_arch_log = logger.handlers[1].stream.name exclude_arch_log = logger.handlers[1].stream.name
self.assertEqual(os.path.basename(pungi_log), 'pungi.global.log') self.assertEqual(os.path.basename(pungi_log), "pungi.global.log")
self.assertEqual(os.path.basename(exclude_arch_log), 'excluding-arch.global.log') self.assertEqual(
os.path.basename(exclude_arch_log), "excluding-arch.global.log"
)
msg = "test log" msg = "test log"
compose.log_info(msg) compose.log_info(msg)
@ -52,240 +57,322 @@ class ComposeTestCase(unittest.TestCase):
with open(exclude_arch_log) as f: with open(exclude_arch_log) as f:
self.assertTrue(msg in f.read()) self.assertTrue(msg in f.read())
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_can_fail(self, ci): def test_can_fail(self, ci):
conf = { conf = {
'failable_deliverables': [ "failable_deliverables": [
('^.*$', { (
'*': ['buildinstall'], "^.*$",
'i386': ['buildinstall', 'live', 'iso'], {"*": ["buildinstall"], "i386": ["buildinstall", "live", "iso"]},
}), ),
] ]
} }
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
variant = mock.Mock(uid='Server') variant = mock.Mock(uid="Server")
self.assertTrue(compose.can_fail(variant, 'x86_64', 'buildinstall')) self.assertTrue(compose.can_fail(variant, "x86_64", "buildinstall"))
self.assertFalse(compose.can_fail(variant, 'x86_64', 'live')) self.assertFalse(compose.can_fail(variant, "x86_64", "live"))
self.assertTrue(compose.can_fail(variant, 'i386', 'live')) self.assertTrue(compose.can_fail(variant, "i386", "live"))
self.assertFalse(compose.can_fail(None, 'x86_64', 'live')) self.assertFalse(compose.can_fail(None, "x86_64", "live"))
self.assertTrue(compose.can_fail(None, 'i386', 'live')) self.assertTrue(compose.can_fail(None, "i386", "live"))
self.assertTrue(compose.can_fail(variant, '*', 'buildinstall')) self.assertTrue(compose.can_fail(variant, "*", "buildinstall"))
self.assertFalse(compose.can_fail(variant, '*', 'live')) self.assertFalse(compose.can_fail(variant, "*", "live"))
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name(self, ci): def test_get_image_name(self, ci):
conf = {} conf = {}
variant = mock.Mock(uid='Server', type='variant') variant = mock.Mock(uid="Server", type="variant")
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.id = 'compose_id' ci.return_value.compose.id = "compose_id"
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = 'RC-1.0' ci.return_value.compose.label = "RC-1.0"
ci.return_value.compose.label_major_version = '1' ci.return_value.compose.label_major_version = "1"
ci.return_value.release.version = '3.0' ci.return_value.release.version = "3.0"
ci.return_value.release.short = 'rel_short' ci.return_value.release.short = "rel_short"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
keys = ['arch', 'compose_id', 'date', 'disc_num', 'disc_type', keys = [
'label', 'label_major_version', 'release_short', 'respin', "arch",
'suffix', 'type', 'type_suffix', 'variant', 'version'] "compose_id",
format = '-'.join(['%(' + k + ')s' for k in keys]) "date",
name = compose.get_image_name('x86_64', variant, format=format, "disc_num",
disc_num=7, disc_type='live', suffix='.iso') "disc_type",
"label",
"label_major_version",
"release_short",
"respin",
"suffix",
"type",
"type_suffix",
"variant",
"version",
]
format = "-".join(["%(" + k + ")s" for k in keys])
name = compose.get_image_name(
"x86_64",
variant,
format=format,
disc_num=7,
disc_type="live",
suffix=".iso",
)
self.assertEqual(name, '-'.join(['x86_64', 'compose_id', '20160107', '7', 'live', self.assertEqual(
'RC-1.0', '1', 'rel_short', '2', '.iso', 'nightly', name,
'.n', 'Server', '3.0'])) "-".join(
[
"x86_64",
"compose_id",
"20160107",
"7",
"live",
"RC-1.0",
"1",
"rel_short",
"2",
".iso",
"nightly",
".n",
"Server",
"3.0",
]
),
)
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_variant_mapping(self, ci): def test_get_image_name_variant_mapping(self, ci):
conf = {"image_name_format": {"^Server$": "whatever"}} conf = {"image_name_format": {"^Server$": "whatever"}}
variant = mock.Mock(uid='Server', type='variant') variant = mock.Mock(uid="Server", type="variant")
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
name = compose.get_image_name( name = compose.get_image_name(
'x86_64', variant, disc_num=7, disc_type='live', suffix='.iso' "x86_64", variant, disc_num=7, disc_type="live", suffix=".iso"
) )
self.assertEqual(name, "whatever") self.assertEqual(name, "whatever")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_variant_mapping_no_match(self, ci): def test_get_image_name_variant_mapping_no_match(self, ci):
conf = {"image_name_format": {"^Client$": "whatever"}} conf = {"image_name_format": {"^Client$": "whatever"}}
variant = mock.Mock(uid='Server', type='variant') variant = mock.Mock(uid="Server", type="variant")
ci.return_value.compose.id = 'compose_id' ci.return_value.compose.id = "compose_id"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
name = compose.get_image_name( name = compose.get_image_name(
'x86_64', variant, disc_num=7, disc_type='live', suffix='.iso' "x86_64", variant, disc_num=7, disc_type="live", suffix=".iso"
) )
self.assertEqual(name, "compose_id-Server-x86_64-live7.iso") self.assertEqual(name, "compose_id-Server-x86_64-live7.iso")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_layered_product(self, ci): def test_get_image_name_layered_product(self, ci):
conf = {} conf = {}
variant = mock.Mock(uid='Server-LP', type='layered-product') variant = mock.Mock(uid="Server-LP", type="layered-product")
variant.parent = mock.Mock(uid='Server') variant.parent = mock.Mock(uid="Server")
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.id = 'compose_id' ci.return_value.compose.id = "compose_id"
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = 'RC-1.0' ci.return_value.compose.label = "RC-1.0"
ci.return_value.compose.label_major_version = '1' ci.return_value.compose.label_major_version = "1"
ci.return_value.release.version = '3.0' ci.return_value.release.version = "3.0"
ci.return_value.release.short = 'rel_short' ci.return_value.release.short = "rel_short"
ci.return_value['Server-LP'].compose_id = 'Gluster 1.0' ci.return_value["Server-LP"].compose_id = "Gluster 1.0"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
format = '{compose_id} {variant}' format = "{compose_id} {variant}"
name = compose.get_image_name('x86_64', variant, format=format, name = compose.get_image_name(
disc_num=7, disc_type='live', suffix='.iso') "x86_64",
variant,
format=format,
disc_num=7,
disc_type="live",
suffix=".iso",
)
self.assertEqual(name, 'Gluster 1.0 Server') self.assertEqual(name, "Gluster 1.0 Server")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_type_netinst(self, ci): def test_get_image_name_type_netinst(self, ci):
conf = {} conf = {}
variant = mock.Mock(uid='Server', type='variant') variant = mock.Mock(uid="Server", type="variant")
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.id = 'compose_id' ci.return_value.compose.id = "compose_id"
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = 'RC-1.0' ci.return_value.compose.label = "RC-1.0"
ci.return_value.compose.label_major_version = '1' ci.return_value.compose.label_major_version = "1"
ci.return_value.release.version = '3.0' ci.return_value.release.version = "3.0"
ci.return_value.release.short = 'rel_short' ci.return_value.release.short = "rel_short"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
keys = ['arch', 'compose_id', 'date', 'disc_num', 'disc_type', keys = [
'label', 'label_major_version', 'release_short', 'respin', "arch",
'suffix', 'type', 'type_suffix', 'variant', 'version'] "compose_id",
format = '-'.join(['%(' + k + ')s' for k in keys]) "date",
name = compose.get_image_name('x86_64', variant, format=format, "disc_num",
disc_num=7, disc_type='netinst', suffix='.iso') "disc_type",
"label",
"label_major_version",
"release_short",
"respin",
"suffix",
"type",
"type_suffix",
"variant",
"version",
]
format = "-".join(["%(" + k + ")s" for k in keys])
name = compose.get_image_name(
"x86_64",
variant,
format=format,
disc_num=7,
disc_type="netinst",
suffix=".iso",
)
self.assertEqual(name, '-'.join(['x86_64', 'compose_id', '20160107', '7', 'netinst', self.assertEqual(
'RC-1.0', '1', 'rel_short', '2', '.iso', 'nightly', name,
'.n', 'Server', '3.0'])) "-".join(
[
"x86_64",
"compose_id",
"20160107",
"7",
"netinst",
"RC-1.0",
"1",
"rel_short",
"2",
".iso",
"nightly",
".n",
"Server",
"3.0",
]
),
)
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_image_release(self, ci): def test_image_release(self, ci):
conf = {} conf = {}
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = None ci.return_value.compose.label = None
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_release, '20160107.n.2') self.assertEqual(compose.image_release, "20160107.n.2")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_image_release_production(self, ci): def test_image_release_production(self, ci):
conf = {} conf = {}
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'production' ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = '' ci.return_value.compose.type_suffix = ""
ci.return_value.compose.label = None ci.return_value.compose.label = None
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_release, '20160107.2') self.assertEqual(compose.image_release, "20160107.2")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_image_release_from_label(self, ci): def test_image_release_from_label(self, ci):
conf = {} conf = {}
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'production' ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = 'Alpha-1.2' ci.return_value.compose.label = "Alpha-1.2"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_release, '1.2') self.assertEqual(compose.image_release, "1.2")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_image_version_without_label(self, ci): def test_image_version_without_label(self, ci):
conf = {} conf = {}
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = None ci.return_value.compose.label = None
ci.return_value.release.version = '25' ci.return_value.release.version = "25"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_version, '25') self.assertEqual(compose.image_version, "25")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_image_version_with_label(self, ci): def test_image_version_with_label(self, ci):
conf = {} conf = {}
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = 'Alpha-1.2' ci.return_value.compose.label = "Alpha-1.2"
ci.return_value.release.version = '25' ci.return_value.release.version = "25"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_version, '25_Alpha') self.assertEqual(compose.image_version, "25_Alpha")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_image_version_with_label_rc(self, ci): def test_image_version_with_label_rc(self, ci):
conf = {} conf = {}
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'nightly' ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = 'RC-1.2' ci.return_value.compose.label = "RC-1.2"
ci.return_value.release.version = '25' ci.return_value.release.version = "25"
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_version, '25') self.assertEqual(compose.image_version, "25")
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_without_filter(self, ci): def test_get_variant_arches_without_filter(self, ci):
ci.return_value.compose.id = 'composeid' ci.return_value.compose.id = "composeid"
conf = ConfigWrapper( conf = ConfigWrapper(
variants_file={'scm': 'file', variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
'repo': None, release_name="Test",
'file': 'variants.xml'}, release_version="1.0",
release_name='Test', release_short="test",
release_version='1.0', release_type="ga",
release_short='test',
release_type='ga',
release_internal=False, release_internal=False,
) )
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
compose.read_variants() compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()), self.assertEqual(
['Client', 'Crashy', 'Live', 'Server']) sorted(v.uid for v in compose.variants.values()),
self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()), ["Client", "Crashy", "Live", "Server"],
['Server-Gluster', 'Server-ResilientStorage', 'Server-optional']) )
self.assertEqual(
sorted(v.uid for v in compose.variants["Server"].variants.values()),
["Server-Gluster", "Server-ResilientStorage", "Server-optional"],
)
six.assertCountEqual( six.assertCountEqual(
self, compose.variants["Client"].arches, ["i386", "x86_64"] self, compose.variants["Client"].arches, ["i386", "x86_64"]
) )
@ -303,37 +390,48 @@ class ComposeTestCase(unittest.TestCase):
six.assertCountEqual( six.assertCountEqual(
self, self,
compose.variants["Server"].variants["optional"].arches, compose.variants["Server"].variants["optional"].arches,
["s390x", "x86_64"] ["s390x", "x86_64"],
) )
self.assertEqual([v.uid for v in compose.get_variants()], self.assertEqual(
['Client', 'Crashy', 'Live', 'Server', 'Server-Gluster', [v.uid for v in compose.get_variants()],
'Server-ResilientStorage', 'Server-optional']) [
self.assertEqual(compose.get_arches(), ['i386', 'ppc64le', 's390x', 'x86_64']) "Client",
"Crashy",
"Live",
"Server",
"Server-Gluster",
"Server-ResilientStorage",
"Server-optional",
],
)
self.assertEqual(compose.get_arches(), ["i386", "ppc64le", "s390x", "x86_64"])
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_with_arch_filter(self, ci): def test_get_variant_arches_with_arch_filter(self, ci):
ci.return_value.compose.id = 'composeid' ci.return_value.compose.id = "composeid"
conf = ConfigWrapper( conf = ConfigWrapper(
variants_file={'scm': 'file', variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
'repo': None, release_name="Test",
'file': 'variants.xml'}, release_version="1.0",
release_name='Test', release_short="test",
release_version='1.0', release_type="ga",
release_short='test',
release_type='ga',
release_internal=False, release_internal=False,
tree_arches=['x86_64'], tree_arches=["x86_64"],
) )
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
compose.read_variants() compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()), self.assertEqual(
['Client', 'Live', 'Server']) sorted(v.uid for v in compose.variants.values()),
self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()), ["Client", "Live", "Server"],
['Server-Gluster', 'Server-ResilientStorage', 'Server-optional']) )
self.assertEqual(
sorted(v.uid for v in compose.variants["Server"].variants.values()),
["Server-Gluster", "Server-ResilientStorage", "Server-optional"],
)
self.assertEqual(compose.variants["Client"].arches, ["x86_64"]) self.assertEqual(compose.variants["Client"].arches, ["x86_64"])
self.assertEqual(compose.variants["Live"].arches, ["x86_64"]) self.assertEqual(compose.variants["Live"].arches, ["x86_64"])
self.assertEqual(compose.variants["Server"].arches, ["x86_64"]) self.assertEqual(compose.variants["Server"].arches, ["x86_64"])
@ -347,36 +445,43 @@ class ComposeTestCase(unittest.TestCase):
compose.variants["Server"].variants["optional"].arches, ["x86_64"] compose.variants["Server"].variants["optional"].arches, ["x86_64"]
) )
self.assertEqual(compose.get_arches(), ['x86_64']) self.assertEqual(compose.get_arches(), ["x86_64"])
self.assertEqual([v.uid for v in compose.get_variants()], self.assertEqual(
['Client', 'Live', 'Server', 'Server-Gluster', [v.uid for v in compose.get_variants()],
'Server-ResilientStorage', 'Server-optional']) [
"Client",
"Live",
"Server",
"Server-Gluster",
"Server-ResilientStorage",
"Server-optional",
],
)
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_with_variant_filter(self, ci): def test_get_variant_arches_with_variant_filter(self, ci):
ci.return_value.compose.id = 'composeid' ci.return_value.compose.id = "composeid"
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'production' ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
conf = ConfigWrapper( conf = ConfigWrapper(
variants_file={'scm': 'file', variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
'repo': None, release_name="Test",
'file': 'variants.xml'}, release_version="1.0",
release_name='Test', release_short="test",
release_version='1.0', release_type="ga",
release_short='test',
release_type='ga',
release_internal=False, release_internal=False,
tree_variants=['Server', 'Client', 'Server-Gluster'], tree_variants=["Server", "Client", "Server-Gluster"],
) )
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
compose.read_variants() compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()), self.assertEqual(
['Client', 'Server']) sorted(v.uid for v in compose.variants.values()), ["Client", "Server"]
)
six.assertCountEqual( six.assertCountEqual(
self, compose.variants["Client"].arches, ["i386", "x86_64"] self, compose.variants["Client"].arches, ["i386", "x86_64"]
) )
@ -387,77 +492,84 @@ class ComposeTestCase(unittest.TestCase):
compose.variants["Server"].variants["Gluster"].arches, ["x86_64"] compose.variants["Server"].variants["Gluster"].arches, ["x86_64"]
) )
self.assertEqual(compose.get_arches(), ['i386', 's390x', 'x86_64']) self.assertEqual(compose.get_arches(), ["i386", "s390x", "x86_64"])
self.assertEqual([v.uid for v in compose.get_variants()], self.assertEqual(
['Client', 'Server', 'Server-Gluster']) [v.uid for v in compose.get_variants()],
["Client", "Server", "Server-Gluster"],
)
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_with_both_filters(self, ci): def test_get_variant_arches_with_both_filters(self, ci):
ci.return_value.compose.id = 'composeid' ci.return_value.compose.id = "composeid"
ci.return_value.compose.respin = 2 ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107' ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = 'production' ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = '.n' ci.return_value.compose.type_suffix = ".n"
logger = mock.Mock() logger = mock.Mock()
logger.handlers = [] logger.handlers = []
conf = ConfigWrapper( conf = ConfigWrapper(
variants_file={'scm': 'file', variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
'repo': None, release_name="Test",
'file': 'variants.xml'}, release_version="1.0",
release_name='Test', release_short="test",
release_version='1.0', release_type="ga",
release_short='test',
release_type='ga',
release_internal=False, release_internal=False,
tree_variants=['Server', 'Client', 'Server-optional'], tree_variants=["Server", "Client", "Server-optional"],
tree_arches=['x86_64'], tree_arches=["x86_64"],
) )
compose = Compose(conf, self.tmp_dir, logger=logger) compose = Compose(conf, self.tmp_dir, logger=logger)
compose.read_variants() compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()), self.assertEqual(
['Client', 'Server']) sorted(v.uid for v in compose.variants.values()), ["Client", "Server"]
)
self.assertEqual(compose.variants["Client"].arches, ["x86_64"]) self.assertEqual(compose.variants["Client"].arches, ["x86_64"])
self.assertEqual(compose.variants["Server"].arches, ["x86_64"]) self.assertEqual(compose.variants["Server"].arches, ["x86_64"])
self.assertEqual( self.assertEqual(
compose.variants["Server"].variants["optional"].arches, ["x86_64"] compose.variants["Server"].variants["optional"].arches, ["x86_64"]
) )
self.assertEqual(compose.get_arches(), ['x86_64']) self.assertEqual(compose.get_arches(), ["x86_64"])
self.assertEqual([v.uid for v in compose.get_variants()], self.assertEqual(
['Client', 'Server', 'Server-optional']) [v.uid for v in compose.get_variants()],
["Client", "Server", "Server-optional"],
)
six.assertCountEqual( six.assertCountEqual(
self, self,
logger.info.call_args_list, logger.info.call_args_list,
[mock.call('Excluding variant Live: filtered by configuration.'), [
mock.call('Excluding variant Crashy: all its arches are filtered.'), mock.call("Excluding variant Live: filtered by configuration."),
mock.call('Excluding variant Server-ResilientStorage: filtered by configuration.'), mock.call("Excluding variant Crashy: all its arches are filtered."),
mock.call('Excluding variant Server-Gluster: filtered by configuration.')] mock.call(
"Excluding variant Server-ResilientStorage: filtered by configuration."
),
mock.call(
"Excluding variant Server-Gluster: filtered by configuration."
),
],
) )
@mock.patch('pungi.compose.ComposeInfo') @mock.patch("pungi.compose.ComposeInfo")
def test_mkdtemp(self, ci): def test_mkdtemp(self, ci):
ci.return_value.compose.id = 'composeid' ci.return_value.compose.id = "composeid"
conf = ConfigWrapper( conf = ConfigWrapper(
variants_file={'scm': 'file', variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
'repo': None, release_name="Test",
'file': 'variants.xml'}, release_version="1.0",
release_name='Test', release_short="test",
release_version='1.0', release_type="ga",
release_short='test',
release_type='ga',
release_internal=False, release_internal=False,
tree_variants=['Server', 'Client', 'Server-optional'], tree_variants=["Server", "Client", "Server-optional"],
tree_arches=['x86_64'], tree_arches=["x86_64"],
) )
compose = Compose(conf, self.tmp_dir) compose = Compose(conf, self.tmp_dir)
d = compose.mkdtemp() d = compose.mkdtemp()
self.assertTrue(os.path.isdir(d)) self.assertTrue(os.path.isdir(d))
d = compose.mkdtemp(prefix='tweak_buildinstall') d = compose.mkdtemp(prefix="tweak_buildinstall")
self.assertTrue(os.path.isdir(d)) self.assertTrue(os.path.isdir(d))
@ -466,7 +578,7 @@ class StatusTest(unittest.TestCase):
self.tmp_dir = tempfile.mkdtemp() self.tmp_dir = tempfile.mkdtemp()
self.logger = mock.Mock() self.logger = mock.Mock()
self.logger.handlers = [] self.logger.handlers = []
with mock.patch('pungi.compose.ComposeInfo'): with mock.patch("pungi.compose.ComposeInfo"):
self.compose = Compose({}, self.tmp_dir, logger=self.logger) self.compose = Compose({}, self.tmp_dir, logger=self.logger)
def tearDown(self): def tearDown(self):
@ -477,78 +589,82 @@ class StatusTest(unittest.TestCase):
self.assertIsNone(status) self.assertIsNone(status)
def test_get_status_existing(self): def test_get_status_existing(self):
with open(os.path.join(self.tmp_dir, 'STATUS'), 'w') as f: with open(os.path.join(self.tmp_dir, "STATUS"), "w") as f:
f.write('FOOBAR') f.write("FOOBAR")
self.assertEqual(self.compose.get_status(), 'FOOBAR') self.assertEqual(self.compose.get_status(), "FOOBAR")
def test_get_status_is_dir(self): def test_get_status_is_dir(self):
os.mkdir(os.path.join(self.tmp_dir, 'STATUS')) os.mkdir(os.path.join(self.tmp_dir, "STATUS"))
self.assertIsNone(self.compose.get_status()) self.assertIsNone(self.compose.get_status())
def test_write_status(self): def test_write_status(self):
self.compose.write_status('DOOMED') self.compose.write_status("DOOMED")
with open(os.path.join(self.tmp_dir, 'STATUS'), 'r') as f: with open(os.path.join(self.tmp_dir, "STATUS"), "r") as f:
self.assertEqual(f.read(), 'DOOMED\n') self.assertEqual(f.read(), "DOOMED\n")
def test_write_non_standard_status(self): def test_write_non_standard_status(self):
self.compose.write_status('FOOBAR') self.compose.write_status("FOOBAR")
self.assertEqual(self.logger.log.call_count, 1) self.assertEqual(self.logger.log.call_count, 1)
with open(os.path.join(self.tmp_dir, 'STATUS'), 'r') as f: with open(os.path.join(self.tmp_dir, "STATUS"), "r") as f:
self.assertEqual(f.read(), 'FOOBAR\n') self.assertEqual(f.read(), "FOOBAR\n")
def test_write_status_on_finished(self): def test_write_status_on_finished(self):
self.compose.write_status('FINISHED') self.compose.write_status("FINISHED")
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
self.compose.write_status('NOT REALLY') self.compose.write_status("NOT REALLY")
def test_write_status_with_failed_deliverables(self): def test_write_status_with_failed_deliverables(self):
self.compose.conf = { self.compose.conf = {
'failable_deliverables': [ "failable_deliverables": [("^.+$", {"*": ["live", "build-image"]})]
('^.+$', {
'*': ['live', 'build-image'],
})
]
} }
variant = mock.Mock(uid='Server') variant = mock.Mock(uid="Server")
self.compose.fail_deliverable(variant, 'x86_64', 'live') self.compose.fail_deliverable(variant, "x86_64", "live")
self.compose.fail_deliverable(None, '*', 'build-image') self.compose.fail_deliverable(None, "*", "build-image")
self.compose.write_status('FINISHED') self.compose.write_status("FINISHED")
self.logger.log.assert_has_calls( self.logger.log.assert_has_calls(
[mock.call(20, 'Failed build-image on variant <>, arch <*>, subvariant <None>.'), [
mock.call(20, 'Failed live on variant <Server>, arch <x86_64>, subvariant <None>.')], mock.call(
any_order=True) 20, "Failed build-image on variant <>, arch <*>, subvariant <None>."
),
mock.call(
20,
"Failed live on variant <Server>, arch <x86_64>, subvariant <None>.",
),
],
any_order=True,
)
with open(os.path.join(self.tmp_dir, 'STATUS'), 'r') as f: with open(os.path.join(self.tmp_dir, "STATUS"), "r") as f:
self.assertEqual(f.read(), 'FINISHED_INCOMPLETE\n') self.assertEqual(f.read(), "FINISHED_INCOMPLETE\n")
def test_calls_notifier(self): def test_calls_notifier(self):
self.compose.notifier = mock.Mock() self.compose.notifier = mock.Mock()
self.compose.write_status('FINISHED') self.compose.write_status("FINISHED")
self.assertTrue(self.compose.notifier.send.call_count, 1) self.assertTrue(self.compose.notifier.send.call_count, 1)
def test_no_database_with_dnf_backend(self): def test_no_database_with_dnf_backend(self):
self.compose.conf['gather_backend'] = 'dnf' self.compose.conf["gather_backend"] = "dnf"
self.assertFalse(self.compose.should_create_yum_database) self.assertFalse(self.compose.should_create_yum_database)
def test_no_database_with_dnf_backend_config_override(self): def test_no_database_with_dnf_backend_config_override(self):
self.compose.conf['gather_backend'] = 'dnf' self.compose.conf["gather_backend"] = "dnf"
self.compose.conf['createrepo_database'] = True self.compose.conf["createrepo_database"] = True
self.assertTrue(self.compose.should_create_yum_database) self.assertTrue(self.compose.should_create_yum_database)
def test_no_database_with_yum_backend(self): def test_no_database_with_yum_backend(self):
self.compose.conf['gather_backend'] = 'yum' self.compose.conf["gather_backend"] = "yum"
self.assertTrue(self.compose.should_create_yum_database) self.assertTrue(self.compose.should_create_yum_database)
def test_no_database_with_yum_backend_config_override(self): def test_no_database_with_yum_backend_config_override(self):
self.compose.conf['gather_backend'] = 'yum' self.compose.conf["gather_backend"] = "yum"
self.compose.conf['createrepo_database'] = False self.compose.conf["createrepo_database"] = False
self.assertFalse(self.compose.should_create_yum_database) self.assertFalse(self.compose.should_create_yum_database)

View File

@ -12,29 +12,40 @@ import sys
from pungi.wrappers.comps import CompsWrapper, CompsFilter, CompsValidationError from pungi.wrappers.comps import CompsWrapper, CompsFilter, CompsValidationError
from tests.helpers import BaseTestCase, FIXTURE_DIR from tests.helpers import BaseTestCase, FIXTURE_DIR
COMPS_FILE = os.path.join(FIXTURE_DIR, 'comps.xml') COMPS_FILE = os.path.join(FIXTURE_DIR, "comps.xml")
COMPS_FORMATTED_FILE = os.path.join(FIXTURE_DIR, 'comps-formatted.xml') COMPS_FORMATTED_FILE = os.path.join(FIXTURE_DIR, "comps-formatted.xml")
COMPS_GROUP_FILE = os.path.join(FIXTURE_DIR, 'comps-group.xml') COMPS_GROUP_FILE = os.path.join(FIXTURE_DIR, "comps-group.xml")
COMPS_ENVIRONMENT_FILE = os.path.join(FIXTURE_DIR, 'comps-env.xml') COMPS_ENVIRONMENT_FILE = os.path.join(FIXTURE_DIR, "comps-env.xml")
COMPS_FILE_WITH_TYPO = os.path.join(FIXTURE_DIR, 'comps-typo.xml') COMPS_FILE_WITH_TYPO = os.path.join(FIXTURE_DIR, "comps-typo.xml")
COMPS_FILE_WITH_WHITESPACE = os.path.join(FIXTURE_DIR, 'comps-ws.xml') COMPS_FILE_WITH_WHITESPACE = os.path.join(FIXTURE_DIR, "comps-ws.xml")
class CompsWrapperTest(BaseTestCase): class CompsWrapperTest(BaseTestCase):
def setUp(self): def setUp(self):
self.file = tempfile.NamedTemporaryFile(prefix='comps-wrapper-test-') self.file = tempfile.NamedTemporaryFile(prefix="comps-wrapper-test-")
def test_get_groups(self): def test_get_groups(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
self.assertEqual( self.assertEqual(
sorted(comps.get_comps_groups()), sorted(comps.get_comps_groups()),
sorted(['core', 'standard', 'text-internet', 'firefox', 'resilient-storage', 'basic-desktop'])) sorted(
[
"core",
"standard",
"text-internet",
"firefox",
"resilient-storage",
"basic-desktop",
]
),
)
def test_get_packages(self): def test_get_packages(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
self.assertEqual( self.assertEqual(
sorted(comps.get_packages('text-internet')), sorted(comps.get_packages("text-internet")),
sorted(['dummy-elinks', 'dummy-tftp'])) sorted(["dummy-elinks", "dummy-tftp"]),
)
def test_get_langpacks(self): def test_get_langpacks(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
@ -44,13 +55,13 @@ class CompsWrapperTest(BaseTestCase):
"aspell": "aspell-%s", "aspell": "aspell-%s",
"firefox": "firefox-langpack-%s", "firefox": "firefox-langpack-%s",
"kdelibs": "kde-l10n-%s", "kdelibs": "kde-l10n-%s",
} },
) )
def test_get_packages_for_non_existing_group(self): def test_get_packages_for_non_existing_group(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
with self.assertRaises(KeyError): with self.assertRaises(KeyError):
comps.get_packages('foo') comps.get_packages("foo")
def test_write_comps(self): def test_write_comps(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
@ -59,44 +70,44 @@ class CompsWrapperTest(BaseTestCase):
def test_filter_groups(self): def test_filter_groups(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
unmatched = comps.filter_groups([ unmatched = comps.filter_groups(
[
{"name": "core", "glob": False, "default": False, "uservisible": True}, {"name": "core", "glob": False, "default": False, "uservisible": True},
{"name": "*a*", "glob": True, "default": None, "uservisible": None}, {"name": "*a*", "glob": True, "default": None, "uservisible": None},
]) ]
)
self.assertEqual(unmatched, set()) self.assertEqual(unmatched, set())
comps.write_comps(target_file=self.file.name) comps.write_comps(target_file=self.file.name)
self.assertFilesEqual(COMPS_GROUP_FILE, self.file.name) self.assertFilesEqual(COMPS_GROUP_FILE, self.file.name)
def test_filter_groups_unused_filter(self): def test_filter_groups_unused_filter(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
unmatched = comps.filter_groups([ unmatched = comps.filter_groups(
{"name": "boom", "glob": False, "default": False, "uservisible": True}, [{"name": "boom", "glob": False, "default": False, "uservisible": True}]
]) )
self.assertEqual(unmatched, set(["boom"])) self.assertEqual(unmatched, set(["boom"]))
def test_filter_environments(self): def test_filter_environments(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
comps.filter_environments([ comps.filter_environments([{"name": "minimal", "display_order": 10}])
{"name": "minimal", "display_order": 10}
])
comps.write_comps(target_file=self.file.name) comps.write_comps(target_file=self.file.name)
self.assertFilesEqual(COMPS_ENVIRONMENT_FILE, self.file.name) self.assertFilesEqual(COMPS_ENVIRONMENT_FILE, self.file.name)
def test_read_display_order(self): def test_read_display_order(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
groups = [ groups = [{"name": "minimal", "display_order": None}]
{"name": "minimal", "display_order": None}
]
comps.filter_environments(groups) comps.filter_environments(groups)
self.assertEqual(groups, [{"name": "minimal", "display_order": 99, "groups": ["core"]}]) self.assertEqual(
groups, [{"name": "minimal", "display_order": 99, "groups": ["core"]}]
)
def test_report_typo_in_package_type(self): def test_report_typo_in_package_type(self):
comps = CompsWrapper(COMPS_FILE_WITH_TYPO) comps = CompsWrapper(COMPS_FILE_WITH_TYPO)
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
comps.write_comps(target_file=self.file.name) comps.write_comps(target_file=self.file.name)
self.assertIn( self.assertIn(
'Package dummy-bash in group core has unknown type', "Package dummy-bash in group core has unknown type", str(ctx.exception)
str(ctx.exception)) )
def test_validate_correct(self): def test_validate_correct(self):
comps = CompsWrapper(COMPS_FILE) comps = CompsWrapper(COMPS_FILE)
@ -121,81 +132,87 @@ class CompsWrapperTest(BaseTestCase):
) )
COMPS_IN_FILE = os.path.join(FIXTURE_DIR, 'comps.xml.in') COMPS_IN_FILE = os.path.join(FIXTURE_DIR, "comps.xml.in")
class CompsFilterTest(unittest.TestCase): class CompsFilterTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.filter = CompsFilter(COMPS_IN_FILE, reindent=True) self.filter = CompsFilter(COMPS_IN_FILE, reindent=True)
self.output = tempfile.NamedTemporaryFile(prefix='comps-filter-test-') self.output = tempfile.NamedTemporaryFile(prefix="comps-filter-test-")
def assertOutput(self, filepath): def assertOutput(self, filepath):
self.filter.write(self.output) self.filter.write(self.output)
self.output.flush() self.output.flush()
with open(self.output.name, 'r') as f: with open(self.output.name, "r") as f:
actual = f.read().strip().replace('utf-8', 'UTF-8') actual = f.read().strip().replace("utf-8", "UTF-8")
with open(filepath, 'r') as f: with open(filepath, "r") as f:
expected = f.read().strip() expected = f.read().strip()
self.maxDiff = None self.maxDiff = None
self.assertEqual(expected, actual) self.assertEqual(expected, actual)
def test_filter_packages(self): def test_filter_packages(self):
self.filter.filter_packages('ppc64le', None) self.filter.filter_packages("ppc64le", None)
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-packages.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-filtered-packages.xml"))
def test_filter_packages_with_variant(self): def test_filter_packages_with_variant(self):
self.filter.filter_packages('ppc64le', 'Server') self.filter.filter_packages("ppc64le", "Server")
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-packages-variant.xml')) self.assertOutput(
os.path.join(FIXTURE_DIR, "comps-filtered-packages-variant.xml")
)
def test_filter_groups(self): def test_filter_groups(self):
self.filter.filter_groups('ppc64le', None) self.filter.filter_groups("ppc64le", None)
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-groups.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-filtered-groups.xml"))
def test_filter_groups_with_variant(self): def test_filter_groups_with_variant(self):
self.filter.filter_groups('ppc64le', 'Server') self.filter.filter_groups("ppc64le", "Server")
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-groups-variant.xml')) self.assertOutput(
os.path.join(FIXTURE_DIR, "comps-filtered-groups-variant.xml")
)
def test_filter_environments(self): def test_filter_environments(self):
self.filter.filter_environments('ppc64le', None) self.filter.filter_environments("ppc64le", None)
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-environments.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-filtered-environments.xml"))
def test_filter_environments_variant(self): def test_filter_environments_variant(self):
self.filter.filter_environments('ppc64le', 'Client') self.filter.filter_environments("ppc64le", "Client")
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-environments-variant.xml')) self.assertOutput(
os.path.join(FIXTURE_DIR, "comps-filtered-environments-variant.xml")
)
def test_remove_categories(self): def test_remove_categories(self):
self.filter.remove_categories() self.filter.remove_categories()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-categories.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-categories.xml"))
def test_remove_langpacks(self): def test_remove_langpacks(self):
self.filter.remove_langpacks() self.filter.remove_langpacks()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-langpacks.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-langpacks.xml"))
def test_remove_translations(self): def test_remove_translations(self):
self.filter.remove_translations() self.filter.remove_translations()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-translations.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-translations.xml"))
def test_remove_environments(self): def test_remove_environments(self):
self.filter.remove_environments() self.filter.remove_environments()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-environments.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-environments.xml"))
def test_cleanup(self): def test_cleanup(self):
self.filter.cleanup() self.filter.cleanup()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup.xml"))
def test_cleanup_after_filter(self): def test_cleanup_after_filter(self):
self.filter.filter_packages('ppc64le', None) self.filter.filter_packages("ppc64le", None)
self.filter.cleanup() self.filter.cleanup()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup-filter.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-filter.xml"))
def test_cleanup_after_filter_keep_group(self): def test_cleanup_after_filter_keep_group(self):
self.filter.filter_packages('ppc64le', None) self.filter.filter_packages("ppc64le", None)
self.filter.cleanup(['standard']) self.filter.cleanup(["standard"])
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup-keep.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-keep.xml"))
def test_cleanup_all(self): def test_cleanup_all(self):
self.filter.filter_packages('ppc64le', None) self.filter.filter_packages("ppc64le", None)
self.filter.filter_groups('ppc64le', None) self.filter.filter_groups("ppc64le", None)
self.filter.filter_environments('ppc64le', None) self.filter.filter_environments("ppc64le", None)
self.filter.cleanup() self.filter.cleanup()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup-all.xml')) self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-all.xml"))

View File

@ -23,49 +23,46 @@ class ConfigTestCase(unittest.TestCase):
class PkgsetConfigTestCase(ConfigTestCase): class PkgsetConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self): def test_validate_minimal_pkgset_koji(self):
cfg = load_config( cfg = load_config(pkgset_source="koji",)
pkgset_source='koji',
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_validate_minimal_pkgset_repos(self): def test_validate_minimal_pkgset_repos(self):
cfg = load_config( cfg = load_config(
pkgset_source='repos', pkgset_source="repos",
pkgset_repos={'x86_64': '/first', 'ppc64': '/second'}, pkgset_repos={"x86_64": "/first", "ppc64": "/second"},
) )
self.assertValidation(cfg) self.assertValidation(cfg)
def test_pkgset_mismatch_repos(self): def test_pkgset_mismatch_repos(self):
cfg = load_config( cfg = load_config(
pkgset_source='repos', pkgset_source="repos", pkgset_koji_tag="f25", pkgset_koji_inherit=False,
pkgset_koji_tag='f25',
pkgset_koji_inherit=False,
) )
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.REQUIRES.format('pkgset_source', 'repos', 'pkgset_repos'), [
checks.CONFLICTS.format('pkgset_source', 'repos', 'pkgset_koji_tag'), checks.REQUIRES.format("pkgset_source", "repos", "pkgset_repos"),
checks.CONFLICTS.format('pkgset_source', 'repos', 'pkgset_koji_inherit')]) checks.CONFLICTS.format("pkgset_source", "repos", "pkgset_koji_tag"),
checks.CONFLICTS.format(
"pkgset_source", "repos", "pkgset_koji_inherit"
),
],
)
def test_pkgset_mismatch_koji(self): def test_pkgset_mismatch_koji(self):
cfg = load_config( cfg = load_config(pkgset_source="koji", pkgset_repos={"whatever": "/foo"},)
pkgset_source='koji',
pkgset_repos={'whatever': '/foo'},
)
self.assertValidation( self.assertValidation(
cfg, cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")]
[checks.CONFLICTS.format('pkgset_source', 'koji', 'pkgset_repos')]) )
def test_pkgset_multiple_koji_tags(self): def test_pkgset_multiple_koji_tags(self):
cfg = load_config( cfg = load_config(
pkgset_source='koji', pkgset_source="koji",
pkgset_koji_tag=['f25', 'f25-extra'], pkgset_koji_tag=["f25", "f25-extra"],
pkgset_koji_inherit=False, pkgset_koji_inherit=False,
) )
self.assertValidation(cfg) self.assertValidation(cfg)
@ -73,215 +70,222 @@ class PkgsetConfigTestCase(ConfigTestCase):
class ReleaseConfigTestCase(ConfigTestCase): class ReleaseConfigTestCase(ConfigTestCase):
def test_set_release_is_layered(self): def test_set_release_is_layered(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, release_is_layered=True)
PKGSET_REPOS,
release_is_layered=True
)
self.assertValidation( self.assertValidation(
cfg, cfg,
warnings=[ warnings=[
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product."]) "WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product."
],
)
def test_only_config_base_product_name(self): def test_only_config_base_product_name(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, base_product_name="Prod",)
PKGSET_REPOS,
base_product_name='Prod',
)
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.REQUIRES.format('base_product_name', 'Prod', 'base_product_short'), [
checks.REQUIRES.format('base_product_name', 'Prod', 'base_product_version'), checks.REQUIRES.format(
checks.CONFLICTS.format('base_product_short', None, 'base_product_name'), "base_product_name", "Prod", "base_product_short"
checks.CONFLICTS.format('base_product_version', None, 'base_product_name')]) ),
checks.REQUIRES.format(
"base_product_name", "Prod", "base_product_version"
),
checks.CONFLICTS.format(
"base_product_short", None, "base_product_name"
),
checks.CONFLICTS.format(
"base_product_version", None, "base_product_name"
),
],
)
def test_only_config_base_product_short(self): def test_only_config_base_product_short(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, base_product_short="bp",)
PKGSET_REPOS,
base_product_short='bp',
)
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.REQUIRES.format('base_product_short', 'bp', 'base_product_name'), [
checks.REQUIRES.format('base_product_short', 'bp', 'base_product_version'), checks.REQUIRES.format("base_product_short", "bp", "base_product_name"),
checks.CONFLICTS.format('base_product_name', None, 'base_product_short'), checks.REQUIRES.format(
checks.CONFLICTS.format('base_product_version', None, 'base_product_short')]) "base_product_short", "bp", "base_product_version"
),
checks.CONFLICTS.format(
"base_product_name", None, "base_product_short"
),
checks.CONFLICTS.format(
"base_product_version", None, "base_product_short"
),
],
)
def test_only_config_base_product_version(self): def test_only_config_base_product_version(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, base_product_version="1.0",)
PKGSET_REPOS,
base_product_version='1.0',
)
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.REQUIRES.format('base_product_version', '1.0', 'base_product_name'), [
checks.REQUIRES.format('base_product_version', '1.0', 'base_product_short'), checks.REQUIRES.format(
checks.CONFLICTS.format('base_product_name', None, 'base_product_version'), "base_product_version", "1.0", "base_product_name"
checks.CONFLICTS.format('base_product_short', None, 'base_product_version')]) ),
checks.REQUIRES.format(
"base_product_version", "1.0", "base_product_short"
),
checks.CONFLICTS.format(
"base_product_name", None, "base_product_version"
),
checks.CONFLICTS.format(
"base_product_short", None, "base_product_version"
),
],
)
class ImageNameConfigTestCase(ConfigTestCase): class ImageNameConfigTestCase(ConfigTestCase):
def test_image_name_simple_string(self): def test_image_name_simple_string(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, image_name_format="foobar",)
PKGSET_REPOS,
image_name_format="foobar",
)
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
def test_image_name_variant_mapping(self): def test_image_name_variant_mapping(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, image_name_format={"^Server$": "foobar"},)
PKGSET_REPOS,
image_name_format={"^Server$": "foobar"},
)
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
class RunrootConfigTestCase(ConfigTestCase): class RunrootConfigTestCase(ConfigTestCase):
def test_set_runroot_true(self): def test_set_runroot_true(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, runroot=True,)
PKGSET_REPOS,
runroot=True,
)
self.assertValidation( self.assertValidation(
cfg, cfg,
warnings=["WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."]) warnings=[
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
],
)
def test_set_runroot_false(self): def test_set_runroot_false(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, runroot=False,)
PKGSET_REPOS,
runroot=False,
)
self.assertValidation( self.assertValidation(
cfg, cfg,
warnings=["WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."]) warnings=[
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
],
)
class BuildinstallConfigTestCase(ConfigTestCase): class BuildinstallConfigTestCase(ConfigTestCase):
def test_bootable_deprecated(self): def test_bootable_deprecated(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, bootable=True,)
PKGSET_REPOS,
bootable=True,
)
self.assertValidation( self.assertValidation(
cfg, cfg,
warnings=['WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer.']) warnings=[
"WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer."
],
)
def test_buildinstall_method_without_bootable(self): def test_buildinstall_method_without_bootable(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax",)
PKGSET_REPOS,
buildinstall_method='lorax',
)
self.assertValidation( self.assertValidation(cfg, [])
cfg,
[])
def test_buildinstall_with_lorax_options(self): def test_buildinstall_with_lorax_options(self):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
buildinstall_method='buildinstall', buildinstall_method="buildinstall",
lorax_options=[('^Server$', {})] lorax_options=[("^Server$", {})],
) )
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.CONFLICTS.format('buildinstall_method', 'buildinstall', 'lorax_options')]) [
checks.CONFLICTS.format(
"buildinstall_method", "buildinstall", "lorax_options"
)
],
)
def test_lorax_with_lorax_options(self): def test_lorax_with_lorax_options(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax", lorax_options=[])
PKGSET_REPOS,
buildinstall_method='lorax',
lorax_options=[]
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_lorax_options_without_bootable_and_method(self): def test_lorax_options_without_bootable_and_method(self):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
lorax_options=[('^Server$', {})], lorax_options=[("^Server$", {})],
buildinstall_kickstart='foo', buildinstall_kickstart="foo",
) )
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.CONFLICTS.format('buildinstall_method', 'None', 'lorax_options'), [
checks.CONFLICTS.format('buildinstall_method', 'None', 'buildinstall_kickstart')]) checks.CONFLICTS.format("buildinstall_method", "None", "lorax_options"),
checks.CONFLICTS.format(
"buildinstall_method", "None", "buildinstall_kickstart"
),
],
)
class CreaterepoConfigTestCase(ConfigTestCase): class CreaterepoConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self): def test_validate_minimal_pkgset_koji(self):
cfg = load_config( cfg = load_config(
pkgset_source='koji', pkgset_source="koji", pkgset_koji_tag="f25", product_id_allow_missing=True,
pkgset_koji_tag="f25",
product_id_allow_missing=True,
) )
self.assertValidation( self.assertValidation(
cfg, cfg,
[checks.CONFLICTS.format('product_id', 'None', 'product_id_allow_missing')]) [checks.CONFLICTS.format("product_id", "None", "product_id_allow_missing")],
)
class GatherConfigTestCase(ConfigTestCase): class GatherConfigTestCase(ConfigTestCase):
def test_dnf_backend_is_default_on_py3(self): def test_dnf_backend_is_default_on_py3(self):
cfg = load_config( cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
pkgset_source='koji',
pkgset_koji_tag='f27',
)
with mock.patch('six.PY2', new=False): with mock.patch("six.PY2", new=False):
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
self.assertEqual(cfg['gather_backend'], 'dnf') self.assertEqual(cfg["gather_backend"], "dnf")
def test_yum_backend_is_default_on_py2(self): def test_yum_backend_is_default_on_py2(self):
cfg = load_config( cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
pkgset_source='koji',
pkgset_koji_tag='f27',
)
with mock.patch('six.PY2', new=True): with mock.patch("six.PY2", new=True):
self.assertValidation(cfg, []) self.assertValidation(cfg, [])
self.assertEqual(cfg['gather_backend'], 'yum') self.assertEqual(cfg["gather_backend"], "yum")
def test_yum_backend_is_rejected_on_py3(self): def test_yum_backend_is_rejected_on_py3(self):
cfg = load_config( cfg = load_config(
pkgset_source='koji', pkgset_source="koji", pkgset_koji_tag="f27", gather_backend="yum",
pkgset_koji_tag='f27',
gather_backend='yum',
) )
with mock.patch('six.PY2', new=False): with mock.patch("six.PY2", new=False):
self.assertValidation( self.assertValidation(
cfg, cfg,
["Failed validation in gather_backend: 'yum' is not one of ['dnf']"]) ["Failed validation in gather_backend: 'yum' is not one of ['dnf']"],
)
class OSBSConfigTestCase(ConfigTestCase): class OSBSConfigTestCase(ConfigTestCase):
def test_validate(self): def test_validate(self):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
osbs={"^Server$": { osbs={
'url': 'http://example.com', "^Server$": {
'target': 'f25-build', "url": "http://example.com",
'git_branch': 'f25', "target": "f25-build",
}} "git_branch": "f25",
}
},
) )
self.assertValidation(cfg) self.assertValidation(cfg)
def test_validate_bad_conf(self): def test_validate_bad_conf(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, osbs="yes please")
PKGSET_REPOS,
osbs='yes please'
)
self.assertNotEqual(checks.validate(cfg), ([], [])) self.assertNotEqual(checks.validate(cfg), ([], []))
@ -291,25 +295,25 @@ class OstreeConfigTestCase(ConfigTestCase):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
ostree=[ ostree=[
("^Atomic$", { (
"^Atomic$",
{
"x86_64": { "x86_64": {
"treefile": "fedora-atomic-docker-host.json", "treefile": "fedora-atomic-docker-host.json",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git", "config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"repo": "Everything", "repo": "Everything",
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/", "ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
"version": '!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN', "version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
} }
}) },
] )
],
) )
self.assertValidation(cfg) self.assertValidation(cfg)
def test_validate_bad_conf(self): def test_validate_bad_conf(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, ostree="yes please")
PKGSET_REPOS,
ostree='yes please'
)
self.assertNotEqual(checks.validate(cfg), ([], [])) self.assertNotEqual(checks.validate(cfg), ([], []))
@ -319,26 +323,33 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
ostree_installer=[ ostree_installer=[
("^Atomic$", { (
"^Atomic$",
{
"x86_64": { "x86_64": {
"repo": "Everything", "repo": "Everything",
"release": None, "release": None,
"installpkgs": ["fedora-productimg-atomic"], "installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"], "add_template": [
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
],
"add_template_var": [ "add_template_var": [
"ostree_osname=fedora-atomic", "ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
], ],
"add_arch_template": ["/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"], "add_arch_template": [
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
],
"rootfs_size": "3", "rootfs_size": "3",
"add_arch_template_var": [ "add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", "ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic", "ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
] ],
} }
}) },
] )
],
) )
self.assertValidation(cfg) self.assertValidation(cfg)
@ -347,12 +358,16 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
ostree_installer=[ ostree_installer=[
("^Atomic$", { (
"^Atomic$",
{
"x86_64": { "x86_64": {
"repo": "Everything", "repo": "Everything",
"release": None, "release": None,
"installpkgs": ["fedora-productimg-atomic"], "installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"], "add_template": [
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
],
"add_template_var": [ "add_template_var": [
"ostree_osname=fedora-atomic", "ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
@ -362,10 +377,11 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/", "ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic", "ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host", "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
] ],
} }
}) },
] )
],
) )
self.assertNotEqual(checks.validate(cfg), ([], [])) self.assertNotEqual(checks.validate(cfg), ([], []))
@ -376,10 +392,10 @@ class LiveMediaConfigTestCase(ConfigTestCase):
def test_global_config_validation(self, resolve_git_url): def test_global_config_validation(self, resolve_git_url):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS,
live_media_ksurl='git://example.com/repo.git#HEAD', live_media_ksurl="git://example.com/repo.git#HEAD",
live_media_target='f24', live_media_target="f24",
live_media_release='RRR', live_media_release="RRR",
live_media_version='Rawhide', live_media_version="Rawhide",
) )
resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE") resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE")
@ -388,53 +404,50 @@ class LiveMediaConfigTestCase(ConfigTestCase):
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE") self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
def test_global_config_null_release(self): def test_global_config_null_release(self):
cfg = load_config( cfg = load_config(PKGSET_REPOS, live_media_release=None,)
PKGSET_REPOS,
live_media_release=None,
)
self.assertValidation(cfg) self.assertValidation(cfg)
class TestSuggestions(ConfigTestCase): class TestSuggestions(ConfigTestCase):
def test_with_a_typo(self): def test_with_a_typo(self):
cfg = load_config(PKGSET_REPOS, cfg = load_config(PKGSET_REPOS, product_pid=None)
product_pid=None)
self.assertValidation(cfg, [], [checks.UNKNOWN_SUGGEST.format('product_pid', 'product_id')]) self.assertValidation(
cfg, [], [checks.UNKNOWN_SUGGEST.format("product_pid", "product_id")]
)
class TestRegexValidation(ConfigTestCase): class TestRegexValidation(ConfigTestCase):
def test_incorrect_regular_expression(self): def test_incorrect_regular_expression(self):
cfg = load_config(PKGSET_REPOS, cfg = load_config(PKGSET_REPOS, multilib=[("^*$", {"*": []})])
multilib=[('^*$', {'*': []})])
msg = 'Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat' msg = "Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat"
if six.PY3: if six.PY3:
msg += ' at position 1' msg += " at position 1"
self.assertValidation(cfg, [msg], []) self.assertValidation(cfg, [msg], [])
class RepoclosureTestCase(ConfigTestCase): class RepoclosureTestCase(ConfigTestCase):
def test_invalid_backend(self): def test_invalid_backend(self):
cfg = load_config( cfg = load_config(
PKGSET_REPOS, PKGSET_REPOS, repoclosure_backend="fnd", # Intentionally with a typo
repoclosure_backend='fnd', # Intentionally with a typo
) )
options = ['yum', 'dnf'] if six.PY2 else ['dnf'] options = ["yum", "dnf"] if six.PY2 else ["dnf"]
self.assertValidation( self.assertValidation(
cfg, cfg,
["Failed validation in repoclosure_backend: 'fnd' is not one of %s" % options]) [
"Failed validation in repoclosure_backend: 'fnd' is not one of %s"
% options
],
)
class VariantAsLookasideTestCase(ConfigTestCase): class VariantAsLookasideTestCase(ConfigTestCase):
def test_empty(self): def test_empty(self):
variant_as_lookaside = [] variant_as_lookaside = []
cfg = load_config( cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
PKGSET_REPOS,
variant_as_lookaside=variant_as_lookaside,
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_basic(self): def test_basic(self):
@ -443,20 +456,14 @@ class VariantAsLookasideTestCase(ConfigTestCase):
("Server", "Client"), ("Server", "Client"),
("Everything", "Spin"), ("Everything", "Spin"),
] ]
cfg = load_config( cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
PKGSET_REPOS,
variant_as_lookaside=variant_as_lookaside,
)
self.assertValidation(cfg) self.assertValidation(cfg)
class SkipPhasesTestCase(ConfigTestCase): class SkipPhasesTestCase(ConfigTestCase):
def test_empty(self): def test_empty(self):
skip_phases = [] skip_phases = []
cfg = load_config( cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
PKGSET_REPOS,
skip_phases=skip_phases,
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_basic(self): def test_basic(self):
@ -464,10 +471,7 @@ class SkipPhasesTestCase(ConfigTestCase):
"buildinstall", "buildinstall",
"gather", "gather",
] ]
cfg = load_config( cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
PKGSET_REPOS,
skip_phases=skip_phases,
)
self.assertValidation(cfg) self.assertValidation(cfg)
def test_bad_phase_name(self): def test_bad_phase_name(self):
@ -475,8 +479,5 @@ class SkipPhasesTestCase(ConfigTestCase):
"gather", "gather",
"non-existing-phase_name", "non-existing-phase_name",
] ]
cfg = load_config( cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
PKGSET_REPOS,
skip_phases=skip_phases,
)
self.assertNotEqual(checks.validate(cfg), ([], [])) self.assertNotEqual(checks.validate(cfg), ([], []))

File diff suppressed because it is too large Load Diff

View File

@ -11,206 +11,384 @@ from pungi import createiso
class CreateIsoScriptTest(helpers.PungiTestCase): class CreateIsoScriptTest(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(CreateIsoScriptTest, self).setUp() super(CreateIsoScriptTest, self).setUp()
self.outdir = os.path.join(self.topdir, 'isos') self.outdir = os.path.join(self.topdir, "isos")
self.out = StringIO() self.out = StringIO()
self.maxDiff = None self.maxDiff = None
def assertScript(self, cmds): def assertScript(self, cmds):
script = self.out.getvalue().strip().split('\n') script = self.out.getvalue().strip().split("\n")
self.assertEqual(script[:3], self.assertEqual(script[:3], ["#!/bin/bash", "set -ex", "cd %s" % self.outdir])
['#!/bin/bash',
'set -ex',
'cd %s' % self.outdir])
self.assertEqual(script[3:], cmds) self.assertEqual(script[3:], cmds)
def test_minimal_run(self): def test_minimal_run(self):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso', iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='x86_64', arch="x86_64",
), self.out) ),
self.out,
)
self.assertScript( self.assertScript(
[' '.join(['/usr/bin/genisoimage', '-untranslated-filenames', [
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long', " ".join(
'-rational-rock', '-translation-table', [
'-input-charset', 'utf-8', '-x', './lost+found', "/usr/bin/genisoimage",
'-o', 'DP-1.0-20160405.t.3-x86_64.iso', "-untranslated-filenames",
'-graft-points', '-path-list', 'graft-list']), "-volid",
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso']), "DP-1.0-20160405.t.3",
'isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest'] "-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-o",
"DP-1.0-20160405.t.3-x86_64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
]
) )
def test_bootable_run(self): def test_bootable_run(self):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso', iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='x86_64', arch="x86_64",
buildinstall_method='lorax', buildinstall_method="lorax",
), self.out) ),
self.out,
)
self.assertScript( self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET, [
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames', createiso.FIND_TEMPLATE_SNIPPET,
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long', " ".join(
'-rational-rock', '-translation-table', [
'-input-charset', 'utf-8', '-x', './lost+found', "/usr/bin/genisoimage",
'-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat', "-untranslated-filenames",
'-no-emul-boot', "-volid",
'-boot-load-size', '4', '-boot-info-table', "DP-1.0-20160405.t.3",
'-eltorito-alt-boot', '-e', 'images/efiboot.img', "-J",
'-no-emul-boot', "-joliet-long",
'-o', 'DP-1.0-20160405.t.3-x86_64.iso', "-rational-rock",
'-graft-points', '-path-list', 'graft-list']), "-translation-table",
' '.join(['/usr/bin/isohybrid', '--uefi', 'DP-1.0-20160405.t.3-x86_64.iso']), "-input-charset",
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso']), "utf-8",
'isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest'] "-x",
"./lost+found",
"-b",
"isolinux/isolinux.bin",
"-c",
"isolinux/boot.cat",
"-no-emul-boot",
"-boot-load-size",
"4",
"-boot-info-table",
"-eltorito-alt-boot",
"-e",
"images/efiboot.img",
"-no-emul-boot",
"-o",
"DP-1.0-20160405.t.3-x86_64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(
["/usr/bin/isohybrid", "--uefi", "DP-1.0-20160405.t.3-x86_64.iso"]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
]
) )
def test_bootable_run_on_i386(self): def test_bootable_run_on_i386(self):
# This will call isohybrid, but not with --uefi switch # This will call isohybrid, but not with --uefi switch
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-i386.iso', iso_name="DP-1.0-20160405.t.3-i386.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='i386', arch="i386",
buildinstall_method='lorax', buildinstall_method="lorax",
), self.out) ),
self.out,
)
self.assertScript( self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET, [
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames', createiso.FIND_TEMPLATE_SNIPPET,
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long', " ".join(
'-rational-rock', '-translation-table', [
'-input-charset', 'utf-8', '-x', './lost+found', "/usr/bin/genisoimage",
'-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat', "-untranslated-filenames",
'-no-emul-boot', "-volid",
'-boot-load-size', '4', '-boot-info-table', "DP-1.0-20160405.t.3",
'-o', 'DP-1.0-20160405.t.3-i386.iso', "-J",
'-graft-points', '-path-list', 'graft-list']), "-joliet-long",
' '.join(['/usr/bin/isohybrid', 'DP-1.0-20160405.t.3-i386.iso']), "-rational-rock",
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-i386.iso']), "-translation-table",
'isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest'] "-input-charset",
"utf-8",
"-x",
"./lost+found",
"-b",
"isolinux/isolinux.bin",
"-c",
"isolinux/boot.cat",
"-no-emul-boot",
"-boot-load-size",
"4",
"-boot-info-table",
"-o",
"DP-1.0-20160405.t.3-i386.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/isohybrid", "DP-1.0-20160405.t.3-i386.iso"]),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-i386.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest",
]
) )
def test_bootable_run_ppc64(self): def test_bootable_run_ppc64(self):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-ppc64.iso', iso_name="DP-1.0-20160405.t.3-ppc64.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='ppc64', arch="ppc64",
buildinstall_method='lorax', buildinstall_method="lorax",
), self.out) ),
self.out,
)
self.assertScript( self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET, [
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames', createiso.FIND_TEMPLATE_SNIPPET,
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long', " ".join(
'-rational-rock', '-translation-table', [
'-x', './lost+found', "/usr/bin/genisoimage",
'-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop', "-untranslated-filenames",
'-allow-multidot', '-chrp-boot', '-map', '$TEMPLATE/config_files/ppc/mapping', "-volid",
'-hfs-bless', '/ppc/mac', "DP-1.0-20160405.t.3",
'-o', 'DP-1.0-20160405.t.3-ppc64.iso', "-J",
'-graft-points', '-path-list', 'graft-list']), "-joliet-long",
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-ppc64.iso']), "-rational-rock",
'isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest'] "-translation-table",
"-x",
"./lost+found",
"-part",
"-hfs",
"-r",
"-l",
"-sysid",
"PPC",
"-no-desktop",
"-allow-multidot",
"-chrp-boot",
"-map",
"$TEMPLATE/config_files/ppc/mapping",
"-hfs-bless",
"/ppc/mac",
"-o",
"DP-1.0-20160405.t.3-ppc64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
]
) )
def test_bootable_run_on_s390x(self): def test_bootable_run_on_s390x(self):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-s390x.iso', iso_name="DP-1.0-20160405.t.3-s390x.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='s390x', arch="s390x",
buildinstall_method='lorax', buildinstall_method="lorax",
), self.out) ),
self.out,
)
self.assertScript( self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET, [
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames', createiso.FIND_TEMPLATE_SNIPPET,
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long', " ".join(
'-rational-rock', '-translation-table', [
'-input-charset', 'utf-8', "/usr/bin/genisoimage",
'-x', './lost+found', "-untranslated-filenames",
'-eltorito-boot images/cdboot.img', '-no-emul-boot', "-volid",
'-o', 'DP-1.0-20160405.t.3-s390x.iso', "DP-1.0-20160405.t.3",
'-graft-points', '-path-list', 'graft-list']), "-J",
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-s390x.iso']), "-joliet-long",
'isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest'] "-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-eltorito-boot images/cdboot.img",
"-no-emul-boot",
"-o",
"DP-1.0-20160405.t.3-s390x.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-s390x.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest",
]
) )
def test_bootable_run_buildinstall(self): def test_bootable_run_buildinstall(self):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-ppc64.iso', iso_name="DP-1.0-20160405.t.3-ppc64.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='ppc64', arch="ppc64",
buildinstall_method='buildinstall', buildinstall_method="buildinstall",
), self.out) ),
self.out,
self.assertScript(
[' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-x', './lost+found',
'-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop',
'-allow-multidot', '-chrp-boot',
'-map', '/usr/lib/anaconda-runtime/boot/mapping',
'-hfs-bless', '/ppc/mac',
'-o', 'DP-1.0-20160405.t.3-ppc64.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-ppc64.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest']
) )
@mock.patch('sys.stderr') self.assertScript(
@mock.patch('kobo.shortcuts.run') [
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-x",
"./lost+found",
"-part",
"-hfs",
"-r",
"-l",
"-sysid",
"PPC",
"-no-desktop",
"-allow-multidot",
"-chrp-boot",
"-map",
"/usr/lib/anaconda-runtime/boot/mapping",
"-hfs-bless",
"/ppc/mac",
"-o",
"DP-1.0-20160405.t.3-ppc64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
]
)
@mock.patch("sys.stderr")
@mock.patch("kobo.shortcuts.run")
def test_run_with_jigdo_bad_args(self, run, stderr): def test_run_with_jigdo_bad_args(self, run, stderr):
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso', iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='x86_64', arch="x86_64",
jigdo_dir='%s/jigdo' % self.topdir, jigdo_dir="%s/jigdo" % self.topdir,
), self.out) ),
self.out,
)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_run_with_jigdo(self, run): def test_run_with_jigdo(self, run):
createiso.write_script(createiso.CreateIsoOpts( createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir, output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso', iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid='DP-1.0-20160405.t.3', volid="DP-1.0-20160405.t.3",
graft_points='graft-list', graft_points="graft-list",
arch='x86_64', arch="x86_64",
jigdo_dir='%s/jigdo' % self.topdir, jigdo_dir="%s/jigdo" % self.topdir,
os_tree='%s/os' % self.topdir, os_tree="%s/os" % self.topdir,
), self.out) ),
self.out,
)
self.assertScript( self.assertScript(
[' '.join(['/usr/bin/genisoimage', '-untranslated-filenames', [
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long', " ".join(
'-rational-rock', '-translation-table', [
'-input-charset', 'utf-8', '-x', './lost+found', "/usr/bin/genisoimage",
'-o', 'DP-1.0-20160405.t.3-x86_64.iso', "-untranslated-filenames",
'-graft-points', '-path-list', 'graft-list']), "-volid",
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso']), "DP-1.0-20160405.t.3",
'isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest', "-J",
' '.join(['jigdo-file', 'make-template', '--force', "-joliet-long",
'--image=%s/isos/DP-1.0-20160405.t.3-x86_64.iso' % self.topdir, "-rational-rock",
'--jigdo=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.jigdo' % self.topdir, "-translation-table",
'--template=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.template' % self.topdir, "-input-charset",
'--no-servers-section', '--report=noprogress', self.topdir + '/os//'])] "utf-8",
"-x",
"./lost+found",
"-o",
"DP-1.0-20160405.t.3-x86_64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
" ".join(
[
"jigdo-file",
"make-template",
"--force",
"--image=%s/isos/DP-1.0-20160405.t.3-x86_64.iso" % self.topdir,
"--jigdo=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.jigdo"
% self.topdir,
"--template=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.template"
% self.topdir,
"--no-servers-section",
"--report=noprogress",
self.topdir + "/os//",
]
),
]
) )

View File

@ -14,12 +14,11 @@ from pungi.wrappers.createrepo import CreaterepoWrapper
class CreateRepoWrapperTest(unittest.TestCase): class CreateRepoWrapperTest(unittest.TestCase):
def test_get_createrepo_c_cmd_minimal(self): def test_get_createrepo_c_cmd_minimal(self):
repo = CreaterepoWrapper() repo = CreaterepoWrapper()
cmd = repo.get_createrepo_cmd('/test/dir') cmd = repo.get_createrepo_cmd("/test/dir")
self.assertEqual(cmd[:2], ['createrepo_c', '/test/dir']) self.assertEqual(cmd[:2], ["createrepo_c", "/test/dir"])
six.assertCountEqual( six.assertCountEqual(
self, cmd[2:], ["--update", "--database", "--unique-md-filenames"] self, cmd[2:], ["--update", "--database", "--unique-md-filenames"]
) )
@ -27,35 +26,75 @@ class CreateRepoWrapperTest(unittest.TestCase):
def test_get_createrepo_c_cmd_full(self): def test_get_createrepo_c_cmd_full(self):
repo = CreaterepoWrapper() repo = CreaterepoWrapper()
cmd = repo.get_createrepo_cmd( cmd = repo.get_createrepo_cmd(
'/test/dir', baseurl='http://base.example.com', excludes=['abc', 'xyz'], "/test/dir",
pkglist='/test/pkglist', groupfile='/test/comps', cachedir='/test/cache', baseurl="http://base.example.com",
update=False, update_md_path='/test/md_path', skip_stat=True, checkts=True, excludes=["abc", "xyz"],
split=True, pretty=False, database=False, checksum='sha256', unique_md_filenames=False, pkglist="/test/pkglist",
distro='Fedora', content=['c1', 'c2'], repo=['r1', 'r2'], revision='rev', deltas=True, groupfile="/test/comps",
oldpackagedirs='/test/old', num_deltas=2, workers=3, outputdir='/test/output', cachedir="/test/cache",
update=False,
update_md_path="/test/md_path",
skip_stat=True,
checkts=True,
split=True,
pretty=False,
database=False,
checksum="sha256",
unique_md_filenames=False,
distro="Fedora",
content=["c1", "c2"],
repo=["r1", "r2"],
revision="rev",
deltas=True,
oldpackagedirs="/test/old",
num_deltas=2,
workers=3,
outputdir="/test/output",
use_xz=True, use_xz=True,
extra_args=["--zck", "--zck-primary-dict=/foo/bar"], extra_args=["--zck", "--zck-primary-dict=/foo/bar"],
) )
self.maxDiff = None self.maxDiff = None
self.assertEqual(cmd[:2], ['createrepo_c', '/test/dir']) self.assertEqual(cmd[:2], ["createrepo_c", "/test/dir"])
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[2:], cmd[2:],
["--baseurl=http://base.example.com", "--excludes=abc", "--excludes=xyz", [
"--pkglist=/test/pkglist", "--groupfile=/test/comps", "--cachedir=/test/cache", "--baseurl=http://base.example.com",
"--skip-stat", "--update-md-path=/test/md_path", "--split", "--checkts", "--excludes=abc",
"--checksum=sha256", "--distro=Fedora", "--simple-md-filenames", "--no-database", "--excludes=xyz",
"--content=c1", "--content=c2", "--repo=r1", "--repo=r2", "--revision=rev", "--pkglist=/test/pkglist",
"--deltas", "--oldpackagedirs=/test/old", "--num-deltas=2", "--workers=3", "--groupfile=/test/comps",
"--outputdir=/test/output", "--xz", "--zck", "--zck-primary-dict=/foo/bar"], "--cachedir=/test/cache",
"--skip-stat",
"--update-md-path=/test/md_path",
"--split",
"--checkts",
"--checksum=sha256",
"--distro=Fedora",
"--simple-md-filenames",
"--no-database",
"--content=c1",
"--content=c2",
"--repo=r1",
"--repo=r2",
"--revision=rev",
"--deltas",
"--oldpackagedirs=/test/old",
"--num-deltas=2",
"--workers=3",
"--outputdir=/test/output",
"--xz",
"--zck",
"--zck-primary-dict=/foo/bar",
],
) )
def test_get_createrepo_cmd_minimal(self): def test_get_createrepo_cmd_minimal(self):
repo = CreaterepoWrapper(False) repo = CreaterepoWrapper(False)
cmd = repo.get_createrepo_cmd('/test/dir') cmd = repo.get_createrepo_cmd("/test/dir")
self.assertEqual(cmd[:2], ['createrepo', '/test/dir']) self.assertEqual(cmd[:2], ["createrepo", "/test/dir"])
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[2:], cmd[2:],
@ -65,24 +104,61 @@ class CreateRepoWrapperTest(unittest.TestCase):
def test_get_createrepo_cmd_full(self): def test_get_createrepo_cmd_full(self):
repo = CreaterepoWrapper(False) repo = CreaterepoWrapper(False)
cmd = repo.get_createrepo_cmd( cmd = repo.get_createrepo_cmd(
'/test/dir', baseurl='http://base.example.com', excludes=['abc', 'xyz'], "/test/dir",
pkglist='/test/pkglist', groupfile='/test/comps', cachedir='/test/cache', baseurl="http://base.example.com",
update=False, update_md_path='/test/md_path', skip_stat=True, checkts=True, excludes=["abc", "xyz"],
split=True, pretty=False, database=False, checksum='sha256', unique_md_filenames=False, pkglist="/test/pkglist",
distro='Fedora', content=['c1', 'c2'], repo=['r1', 'r2'], revision='rev', deltas=True, groupfile="/test/comps",
oldpackagedirs='/test/old', num_deltas=2, workers=3, outputdir='/test/output' cachedir="/test/cache",
update=False,
update_md_path="/test/md_path",
skip_stat=True,
checkts=True,
split=True,
pretty=False,
database=False,
checksum="sha256",
unique_md_filenames=False,
distro="Fedora",
content=["c1", "c2"],
repo=["r1", "r2"],
revision="rev",
deltas=True,
oldpackagedirs="/test/old",
num_deltas=2,
workers=3,
outputdir="/test/output",
) )
self.maxDiff = None self.maxDiff = None
self.assertEqual(cmd[:2], ['createrepo', '/test/dir']) self.assertEqual(cmd[:2], ["createrepo", "/test/dir"])
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[2:], cmd[2:],
["--baseurl=http://base.example.com", "--excludes=abc", "--excludes=xyz", [
"--pkglist=/test/pkglist", "--groupfile=/test/comps", "--cachedir=/test/cache", "--baseurl=http://base.example.com",
"--skip-stat", "--update-md-path=/test/md_path", "--split", "--checkts", "--excludes=abc",
"--checksum=sha256", "--distro=Fedora", "--simple-md-filenames", "--no-database", "--excludes=xyz",
"--content=c1", "--content=c2", "--repo=r1", "--repo=r2", "--revision=rev", "--pkglist=/test/pkglist",
"--deltas", "--oldpackagedirs=/test/old", "--num-deltas=2", "--workers=3", "--groupfile=/test/comps",
"--outputdir=/test/output"], "--cachedir=/test/cache",
"--skip-stat",
"--update-md-path=/test/md_path",
"--split",
"--checkts",
"--checksum=sha256",
"--distro=Fedora",
"--simple-md-filenames",
"--no-database",
"--content=c1",
"--content=c2",
"--repo=r1",
"--repo=r2",
"--revision=rev",
"--deltas",
"--oldpackagedirs=/test/old",
"--num-deltas=2",
"--workers=3",
"--outputdir=/test/output",
],
) )

File diff suppressed because it is too large Load Diff

View File

@ -13,8 +13,7 @@ from tests import helpers
class TestExtraFilePhase(helpers.PungiTestCase): class TestExtraFilePhase(helpers.PungiTestCase):
@mock.patch("pungi.phases.extra_files.copy_extra_files")
@mock.patch('pungi.phases.extra_files.copy_extra_files')
def test_skips_unless_has_config(self, copy_extra_files): def test_skips_unless_has_config(self, copy_extra_files):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None compose.just_phases = None
@ -22,15 +21,13 @@ class TestExtraFilePhase(helpers.PungiTestCase):
phase = extra_files.ExtraFilesPhase(compose, mock.Mock()) phase = extra_files.ExtraFilesPhase(compose, mock.Mock())
self.assertTrue(phase.skip()) self.assertTrue(phase.skip())
@mock.patch('pungi.phases.extra_files.copy_extra_files') @mock.patch("pungi.phases.extra_files.copy_extra_files")
def test_runs_copy_files_for_each_variant(self, copy_extra_files): def test_runs_copy_files_for_each_variant(self, copy_extra_files):
cfg = mock.Mock() cfg = mock.Mock()
pkgset_phase = mock.Mock() pkgset_phase = mock.Mock()
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'extra_files': [ self.topdir, {"extra_files": [("^.+$", {"x86_64": [cfg]})]}
('^.+$', {'x86_64': [cfg]}) )
]
})
phase = extra_files.ExtraFilesPhase(compose, pkgset_phase) phase = extra_files.ExtraFilesPhase(compose, pkgset_phase)
phase.run() phase.run()
@ -61,7 +58,6 @@ class TestExtraFilePhase(helpers.PungiTestCase):
class TestCopyFiles(helpers.PungiTestCase): class TestCopyFiles(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestCopyFiles, self).setUp() super(TestCopyFiles, self).setUp()
self.metadata = ExtraFiles() self.metadata = ExtraFiles()
@ -69,24 +65,27 @@ class TestCopyFiles(helpers.PungiTestCase):
self.variant = self.compose.variants["Server"] self.variant = self.compose.variants["Server"]
def test_copy_local_file(self): def test_copy_local_file(self):
tgt = os.path.join(self.topdir, 'file') tgt = os.path.join(self.topdir, "file")
helpers.touch(tgt) helpers.touch(tgt)
cfg = {'scm': 'file', 'file': tgt, 'repo': None} cfg = {"scm": "file", "file": tgt, "repo": None}
extra_files.copy_extra_files( extra_files.copy_extra_files(
self.compose, [cfg], "x86_64", self.variant, mock.Mock(), self.metadata self.compose, [cfg], "x86_64", self.variant, mock.Mock(), self.metadata
) )
self.assertTrue(os.path.isfile(os.path.join( self.assertTrue(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file'))) os.path.isfile(
os.path.join(self.topdir, "compose", "Server", "x86_64", "os", "file")
)
)
def test_copy_multiple_sources(self): def test_copy_multiple_sources(self):
tgt1 = os.path.join(self.topdir, 'file') tgt1 = os.path.join(self.topdir, "file")
tgt2 = os.path.join(self.topdir, 'gpl') tgt2 = os.path.join(self.topdir, "gpl")
helpers.touch(tgt1) helpers.touch(tgt1)
helpers.touch(tgt2) helpers.touch(tgt2)
cfg1 = {'scm': 'file', 'file': tgt1, 'repo': None} cfg1 = {"scm": "file", "file": tgt1, "repo": None}
cfg2 = {'scm': 'file', 'file': tgt2, 'repo': None, 'target': 'license'} cfg2 = {"scm": "file", "file": tgt2, "repo": None, "target": "license"}
extra_files.copy_extra_files( extra_files.copy_extra_files(
self.compose, self.compose,
@ -94,32 +93,64 @@ class TestCopyFiles(helpers.PungiTestCase):
"x86_64", "x86_64",
self.variant, self.variant,
mock.Mock(), mock.Mock(),
self.metadata self.metadata,
) )
self.assertTrue(os.path.isfile(os.path.join( self.assertTrue(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file'))) os.path.isfile(
self.assertTrue(os.path.isfile(os.path.join( os.path.join(self.topdir, "compose", "Server", "x86_64", "os", "file")
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'license', 'gpl'))) )
)
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "license", "gpl"
)
)
)
def test_copy_local_dir(self): def test_copy_local_dir(self):
helpers.touch(os.path.join(self.topdir, 'src', 'file')) helpers.touch(os.path.join(self.topdir, "src", "file"))
helpers.touch(os.path.join(self.topdir, 'src', 'another')) helpers.touch(os.path.join(self.topdir, "src", "another"))
cfg = {'scm': 'file', 'dir': os.path.join(self.topdir, 'src'), cfg = {
'repo': None, 'target': 'subdir'} "scm": "file",
"dir": os.path.join(self.topdir, "src"),
"repo": None,
"target": "subdir",
}
extra_files.copy_extra_files( extra_files.copy_extra_files(
self.compose, [cfg], "x86_64", self.variant, mock.Mock(), self.metadata self.compose, [cfg], "x86_64", self.variant, mock.Mock(), self.metadata
) )
self.assertTrue(os.path.isfile(os.path.join( self.assertTrue(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'subdir', 'file'))) os.path.isfile(
self.assertTrue(os.path.isfile(os.path.join( os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'subdir', 'another'))) self.topdir, "compose", "Server", "x86_64", "os", "subdir", "file"
)
)
)
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir,
"compose",
"Server",
"x86_64",
"os",
"subdir",
"another",
)
)
)
@mock.patch('pungi.phases.extra_files.get_file_from_scm') @mock.patch("pungi.phases.extra_files.get_file_from_scm")
@mock.patch('pungi.phases.extra_files.get_dir_from_scm') @mock.patch("pungi.phases.extra_files.get_dir_from_scm")
def test_copy_from_external_rpm(self, get_dir_from_scm, get_file_from_scm): def test_copy_from_external_rpm(self, get_dir_from_scm, get_file_from_scm):
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'http://example.com/package.rpm'} cfg = {
"scm": "rpm",
"file": "file.txt",
"repo": "http://example.com/package.rpm",
}
get_file_from_scm.side_effect = self.fake_get_file get_file_from_scm.side_effect = self.fake_get_file
@ -129,26 +160,41 @@ class TestCopyFiles(helpers.PungiTestCase):
self.assertEqual(len(get_file_from_scm.call_args_list), 1) self.assertEqual(len(get_file_from_scm.call_args_list), 1)
self.assertEqual(get_dir_from_scm.call_args_list, []) self.assertEqual(get_dir_from_scm.call_args_list, [])
self.assertEqual(self.scm_dict, self.assertEqual(
{'scm': 'rpm', 'file': 'file.txt', 'repo': 'http://example.com/package.rpm'}) self.scm_dict,
{
"scm": "rpm",
"file": "file.txt",
"repo": "http://example.com/package.rpm",
},
)
self.assertTrue(os.path.isfile(os.path.join( self.assertTrue(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file.txt'))) os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "file.txt"
)
)
)
@mock.patch('pungi.phases.extra_files.get_file_from_scm') @mock.patch("pungi.phases.extra_files.get_file_from_scm")
@mock.patch('pungi.phases.extra_files.get_dir_from_scm') @mock.patch("pungi.phases.extra_files.get_dir_from_scm")
def test_copy_from_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm): def test_copy_from_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm):
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': '%(variant_uid)s-data*'} cfg = {"scm": "rpm", "file": "file.txt", "repo": "%(variant_uid)s-data*"}
server_po, client_po, src_po = mock.Mock(), mock.Mock(), mock.Mock() server_po, client_po, src_po = mock.Mock(), mock.Mock(), mock.Mock()
server_po.configure_mock(name='Server-data-1.1-1.fc24.x86_64.rpm', server_po.configure_mock(
file_path='/server/location', name="Server-data-1.1-1.fc24.x86_64.rpm",
arch='x86_64') file_path="/server/location",
client_po.configure_mock(name='Client-data-1.1-1.fc24.x86_64.rpm', arch="x86_64",
file_path='/client/location', )
arch='x86_64') client_po.configure_mock(
src_po.configure_mock(name='extra-data-1.1-1.fc24.src.rpm', name="Client-data-1.1-1.fc24.x86_64.rpm",
file_path='/src/location', file_path="/client/location",
arch='src') arch="x86_64",
)
src_po.configure_mock(
name="extra-data-1.1-1.fc24.src.rpm", file_path="/src/location", arch="src"
)
package_sets = [ package_sets = [
{ {
"x86_64": { "x86_64": {
@ -168,21 +214,30 @@ class TestCopyFiles(helpers.PungiTestCase):
self.assertEqual(len(get_file_from_scm.call_args_list), 1) self.assertEqual(len(get_file_from_scm.call_args_list), 1)
self.assertEqual(get_dir_from_scm.call_args_list, []) self.assertEqual(get_dir_from_scm.call_args_list, [])
self.assertEqual(self.scm_dict, self.assertEqual(
{'scm': 'rpm', 'file': 'file.txt', 'repo': ['/server/location']}) self.scm_dict,
{"scm": "rpm", "file": "file.txt", "repo": ["/server/location"]},
)
self.assertTrue(os.path.isfile(os.path.join( self.assertTrue(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file.txt'))) os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "file.txt"
)
)
)
def fake_get_file(self, scm_dict, dest, compose): def fake_get_file(self, scm_dict, dest, compose):
self.scm_dict = scm_dict self.scm_dict = scm_dict
helpers.touch(os.path.join(dest, scm_dict['file'])) helpers.touch(os.path.join(dest, scm_dict["file"]))
return [scm_dict['file']] return [scm_dict["file"]]
@mock.patch('pungi.phases.extra_files.get_file_from_scm') @mock.patch("pungi.phases.extra_files.get_file_from_scm")
@mock.patch('pungi.phases.extra_files.get_dir_from_scm') @mock.patch("pungi.phases.extra_files.get_dir_from_scm")
def test_copy_from_non_existing_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm): def test_copy_from_non_existing_rpm_in_compose(
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'bad-%(variant_uid_lower)s*'} self, get_dir_from_scm, get_file_from_scm
):
cfg = {"scm": "rpm", "file": "file.txt", "repo": "bad-%(variant_uid_lower)s*"}
package_sets = [{"x86_64": {}}] package_sets = [{"x86_64": {}}]
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
@ -191,7 +246,7 @@ class TestCopyFiles(helpers.PungiTestCase):
) )
self.assertRegexpMatches( self.assertRegexpMatches(
str(ctx.exception), r'No.*package.*matching bad-server\*.*' str(ctx.exception), r"No.*package.*matching bad-server\*.*"
) )
self.assertEqual(len(get_file_from_scm.call_args_list), 0) self.assertEqual(len(get_file_from_scm.call_args_list), 0)

File diff suppressed because it is too large Load Diff

View File

@ -101,12 +101,11 @@ class TestGetCmd(unittest.TestCase):
"x86_64", "x86_64",
"--repo=lookaside-0,lookaside,http:///tmp", "--repo=lookaside-0,lookaside,http:///tmp",
"@conf", "@conf",
] ],
) )
class TestWriteConfig(PungiTestCase): class TestWriteConfig(PungiTestCase):
def test_write_sorted_mix(self): def test_write_sorted_mix(self):
f = os.path.join(self.topdir, "solvables") f = os.path.join(self.topdir, "solvables")
fus.write_config(f, ["moda:master"], ["pkg", "foo"]) fus.write_config(f, ["moda:master"], ["pkg", "foo"])
@ -149,8 +148,7 @@ class TestParseOutput(unittest.TestCase):
touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n") touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n")
packages, modules = fus.parse_output(self.file) packages, modules = fus.parse_output(self.file)
self.assertEqual( self.assertEqual(
packages, packages, set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
) )
self.assertEqual(modules, set()) self.assertEqual(modules, set())

View File

@ -16,9 +16,11 @@ import logging
from six.moves import cStringIO from six.moves import cStringIO
from pungi.wrappers.pungi import PungiWrapper from pungi.wrappers.pungi import PungiWrapper
try: try:
from pungi.dnf_wrapper import DnfWrapper, Conf from pungi.dnf_wrapper import DnfWrapper, Conf
from pungi.gather_dnf import Gather, GatherOptions, PkgFlag from pungi.gather_dnf import Gather, GatherOptions, PkgFlag
HAS_DNF = True HAS_DNF = True
except ImportError: except ImportError:
HAS_DNF = False HAS_DNF = False
@ -36,18 +38,19 @@ def convert_pkg_map(data):
""" """
result = {} result = {}
for pkg_type in data: for pkg_type in data:
result[pkg_type] = sorted(set([os.path.basename(pkg['path']) result[pkg_type] = sorted(
for pkg in data[pkg_type]])) set([os.path.basename(pkg["path"]) for pkg in data[pkg_type]])
)
return result return result
class DepsolvingBase(object): class DepsolvingBase(object):
def setUp(self): def setUp(self):
self.tmp_dir = tempfile.mkdtemp(prefix="test_compose_") self.tmp_dir = tempfile.mkdtemp(prefix="test_compose_")
self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/repo") self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/repo")
self.lookaside = os.path.join(os.path.dirname(__file__), self.lookaside = os.path.join(
"fixtures/repos/repo-krb5-lookaside") os.path.dirname(__file__), "fixtures/repos/repo-krb5-lookaside"
)
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tmp_dir) shutil.rmtree(self.tmp_dir)
@ -88,8 +91,13 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-kernel", "dummy-kernel",
] ]
pkg_map = self.go(packages, None, greedy="none", fulltree=True, pkg_map = self.go(
fulltree_excludes=['dummy-kernel']) packages,
None,
greedy="none",
fulltree=True,
fulltree_excludes=["dummy-kernel"],
)
self.assertNotIn("dummy-kernel-3.1.0-1.i686.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-kernel-3.1.0-1.i686.rpm", pkg_map["rpm"])
@ -181,9 +189,9 @@ class DepsolvingBase(object):
def test_bash_exclude_debuginfo(self): def test_bash_exclude_debuginfo(self):
packages = [ packages = [
'dummy-bash', "dummy-bash",
'-dummy-bash-debuginfo', "-dummy-bash-debuginfo",
'-dummy-bash-debugsource', "-dummy-bash-debugsource",
] ]
pkg_map = self.go(packages, None, greedy="none") pkg_map = self.go(packages, None, greedy="none")
@ -219,9 +227,9 @@ class DepsolvingBase(object):
def test_bash_multilib_exclude_debuginfo(self): def test_bash_multilib_exclude_debuginfo(self):
packages = [ packages = [
'dummy-bash.+', "dummy-bash.+",
'-dummy-bash-debuginfo', "-dummy-bash-debuginfo",
'-dummy-bash-debugsource', "-dummy-bash-debugsource",
] ]
pkg_map = self.go(packages, None, greedy="none") pkg_map = self.go(packages, None, greedy="none")
@ -439,8 +447,12 @@ class DepsolvingBase(object):
] ]
pkg_map = self.go(packages, None, greedy="none") pkg_map = self.go(packages, None, greedy="none")
self.assertNotIn("dummy-release-client-workstation-1.0.0-1.i686.rpm", pkg_map["rpm"]) self.assertNotIn(
self.assertNotIn("dummy-release-client-workstation-1.0.0-1.x86_64.rpm", pkg_map["rpm"]) "dummy-release-client-workstation-1.0.0-1.i686.rpm", pkg_map["rpm"]
)
self.assertNotIn(
"dummy-release-client-workstation-1.0.0-1.x86_64.rpm", pkg_map["rpm"]
)
self.assertNotIn("dummy-release-client-1.0.0-1.i686.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-release-client-1.0.0-1.i686.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-release-client-1.0.0-1.x86_64.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-release-client-1.0.0-1.x86_64.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-release-server-1.0.0-1.i686.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-release-server-1.0.0-1.i686.rpm", pkg_map["rpm"])
@ -892,10 +904,11 @@ class DepsolvingBase(object):
# #
# By default newer version should be pulled in. # By default newer version should be pulled in.
self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/cockpit") self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/cockpit")
self.lookaside = os.path.join(os.path.dirname(__file__), self.lookaside = os.path.join(
"fixtures/repos/cockpit-lookaside") os.path.dirname(__file__), "fixtures/repos/cockpit-lookaside"
)
packages = [ packages = [
'dummy-cockpit-docker', "dummy-cockpit-docker",
] ]
pkg_map = self.go(packages, None, lookaside=self.lookaside) pkg_map = self.go(packages, None, lookaside=self.lookaside)
@ -920,11 +933,12 @@ class DepsolvingBase(object):
# satisfied by the older version in lookaside. No broken dependencies # satisfied by the older version in lookaside. No broken dependencies
# should be reported. # should be reported.
self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/cockpit") self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/cockpit")
self.lookaside = os.path.join(os.path.dirname(__file__), self.lookaside = os.path.join(
"fixtures/repos/cockpit-lookaside") os.path.dirname(__file__), "fixtures/repos/cockpit-lookaside"
)
packages = [ packages = [
'dummy-cockpit-docker', "dummy-cockpit-docker",
'-dummy-cockpit-system', "-dummy-cockpit-system",
] ]
pkg_map = self.go(packages, None, lookaside=self.lookaside) pkg_map = self.go(packages, None, lookaside=self.lookaside)
@ -982,7 +996,9 @@ class DepsolvingBase(object):
packages = [ packages = [
"Dummy-firefox", "Dummy-firefox",
] ]
pkg_map = self.go(packages, None, greedy="none", selfhosting=True, fulltree=True) pkg_map = self.go(
packages, None, greedy="none", selfhosting=True, fulltree=True
)
self.assertNotIn("Dummy-firefox-16.0.1-2.i686.rpm", pkg_map["rpm"]) self.assertNotIn("Dummy-firefox-16.0.1-2.i686.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-krb5-devel-1.10-5.i686.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-krb5-devel-1.10-5.i686.rpm", pkg_map["rpm"])
@ -1128,7 +1144,7 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-glibc*", "dummy-glibc*",
] ]
pkg_map = self.go(packages, None, multilib_blacklist=['dummy-glibc*']) pkg_map = self.go(packages, None, multilib_blacklist=["dummy-glibc*"])
six.assertCountEqual( six.assertCountEqual(
self, self,
@ -1297,7 +1313,7 @@ class DepsolvingBase(object):
], ],
) )
@unittest.skip('This test is broken') @unittest.skip("This test is broken")
def test_bash_multilib_nogreedy(self): def test_bash_multilib_nogreedy(self):
packages = [ packages = [
"dummy-bash.+", "dummy-bash.+",
@ -1429,9 +1445,7 @@ class DepsolvingBase(object):
"dummy-kmod-ipw3945-xen-1.2.0-4.20.x86_64.rpm", "dummy-kmod-ipw3945-xen-1.2.0-4.20.x86_64.rpm",
], ],
) )
self.assertEqual( self.assertEqual(pkg_map["srpm"], ["dummy-ipw3945-kmod-1.2.0-4.20.src.rpm"])
pkg_map["srpm"], ["dummy-ipw3945-kmod-1.2.0-4.20.src.rpm"]
)
self.assertEqual( self.assertEqual(
pkg_map["debuginfo"], ["dummy-ipw3945-kmod-debuginfo-1.2.0-4.20.x86_64.rpm"] pkg_map["debuginfo"], ["dummy-ipw3945-kmod-debuginfo-1.2.0-4.20.x86_64.rpm"]
) )
@ -1440,8 +1454,13 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-lvm2-devel", "dummy-lvm2-devel",
] ]
pkg_map = self.go(packages, None, greedy="none", fulltree=False, pkg_map = self.go(
multilib_methods=["devel", "runtime"]) packages,
None,
greedy="none",
fulltree=False,
multilib_methods=["devel", "runtime"],
)
six.assertCountEqual( six.assertCountEqual(
self, self,
@ -1518,7 +1537,7 @@ class DepsolvingBase(object):
"dummy-freeipa-server-2.2.0-1.ppc64.rpm", # Important "dummy-freeipa-server-2.2.0-1.ppc64.rpm", # Important
"dummy-selinux-policy-minimal-3.10.0-121.noarch.rpm", "dummy-selinux-policy-minimal-3.10.0-121.noarch.rpm",
"dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", # Important "dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", # Important
"dummy-selinux-policy-targeted-3.10.0-121.noarch.rpm" "dummy-selinux-policy-targeted-3.10.0-121.noarch.rpm",
], ],
) )
six.assertCountEqual( six.assertCountEqual(
@ -1538,7 +1557,9 @@ class DepsolvingBase(object):
] ]
pkg_map = self.go(packages, None, greedy="none", fulltree=False, arch="ppc64") pkg_map = self.go(packages, None, greedy="none", fulltree=False, arch="ppc64")
self.assertNotIn("dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", pkg_map["rpm"]) self.assertNotIn(
"dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", pkg_map["rpm"]
)
six.assertCountEqual( six.assertCountEqual(
self, self,
@ -1559,9 +1580,7 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["debuginfo"], []) self.assertEqual(pkg_map["debuginfo"], [])
def test_selinux_policy_doc_fulltree(self): def test_selinux_policy_doc_fulltree(self):
packages = [ packages = ["dummy-selinux-policy-doc"]
"dummy-selinux-policy-doc"
]
pkg_map = self.go(packages, None, fulltree=True) pkg_map = self.go(packages, None, fulltree=True)
six.assertCountEqual( six.assertCountEqual(
@ -1637,10 +1656,10 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-imsettings", "dummy-imsettings",
] ]
groups = [ groups = ["basic-desktop"]
"basic-desktop" pkg_map = self.go(
] packages, groups, greedy="none", fulltree=False, arch="x86_64"
pkg_map = self.go(packages, groups, greedy="none", fulltree=False, arch="x86_64") )
self.assertNotIn("dummy-imsettings-qt-1.2.9-1.x86_64.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-imsettings-qt-1.2.9-1.x86_64.rpm", pkg_map["rpm"])
# prefers gnome over qt (condrequires in @basic-desktop) # prefers gnome over qt (condrequires in @basic-desktop)
@ -1660,11 +1679,10 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-imsettings", "dummy-imsettings",
] ]
groups = [ groups = ["basic-desktop"]
"basic-desktop" pkg_map = self.go(
] packages, groups, greedy="none", fulltree=False, nodeps=True, arch="x86_64"
pkg_map = self.go(packages, groups, greedy="none", fulltree=False, nodeps=True, )
arch="x86_64")
self.assertNotIn("dummy-imsettings-gnome-1.2.9-1.x86_64.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-imsettings-gnome-1.2.9-1.x86_64.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-imsettings-qt-1.2.9-1.x86_64.rpm", pkg_map["rpm"]) self.assertNotIn("dummy-imsettings-qt-1.2.9-1.x86_64.rpm", pkg_map["rpm"])
@ -1679,10 +1697,10 @@ class DepsolvingBase(object):
"dummy-imsettings", "dummy-imsettings",
"dummy-imsettings-qt", "dummy-imsettings-qt",
] ]
groups = [ groups = ["basic-desktop"]
"basic-desktop" pkg_map = self.go(
] packages, groups, greedy="none", fulltree=False, arch="x86_64"
pkg_map = self.go(packages, groups, greedy="none", fulltree=False, arch="x86_64") )
# prefers gnome over qt (condrequires in @basic-desktop) # prefers gnome over qt (condrequires in @basic-desktop)
six.assertCountEqual( six.assertCountEqual(
@ -1751,8 +1769,9 @@ class DepsolvingBase(object):
packages = [ packages = [
"*", "*",
] ]
pkg_map = self.go(packages, None, lookaside=self.repo, pkg_map = self.go(
nodeps=True, fulltree=True) packages, None, lookaside=self.repo, nodeps=True, fulltree=True
)
self.assertEqual(pkg_map["rpm"], []) self.assertEqual(pkg_map["rpm"], [])
self.assertEqual(pkg_map["srpm"], []) self.assertEqual(pkg_map["srpm"], [])
@ -1764,8 +1783,7 @@ class DepsolvingBase(object):
"-dummy-bas*", "-dummy-bas*",
"dummy-glibc", "dummy-glibc",
] ]
pkg_map = self.go(packages, None, pkg_map = self.go(packages, None, greedy="none", nodeps=True, fulltree=True)
greedy="none", nodeps=True, fulltree=True)
# neither dummy-bash or dummy-basesystem is pulled in # neither dummy-bash or dummy-basesystem is pulled in
six.assertCountEqual( six.assertCountEqual(
@ -1796,10 +1814,7 @@ class DepsolvingBase(object):
six.assertCountEqual( six.assertCountEqual(
self, self,
pkg_map["rpm"], pkg_map["rpm"],
[ ["dummy-atlas-3.8.4-7.x86_64.rpm", "dummy-atlas-devel-3.8.4-7.x86_64.rpm"],
"dummy-atlas-3.8.4-7.x86_64.rpm",
"dummy-atlas-devel-3.8.4-7.x86_64.rpm",
],
) )
self.assertEqual(pkg_map["srpm"], ["dummy-atlas-3.8.4-7.src.rpm"]) self.assertEqual(pkg_map["srpm"], ["dummy-atlas-3.8.4-7.src.rpm"])
self.assertEqual(pkg_map["debuginfo"], []) self.assertEqual(pkg_map["debuginfo"], [])
@ -1827,8 +1842,14 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-atlas-devel", "dummy-atlas-devel",
] ]
pkg_map = self.go(packages, None, greedy="build", multilib_methods=["devel", "runtime"], pkg_map = self.go(
fulltree=False, arch="x86_64") packages,
None,
greedy="build",
multilib_methods=["devel", "runtime"],
fulltree=False,
arch="x86_64",
)
six.assertCountEqual( six.assertCountEqual(
self, self,
@ -1847,8 +1868,14 @@ class DepsolvingBase(object):
packages = [ packages = [
"dummy-atlas-devel.+", "dummy-atlas-devel.+",
] ]
pkg_map = self.go(packages, None, greedy="build", multilib_methods=["devel", "runtime"], pkg_map = self.go(
fulltree=False, arch="x86_64") packages,
None,
greedy="build",
multilib_methods=["devel", "runtime"],
fulltree=False,
arch="x86_64",
)
six.assertCountEqual( six.assertCountEqual(
self, self,
@ -1878,7 +1905,6 @@ class DepsolvingBase(object):
"dummy-atlas-sse-3.8.4-7.i686.rpm", "dummy-atlas-sse-3.8.4-7.i686.rpm",
"dummy-atlas-sse2-3.8.4-7.i686.rpm", "dummy-atlas-sse2-3.8.4-7.i686.rpm",
"dummy-atlas-sse3-3.8.4-7.i686.rpm", "dummy-atlas-sse3-3.8.4-7.i686.rpm",
"dummy-atlas-3.8.4-7.x86_64.rpm", "dummy-atlas-3.8.4-7.x86_64.rpm",
"dummy-atlas-devel-3.8.4-7.x86_64.rpm", "dummy-atlas-devel-3.8.4-7.x86_64.rpm",
"dummy-atlas-sse3-3.8.4-7.x86_64.rpm", "dummy-atlas-sse3-3.8.4-7.x86_64.rpm",
@ -2052,7 +2078,7 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["srpm"], ["dummy-mingw-qt5-qtbase-5.6.0-1.src.rpm"]) self.assertEqual(pkg_map["srpm"], ["dummy-mingw-qt5-qtbase-5.6.0-1.src.rpm"])
self.assertEqual( self.assertEqual(
pkg_map["debuginfo"], pkg_map["debuginfo"],
["dummy-mingw32-qt5-qtbase-debuginfo-5.6.0-1.noarch.rpm"] ["dummy-mingw32-qt5-qtbase-debuginfo-5.6.0-1.noarch.rpm"],
) )
def test_input_by_wildcard(self): def test_input_by_wildcard(self):
@ -2083,9 +2109,7 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["debuginfo"], []) self.assertEqual(pkg_map["debuginfo"], [])
def test_requires_pre_post(self): def test_requires_pre_post(self):
packages = [ packages = ["dummy-perl"]
"dummy-perl"
]
pkg_map = self.go(packages, None) pkg_map = self.go(packages, None)
six.assertCountEqual( six.assertCountEqual(
@ -2102,8 +2126,8 @@ class DepsolvingBase(object):
def test_multilib_exclude_pattern_does_not_match_noarch(self): def test_multilib_exclude_pattern_does_not_match_noarch(self):
packages = [ packages = [
'dummy-release-notes-en-US', "dummy-release-notes-en-US",
'-dummy-release-notes-en*.+', "-dummy-release-notes-en*.+",
] ]
pkg_map = self.go(packages, None) pkg_map = self.go(packages, None)
@ -2113,9 +2137,8 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["debuginfo"], []) self.assertEqual(pkg_map["debuginfo"], [])
@unittest.skipUnless(HAS_YUM, 'YUM only available on Python 2') @unittest.skipUnless(HAS_YUM, "YUM only available on Python 2")
class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase): class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def setUp(self): def setUp(self):
super(PungiYumDepsolvingTestCase, self).setUp() super(PungiYumDepsolvingTestCase, self).setUp()
self.ks = os.path.join(self.tmp_dir, "ks") self.ks = os.path.join(self.tmp_dir, "ks")
@ -2125,9 +2148,9 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
self.old_cwd = os.getcwd() self.old_cwd = os.getcwd()
os.chdir(self.cwd) os.chdir(self.cwd)
logger = logging.getLogger('Pungi') logger = logging.getLogger("Pungi")
if not logger.handlers: if not logger.handlers:
formatter = logging.Formatter('%(name)s:%(levelname)s: %(message)s') formatter = logging.Formatter("%(name)s:%(levelname)s: %(message)s")
console = logging.StreamHandler(sys.stdout) console = logging.StreamHandler(sys.stdout)
console.setFormatter(formatter) console.setFormatter(formatter)
console.setLevel(logging.INFO) console.setLevel(logging.INFO)
@ -2137,9 +2160,17 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
os.chdir(self.old_cwd) os.chdir(self.old_cwd)
super(PungiYumDepsolvingTestCase, self).tearDown() super(PungiYumDepsolvingTestCase, self).tearDown()
def go(self, packages, groups, lookaside=None, prepopulate=None, def go(
fulltree_excludes=None, multilib_blacklist=None, self,
multilib_whitelist=None, **kwargs): packages,
groups,
lookaside=None,
prepopulate=None,
fulltree_excludes=None,
multilib_blacklist=None,
multilib_whitelist=None,
**kwargs
):
""" """
Write a kickstart with given packages and groups, then run the Write a kickstart with given packages and groups, then run the
depsolving and parse the output. depsolving and parse the output.
@ -2147,19 +2178,25 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
p = PungiWrapper() p = PungiWrapper()
repos = {"repo": self.repo} repos = {"repo": self.repo}
if lookaside: if lookaside:
repos['lookaside'] = lookaside repos["lookaside"] = lookaside
kwargs['lookaside_repos'] = ['lookaside'] kwargs["lookaside_repos"] = ["lookaside"]
p.write_kickstart(self.ks, repos, groups, packages, prepopulate=prepopulate, p.write_kickstart(
self.ks,
repos,
groups,
packages,
prepopulate=prepopulate,
multilib_whitelist=multilib_whitelist, multilib_whitelist=multilib_whitelist,
multilib_blacklist=multilib_blacklist, multilib_blacklist=multilib_blacklist,
fulltree_excludes=fulltree_excludes) fulltree_excludes=fulltree_excludes,
kwargs.setdefault('cache_dir', self.tmp_dir) )
kwargs.setdefault("cache_dir", self.tmp_dir)
# Unless the test specifies an arch, we need to default to x86_64. # Unless the test specifies an arch, we need to default to x86_64.
# Otherwise the arch of current machine will be used, which will cause # Otherwise the arch of current machine will be used, which will cause
# failure most of the time. # failure most of the time.
kwargs.setdefault('arch', 'x86_64') kwargs.setdefault("arch", "x86_64")
p.run_pungi(self.ks, self.tmp_dir, 'DP', **kwargs) p.run_pungi(self.ks, self.tmp_dir, "DP", **kwargs)
with open(self.out, "r") as f: with open(self.out, "r") as f:
pkg_map, self.broken_deps, _ = p.parse_log(f) pkg_map, self.broken_deps, _ = p.parse_log(f)
return convert_pkg_map(pkg_map) return convert_pkg_map(pkg_map)
@ -2168,7 +2205,7 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def convert_dnf_packages(pkgs, flags): def convert_dnf_packages(pkgs, flags):
convert_table = { convert_table = {
# Hawkey returns nosrc package as src # Hawkey returns nosrc package as src
'dummy-AdobeReader_enu-9.5.1-1.src': 'dummy-AdobeReader_enu-9.5.1-1.nosrc', "dummy-AdobeReader_enu-9.5.1-1.src": "dummy-AdobeReader_enu-9.5.1-1.nosrc",
} }
result = set() result = set()
for p in pkgs: for p in pkgs:
@ -2178,20 +2215,20 @@ def convert_dnf_packages(pkgs, flags):
# Package is coming from lookaside repo, we don't want those in # Package is coming from lookaside repo, we don't want those in
# output. # output.
continue continue
result.add(name + '.rpm') result.add(name + ".rpm")
return sorted(result) return sorted(result)
@unittest.skipUnless(HAS_DNF, 'Dependencies are not available') @unittest.skipUnless(HAS_DNF, "Dependencies are not available")
class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase): class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def setUp(self): def setUp(self):
super(DNFDepsolvingTestCase, self).setUp() super(DNFDepsolvingTestCase, self).setUp()
self.cachedir = os.path.join(self.tmp_dir, 'pungi_dnf_cache') self.cachedir = os.path.join(self.tmp_dir, "pungi_dnf_cache")
self.get_langpacks = False self.get_langpacks = False
logger = logging.getLogger('gather_dnf') logger = logging.getLogger("gather_dnf")
if not logger.handlers: if not logger.handlers:
formatter = logging.Formatter('%(name)s:%(levelname)s: %(message)s') formatter = logging.Formatter("%(name)s:%(levelname)s: %(message)s")
console = logging.StreamHandler(sys.stdout) console = logging.StreamHandler(sys.stdout)
console.setFormatter(formatter) console.setFormatter(formatter)
console.setLevel(logging.INFO) console.setLevel(logging.INFO)
@ -2200,29 +2237,32 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
self.maxDiff = None self.maxDiff = None
def go(self, packages, groups, lookaside=None, **kwargs): def go(self, packages, groups, lookaside=None, **kwargs):
arch = kwargs.pop('arch', 'x86_64') arch = kwargs.pop("arch", "x86_64")
if 'greedy' in kwargs: if "greedy" in kwargs:
kwargs['greedy_method'] = kwargs.pop('greedy') kwargs["greedy_method"] = kwargs.pop("greedy")
if 'nodeps' in kwargs: if "nodeps" in kwargs:
kwargs['resolve_deps'] = not kwargs.pop('nodeps') kwargs["resolve_deps"] = not kwargs.pop("nodeps")
if lookaside: if lookaside:
kwargs['lookaside_repos'] = ['lookaside'] kwargs["lookaside_repos"] = ["lookaside"]
self.dnf = self.dnf_instance(arch, lookaside=lookaside, persistdir=self.tmp_dir) self.dnf = self.dnf_instance(arch, lookaside=lookaside, persistdir=self.tmp_dir)
if self.get_langpacks: if self.get_langpacks:
kwargs['langpacks'] = self.dnf.comps_wrapper.get_langpacks() kwargs["langpacks"] = self.dnf.comps_wrapper.get_langpacks()
groups = groups or [] groups = groups or []
exclude_groups = [] exclude_groups = []
_, conditional_packages = self.dnf.comps_wrapper.get_comps_packages(groups, exclude_groups) _, conditional_packages = self.dnf.comps_wrapper.get_comps_packages(
groups, exclude_groups
)
self.g = Gather(self.dnf, GatherOptions(**kwargs)) self.g = Gather(self.dnf, GatherOptions(**kwargs))
self.g.logger.handlers = [h for h in self.g.logger.handlers self.g.logger.handlers = [
if h.name != 'capture-logs'] h for h in self.g.logger.handlers if h.name != "capture-logs"
]
log_output = cStringIO() log_output = cStringIO()
handler = logging.StreamHandler(log_output) handler = logging.StreamHandler(log_output)
handler.name = 'capture-logs' handler.name = "capture-logs"
handler.setLevel(logging.WARNING) handler.setLevel(logging.WARNING)
self.g.logger.addHandler(handler) self.g.logger.addHandler(handler)
@ -2231,12 +2271,15 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
_, self.broken_deps, _ = PungiWrapper().parse_log(log_output) _, self.broken_deps, _ = PungiWrapper().parse_log(log_output)
return { return {
'debuginfo': convert_dnf_packages(self.g.result_debug_packages, "debuginfo": convert_dnf_packages(
self.g.result_package_flags), self.g.result_debug_packages, self.g.result_package_flags
'srpm': convert_dnf_packages(self.g.result_source_packages, ),
self.g.result_package_flags), "srpm": convert_dnf_packages(
'rpm': convert_dnf_packages(self.g.result_binary_packages, self.g.result_source_packages, self.g.result_package_flags
self.g.result_package_flags), ),
"rpm": convert_dnf_packages(
self.g.result_binary_packages, self.g.result_package_flags
),
} }
def dnf_instance(self, base_arch, exclude=None, lookaside=False, persistdir=None): def dnf_instance(self, base_arch, exclude=None, lookaside=False, persistdir=None):
@ -2262,25 +2305,33 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
for pkg, flags in self.g.result_package_flags.items(): for pkg, flags in self.g.result_package_flags.items():
if nvra == "%s-%s-%s.%s" % (pkg.name, pkg.version, pkg.release, pkg.arch): if nvra == "%s-%s-%s.%s" % (pkg.name, pkg.version, pkg.release, pkg.arch):
self.assertEqual( self.assertEqual(
flags, expected_flags, flags,
"pkg: %s; flags: %s; expected flags: %s" % (nvra, flags, expected_flags)) expected_flags,
"pkg: %s; flags: %s; expected flags: %s"
% (nvra, flags, expected_flags),
)
found = True found = True
if not found: if not found:
flags = set() flags = set()
self.assertEqual( self.assertEqual(
flags, expected_flags, flags,
"pkg: %s; flags: %s; expected flags: %s" % (nvra, flags, expected_flags)) expected_flags,
"pkg: %s; flags: %s; expected flags: %s"
% (nvra, flags, expected_flags),
)
def test_langpacks(self): def test_langpacks(self):
self.get_langpacks = True self.get_langpacks = True
super(DNFDepsolvingTestCase, self).test_langpacks() super(DNFDepsolvingTestCase, self).test_langpacks()
@unittest.skip('DNF code does not support NVR as input') @unittest.skip("DNF code does not support NVR as input")
def test_bash_older(self): def test_bash_older(self):
pass pass
def test_firefox_selfhosting_with_krb5_lookaside(self): def test_firefox_selfhosting_with_krb5_lookaside(self):
super(DNFDepsolvingTestCase, self).test_firefox_selfhosting_with_krb5_lookaside() super(
DNFDepsolvingTestCase, self
).test_firefox_selfhosting_with_krb5_lookaside()
self.assertFlags("dummy-krb5-devel-1.10-5.x86_64", [PkgFlag.lookaside]) self.assertFlags("dummy-krb5-devel-1.10-5.x86_64", [PkgFlag.lookaside])
self.assertFlags("dummy-krb5-1.10-5.src", [PkgFlag.lookaside]) self.assertFlags("dummy-krb5-1.10-5.src", [PkgFlag.lookaside])
@ -2343,9 +2394,9 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def test_bash_multilib_exclude_debuginfo(self): def test_bash_multilib_exclude_debuginfo(self):
packages = [ packages = [
'dummy-bash.+', "dummy-bash.+",
'-dummy-bash-debuginfo', "-dummy-bash-debuginfo",
'-dummy-bash-debugsource', "-dummy-bash-debugsource",
] ]
pkg_map = self.go(packages, None, greedy="none") pkg_map = self.go(packages, None, greedy="none")
@ -2385,8 +2436,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
"-dummy-bas*", "-dummy-bas*",
"dummy-glibc", "dummy-glibc",
] ]
pkg_map = self.go(packages, None, pkg_map = self.go(packages, None, greedy="none", nodeps=True, fulltree=True)
greedy="none", nodeps=True, fulltree=True)
# neither dummy-bash or dummy-basesystem is pulled in # neither dummy-bash or dummy-basesystem is pulled in
six.assertCountEqual( six.assertCountEqual(
@ -2465,8 +2515,13 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
packages = [ packages = [
"dummy-lvm2-devel", "dummy-lvm2-devel",
] ]
pkg_map = self.go(packages, None, greedy="none", fulltree=False, pkg_map = self.go(
multilib_methods=["devel", "runtime"]) packages,
None,
greedy="none",
fulltree=False,
multilib_methods=["devel", "runtime"],
)
six.assertCountEqual( six.assertCountEqual(
self, self,

View File

@ -16,32 +16,46 @@ class TestWritePungiConfig(helpers.PungiTestCase):
def assertWritten(self, PungiWrapper, **kwargs): def assertWritten(self, PungiWrapper, **kwargs):
wrapper = PungiWrapper.return_value wrapper = PungiWrapper.return_value
self.assertEqual(wrapper.mock_calls, self.assertEqual(wrapper.mock_calls, [mock.call.write_kickstart(**kwargs)])
[mock.call.write_kickstart(**kwargs)])
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper') @mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_correct(self, PungiWrapper): def test_correct(self, PungiWrapper):
pkgs = [('pkg1', None), ('pkg2', 'x86_64')] pkgs = [("pkg1", None), ("pkg2", "x86_64")]
grps = ['grp1'] grps = ["grp1"]
filter = [('pkg3', None), ('pkg4', 'x86_64')] filter = [("pkg3", None), ("pkg4", "x86_64")]
white = mock.Mock() white = mock.Mock()
black = mock.Mock() black = mock.Mock()
prepopulate = mock.Mock() prepopulate = mock.Mock()
fulltree = mock.Mock() fulltree = mock.Mock()
deps.write_pungi_config( deps.write_pungi_config(
self.compose, 'x86_64', self.compose.variants['Server'], self.compose,
pkgs, grps, filter, white, black, "x86_64",
prepopulate=prepopulate, fulltree_excludes=fulltree, self.compose.variants["Server"],
pkgs,
grps,
filter,
white,
black,
prepopulate=prepopulate,
fulltree_excludes=fulltree,
package_sets=self.package_sets, package_sets=self.package_sets,
) )
self.assertWritten(PungiWrapper, packages=['pkg1', 'pkg2.x86_64'], self.assertWritten(
ks_path=self.topdir + '/work/x86_64/pungi/Server.x86_64.conf', PungiWrapper,
lookaside_repos={}, multilib_whitelist=white, multilib_blacklist=black, packages=["pkg1", "pkg2.x86_64"],
groups=['grp1'], prepopulate=prepopulate, ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf",
repos={"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1", lookaside_repos={},
'comps-repo': self.topdir + '/work/x86_64/comps_repo_Server'}, multilib_whitelist=white,
exclude_packages=['pkg3', 'pkg4.x86_64'], multilib_blacklist=black,
fulltree_excludes=fulltree) groups=["grp1"],
prepopulate=prepopulate,
repos={
"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server",
},
exclude_packages=["pkg3", "pkg4.x86_64"],
fulltree_excludes=fulltree,
)
@mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper") @mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_duplicated_package_name(self, PungiWrapper): def test_duplicated_package_name(self, PungiWrapper):
@ -53,73 +67,114 @@ class TestWritePungiConfig(helpers.PungiTestCase):
prepopulate = mock.Mock() prepopulate = mock.Mock()
fulltree = mock.Mock() fulltree = mock.Mock()
deps.write_pungi_config( deps.write_pungi_config(
self.compose, "x86_64", self.compose.variants["Server"], self.compose,
pkgs, grps, filter, white, black, "x86_64",
prepopulate=prepopulate, fulltree_excludes=fulltree, self.compose.variants["Server"],
pkgs,
grps,
filter,
white,
black,
prepopulate=prepopulate,
fulltree_excludes=fulltree,
package_sets=self.package_sets, package_sets=self.package_sets,
) )
self.assertWritten(PungiWrapper, packages=["pkg1", "pkg1.x86_64"], self.assertWritten(
PungiWrapper,
packages=["pkg1", "pkg1.x86_64"],
ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf", ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf",
lookaside_repos={}, multilib_whitelist=white, multilib_blacklist=black, lookaside_repos={},
groups=[], prepopulate=prepopulate, multilib_whitelist=white,
repos={"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1", multilib_blacklist=black,
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server"}, groups=[],
prepopulate=prepopulate,
repos={
"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server",
},
exclude_packages=["pkg2", "pkg2.x86_64"], exclude_packages=["pkg2", "pkg2.x86_64"],
fulltree_excludes=fulltree) fulltree_excludes=fulltree,
)
@mock.patch('pungi.phases.gather.get_lookaside_repos') @mock.patch("pungi.phases.gather.get_lookaside_repos")
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper') @mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_with_lookaside(self, PungiWrapper, glr): def test_with_lookaside(self, PungiWrapper, glr):
glr.return_value = ['http://example.com/repo'] glr.return_value = ["http://example.com/repo"]
pkgs = [('pkg1', None)] pkgs = [("pkg1", None)]
deps.write_pungi_config( deps.write_pungi_config(
self.compose, 'x86_64', self.compose.variants['Server'], self.compose,
pkgs, [], [], [], [], "x86_64",
self.compose.variants["Server"],
pkgs,
[],
[],
[],
[],
package_sets=self.package_sets, package_sets=self.package_sets,
) )
self.assertWritten(PungiWrapper, packages=['pkg1'], self.assertWritten(
ks_path=self.topdir + '/work/x86_64/pungi/Server.x86_64.conf', PungiWrapper,
lookaside_repos={'lookaside-repo-0': 'http://example.com/repo'}, packages=["pkg1"],
multilib_whitelist=[], multilib_blacklist=[], ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf",
groups=[], prepopulate=None, lookaside_repos={"lookaside-repo-0": "http://example.com/repo"},
repos={"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1", multilib_whitelist=[],
'comps-repo': self.topdir + '/work/x86_64/comps_repo_Server'}, multilib_blacklist=[],
exclude_packages=[], fulltree_excludes=None) groups=[],
self.assertEqual(glr.call_args_list, prepopulate=None,
[mock.call(self.compose, 'x86_64', self.compose.variants['Server'])]) repos={
"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server",
},
exclude_packages=[],
fulltree_excludes=None,
)
self.assertEqual(
glr.call_args_list,
[mock.call(self.compose, "x86_64", self.compose.variants["Server"])],
)
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper') @mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_without_input(self, PungiWrapper): def test_without_input(self, PungiWrapper):
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
deps.write_pungi_config(self.compose, 'x86_64', self.compose.variants['Server'], deps.write_pungi_config(
[], [], [], [], []) self.compose,
"x86_64",
self.compose.variants["Server"],
[],
[],
[],
[],
[],
)
self.assertEqual( self.assertEqual(
str(ctx.exception), str(ctx.exception),
'No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)') "No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)",
)
self.assertEqual(PungiWrapper.return_value.mock_calls, []) self.assertEqual(PungiWrapper.return_value.mock_calls, [])
class TestRaiseOnInvalidSigkeys(helpers.PungiTestCase): class TestRaiseOnInvalidSigkeys(helpers.PungiTestCase):
def test_raise_on_invalid_sigkeys(self): def test_raise_on_invalid_sigkeys(self):
pkgset = { pkgset = {
"global": mock.Mock(), "global": mock.Mock(),
} }
pkgset["global"].invalid_sigkey_rpms = [{'name': 'pkg1'}] pkgset["global"].invalid_sigkey_rpms = [{"name": "pkg1"}]
pkgset["global"].raise_invalid_sigkeys_exception = mock.Mock(side_effect=RuntimeError()) pkgset["global"].raise_invalid_sigkeys_exception = mock.Mock(
side_effect=RuntimeError()
)
result = { result = {
'rpm': [{'path': 'pkg1-1-1.el7'}], "rpm": [{"path": "pkg1-1-1.el7"}],
} }
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
deps.raise_on_invalid_sigkeys('', '', [pkgset], result) deps.raise_on_invalid_sigkeys("", "", [pkgset], result)
class TestCheckDeps(helpers.PungiTestCase): class TestCheckDeps(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestCheckDeps, self).setUp() super(TestCheckDeps, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {}) self.compose = helpers.DummyCompose(self.topdir, {})
self.arch = 'x86_64' self.arch = "x86_64"
self.variant = self.compose.variants['Server'] self.variant = self.compose.variants["Server"]
def test_not_check_deps(self): def test_not_check_deps(self):
self.compose.conf["check_deps"] = False self.compose.conf["check_deps"] = False
@ -127,15 +182,16 @@ class TestCheckDeps(helpers.PungiTestCase):
def test_missing_deps(self): def test_missing_deps(self):
self.compose.conf["check_deps"] = True self.compose.conf["check_deps"] = True
missing_deps = {'foo.noarch': set(['bar = 1.1'])} missing_deps = {"foo.noarch": set(["bar = 1.1"])}
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
deps.check_deps(self.compose, self.arch, self.variant, missing_deps) deps.check_deps(self.compose, self.arch, self.variant, missing_deps)
self.assertEqual(str(ctx.exception), 'Unresolved dependencies detected') self.assertEqual(str(ctx.exception), "Unresolved dependencies detected")
self.assertEqual( self.assertEqual(
self.compose.log_error.call_args_list, self.compose.log_error.call_args_list,
[ [
mock.call( mock.call(
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']" % (self.variant, self.arch) "Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']"
% (self.variant, self.arch)
) )
] ],
) )

View File

@ -244,6 +244,7 @@ class MockModule(object):
def get_runtime_streams(platform): def get_runtime_streams(platform):
assert platform == "platform" assert platform == "platform"
return [self.platform] return [self.platform]
return [mock.Mock(get_runtime_streams=get_runtime_streams)] return [mock.Mock(get_runtime_streams=get_runtime_streams)]
def get_rpm_artifacts(self): def get_rpm_artifacts(self):
@ -368,9 +369,9 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
self.assertEqual( self.assertEqual(
self.compose.log_debug.call_args_list, self.compose.log_debug.call_args_list,
[ [
mock.call('[BEGIN] Running fus (arch: x86_64, variant: Server)'), mock.call("[BEGIN] Running fus (arch: x86_64, variant: Server)"),
mock.call('[DONE ] Running fus (arch: x86_64, variant: Server)') mock.call("[DONE ] Running fus (arch: x86_64, variant: Server)"),
] ],
) )
def test_with_modules_with_devel(self, run, gc, po, wc): def test_with_modules_with_devel(self, run, gc, po, wc):
@ -481,9 +482,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
"pkg-debuginfo-1.0-2.x86_64": dbg2, "pkg-debuginfo-1.0-2.x86_64": dbg2,
} }
self.phase.debuginfo = { self.phase.debuginfo = {
"x86_64": { "x86_64": {"pkg-debuginfo": [dbg1, dbg2]},
"pkg-debuginfo": [dbg1, dbg2],
},
} }
po.side_effect = [ po.side_effect = [
([("pkg-1.0-1", "x86_64", frozenset())], []), ([("pkg-1.0-1", "x86_64", frozenset())], []),
@ -639,15 +638,11 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
( (
[ [
("pkg-devel-1.0-1", "x86_64", frozenset()), ("pkg-devel-1.0-1", "x86_64", frozenset()),
("foo-1.0-1", "x86_64", frozenset()) ("foo-1.0-1", "x86_64", frozenset()),
], ],
frozenset()), frozenset(),
(
[
("pkg-devel-1.0-1", "i686", frozenset()),
],
[],
), ),
([("pkg-devel-1.0-1", "i686", frozenset())], []),
] ]
res = self.phase.run_solver( res = self.phase.run_solver(
@ -666,7 +661,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
("pkg-devel-1.0-1", "x86_64", frozenset()), ("pkg-devel-1.0-1", "x86_64", frozenset()),
("foo-1.0-1", "x86_64", frozenset()), ("foo-1.0-1", "x86_64", frozenset()),
("pkg-devel-1.0-1", "i686", frozenset()), ("pkg-devel-1.0-1", "i686", frozenset()),
] ],
) )
self.assertEqual(res[1], set()) self.assertEqual(res[1], set())
self.assertEqual( self.assertEqual(
@ -761,16 +756,11 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
( (
[ [
("pkg-devel-1.0-1", "x86_64", frozenset()), ("pkg-devel-1.0-1", "x86_64", frozenset()),
("foo-1.0-1", "x86_64", frozenset()) ("foo-1.0-1", "x86_64", frozenset()),
],
[],
),
(
[
("foo-1.0-1", "i686", frozenset()),
], ],
[], [],
), ),
([("foo-1.0-1", "i686", frozenset())], []),
] ]
res = self.phase.run_solver( res = self.phase.run_solver(
@ -856,7 +846,7 @@ class TestExpandPackages(helpers.PungiTestCase):
nevra_to_pkg["pkg-debuginfo-3:1-2.%s" % debug_arch] = pkg._replace( nevra_to_pkg["pkg-debuginfo-3:1-2.%s" % debug_arch] = pkg._replace(
name="pkg-debuginfo", name="pkg-debuginfo",
arch=debug_arch, arch=debug_arch,
file_path="/tmp/pkg-debuginfo.%s.rpm" % debug_arch file_path="/tmp/pkg-debuginfo.%s.rpm" % debug_arch,
) )
return nevra_to_pkg return nevra_to_pkg

View File

@ -9,7 +9,7 @@ import six
from pungi.phases.gather.methods import method_nodeps as nodeps from pungi.phases.gather.methods import method_nodeps as nodeps
from tests import helpers from tests import helpers
COMPS_FILE = os.path.join(helpers.FIXTURE_DIR, 'comps.xml') COMPS_FILE = os.path.join(helpers.FIXTURE_DIR, "comps.xml")
class TestWritePungiConfig(helpers.PungiTestCase): class TestWritePungiConfig(helpers.PungiTestCase):
@ -19,7 +19,9 @@ class TestWritePungiConfig(helpers.PungiTestCase):
self.compose.paths.work.comps = mock.Mock(return_value=COMPS_FILE) self.compose.paths.work.comps = mock.Mock(return_value=COMPS_FILE)
def test_expand_group(self): def test_expand_group(self):
packages = nodeps.expand_groups(self.compose, 'x86_64', None, ['core', 'text-internet']) packages = nodeps.expand_groups(
self.compose, "x86_64", None, ["core", "text-internet"]
)
six.assertCountEqual( six.assertCountEqual(
self, self,
packages, packages,

File diff suppressed because it is too large Load Diff

View File

@ -12,15 +12,14 @@ from pungi.graph import SimpleAcyclicOrientedGraph
class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase): class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
def setUp(self): def setUp(self):
self.g = SimpleAcyclicOrientedGraph() self.g = SimpleAcyclicOrientedGraph()
def test_simple_graph(self): def test_simple_graph(self):
graph_data = ( graph_data = (
('Client', 'Base'), ("Client", "Base"),
('Server', 'Base'), ("Server", "Base"),
('Workstation', 'Base'), ("Workstation", "Base"),
) )
for start, end in graph_data: for start, end in graph_data:
@ -33,13 +32,13 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
def test_complex_graph(self): def test_complex_graph(self):
graph_data = ( graph_data = (
('1', '3'), # 1 --> 3 --> 4 --> 5 ... ("1", "3"), # 1 --> 3 --> 4 --> 5 ...
('3', '4'), ("3", "4"),
('4', '5'), ("4", "5"),
('4', '6'), ("4", "6"),
('2', '4'), ("2", "4"),
('7', '6'), ("7", "6"),
('6', '5'), ("6", "5"),
) )
for start, end in graph_data: for start, end in graph_data:
@ -47,13 +46,13 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
spanning_line = self.g.prune_graph() spanning_line = self.g.prune_graph()
# spanning line have to match completely to given graph # spanning line have to match completely to given graph
self.assertEqual(['1', '3', '2', '4', '7', '6', '5'], spanning_line) self.assertEqual(["1", "3", "2", "4", "7", "6", "5"], spanning_line)
def test_cyclic_graph(self): def test_cyclic_graph(self):
graph_data = ( graph_data = (
('1', '2'), ("1", "2"),
('2', '3'), ("2", "3"),
('3', '1'), ("3", "1"),
) )
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
@ -62,28 +61,28 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
def test_two_separate_graph_lines(self): def test_two_separate_graph_lines(self):
graph_data = ( graph_data = (
('1', '3'), # 1st graph ("1", "3"), # 1st graph
('3', '2'), # 1st graph ("3", "2"), # 1st graph
('6', '5'), # 2nd graph ("6", "5"), # 2nd graph
) )
for start, end in graph_data: for start, end in graph_data:
self.g.add_edge(start, end) self.g.add_edge(start, end)
spanning_line = self.g.prune_graph() spanning_line = self.g.prune_graph()
spanning_line_str = ''.join(spanning_line) spanning_line_str = "".join(spanning_line)
self.assertEqual(5, len(spanning_line)) self.assertEqual(5, len(spanning_line))
# Particular parts should match. Order of these parts is not crucial. # Particular parts should match. Order of these parts is not crucial.
self.assertTrue( self.assertTrue(
"132" in spanning_line_str and "65" in spanning_line_str, "132" in spanning_line_str and "65" in spanning_line_str,
"Spanning line '%s' does not match to graphs" % spanning_line_str "Spanning line '%s' does not match to graphs" % spanning_line_str,
) )
def alternative_route_in_graph(self): def alternative_route_in_graph(self):
graph_data = ( graph_data = (
('1', '3'), ("1", "3"),
('3', '2'), ("3", "2"),
('1', '2'), ("1", "2"),
) )
for start, end in graph_data: for start, end in graph_data:
@ -91,4 +90,4 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
spanning_line = self.g.prune_graph() spanning_line = self.g.prune_graph()
# spanning line have to match completely to given graph # spanning line have to match completely to given graph
self.assertEqual(['1', '3', '2'], spanning_line) self.assertEqual(["1", "3", "2"], spanning_line)

File diff suppressed because it is too large Load Diff

View File

@ -16,162 +16,221 @@ from tests.helpers import DummyCompose, PungiTestCase
class TestImageChecksumPhase(PungiTestCase): class TestImageChecksumPhase(PungiTestCase):
def test_phase_is_never_skipped(self): def test_phase_is_never_skipped(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
phase = ImageChecksumPhase(compose) phase = ImageChecksumPhase(compose)
self.assertFalse(phase.skip()) self.assertFalse(phase.skip())
def test_config_skip_individual_with_multiple_algorithms(self): def test_config_skip_individual_with_multiple_algorithms(self):
compose = DummyCompose(self.topdir, { compose = DummyCompose(
'media_checksums': ['md5', 'sha1'], self.topdir,
'media_checksum_one_file': True {"media_checksums": ["md5", "sha1"], "media_checksum_one_file": True},
}) )
phase = ImageChecksumPhase(compose) phase = ImageChecksumPhase(compose)
with self.assertRaises(ValueError) as ctx: with self.assertRaises(ValueError) as ctx:
phase.validate() phase.validate()
self.assertIn('media_checksum_one_file', str(ctx.exception)) self.assertIn("media_checksum_one_file", str(ctx.exception))
@mock.patch('os.path.exists') @mock.patch("os.path.exists")
@mock.patch('kobo.shortcuts.compute_file_checksums') @mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch('pungi.phases.image_checksum.dump_checksums') @mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_one_file(self, dump_checksums, cc, exists): def test_checksum_one_file(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, { compose = DummyCompose(
'media_checksums': ['sha256'], self.topdir,
'media_checksum_one_file': True, {"media_checksums": ["sha256"], "media_checksum_one_file": True},
})
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'sha256': 'cafebabe'}
phase.run()
dump_checksums.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/CHECKSUM',
set([('image.iso', 123, 'sha256', 'cafebabe')]))
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['sha256'])
compose.image.add_checksum.assert_called_once_with(None, 'sha256', 'cafebabe')
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
def test_checksum_save_individuals(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['md5', 'sha256'],
})
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'md5': 'cafebabe', 'sha256': 'deadbeef'}
phase.run()
dump_checksums.assert_has_calls(
[mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')])),
mock.call(self.topdir + '/compose/Client/i386/iso/MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')]))],
any_order=True
) )
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['md5', 'sha256'])
compose.image.add_checksum.assert_has_calls([mock.call(None, 'sha256', 'deadbeef'),
mock.call(None, 'md5', 'cafebabe')],
any_order=True)
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
def test_checksum_one_file_custom_name(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['sha256'],
'media_checksum_one_file': True,
'media_checksum_base_filename': '%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s'
})
compose.compose_label = 'Alpha-1.0'
phase = ImageChecksumPhase(compose) phase = ImageChecksumPhase(compose)
exists.return_value = True exists.return_value = True
cc.return_value = {'sha256': 'cafebabe'} cc.return_value = {"sha256": "cafebabe"}
phase.run() phase.run()
dump_checksums.assert_called_once_with( dump_checksums.assert_called_once_with(
self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM', self.topdir + "/compose/Client/i386/iso/CHECKSUM",
set([('image.iso', 123, 'sha256', 'cafebabe')])) set([("image.iso", 123, "sha256", "cafebabe")]),
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['sha256']) )
compose.image.add_checksum.assert_called_once_with(None, 'sha256', 'cafebabe') cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["sha256"]
)
compose.image.add_checksum.assert_called_once_with(None, "sha256", "cafebabe")
@mock.patch('os.path.exists') @mock.patch("os.path.exists")
@mock.patch('kobo.shortcuts.compute_file_checksums') @mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch('pungi.phases.image_checksum.dump_checksums') @mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_save_individuals(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {"media_checksums": ["md5", "sha256"]})
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {"md5": "cafebabe", "sha256": "deadbeef"}
phase.run()
dump_checksums.assert_has_calls(
[
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
],
any_order=True,
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["md5", "sha256"]
)
compose.image.add_checksum.assert_has_calls(
[mock.call(None, "sha256", "deadbeef"), mock.call(None, "md5", "cafebabe")],
any_order=True,
)
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_one_file_custom_name(self, dump_checksums, cc, exists):
compose = DummyCompose(
self.topdir,
{
"media_checksums": ["sha256"],
"media_checksum_one_file": True,
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s",
},
)
compose.compose_label = "Alpha-1.0"
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {"sha256": "cafebabe"}
phase.run()
dump_checksums.assert_called_once_with(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM",
set([("image.iso", 123, "sha256", "cafebabe")]),
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["sha256"]
)
compose.image.add_checksum.assert_called_once_with(None, "sha256", "cafebabe")
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_save_individuals_custom_name(self, dump_checksums, cc, exists): def test_checksum_save_individuals_custom_name(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, { compose = DummyCompose(
'media_checksums': ['md5', 'sha256'], self.topdir,
'media_checksum_base_filename': '%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s' {
}) "media_checksums": ["md5", "sha256"],
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s",
},
)
phase = ImageChecksumPhase(compose) phase = ImageChecksumPhase(compose)
exists.return_value = True exists.return_value = True
cc.return_value = {'md5': 'cafebabe', 'sha256': 'deadbeef'} cc.return_value = {"md5": "cafebabe", "sha256": "deadbeef"}
phase.run() phase.run()
dump_checksums.assert_has_calls( dump_checksums.assert_has_calls(
[mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.MD5SUM', [
set([('image.iso', 123, 'md5', 'cafebabe')])), mock.call(
mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.SHA256SUM', self.topdir + "/compose/Client/i386/iso/image.iso.MD5SUM",
set([('image.iso', 123, 'sha256', 'deadbeef')])), set([("image.iso", 123, "md5", "cafebabe")]),
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM', ),
set([('image.iso', 123, 'md5', 'cafebabe')])), mock.call(
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM', self.topdir + "/compose/Client/i386/iso/image.iso.SHA256SUM",
set([('image.iso', 123, 'sha256', 'deadbeef')]))], set([("image.iso", 123, "sha256", "deadbeef")]),
any_order=True ),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
],
any_order=True,
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["md5", "sha256"]
)
compose.image.add_checksum.assert_has_calls(
[mock.call(None, "sha256", "deadbeef"), mock.call(None, "md5", "cafebabe")],
any_order=True,
) )
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['md5', 'sha256'])
compose.image.add_checksum.assert_has_calls([mock.call(None, 'sha256', 'deadbeef'),
mock.call(None, 'md5', 'cafebabe')],
any_order=True)
@mock.patch('os.path.exists') @mock.patch("os.path.exists")
@mock.patch('kobo.shortcuts.compute_file_checksums') @mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch('pungi.phases.image_checksum.dump_checksums') @mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_save_individuals_custom_name_str_format(self, dump_checksums, cc, exists): def test_checksum_save_individuals_custom_name_str_format(
compose = DummyCompose(self.topdir, { self, dump_checksums, cc, exists
'media_checksums': ['md5', 'sha256'], ):
'media_checksum_base_filename': '{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}' compose = DummyCompose(
}) self.topdir,
{
"media_checksums": ["md5", "sha256"],
"media_checksum_base_filename": "{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}",
},
)
phase = ImageChecksumPhase(compose) phase = ImageChecksumPhase(compose)
exists.return_value = True exists.return_value = True
cc.return_value = {'md5': 'cafebabe', 'sha256': 'deadbeef'} cc.return_value = {"md5": "cafebabe", "sha256": "deadbeef"}
phase.run() phase.run()
dump_checksums.assert_has_calls( dump_checksums.assert_has_calls(
[mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.MD5SUM', [
set([('image.iso', 123, 'md5', 'cafebabe')])), mock.call(
mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.SHA256SUM', self.topdir + "/compose/Client/i386/iso/image.iso.MD5SUM",
set([('image.iso', 123, 'sha256', 'deadbeef')])), set([("image.iso", 123, "md5", "cafebabe")]),
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM', ),
set([('image.iso', 123, 'md5', 'cafebabe')])), mock.call(
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM', self.topdir + "/compose/Client/i386/iso/image.iso.SHA256SUM",
set([('image.iso', 123, 'sha256', 'deadbeef')]))], set([("image.iso", 123, "sha256", "deadbeef")]),
any_order=True ),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
],
any_order=True,
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["md5", "sha256"]
)
compose.image.add_checksum.assert_has_calls(
[mock.call(None, "sha256", "deadbeef"), mock.call(None, "md5", "cafebabe")],
any_order=True,
) )
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['md5', 'sha256'])
compose.image.add_checksum.assert_has_calls([mock.call(None, 'sha256', 'deadbeef'),
mock.call(None, 'md5', 'cafebabe')],
any_order=True)
class TestDumpChecksums(unittest.TestCase): class TestDumpChecksums(unittest.TestCase):
@ -182,16 +241,20 @@ class TestDumpChecksums(unittest.TestCase):
shutil.rmtree(self.tmp_dir) shutil.rmtree(self.tmp_dir)
def test_dump_checksums(self): def test_dump_checksums(self):
dump_checksums(os.path.join(self.tmp_dir, 'CHECKSUM'), dump_checksums(
[('file2.iso', 456, 'md5', 'cafebabe'), os.path.join(self.tmp_dir, "CHECKSUM"),
('file1.iso', 123, 'md5', 'abcdef')]) [
("file2.iso", 456, "md5", "cafebabe"),
("file1.iso", 123, "md5", "abcdef"),
],
)
with open(os.path.join(self.tmp_dir, 'CHECKSUM'), 'r') as f: with open(os.path.join(self.tmp_dir, "CHECKSUM"), "r") as f:
data = f.read().rstrip().split('\n') data = f.read().rstrip().split("\n")
expected = [ expected = [
'# file1.iso: 123 bytes', "# file1.iso: 123 bytes",
'MD5 (file1.iso) = abcdef', "MD5 (file1.iso) = abcdef",
'# file2.iso: 456 bytes', "# file2.iso: 456 bytes",
'MD5 (file2.iso) = cafebabe', "MD5 (file2.iso) = cafebabe",
] ]
self.assertEqual(expected, data) self.assertEqual(expected, data)

View File

@ -14,7 +14,13 @@ import sys
from pungi.module_util import Modulemd from pungi.module_util import Modulemd
from pungi.phases import init from pungi.phases import init
from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom, fake_run_in_threads from tests.helpers import (
DummyCompose,
PungiTestCase,
touch,
mk_boom,
fake_run_in_threads,
)
@mock.patch("pungi.phases.init.run_in_threads", new=fake_run_in_threads) @mock.patch("pungi.phases.init.run_in_threads", new=fake_run_in_threads)
@ -27,7 +33,6 @@ from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom, fake_run_
@mock.patch("pungi.phases.init.write_variant_comps") @mock.patch("pungi.phases.init.write_variant_comps")
@mock.patch("pungi.phases.init.write_prepopulate_file") @mock.patch("pungi.phases.init.write_prepopulate_file")
class TestInitPhase(PungiTestCase): class TestInitPhase(PungiTestCase):
def test_run( def test_run(
self, self,
write_prepopulate, write_prepopulate,
@ -56,23 +61,28 @@ class TestInitPhase(PungiTestCase):
six.assertCountEqual( six.assertCountEqual(
self, self,
create_comps.mock_calls, create_comps.mock_calls,
[mock.call(compose, "x86_64", None), mock.call(compose, "amd64", None), [
mock.call(compose, "x86_64", None),
mock.call(compose, "amd64", None),
mock.call(compose, "x86_64", compose.variants["Server"]), mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]), mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]), mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]), mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"]), mock.call(compose, "amd64", compose.variants["Everything"]),
mock.call(compose, "x86_64", compose.all_variants["Server-optional"])], mock.call(compose, "x86_64", compose.all_variants["Server-optional"]),
],
) )
six.assertCountEqual( six.assertCountEqual(
self, self,
write_variant.mock_calls, write_variant.mock_calls,
[mock.call(compose, "x86_64", compose.variants["Server"]), [
mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]), mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]), mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]), mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"]), mock.call(compose, "amd64", compose.variants["Everything"]),
mock.call(compose, "x86_64", compose.all_variants["Server-optional"])], mock.call(compose, "x86_64", compose.all_variants["Server-optional"]),
],
) )
self.assertEqual(write_defaults.call_args_list, []) self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, []) self.assertEqual(validate_defaults.call_args_list, [])
@ -91,8 +101,8 @@ class TestInitPhase(PungiTestCase):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.has_comps = True compose.has_comps = True
compose.has_module_defaults = False compose.has_module_defaults = False
compose.variants['Everything'].groups = [] compose.variants["Everything"].groups = []
compose.variants['Everything'].modules = [] compose.variants["Everything"].modules = []
phase = init.InitPhase(compose) phase = init.InitPhase(compose)
phase.run() phase.run()
@ -109,21 +119,26 @@ class TestInitPhase(PungiTestCase):
six.assertCountEqual( six.assertCountEqual(
self, self,
create_comps.mock_calls, create_comps.mock_calls,
[mock.call(compose, "x86_64", None), mock.call(compose, "amd64", None), [
mock.call(compose, "x86_64", None),
mock.call(compose, "amd64", None),
mock.call(compose, "x86_64", compose.variants["Server"]), mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]), mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]), mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]), mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"])], mock.call(compose, "amd64", compose.variants["Everything"]),
],
) )
six.assertCountEqual( six.assertCountEqual(
self, self,
write_variant.mock_calls, write_variant.mock_calls,
[mock.call(compose, "x86_64", compose.variants["Server"]), [
mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]), mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]), mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]), mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"])], mock.call(compose, "amd64", compose.variants["Everything"]),
],
) )
self.assertEqual(write_defaults.call_args_list, []) self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, []) self.assertEqual(validate_defaults.call_args_list, [])
@ -185,96 +200,140 @@ class TestInitPhase(PungiTestCase):
class TestWriteArchComps(PungiTestCase): class TestWriteArchComps(PungiTestCase):
@mock.patch("pungi.phases.init.run")
@mock.patch('pungi.phases.init.run')
def test_run(self, run): def test_run(self, run):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
init.write_arch_comps(compose, 'x86_64') init.write_arch_comps(compose, "x86_64")
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['comps_filter', '--arch=x86_64', '--no-cleanup', run.mock_calls,
'--output=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir, [
self.topdir + '/work/global/comps/comps-global.xml'])]) mock.call(
[
"comps_filter",
"--arch=x86_64",
"--no-cleanup",
"--output=%s/work/x86_64/comps/comps-x86_64.xml" % self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
class TestCreateCompsRepo(PungiTestCase): class TestCreateCompsRepo(PungiTestCase):
@mock.patch("pungi.phases.init.run")
@mock.patch('pungi.phases.init.run')
def test_run(self, run): def test_run(self, run):
compose = DummyCompose(self.topdir, { compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
'createrepo_checksum': 'sha256',
})
init.create_comps_repo(compose, 'x86_64', None) init.create_comps_repo(compose, "x86_64", None)
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['createrepo_c', self.topdir + '/work/x86_64/comps_repo', run.mock_calls,
'--outputdir=%s/work/x86_64/comps_repo' % self.topdir, [
'--groupfile=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir, mock.call(
'--update', '--no-database', '--checksum=sha256', [
'--unique-md-filenames'], "createrepo_c",
logfile=self.topdir + '/logs/x86_64/comps_repo.x86_64.log', self.topdir + "/work/x86_64/comps_repo",
show_cmd=True)]) "--outputdir=%s/work/x86_64/comps_repo" % self.topdir,
"--groupfile=%s/work/x86_64/comps/comps-x86_64.xml"
% self.topdir,
"--update",
"--no-database",
"--checksum=sha256",
"--unique-md-filenames",
],
logfile=self.topdir + "/logs/x86_64/comps_repo.x86_64.log",
show_cmd=True,
)
],
)
@mock.patch('pungi.phases.init.run') @mock.patch("pungi.phases.init.run")
def test_run_with_variant(self, run): def test_run_with_variant(self, run):
compose = DummyCompose(self.topdir, { compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
'createrepo_checksum': 'sha256',
})
init.create_comps_repo(compose, 'x86_64', compose.variants['Server']) init.create_comps_repo(compose, "x86_64", compose.variants["Server"])
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['createrepo_c', self.topdir + '/work/x86_64/comps_repo_Server', run.mock_calls,
'--outputdir=%s/work/x86_64/comps_repo_Server' % self.topdir, [
'--groupfile=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir, mock.call(
'--update', '--no-database', '--checksum=sha256', [
'--unique-md-filenames'], "createrepo_c",
logfile=self.topdir + '/logs/x86_64/comps_repo-Server.x86_64.log', self.topdir + "/work/x86_64/comps_repo_Server",
show_cmd=True)]) "--outputdir=%s/work/x86_64/comps_repo_Server" % self.topdir,
"--groupfile=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
"--update",
"--no-database",
"--checksum=sha256",
"--unique-md-filenames",
],
logfile=self.topdir + "/logs/x86_64/comps_repo-Server.x86_64.log",
show_cmd=True,
)
],
)
class TestWriteGlobalComps(PungiTestCase): class TestWriteGlobalComps(PungiTestCase):
@mock.patch("pungi.phases.init.get_file_from_scm")
@mock.patch('pungi.phases.init.get_file_from_scm')
def test_run_local_file(self, get_file): def test_run_local_file(self, get_file):
compose = DummyCompose(self.topdir, {'comps_file': 'some-file.xml'}) compose = DummyCompose(self.topdir, {"comps_file": "some-file.xml"})
def gen_file(src, dest, compose=None): def gen_file(src, dest, compose=None):
self.assertEqual(src, '/home/releng/config/some-file.xml') self.assertEqual(src, "/home/releng/config/some-file.xml")
touch(os.path.join(dest, 'some-file.xml')) touch(os.path.join(dest, "some-file.xml"))
get_file.side_effect = gen_file get_file.side_effect = gen_file
init.write_global_comps(compose) init.write_global_comps(compose)
self.assertTrue(os.path.isfile(self.topdir + '/work/global/comps/comps-global.xml')) self.assertTrue(
os.path.isfile(self.topdir + "/work/global/comps/comps-global.xml")
)
class TestWriteVariantComps(PungiTestCase): class TestWriteVariantComps(PungiTestCase):
@mock.patch("pungi.phases.init.run")
@mock.patch('pungi.phases.init.run') @mock.patch("pungi.phases.init.CompsWrapper")
@mock.patch('pungi.phases.init.CompsWrapper')
def test_run(self, CompsWrapper, run): def test_run(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server'] variant = compose.variants["Server"]
comps = CompsWrapper.return_value comps = CompsWrapper.return_value
comps.filter_groups.return_value = [] comps.filter_groups.return_value = []
init.write_variant_comps(compose, 'x86_64', variant) init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts', run.mock_calls,
'--keep-empty-group=conflicts-server', [
'--variant=Server', mock.call(
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir, [
self.topdir + '/work/global/comps/comps-global.xml'])]) "comps_filter",
self.assertEqual(CompsWrapper.call_args_list, "--arch=x86_64",
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')]) "--keep-empty-group=conflicts",
self.assertEqual(comps.filter_groups.call_args_list, [mock.call(variant.groups)]) "--keep-empty-group=conflicts-server",
self.assertEqual(comps.filter_environments.mock_calls, "--variant=Server",
[mock.call(variant.environments)]) "--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(
comps.filter_groups.call_args_list, [mock.call(variant.groups)]
)
self.assertEqual(
comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()]) self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch("pungi.phases.init.get_lookaside_groups") @mock.patch("pungi.phases.init.get_lookaside_groups")
@ -299,7 +358,8 @@ class TestWriteVariantComps(PungiTestCase):
"--keep-empty-group=conflicts", "--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server", "--keep-empty-group=conflicts-server",
"--variant=Server", "--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml" % self.topdir, "--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml", self.topdir + "/work/global/comps/comps-global.xml",
"--lookaside-group=foo", "--lookaside-group=foo",
"--lookaside-group=bar", "--lookaside-group=bar",
@ -319,81 +379,128 @@ class TestWriteVariantComps(PungiTestCase):
) )
self.assertEqual(comps.write_comps.mock_calls, [mock.call()]) self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch('pungi.phases.init.run') @mock.patch("pungi.phases.init.run")
@mock.patch('pungi.phases.init.CompsWrapper') @mock.patch("pungi.phases.init.CompsWrapper")
def test_run_no_filter_without_groups(self, CompsWrapper, run): def test_run_no_filter_without_groups(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server'] variant = compose.variants["Server"]
variant.groups = [] variant.groups = []
comps = CompsWrapper.return_value comps = CompsWrapper.return_value
comps.filter_groups.return_value = [] comps.filter_groups.return_value = []
init.write_variant_comps(compose, 'x86_64', variant) init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts', run.mock_calls,
'--keep-empty-group=conflicts-server', [
'--variant=Server', mock.call(
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir, [
self.topdir + '/work/global/comps/comps-global.xml'])]) "comps_filter",
self.assertEqual(CompsWrapper.call_args_list, "--arch=x86_64",
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')]) "--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(comps.filter_groups.call_args_list, []) self.assertEqual(comps.filter_groups.call_args_list, [])
self.assertEqual(comps.filter_environments.mock_calls, self.assertEqual(
[mock.call(variant.environments)]) comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()]) self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch('pungi.phases.init.run') @mock.patch("pungi.phases.init.run")
@mock.patch('pungi.phases.init.CompsWrapper') @mock.patch("pungi.phases.init.CompsWrapper")
def test_run_filter_for_modular(self, CompsWrapper, run): def test_run_filter_for_modular(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server'] variant = compose.variants["Server"]
variant.groups = [] variant.groups = []
variant.modules = ['testmodule:2.0'] variant.modules = ["testmodule:2.0"]
comps = CompsWrapper.return_value comps = CompsWrapper.return_value
comps.filter_groups.return_value = [] comps.filter_groups.return_value = []
init.write_variant_comps(compose, 'x86_64', variant) init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts', run.mock_calls,
'--keep-empty-group=conflicts-server', [
'--variant=Server', mock.call(
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir, [
self.topdir + '/work/global/comps/comps-global.xml'])]) "comps_filter",
self.assertEqual(CompsWrapper.call_args_list, "--arch=x86_64",
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')]) "--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(comps.filter_groups.call_args_list, [mock.call([])]) self.assertEqual(comps.filter_groups.call_args_list, [mock.call([])])
self.assertEqual(comps.filter_environments.mock_calls, self.assertEqual(
[mock.call(variant.environments)]) comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()]) self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch('pungi.phases.init.run') @mock.patch("pungi.phases.init.run")
@mock.patch('pungi.phases.init.CompsWrapper') @mock.patch("pungi.phases.init.CompsWrapper")
def test_run_report_unmatched(self, CompsWrapper, run): def test_run_report_unmatched(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server'] variant = compose.variants["Server"]
comps = CompsWrapper.return_value comps = CompsWrapper.return_value
comps.filter_groups.return_value = ['foo', 'bar'] comps.filter_groups.return_value = ["foo", "bar"]
init.write_variant_comps(compose, 'x86_64', variant) init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls, self.assertEqual(
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts', run.mock_calls,
'--keep-empty-group=conflicts-server', [
'--variant=Server', mock.call(
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir, [
self.topdir + '/work/global/comps/comps-global.xml'])]) "comps_filter",
self.assertEqual(CompsWrapper.call_args_list, "--arch=x86_64",
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')]) "--keep-empty-group=conflicts",
self.assertEqual(comps.filter_groups.call_args_list, [mock.call(variant.groups)]) "--keep-empty-group=conflicts-server",
self.assertEqual(comps.filter_environments.mock_calls, "--variant=Server",
[mock.call(variant.environments)]) "--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(
comps.filter_groups.call_args_list, [mock.call(variant.groups)]
)
self.assertEqual(
comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()]) self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
self.assertEqual( self.assertEqual(
compose.log_warning.call_args_list, compose.log_warning.call_args_list,
[mock.call(init.UNMATCHED_GROUP_MSG % ('Server', 'x86_64', 'foo')), [
mock.call(init.UNMATCHED_GROUP_MSG % ('Server', 'x86_64', 'bar'))]) mock.call(init.UNMATCHED_GROUP_MSG % ("Server", "x86_64", "foo")),
mock.call(init.UNMATCHED_GROUP_MSG % ("Server", "x86_64", "bar")),
],
)
class TestGetLookasideGroups(PungiTestCase): class TestGetLookasideGroups(PungiTestCase):
@ -426,7 +533,6 @@ class TestGetLookasideGroups(PungiTestCase):
@mock.patch("shutil.copytree") @mock.patch("shutil.copytree")
@mock.patch("pungi.phases.init.get_dir_from_scm") @mock.patch("pungi.phases.init.get_dir_from_scm")
class TestWriteModuleDefaults(PungiTestCase): class TestWriteModuleDefaults(PungiTestCase):
def test_clone_git(self, gdfs, ct): def test_clone_git(self, gdfs, ct):
conf = {"scm": "git", "repo": "https://pagure.io/pungi.git", "dir": "."} conf = {"scm": "git", "repo": "https://pagure.io/pungi.git", "dir": "."}
compose = DummyCompose(self.topdir, {"module_defaults_dir": conf}) compose = DummyCompose(self.topdir, {"module_defaults_dir": conf})
@ -497,7 +603,6 @@ class TestWriteModuleDefaults(PungiTestCase):
@unittest.skipUnless(Modulemd, "Skipped test, no module support.") @unittest.skipUnless(Modulemd, "Skipped test, no module support.")
class TestValidateModuleDefaults(PungiTestCase): class TestValidateModuleDefaults(PungiTestCase):
def _write_defaults(self, defs): def _write_defaults(self, defs):
for mod_name, streams in defs.items(): for mod_name, streams in defs.items():
for stream in streams: for stream in streams:

View File

@ -4,6 +4,7 @@ import itertools
import mock import mock
import os import os
import six import six
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -11,13 +12,13 @@ except ImportError:
from pungi.wrappers import iso from pungi.wrappers import iso
CORRECT_OUTPUT = '''dummy.iso: 31ff3e405e26ad01c63b62f6b11d30f6 CORRECT_OUTPUT = """dummy.iso: 31ff3e405e26ad01c63b62f6b11d30f6
Fragment sums: 6eb92e7bda221d7fe5f19b4d21468c9bf261d84c96d145d96c76444b9cbc Fragment sums: 6eb92e7bda221d7fe5f19b4d21468c9bf261d84c96d145d96c76444b9cbc
Fragment count: 20 Fragment count: 20
Supported ISO: no Supported ISO: no
''' """
INCORRECT_OUTPUT = '''This should never happen: File not found''' INCORRECT_OUTPUT = """This should never happen: File not found"""
# Cached to use in tests that mock os.listdir # Cached to use in tests that mock os.listdir
orig_listdir = os.listdir orig_listdir = os.listdir
@ -35,40 +36,54 @@ def fake_listdir(pattern, result=None, exc=None):
raise exc raise exc
return result return result
return orig_listdir(path) return orig_listdir(path)
return worker return worker
class TestIsoUtils(unittest.TestCase): class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.wrappers.iso.run")
@mock.patch('pungi.wrappers.iso.run')
def test_get_implanted_md5_correct(self, mock_run): def test_get_implanted_md5_correct(self, mock_run):
mock_run.return_value = (0, CORRECT_OUTPUT) mock_run.return_value = (0, CORRECT_OUTPUT)
logger = mock.Mock() logger = mock.Mock()
self.assertEqual(iso.get_implanted_md5('dummy.iso', logger=logger), self.assertEqual(
'31ff3e405e26ad01c63b62f6b11d30f6') iso.get_implanted_md5("dummy.iso", logger=logger),
self.assertEqual(mock_run.call_args_list, "31ff3e405e26ad01c63b62f6b11d30f6",
[mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'], )
universal_newlines=True)]) self.assertEqual(
mock_run.call_args_list,
[
mock.call(
["/usr/bin/checkisomd5", "--md5sumonly", "dummy.iso"],
universal_newlines=True,
)
],
)
self.assertEqual(logger.mock_calls, []) self.assertEqual(logger.mock_calls, [])
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_get_implanted_md5_incorrect(self, mock_run): def test_get_implanted_md5_incorrect(self, mock_run):
mock_run.return_value = (0, INCORRECT_OUTPUT) mock_run.return_value = (0, INCORRECT_OUTPUT)
logger = mock.Mock() logger = mock.Mock()
self.assertEqual(iso.get_implanted_md5('dummy.iso', logger=logger), None) self.assertEqual(iso.get_implanted_md5("dummy.iso", logger=logger), None)
self.assertEqual(mock_run.call_args_list, self.assertEqual(
[mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'], mock_run.call_args_list,
universal_newlines=True)]) [
mock.call(
["/usr/bin/checkisomd5", "--md5sumonly", "dummy.iso"],
universal_newlines=True,
)
],
)
self.assertTrue(len(logger.mock_calls) > 0) self.assertTrue(len(logger.mock_calls) > 0)
@mock.patch('pungi.util.run_unmount_cmd') @mock.patch("pungi.util.run_unmount_cmd")
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_mount_iso(self, mock_run, mock_unmount): def test_mount_iso(self, mock_run, mock_unmount):
# first tuple is return value for command 'which guestmount' # first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('1' - guestmount is not available) # value determines type of the mount/unmount command ('1' - guestmount is not available)
# for approach as a root, pair commands mount-umount are used # for approach as a root, pair commands mount-umount are used
mock_run.side_effect = [(1, ''), (0, '')] mock_run.side_effect = [(1, ""), (0, "")]
with iso.mount('dummy') as temp_dir: with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2) self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1]) mount_call_str = str(mock_run.call_args_list[1])
@ -80,14 +95,14 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.util.rmtree") @mock.patch("pungi.util.rmtree")
@mock.patch("os.listdir", new=fake_listdir("guestfs", ["root"])) @mock.patch("os.listdir", new=fake_listdir("guestfs", ["root"]))
@mock.patch('pungi.util.run_unmount_cmd') @mock.patch("pungi.util.run_unmount_cmd")
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_guestmount(self, mock_run, mock_unmount, mock_rmtree): def test_guestmount(self, mock_run, mock_unmount, mock_rmtree):
# first tuple is return value for command 'which guestmount' # first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available) # value determines type of the mount/unmount command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used # for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ''), (0, '')] mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount('dummy') as temp_dir: with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2) self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1]) mount_call_str = str(mock_run.call_args_list[1])
@ -99,14 +114,14 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.util.rmtree") @mock.patch("pungi.util.rmtree")
@mock.patch("os.listdir", new=fake_listdir("guestfs", [])) @mock.patch("os.listdir", new=fake_listdir("guestfs", []))
@mock.patch('pungi.util.run_unmount_cmd') @mock.patch("pungi.util.run_unmount_cmd")
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_guestmount_cleans_up_cache(self, mock_run, mock_unmount, mock_rmtree): def test_guestmount_cleans_up_cache(self, mock_run, mock_unmount, mock_rmtree):
# first tuple is return value for command 'which guestmount' # first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available) # value determines type of the mount/unmount command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used # for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ''), (0, '')] mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount('dummy') as temp_dir: with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2) self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1]) mount_call_str = str(mock_run.call_args_list[1])
@ -118,14 +133,16 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.util.rmtree") @mock.patch("pungi.util.rmtree")
@mock.patch("os.listdir", new=fake_listdir("guestfs", OSError("No such file"))) @mock.patch("os.listdir", new=fake_listdir("guestfs", OSError("No such file")))
@mock.patch('pungi.util.run_unmount_cmd') @mock.patch("pungi.util.run_unmount_cmd")
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_guestmount_handles_missing_cache(self, mock_run, mock_unmount, mock_rmtree): def test_guestmount_handles_missing_cache(
self, mock_run, mock_unmount, mock_rmtree
):
# first tuple is return value for command 'which guestmount' # first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available) # value determines type of the mount/unmount command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used # for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ''), (0, '')] mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount('dummy') as temp_dir: with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2) self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1]) mount_call_str = str(mock_run.call_args_list[1])
@ -135,12 +152,12 @@ class TestIsoUtils(unittest.TestCase):
self.assertTrue(unmount_call_str.startswith("call(['fusermount'")) self.assertTrue(unmount_call_str.startswith("call(['fusermount'"))
self.assertFalse(os.path.isdir(temp_dir)) self.assertFalse(os.path.isdir(temp_dir))
@mock.patch('pungi.util.run_unmount_cmd') @mock.patch("pungi.util.run_unmount_cmd")
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_mount_iso_always_unmounts(self, mock_run, mock_unmount): def test_mount_iso_always_unmounts(self, mock_run, mock_unmount):
mock_run.side_effect = [(1, ''), (0, '')] mock_run.side_effect = [(1, ""), (0, "")]
try: try:
with iso.mount('dummy') as temp_dir: with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir))
raise RuntimeError() raise RuntimeError()
except RuntimeError: except RuntimeError:
@ -149,13 +166,13 @@ class TestIsoUtils(unittest.TestCase):
self.assertEqual(len(mock_unmount.call_args_list), 1) self.assertEqual(len(mock_unmount.call_args_list), 1)
self.assertFalse(os.path.isdir(temp_dir)) self.assertFalse(os.path.isdir(temp_dir))
@mock.patch('pungi.util.run_unmount_cmd') @mock.patch("pungi.util.run_unmount_cmd")
@mock.patch('pungi.wrappers.iso.run') @mock.patch("pungi.wrappers.iso.run")
def test_mount_iso_raises_on_error(self, mock_run, mock_unmount): def test_mount_iso_raises_on_error(self, mock_run, mock_unmount):
log = mock.Mock() log = mock.Mock()
mock_run.side_effect = [(1, ''), (1, 'Boom')] mock_run.side_effect = [(1, ""), (1, "Boom")]
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
with iso.mount('dummy', logger=log) as temp_dir: with iso.mount("dummy", logger=log) as temp_dir:
self.assertTrue(os.path.isdir(temp_dir)) self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2) self.assertEqual(len(mock_run.call_args_list), 2)
self.assertEqual(len(mock_unmount.call_args_list), 0) self.assertEqual(len(mock_unmount.call_args_list), 0)
@ -166,29 +183,28 @@ class TestCmpGraftPoints(unittest.TestCase):
def assertSorted(self, *args): def assertSorted(self, *args):
"""Tests that all permutations of arguments yield the same sorted results.""" """Tests that all permutations of arguments yield the same sorted results."""
for perm in itertools.permutations(args): for perm in itertools.permutations(args):
self.assertEqual(sorted(perm, key=iso.graft_point_sort_key), self.assertEqual(sorted(perm, key=iso.graft_point_sort_key), list(args))
list(args))
def test_eq(self): def test_eq(self):
self.assertSorted('pkgs/foo.rpm', 'pkgs/foo.rpm') self.assertSorted("pkgs/foo.rpm", "pkgs/foo.rpm")
def test_rpms_sorted_alphabetically(self): def test_rpms_sorted_alphabetically(self):
self.assertSorted('pkgs/bar.rpm', 'pkgs/foo.rpm') self.assertSorted("pkgs/bar.rpm", "pkgs/foo.rpm")
def test_images_sorted_alphabetically(self): def test_images_sorted_alphabetically(self):
self.assertSorted('aaa.img', 'images/foo', 'isolinux/foo') self.assertSorted("aaa.img", "images/foo", "isolinux/foo")
def test_other_files_sorted_alphabetically(self): def test_other_files_sorted_alphabetically(self):
self.assertSorted('bar.txt', 'foo.txt') self.assertSorted("bar.txt", "foo.txt")
def test_rpms_after_images(self): def test_rpms_after_images(self):
self.assertSorted('foo.ins', 'bar.rpm') self.assertSorted("foo.ins", "bar.rpm")
def test_other_after_images(self): def test_other_after_images(self):
self.assertSorted('EFI/anything', 'zzz.txt') self.assertSorted("EFI/anything", "zzz.txt")
def test_rpm_after_other(self): def test_rpm_after_other(self):
self.assertSorted('bbb.txt', 'aaa.rpm') self.assertSorted("bbb.txt", "aaa.rpm")
def test_all_kinds(self): def test_all_kinds(self):
self.assertSorted('etc/file', 'ppc/file', 'c.txt', 'd.txt', 'a.rpm', 'b.rpm') self.assertSorted("etc/file", "ppc/file", "c.txt", "d.txt", "a.rpm", "b.rpm")

File diff suppressed because it is too large Load Diff

View File

@ -67,7 +67,6 @@ class TestLinkerBase(helpers.PungiTestCase):
class TestLinkerSymlink(TestLinkerBase): class TestLinkerSymlink(TestLinkerBase):
def test_symlink(self): def test_symlink(self):
path_dst = os.path.join(self.topdir, "symlink") path_dst = os.path.join(self.topdir, "symlink")
@ -81,7 +80,9 @@ class TestLinkerSymlink(TestLinkerBase):
self.linker.symlink(self.path_src, path_dst) self.linker.symlink(self.path_src, path_dst)
# linking existing file with different target must fail # linking existing file with different target must fail
self.assertRaises(OSError, self.linker.symlink, self.path_src, path_dst, relative=False) self.assertRaises(
OSError, self.linker.symlink, self.path_src, path_dst, relative=False
)
def test_symlink_different_type(self): def test_symlink_different_type(self):
# try to symlink 'symlink' -> 'another-file' ('symlink' already exists # try to symlink 'symlink' -> 'another-file' ('symlink' already exists
@ -111,7 +112,6 @@ class TestLinkerSymlink(TestLinkerBase):
class TestLinkerHardlink(TestLinkerBase): class TestLinkerHardlink(TestLinkerBase):
def test_hardlink(self): def test_hardlink(self):
path_dst = os.path.join(self.topdir, "hardlink") path_dst = os.path.join(self.topdir, "hardlink")
@ -146,7 +146,7 @@ class TestLinkerCopy(TestLinkerBase):
def test_copy_to_existing_file_with_different_content(self): def test_copy_to_existing_file_with_different_content(self):
path_dst = os.path.join(self.topdir, "b") path_dst = os.path.join(self.topdir, "b")
helpers.touch(path_dst, 'xxx') helpers.touch(path_dst, "xxx")
self.assertRaises(Exception, self.linker.copy, self.path_src, path_dst) self.assertRaises(Exception, self.linker.copy, self.path_src, path_dst)
def test_copy_to_directory(self): def test_copy_to_directory(self):
@ -260,27 +260,37 @@ class TestLinkerLink(TestLinkerBase):
self.assertEqual(len(self.logger.mock_calls), 1) self.assertEqual(len(self.logger.mock_calls), 1)
def test_link_file_to_existing_destination(self): def test_link_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link, self.assertRaises(
self.file1, self.file2, link_type="hardlink") OSError, self.linker.link, self.file1, self.file2, link_type="hardlink"
)
def test_symlink_file_to_existing_destination(self): def test_symlink_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link, self.assertRaises(
self.file1, self.file2, link_type="symlink") OSError, self.linker.link, self.file1, self.file2, link_type="symlink"
)
def test_copy_file_to_existing_destination(self): def test_copy_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link, self.assertRaises(
self.file1, self.file2, link_type="copy") OSError, self.linker.link, self.file1, self.file2, link_type="copy"
)
def test_hardlink_or_copy_file_to_existing_destination(self): def test_hardlink_or_copy_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link, self.assertRaises(
self.file1, self.file2, link_type="hardlink-or-copy") OSError,
self.linker.link,
self.file1,
self.file2,
link_type="hardlink-or-copy",
)
def test_link_dir_hardlink(self): def test_link_dir_hardlink(self):
self.linker.link(self.src_dir, self.dst_dir, link_type="hardlink") self.linker.link(self.src_dir, self.dst_dir, link_type="hardlink")
self.assertTrue(os.path.isfile(self.dst_file1)) self.assertTrue(os.path.isfile(self.dst_file1))
self.assertTrue(self.same_inode(self.file1, self.dst_file1)) self.assertTrue(self.same_inode(self.file1, self.dst_file1))
self.assertTrue(self.same_inode(self.file3, self.dst_file3)) self.assertTrue(self.same_inode(self.file3, self.dst_file3))
self.assertSameStat(os.path.dirname(self.file3), os.path.dirname(self.dst_file3)) self.assertSameStat(
os.path.dirname(self.file3), os.path.dirname(self.dst_file3)
)
# always preserve symlinks # always preserve symlinks
self.assertEqual(os.readlink(self.dst_symlink1), "../file1") self.assertEqual(os.readlink(self.dst_symlink1), "../file1")
@ -292,7 +302,9 @@ class TestLinkerLink(TestLinkerBase):
self.assertTrue(os.path.isfile(self.dst_file1)) self.assertTrue(os.path.isfile(self.dst_file1))
self.assertFalse(self.same_inode(self.file1, self.dst_file1)) self.assertFalse(self.same_inode(self.file1, self.dst_file1))
self.assertFalse(self.same_inode(self.file3, self.dst_file3)) self.assertFalse(self.same_inode(self.file3, self.dst_file3))
self.assertSameStat(os.path.dirname(self.file3), os.path.dirname(self.dst_file3)) self.assertSameStat(
os.path.dirname(self.file3), os.path.dirname(self.dst_file3)
)
# always preserve symlinks # always preserve symlinks
self.assertEqual(os.readlink(self.dst_symlink1), "../file1") self.assertEqual(os.readlink(self.dst_symlink1), "../file1")
@ -323,7 +335,9 @@ class TestLinkerLink(TestLinkerBase):
self.assertTrue(os.path.isfile(self.dst_file1)) self.assertTrue(os.path.isfile(self.dst_file1))
self.assertTrue(os.path.islink(self.dst_file1)) self.assertTrue(os.path.islink(self.dst_file1))
self.assertEqual(os.readlink(self.dst_file1), self.file1) self.assertEqual(os.readlink(self.dst_file1), self.file1)
self.assertSameStat(os.path.dirname(self.file3), os.path.dirname(self.dst_file3)) self.assertSameStat(
os.path.dirname(self.file3), os.path.dirname(self.dst_file3)
)
self.assertTrue(os.path.isdir(os.path.dirname(self.file3))) self.assertTrue(os.path.isdir(os.path.dirname(self.file3)))
# always preserve symlinks # always preserve symlinks

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -14,56 +14,79 @@ from pungi.wrappers.lorax import LoraxWrapper
class LoraxWrapperTest(unittest.TestCase): class LoraxWrapperTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.lorax = LoraxWrapper() self.lorax = LoraxWrapper()
def test_get_command_with_minimal_arguments(self): def test_get_command_with_minimal_arguments(self):
cmd = self.lorax.get_lorax_cmd("product", "version", "release", cmd = self.lorax.get_lorax_cmd(
"/mnt/repo_baseurl", "/mnt/output_dir") "product", "version", "release", "/mnt/repo_baseurl", "/mnt/output_dir"
)
self.assertEqual(cmd[0], 'lorax') self.assertEqual(cmd[0], "lorax")
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[1:], cmd[1:],
["--product=product", [
"--product=product",
"--version=version", "--version=version",
"--release=release", "--release=release",
"--source=file:///mnt/repo_baseurl", "--source=file:///mnt/repo_baseurl",
"/mnt/output_dir"], "/mnt/output_dir",
],
) )
def test_get_command_with_all_arguments(self): def test_get_command_with_all_arguments(self):
cmd = self.lorax.get_lorax_cmd("product", "version", "release", cmd = self.lorax.get_lorax_cmd(
"/mnt/repo_baseurl", "/mnt/output_dir", "product",
variant="Server", bugurl="http://example.com/", "version",
nomacboot=True, noupgrade=True, is_final=True, "release",
buildarch='x86_64', volid='VOLUME_ID', "/mnt/repo_baseurl",
buildinstallpackages=['bash', 'vim'], "/mnt/output_dir",
add_template=['t1', 't2'], variant="Server",
add_arch_template=['ta1', 'ta2'], bugurl="http://example.com/",
add_template_var=['v1', 'v2'], nomacboot=True,
add_arch_template_var=['va1', 'va2'], noupgrade=True,
is_final=True,
buildarch="x86_64",
volid="VOLUME_ID",
buildinstallpackages=["bash", "vim"],
add_template=["t1", "t2"],
add_arch_template=["ta1", "ta2"],
add_template_var=["v1", "v2"],
add_arch_template_var=["va1", "va2"],
log_dir="/tmp", log_dir="/tmp",
dracut_args=["--foo", "bar"]) dracut_args=["--foo", "bar"],
)
self.assertEqual(cmd[0], 'lorax') self.assertEqual(cmd[0], "lorax")
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[1:], cmd[1:],
["--product=product", "--version=version", [
"--release=release", "--variant=Server", "--product=product",
"--version=version",
"--release=release",
"--variant=Server",
"--source=file:///mnt/repo_baseurl", "--source=file:///mnt/repo_baseurl",
"--bugurl=http://example.com/", "--bugurl=http://example.com/",
"--buildarch=x86_64", "--volid=VOLUME_ID", "--buildarch=x86_64",
"--nomacboot", "--noupgrade", "--isfinal", "--volid=VOLUME_ID",
"--installpkgs=bash", "--installpkgs=vim", "--nomacboot",
"--add-template=t1", "--add-template=t2", "--noupgrade",
"--add-arch-template=ta1", "--add-arch-template=ta2", "--isfinal",
"--add-template-var=v1", "--add-template-var=v2", "--installpkgs=bash",
"--add-arch-template-var=va1", "--add-arch-template-var=va2", "--installpkgs=vim",
"--add-template=t1",
"--add-template=t2",
"--add-arch-template=ta1",
"--add-arch-template=ta2",
"--add-template-var=v1",
"--add-template-var=v2",
"--add-arch-template-var=va1",
"--add-arch-template-var=va2",
"--logfile=/tmp/lorax.log", "--logfile=/tmp/lorax.log",
"--dracut-arg=--foo", "--dracut-arg=--foo",
"--dracut-arg=bar", "--dracut-arg=bar",
"/mnt/output_dir"], "/mnt/output_dir",
],
) )

View File

@ -12,26 +12,25 @@ from pungi import media_split
class ConvertMediaSizeTestCase(unittest.TestCase): class ConvertMediaSizeTestCase(unittest.TestCase):
def test_size_parser_correct_number_as_int(self): def test_size_parser_correct_number_as_int(self):
self.assertEqual(media_split.convert_media_size(123), 123) self.assertEqual(media_split.convert_media_size(123), 123)
def test_size_parser_correct_number_as_str(self): def test_size_parser_correct_number_as_str(self):
self.assertEqual(media_split.convert_media_size('123'), 123) self.assertEqual(media_split.convert_media_size("123"), 123)
def test_size_parser_with_unit_b(self): def test_size_parser_with_unit_b(self):
self.assertEqual(media_split.convert_media_size('123b'), 123) self.assertEqual(media_split.convert_media_size("123b"), 123)
def test_size_parser_with_unit_k(self): def test_size_parser_with_unit_k(self):
self.assertEqual(media_split.convert_media_size('123k'), 123 * 1024) self.assertEqual(media_split.convert_media_size("123k"), 123 * 1024)
def test_size_parser_with_unit_M(self): def test_size_parser_with_unit_M(self):
self.assertEqual(media_split.convert_media_size('123M'), self.assertEqual(media_split.convert_media_size("123M"), 123 * 1024 * 1024)
123 * 1024 * 1024)
def test_size_parser_with_unit_G(self): def test_size_parser_with_unit_G(self):
self.assertEqual(media_split.convert_media_size('123G'), self.assertEqual(
123 * 1024 * 1024 * 1024) media_split.convert_media_size("123G"), 123 * 1024 * 1024 * 1024
)
def test_size_parser_with_negative_number(self): def test_size_parser_with_negative_number(self):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
@ -39,11 +38,10 @@ class ConvertMediaSizeTestCase(unittest.TestCase):
def test_size_parser_with_unknown_unit(self): def test_size_parser_with_unknown_unit(self):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
media_split.convert_media_size('123X') media_split.convert_media_size("123X")
class ConvertFileSizeTestCase(unittest.TestCase): class ConvertFileSizeTestCase(unittest.TestCase):
def test_round_up(self): def test_round_up(self):
self.assertEqual(media_split.convert_file_size(123, 2048), 2048) self.assertEqual(media_split.convert_file_size(123, 2048), 2048)
@ -56,71 +54,78 @@ def bl(s):
class MediaSplitterTestCase(unittest.TestCase): class MediaSplitterTestCase(unittest.TestCase):
def setUp(self): def setUp(self):
self.compose = mock.Mock() self.compose = mock.Mock()
def test_sum_size(self): def test_sum_size(self):
ms = media_split.MediaSplitter(bl(100)) ms = media_split.MediaSplitter(bl(100))
ms.add_file('first', bl(20)) ms.add_file("first", bl(20))
ms.add_file('second', bl(30)) ms.add_file("second", bl(30))
ms.add_file('third', 10) ms.add_file("third", 10)
self.assertEqual(ms.total_size, bl(50) + 10) self.assertEqual(ms.total_size, bl(50) + 10)
self.assertEqual(ms.total_size_in_blocks, bl(51)) self.assertEqual(ms.total_size_in_blocks, bl(51))
def test_add_same_file_twice(self): def test_add_same_file_twice(self):
ms = media_split.MediaSplitter(bl(100)) ms = media_split.MediaSplitter(bl(100))
ms.add_file('first', bl(20)) ms.add_file("first", bl(20))
ms.add_file('first', bl(20)) ms.add_file("first", bl(20))
self.assertEqual(ms.total_size, bl(20)) self.assertEqual(ms.total_size, bl(20))
def test_add_same_file_twice_with_different_size(self): def test_add_same_file_twice_with_different_size(self):
ms = media_split.MediaSplitter(bl(100)) ms = media_split.MediaSplitter(bl(100))
ms.add_file('first', bl(20)) ms.add_file("first", bl(20))
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
ms.add_file('first', bl(30)) ms.add_file("first", bl(30))
def test_add_too_big_file(self): def test_add_too_big_file(self):
ms = media_split.MediaSplitter(bl(100)) ms = media_split.MediaSplitter(bl(100))
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
ms.add_file('too-big', bl(300)) ms.add_file("too-big", bl(300))
def test_fit_on_one(self): def test_fit_on_one(self):
ms = media_split.MediaSplitter(bl(100), compose=self.compose) ms = media_split.MediaSplitter(bl(100), compose=self.compose)
ms.add_file('first', bl(20)) ms.add_file("first", bl(20))
ms.add_file('second', bl(30)) ms.add_file("second", bl(30))
self.assertEqual(ms.split(), self.assertEqual(ms.split(), [{"files": ["first", "second"], "size": bl(50)}])
[{'files': ['first', 'second'], 'size': bl(50)}])
def test_split_on_two_discs(self): def test_split_on_two_discs(self):
ms = media_split.MediaSplitter(bl(100), compose=self.compose) ms = media_split.MediaSplitter(bl(100), compose=self.compose)
ms.add_file('first', bl(25)) ms.add_file("first", bl(25))
ms.add_file('second', bl(40)) ms.add_file("second", bl(40))
ms.add_file('third', bl(80)) ms.add_file("third", bl(80))
self.assertEqual(ms.split(), self.assertEqual(
[{'files': ['first', 'second'], 'size': bl(65)}, ms.split(),
{'files': ['third'], 'size': bl(80)}]) [
{"files": ["first", "second"], "size": bl(65)},
{"files": ["third"], "size": bl(80)},
],
)
def test_split_with_sticky_file(self): def test_split_with_sticky_file(self):
ms = media_split.MediaSplitter(bl(100)) ms = media_split.MediaSplitter(bl(100))
ms.add_file('sticky', bl(15), sticky=True) ms.add_file("sticky", bl(15), sticky=True)
ms.add_file('first', bl(25)) ms.add_file("first", bl(25))
ms.add_file('second', bl(40)) ms.add_file("second", bl(40))
ms.add_file('third', bl(80)) ms.add_file("third", bl(80))
self.assertEqual(ms.split(), self.assertEqual(
[{'files': ['sticky', 'first', 'second'], 'size': bl(80)}, ms.split(),
{'files': ['sticky', 'third'], 'size': bl(95)}]) [
{"files": ["sticky", "first", "second"], "size": bl(80)},
{"files": ["sticky", "third"], "size": bl(95)},
],
)
def test_split_unlimited_media(self): def test_split_unlimited_media(self):
ms = media_split.MediaSplitter(None, compose=self.compose) ms = media_split.MediaSplitter(None, compose=self.compose)
ms.add_file('first', bl(25)) ms.add_file("first", bl(25))
ms.add_file('second', bl(40)) ms.add_file("second", bl(40))
ms.add_file('third', bl(80)) ms.add_file("third", bl(80))
self.assertEqual(ms.split(), self.assertEqual(
[{'files': ['first', 'second', 'third'], 'size': bl(145)}]) ms.split(), [{"files": ["first", "second", "third"], "size": bl(145)}]
)

View File

@ -11,119 +11,127 @@ from pungi.compose_metadata import discinfo
class DiscInfoTestCase(helpers.PungiTestCase): class DiscInfoTestCase(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(DiscInfoTestCase, self).setUp() super(DiscInfoTestCase, self).setUp()
os.environ['SOURCE_DATE_EPOCH'] = '101010101' os.environ["SOURCE_DATE_EPOCH"] = "101010101"
self.path = os.path.join(self.topdir, 'compose/Server/x86_64/os/.discinfo') self.path = os.path.join(self.topdir, "compose/Server/x86_64/os/.discinfo")
def test_write_discinfo_variant(self): def test_write_discinfo_variant(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir, {"release_name": "Test", "release_version": "1.0"}
'release_version': '1.0', )
})
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server']) metadata.write_discinfo(compose, "x86_64", compose.variants["Server"])
with open(self.path) as f: with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'), self.assertEqual(
['101010101', f.read().strip().split("\n"), ["101010101", "Test 1.0", "x86_64", "ALL"]
'Test 1.0', )
'x86_64',
'ALL'])
self.assertEqual(discinfo.read_discinfo(self.path), self.assertEqual(
{'timestamp': '101010101', discinfo.read_discinfo(self.path),
'description': 'Test 1.0', {
'disc_numbers': ['ALL'], "timestamp": "101010101",
'arch': 'x86_64'}) "description": "Test 1.0",
"disc_numbers": ["ALL"],
"arch": "x86_64",
},
)
def test_write_discinfo_custom_description(self): def test_write_discinfo_custom_description(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir,
'release_version': '1.0', {
'release_discinfo_description': 'Fuzzy %(variant_name)s.%(arch)s', "release_name": "Test",
}) "release_version": "1.0",
compose.variants['Server'].name = 'Server' "release_discinfo_description": "Fuzzy %(variant_name)s.%(arch)s",
},
)
compose.variants["Server"].name = "Server"
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server']) metadata.write_discinfo(compose, "x86_64", compose.variants["Server"])
with open(self.path) as f: with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'), self.assertEqual(
['101010101', f.read().strip().split("\n"),
'Fuzzy Server.x86_64', ["101010101", "Fuzzy Server.x86_64", "x86_64", "ALL"],
'x86_64', )
'ALL'])
def test_write_discinfo_layered_product(self): def test_write_discinfo_layered_product(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir,
'release_version': '1.0', {
'base_product_name': 'Base', "release_name": "Test",
'base_product_version': 42, "release_version": "1.0",
}) "base_product_name": "Base",
"base_product_version": 42,
},
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server']) metadata.write_discinfo(compose, "x86_64", compose.variants["Server"])
with open(self.path) as f: with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'), self.assertEqual(
['101010101', f.read().strip().split("\n"),
'Test 1.0 for Base 42', ["101010101", "Test 1.0 for Base 42", "x86_64", "ALL"],
'x86_64', )
'ALL'])
def test_write_discinfo_integrated_layered_product(self): def test_write_discinfo_integrated_layered_product(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir, {"release_name": "Test", "release_version": "1.0"}
'release_version': '1.0', )
}) compose.variants["ILP"] = mock.Mock(
compose.variants['ILP'] = mock.Mock(uid='Server', arches=['x86_64'], uid="Server",
type='layered-product', is_empty=False, arches=["x86_64"],
release_name='Integrated', type="layered-product",
release_version='2.1', is_empty=False,
parent=compose.variants['Server']) release_name="Integrated",
release_version="2.1",
parent=compose.variants["Server"],
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['ILP']) metadata.write_discinfo(compose, "x86_64", compose.variants["ILP"])
with open(self.path) as f: with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'), self.assertEqual(
['101010101', f.read().strip().split("\n"),
'Integrated 2.1 for Test 1', ["101010101", "Integrated 2.1 for Test 1", "x86_64", "ALL"],
'x86_64', )
'ALL'])
def test_addons_dont_have_discinfo(self): def test_addons_dont_have_discinfo(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir, {"release_name": "Test", "release_version": "1.0"}
'release_version': '1.0', )
}) compose.variants["ILP"] = mock.Mock(
compose.variants['ILP'] = mock.Mock(uid='Server', arches=['x86_64'], uid="Server",
type='addon', is_empty=False, arches=["x86_64"],
parent=compose.variants['Server']) type="addon",
is_empty=False,
parent=compose.variants["Server"],
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['ILP']) metadata.write_discinfo(compose, "x86_64", compose.variants["ILP"])
self.assertFalse(os.path.isfile(self.path)) self.assertFalse(os.path.isfile(self.path))
class MediaRepoTestCase(helpers.PungiTestCase): class MediaRepoTestCase(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(MediaRepoTestCase, self).setUp() super(MediaRepoTestCase, self).setUp()
self.path = os.path.join(self.topdir, 'compose/Server/x86_64/os/media.repo') self.path = os.path.join(self.topdir, "compose/Server/x86_64/os/media.repo")
def test_write_media_repo(self): def test_write_media_repo(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir, {"release_name": "Test", "release_version": "1.0"}
'release_version': '1.0', )
})
metadata.write_media_repo(compose, 'x86_64', compose.variants['Server'], metadata.write_media_repo(
timestamp=123456) compose, "x86_64", compose.variants["Server"], timestamp=123456
)
with open(self.path) as f: with open(self.path) as f:
lines = f.read().strip().split('\n') lines = f.read().strip().split("\n")
self.assertEqual(lines[0], '[InstallMedia]') self.assertEqual(lines[0], "[InstallMedia]")
six.assertCountEqual( six.assertCountEqual(
self, self,
lines[1:], lines[1:],
@ -137,15 +145,18 @@ class MediaRepoTestCase(helpers.PungiTestCase):
) )
def test_addons_dont_have_media_repo(self): def test_addons_dont_have_media_repo(self):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'release_name': 'Test', self.topdir, {"release_name": "Test", "release_version": "1.0"}
'release_version': '1.0', )
}) compose.variants["ILP"] = mock.Mock(
compose.variants['ILP'] = mock.Mock(uid='Server', arches=['x86_64'], uid="Server",
type='addon', is_empty=False, arches=["x86_64"],
parent=compose.variants['Server']) type="addon",
is_empty=False,
parent=compose.variants["Server"],
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['ILP']) metadata.write_discinfo(compose, "x86_64", compose.variants["ILP"])
self.assertFalse(os.path.isfile(self.path)) self.assertFalse(os.path.isfile(self.path))
@ -155,7 +166,6 @@ BAR_MD5 = {"md5": "37b51d194a7513e45b56f6524f2d51f2"}
class TestPopulateExtraFiles(helpers.PungiTestCase): class TestPopulateExtraFiles(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestPopulateExtraFiles, self).setUp() super(TestPopulateExtraFiles, self).setUp()
self.variant = mock.Mock(uid="Server") self.variant = mock.Mock(uid="Server")
@ -185,12 +195,8 @@ class TestPopulateExtraFiles(helpers.PungiTestCase):
self, self,
self.metadata.mock_calls, self.metadata.mock_calls,
[ [
mock.call.add( mock.call.add("Server", "x86_64", "Server/x86_64/os/foo", 3, FOO_MD5),
"Server", "x86_64", "Server/x86_64/os/foo", 3, FOO_MD5 mock.call.add("Server", "x86_64", "Server/x86_64/os/bar", 3, BAR_MD5),
),
mock.call.add(
"Server", "x86_64", "Server/x86_64/os/bar", 3, BAR_MD5
),
mock.call.dump_for_tree( mock.call.dump_for_tree(
mock.ANY, "Server", "x86_64", "Server/x86_64/os/" mock.ANY, "Server", "x86_64", "Server/x86_64/os/"
), ),

View File

@ -5,6 +5,7 @@ import json
import mock import mock
import os import os
import sys import sys
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -18,124 +19,131 @@ mock_datetime.utcnow.return_value = datetime(2017, 6, 28, 9, 34)
mock_datetime.side_effect = lambda *args, **kwargs: datetime(*args, **kwargs) mock_datetime.side_effect = lambda *args, **kwargs: datetime(*args, **kwargs)
@mock.patch('pungi.util.makedirs') @mock.patch("pungi.util.makedirs")
@mock.patch('pungi.notifier.datetime', new=mock_datetime) @mock.patch("pungi.notifier.datetime", new=mock_datetime)
class TestNotifier(unittest.TestCase): class TestNotifier(unittest.TestCase):
def setUp(self): def setUp(self):
super(TestNotifier, self).setUp() super(TestNotifier, self).setUp()
self.logfile = '/logs/notifications/notification-2017-06-28_09-34-00.log' self.logfile = "/logs/notifications/notification-2017-06-28_09-34-00.log"
self.compose = mock.Mock( self.compose = mock.Mock(
compose_id='COMPOSE_ID', compose_id="COMPOSE_ID",
compose_date='20171031', compose_date="20171031",
compose_respin=1, compose_respin=1,
compose_label='Updates-20171031.1021', compose_label="Updates-20171031.1021",
compose_type='production', compose_type="production",
log_warning=mock.Mock(), log_warning=mock.Mock(),
conf={ conf={
'release_name': 'Layer', "release_name": "Layer",
'release_short': 'L', "release_short": "L",
'release_version': '27', "release_version": "27",
'release_type': 'updates', "release_type": "updates",
'release_is_layered': True, "release_is_layered": True,
'base_product_name': 'Base', "base_product_name": "Base",
'base_product_short': 'B', "base_product_short": "B",
'base_product_version': '1', "base_product_version": "1",
'base_product_type': 'ga', "base_product_type": "ga",
}, },
paths=mock.Mock( paths=mock.Mock(
compose=mock.Mock( compose=mock.Mock(topdir=mock.Mock(return_value="/a/b")),
topdir=mock.Mock(return_value='/a/b') log=mock.Mock(topdir=mock.Mock(return_value="/logs")),
), ),
log=mock.Mock(
topdir=mock.Mock(return_value='/logs')
) )
) self.data = {"foo": "bar", "baz": "quux"}
)
self.data = {'foo': 'bar', 'baz': 'quux'}
def _call(self, script, cmd, **kwargs): def _call(self, script, cmd, **kwargs):
data = self.data.copy() data = self.data.copy()
data['compose_id'] = 'COMPOSE_ID' data["compose_id"] = "COMPOSE_ID"
data['location'] = '/a/b' data["location"] = "/a/b"
data['compose_date'] = '20171031' data["compose_date"] = "20171031"
data['compose_type'] = 'production' data["compose_type"] = "production"
data['compose_respin'] = 1 data["compose_respin"] = 1
data['compose_label'] = 'Updates-20171031.1021' data["compose_label"] = "Updates-20171031.1021"
data['release_short'] = 'L' data["release_short"] = "L"
data['release_name'] = 'Layer' data["release_name"] = "Layer"
data['release_version'] = '27' data["release_version"] = "27"
data['release_type'] = 'updates' data["release_type"] = "updates"
data['release_is_layered'] = True data["release_is_layered"] = True
data['base_product_name'] = 'Base' data["base_product_name"] = "Base"
data['base_product_version'] = '1' data["base_product_version"] = "1"
data['base_product_short'] = 'B' data["base_product_short"] = "B"
data['base_product_type'] = 'ga' data["base_product_type"] = "ga"
data.update(kwargs) data.update(kwargs)
return mock.call((script, cmd), return mock.call(
(script, cmd),
stdin_data=json.dumps(data), stdin_data=json.dumps(data),
can_fail=True, return_stdout=False, can_fail=True,
return_stdout=False,
workdir=self.compose.paths.compose.topdir.return_value, workdir=self.compose.paths.compose.topdir.return_value,
universal_newlines=True, show_cmd=True, logfile=self.logfile) universal_newlines=True,
show_cmd=True,
logfile=self.logfile,
)
@mock.patch('pungi.util.translate_path') @mock.patch("pungi.util.translate_path")
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_invokes_script(self, run, translate_path, makedirs): def test_invokes_script(self, run, translate_path, makedirs):
run.return_value = (0, None) run.return_value = (0, None)
translate_path.side_effect = lambda compose, x: x translate_path.side_effect = lambda compose, x: x
n = PungiNotifier(['run-notify']) n = PungiNotifier(["run-notify"])
n.compose = self.compose n.compose = self.compose
n.send('cmd', **self.data) n.send("cmd", **self.data)
makedirs.assert_called_once_with('/logs/notifications') makedirs.assert_called_once_with("/logs/notifications")
self.assertEqual(run.call_args_list, [self._call('run-notify', 'cmd')]) self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
@mock.patch('pungi.util.translate_path') @mock.patch("pungi.util.translate_path")
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_invokes_multiple_scripts(self, run, translate_path, makedirs): def test_invokes_multiple_scripts(self, run, translate_path, makedirs):
run.return_value = (0, None) run.return_value = (0, None)
translate_path.side_effect = lambda compose, x: x translate_path.side_effect = lambda compose, x: x
n = PungiNotifier(['run-notify', 'ping-user']) n = PungiNotifier(["run-notify", "ping-user"])
n.compose = self.compose n.compose = self.compose
n.send('cmd', **self.data) n.send("cmd", **self.data)
self.assertEqual( self.assertEqual(
sorted(run.call_args_list), sorted(run.call_args_list),
sorted([self._call('run-notify', 'cmd'), sorted([self._call("run-notify", "cmd"), self._call("ping-user", "cmd")]),
self._call('ping-user', 'cmd')])) )
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_translates_path(self, run, makedirs): def test_translates_path(self, run, makedirs):
self.compose.paths.compose.topdir.return_value = '/root/a/b' self.compose.paths.compose.topdir.return_value = "/root/a/b"
self.compose.conf["translate_paths"] = [("/root/", "http://example.com/compose/")] self.compose.conf["translate_paths"] = [
("/root/", "http://example.com/compose/")
]
run.return_value = (0, None) run.return_value = (0, None)
n = PungiNotifier(['run-notify']) n = PungiNotifier(["run-notify"])
n.compose = self.compose n.compose = self.compose
n.send('cmd', **self.data) n.send("cmd", **self.data)
self.assertEqual( self.assertEqual(
run.call_args_list, run.call_args_list,
[self._call('run-notify', 'cmd', location='http://example.com/compose/a/b')]) [
self._call(
"run-notify", "cmd", location="http://example.com/compose/a/b"
)
],
)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_does_not_run_without_config(self, run, makedirs): def test_does_not_run_without_config(self, run, makedirs):
n = PungiNotifier(None) n = PungiNotifier(None)
n.send('cmd', foo='bar', baz='quux') n.send("cmd", foo="bar", baz="quux")
self.assertFalse(run.called) self.assertFalse(run.called)
@mock.patch('pungi.util.translate_path') @mock.patch("pungi.util.translate_path")
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_logs_warning_on_failure(self, run, translate_path, makedirs): def test_logs_warning_on_failure(self, run, translate_path, makedirs):
translate_path.side_effect = lambda compose, x: x translate_path.side_effect = lambda compose, x: x
run.return_value = (1, None) run.return_value = (1, None)
n = PungiNotifier(['run-notify']) n = PungiNotifier(["run-notify"])
n.compose = self.compose n.compose = self.compose
n.send('cmd', **self.data) n.send("cmd", **self.data)
self.assertEqual(run.call_args_list, [self._call('run-notify', 'cmd')]) self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
self.assertTrue(self.compose.log_warning.called) self.assertTrue(self.compose.log_warning.called)

View File

@ -730,9 +730,7 @@ class TestPrepareComposeDir(PungiTestCase):
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "logs"))) self.assertTrue(os.path.isdir(os.path.join(self.topdir, "logs")))
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "parts"))) self.assertTrue(os.path.isdir(os.path.join(self.topdir, "parts")))
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "work/global"))) self.assertTrue(os.path.isdir(os.path.join(self.topdir, "work/global")))
self.assertFileContent( self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
os.path.join(self.topdir, "STATUS"), "STARTED"
)
def test_restarting_compose(self, gtd): def test_restarting_compose(self, gtd):
args = mock.Mock(name="args", spec=["label", "compose_path"]) args = mock.Mock(name="args", spec=["label", "compose_path"])
@ -914,7 +912,7 @@ class TestSendNotification(BaseTestCase):
self.assertEqual(len(notif.mock_calls), 2) self.assertEqual(len(notif.mock_calls), 2)
self.assertEqual(notif.mock_calls[0], mock.call(["handler"])) self.assertEqual(notif.mock_calls[0], mock.call(["handler"]))
_, args, kwargs = notif.mock_calls[1] _, args, kwargs = notif.mock_calls[1]
self.assertEqual(args, ("status-change", )) self.assertEqual(args, ("status-change",))
self.assertEqual( self.assertEqual(
kwargs, kwargs,
{ {

View File

@ -13,13 +13,10 @@ from pungi.phases import osbs
class OSBSPhaseTest(helpers.PungiTestCase): class OSBSPhaseTest(helpers.PungiTestCase):
@mock.patch("pungi.phases.osbs.ThreadPool")
@mock.patch('pungi.phases.osbs.ThreadPool')
def test_run(self, ThreadPool): def test_run(self, ThreadPool):
cfg = helpers.IterableMock() cfg = helpers.IterableMock()
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": cfg}})
'osbs': {'^Everything$': cfg}
})
pool = ThreadPool.return_value pool = ThreadPool.return_value
@ -27,10 +24,12 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase.run() phase.run()
self.assertEqual(len(pool.add.call_args_list), 1) self.assertEqual(len(pool.add.call_args_list), 1)
self.assertEqual(pool.queue_put.call_args_list, self.assertEqual(
[mock.call((compose, compose.variants['Everything'], cfg))]) pool.queue_put.call_args_list,
[mock.call((compose, compose.variants["Everything"], cfg))],
)
@mock.patch('pungi.phases.osbs.ThreadPool') @mock.patch("pungi.phases.osbs.ThreadPool")
def test_skip_without_config(self, ThreadPool): def test_skip_without_config(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None compose.just_phases = None
@ -38,11 +37,9 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase = osbs.OSBSPhase(compose) phase = osbs.OSBSPhase(compose)
self.assertTrue(phase.skip()) self.assertTrue(phase.skip())
@mock.patch('pungi.phases.osbs.ThreadPool') @mock.patch("pungi.phases.osbs.ThreadPool")
def test_dump_metadata(self, ThreadPool): def test_dump_metadata(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": {}}})
'osbs': {'^Everything$': {}}
})
compose.just_phases = None compose.just_phases = None
compose.skip_phases = [] compose.skip_phases = []
compose.notifier = mock.Mock() compose.notifier = mock.Mock()
@ -52,11 +49,11 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase.pool.metadata = METADATA phase.pool.metadata = METADATA
phase.dump_metadata() phase.dump_metadata()
with open(self.topdir + '/compose/metadata/osbs.json') as f: with open(self.topdir + "/compose/metadata/osbs.json") as f:
data = json.load(f) data = json.load(f)
self.assertEqual(data, METADATA) self.assertEqual(data, METADATA)
@mock.patch('pungi.phases.osbs.ThreadPool') @mock.patch("pungi.phases.osbs.ThreadPool")
def test_dump_metadata_after_skip(self, ThreadPool): def test_dump_metadata_after_skip(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None compose.just_phases = None
@ -66,13 +63,11 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase.stop() phase.stop()
phase.dump_metadata() phase.dump_metadata()
self.assertFalse(os.path.isfile(self.topdir + '/compose/metadata/osbs.json')) self.assertFalse(os.path.isfile(self.topdir + "/compose/metadata/osbs.json"))
@mock.patch("pungi.phases.osbs.ThreadPool") @mock.patch("pungi.phases.osbs.ThreadPool")
def test_request_push(self, ThreadPool): def test_request_push(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": {}}})
"osbs": {"^Everything$": {}}
})
compose.just_phases = None compose.just_phases = None
compose.skip_phases = [] compose.skip_phases = []
compose.notifier = mock.Mock() compose.notifier = mock.Mock()
@ -87,112 +82,124 @@ class OSBSPhaseTest(helpers.PungiTestCase):
self.assertEqual(data, phase.pool.registries) self.assertEqual(data, phase.pool.registries)
self.assertEqual( self.assertEqual(
compose.notifier.call_args_list, compose.notifier.call_args_list, [],
[],
) )
TASK_RESULT = { TASK_RESULT = {
'koji_builds': ['54321'], "koji_builds": ["54321"],
'repositories': [ "repositories": [
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632', "registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
] ],
} }
BUILD_INFO = { BUILD_INFO = {
'completion_time': '2016-06-17 18:25:30', "completion_time": "2016-06-17 18:25:30",
'completion_ts': 1466187930.0, "completion_ts": 1466187930.0,
'creation_event_id': 13227702, "creation_event_id": 13227702,
'creation_time': '2016-06-17 18:25:57.611172', "creation_time": "2016-06-17 18:25:57.611172",
'creation_ts': 1466187957.61117, "creation_ts": 1466187957.61117,
'epoch': None, "epoch": None,
'extra': {'container_koji_task_id': '12345', 'image': {}}, "extra": {"container_koji_task_id": "12345", "image": {}},
'id': 54321, "id": 54321,
'name': 'my-name', "name": "my-name",
'nvr': 'my-name-1.0-1', "nvr": "my-name-1.0-1",
'owner_id': 3436, "owner_id": 3436,
'owner_name': 'osbs', "owner_name": "osbs",
'package_id': 50072, "package_id": 50072,
'package_name': 'my-name', "package_name": "my-name",
'release': '1', "release": "1",
'source': 'git://example.com/repo?#BEEFCAFE', "source": "git://example.com/repo?#BEEFCAFE",
'start_time': '2016-06-17 18:16:37', "start_time": "2016-06-17 18:16:37",
'start_ts': 1466187397.0, "start_ts": 1466187397.0,
'state': 1, "state": 1,
'task_id': None, "task_id": None,
'version': '1.0', "version": "1.0",
'volume_id': 0, "volume_id": 0,
'volume_name': 'DEFAULT' "volume_name": "DEFAULT",
} }
ARCHIVES = [ ARCHIVES = [
{'build_id': 54321, {
'buildroot_id': 2955357, "build_id": 54321,
'checksum': 'a2922842dc80873ac782da048c54f6cc', "buildroot_id": 2955357,
'checksum_type': 0, "checksum": "a2922842dc80873ac782da048c54f6cc",
'extra': { "checksum_type": 0,
'docker': { "extra": {
'id': '408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7', "docker": {
'parent_id': '6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e', "id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
'repositories': ['registry.example.com:8888/rcm/buildroot:1.0-1']}, "parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
'image': {'arch': 'x86_64'}}, "repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
'filename': 'docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz', },
'id': 1436049, "image": {"arch": "x86_64"},
'metadata_only': False, },
'size': 174038795, "filename": "docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz",
'type_description': 'Tar file', "id": 1436049,
'type_extensions': 'tar tar.gz tar.bz2 tar.xz', "metadata_only": False,
'type_id': 4, "size": 174038795,
'type_name': 'tar'} "type_description": "Tar file",
"type_extensions": "tar tar.gz tar.bz2 tar.xz",
"type_id": 4,
"type_name": "tar",
}
] ]
METADATA = { METADATA = {
'Server': {'x86_64': [{ "Server": {
'name': 'my-name', "x86_64": [
'version': '1.0', {
'release': '1', "name": "my-name",
'nvr': 'my-name-1.0-1', "version": "1.0",
'creation_time': BUILD_INFO['creation_time'], "release": "1",
'filename': ARCHIVES[0]['filename'], "nvr": "my-name-1.0-1",
'size': ARCHIVES[0]['size'], "creation_time": BUILD_INFO["creation_time"],
'docker': { "filename": ARCHIVES[0]["filename"],
'id': '408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7', "size": ARCHIVES[0]["size"],
'parent_id': '6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e', "docker": {
'repositories': ['registry.example.com:8888/rcm/buildroot:1.0-1']}, "id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
'image': {'arch': 'x86_64'}, "parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
'checksum': ARCHIVES[0]['checksum'], "repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
}]} },
"image": {"arch": "x86_64"},
"checksum": ARCHIVES[0]["checksum"],
}
]
}
} }
SCRATCH_TASK_RESULT = { SCRATCH_TASK_RESULT = {
'koji_builds': [], "koji_builds": [],
'repositories': [ "repositories": [
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632', "registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
] ],
} }
SCRATCH_METADATA = { SCRATCH_METADATA = {
"Server": {'scratch': [{ "Server": {
"scratch": [
{
"koji_task": 12345, "koji_task": 12345,
"repositories": [ "repositories": [
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632', "registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
],
}
] ]
}]} }
} }
class OSBSThreadTest(helpers.PungiTestCase): class OSBSThreadTest(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(OSBSThreadTest, self).setUp() super(OSBSThreadTest, self).setUp()
self.pool = mock.Mock(metadata={}, registries={}) self.pool = mock.Mock(metadata={}, registries={})
self.t = osbs.OSBSThread(self.pool) self.t = osbs.OSBSThread(self.pool)
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(
'koji_profile': 'koji', self.topdir,
'translate_paths': [ {
(self.topdir, 'http://root'), "koji_profile": "koji",
] "translate_paths": [(self.topdir, "http://root")],
}) },
)
def _setupMock(self, KojiWrapper, scratch=False): def _setupMock(self, KojiWrapper, scratch=False):
self.wrapper = KojiWrapper.return_value self.wrapper = KojiWrapper.return_value
@ -203,7 +210,10 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.wrapper.koji_proxy.getTaskResult.return_value = TASK_RESULT self.wrapper.koji_proxy.getTaskResult.return_value = TASK_RESULT
self.wrapper.koji_proxy.getBuild.return_value = BUILD_INFO self.wrapper.koji_proxy.getBuild.return_value = BUILD_INFO
self.wrapper.koji_proxy.listArchives.return_value = ARCHIVES self.wrapper.koji_proxy.listArchives.return_value = ARCHIVES
self.wrapper.koji_proxy.getLatestBuilds.return_value = [mock.Mock(), mock.Mock()] self.wrapper.koji_proxy.getLatestBuilds.return_value = [
mock.Mock(),
mock.Mock(),
]
self.wrapper.koji_proxy.getNextRelease.return_value = 3 self.wrapper.koji_proxy.getNextRelease.return_value = 3
self.wrapper.watch_task.return_value = 0 self.wrapper.watch_task.return_value = 0
@ -211,149 +221,168 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.maxDiff = None self.maxDiff = None
if scratch: if scratch:
metadata = copy.deepcopy(SCRATCH_METADATA) metadata = copy.deepcopy(SCRATCH_METADATA)
metadata['Server']['scratch'][0]['compose_id'] = self.compose.compose_id metadata["Server"]["scratch"][0]["compose_id"] = self.compose.compose_id
metadata['Server']['scratch'][0]['koji_task'] = 12345 metadata["Server"]["scratch"][0]["koji_task"] = 12345
else: else:
metadata = copy.deepcopy(METADATA) metadata = copy.deepcopy(METADATA)
metadata['Server']['x86_64'][0]['compose_id'] = self.compose.compose_id metadata["Server"]["x86_64"][0]["compose_id"] = self.compose.compose_id
metadata['Server']['x86_64'][0]['koji_task'] = 12345 metadata["Server"]["x86_64"][0]["koji_task"] = 12345
self.assertEqual(self.pool.metadata, metadata) self.assertEqual(self.pool.metadata, metadata)
def _assertCorrectCalls(self, opts, setupCalls=None, scratch=False): def _assertCorrectCalls(self, opts, setupCalls=None, scratch=False):
setupCalls = setupCalls or [] setupCalls = setupCalls or []
options = {'yum_repourls': ['http://root/work/global/tmp-Server/compose-rpms-Server-1.repo']} options = {
"yum_repourls": [
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo"
]
}
if scratch: if scratch:
options['scratch'] = True options["scratch"] = True
options.update(opts) options.update(opts)
expect_calls = [mock.call.login()] + setupCalls expect_calls = [mock.call.login()] + setupCalls
expect_calls.extend([ expect_calls.extend(
[
mock.call.koji_proxy.buildContainer( mock.call.koji_proxy.buildContainer(
'git://example.com/repo?#BEEFCAFE', "git://example.com/repo?#BEEFCAFE",
'f24-docker-candidate', "f24-docker-candidate",
options, options,
priority=None), priority=None,
),
mock.call.watch_task( mock.call.watch_task(
12345, self.topdir + '/logs/global/osbs/Server-1-watch-task.log'), 12345, self.topdir + "/logs/global/osbs/Server-1-watch-task.log"
mock.call.koji_proxy.getTaskResult(12345)]) ),
mock.call.koji_proxy.getTaskResult(12345),
]
)
if not scratch: if not scratch:
expect_calls.extend([mock.call.koji_proxy.getBuild(54321), expect_calls.extend(
mock.call.koji_proxy.listArchives(54321)]) [
mock.call.koji_proxy.getBuild(54321),
mock.call.koji_proxy.listArchives(54321),
]
)
self.assertEqual(self.wrapper.mock_calls, expect_calls) self.assertEqual(self.wrapper.mock_calls, expect_calls)
def _assertRepoFile(self, variants=None, gpgkey=None): def _assertRepoFile(self, variants=None, gpgkey=None):
variants = variants or ['Server'] variants = variants or ["Server"]
for variant in variants: for variant in variants:
with open(self.topdir + '/work/global/tmp-%s/compose-rpms-%s-1.repo' % (variant, variant)) as f: with open(
lines = f.read().split('\n') self.topdir
self.assertIn('baseurl=http://root/compose/%s/$basearch/os' % variant, lines) + "/work/global/tmp-%s/compose-rpms-%s-1.repo" % (variant, variant)
) as f:
lines = f.read().split("\n")
self.assertIn(
"baseurl=http://root/compose/%s/$basearch/os" % variant, lines
)
if gpgkey: if gpgkey:
self.assertIn('gpgcheck=1', lines) self.assertIn("gpgcheck=1", lines)
self.assertIn('gpgkey=%s' % gpgkey, lines) self.assertIn("gpgkey=%s" % gpgkey, lines)
def _assertConfigCorrect(self, cfg): def _assertConfigCorrect(self, cfg):
config = copy.deepcopy(self.compose.conf) config = copy.deepcopy(self.compose.conf)
config['osbs'] = { config["osbs"] = {"^Server$": cfg}
'^Server$': cfg
}
self.assertEqual(([], []), checks.validate(config, offline=True)) self.assertEqual(([], []), checks.validate(config, offline=True))
def _assertConfigMissing(self, cfg, key): def _assertConfigMissing(self, cfg, key):
config = copy.deepcopy(self.compose.conf) config = copy.deepcopy(self.compose.conf)
config['osbs'] = { config["osbs"] = {"^Server$": cfg}
'^Server$': cfg
}
errors, warnings = checks.validate(config, offline=True) errors, warnings = checks.validate(config, offline=True)
self.assertIn( self.assertIn(
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas" % cfg, "Failed validation in osbs.^Server$: %r is not valid under any of the given schemas"
% cfg,
errors, errors,
) )
self.assertIn(" Possible reason: %r is a required property" % key, errors) self.assertIn(" Possible reason: %r is a required property" % key, errors)
self.assertEqual([], warnings) self.assertEqual([], warnings)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_minimal_run(self, KojiWrapper): def test_minimal_run(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
} }
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'git_branch': 'f24-docker'}) self._assertCorrectCalls({"git_branch": "f24-docker"})
self._assertCorrectMetadata() self._assertCorrectMetadata()
self._assertRepoFile() self._assertRepoFile()
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_failable(self, KojiWrapper): def test_run_failable(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'failable': ['*'] "failable": ["*"],
} }
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'git_branch': 'f24-docker'}) self._assertCorrectCalls({"git_branch": "f24-docker"})
self._assertCorrectMetadata() self._assertCorrectMetadata()
self._assertRepoFile() self._assertRepoFile()
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_more_args(self, KojiWrapper): def test_run_with_more_args(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
} }
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'name': 'my-name', 'version': '1.0', 'git_branch': 'f24-docker'}) self._assertCorrectCalls(
{"name": "my-name", "version": "1.0", "git_branch": "f24-docker"}
)
self._assertCorrectMetadata() self._assertCorrectMetadata()
self._assertRepoFile() self._assertRepoFile()
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_extra_repos(self, KojiWrapper): def test_run_with_extra_repos(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
"repo": ["Everything", "http://pkgs.example.com/my.repo", "/extra/repo"], "repo": ["Everything", "http://pkgs.example.com/my.repo", "/extra/repo"],
} }
self.compose.conf["translate_paths"].append(("/extra", "http://example.com")) self.compose.conf["translate_paths"].append(("/extra", "http://example.com"))
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
options = { options = {
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'yum_repourls': [ "yum_repourls": [
'http://root/work/global/tmp-Server/compose-rpms-Server-1.repo', "http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
'http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo', "http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
'http://pkgs.example.com/my.repo', "http://pkgs.example.com/my.repo",
"http://root/work/global/tmp/compose-rpms-local-1.repo", "http://root/work/global/tmp/compose-rpms-local-1.repo",
] ],
} }
self._assertCorrectCalls(options) self._assertCorrectCalls(options)
self._assertCorrectMetadata() self._assertCorrectMetadata()
self._assertRepoFile(['Server', 'Everything']) self._assertRepoFile(["Server", "Everything"])
with open(os.path.join(self.topdir, "work/global/tmp/compose-rpms-local-1.repo")) as f: with open(
os.path.join(self.topdir, "work/global/tmp/compose-rpms-local-1.repo")
) as f:
self.assertIn("baseurl=http://example.com/repo\n", f) self.assertIn("baseurl=http://example.com/repo\n", f)
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper") @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
@ -380,7 +409,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo", "http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo", "http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://pkgs.example.com/my.repo", "http://pkgs.example.com/my.repo",
] ],
} }
self._assertCorrectCalls(options) self._assertCorrectCalls(options)
self._assertCorrectMetadata() self._assertCorrectMetadata()
@ -411,146 +440,148 @@ class OSBSThreadTest(helpers.PungiTestCase):
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo", "http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo", "http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://pkgs.example.com/my.repo", "http://pkgs.example.com/my.repo",
] ],
} }
self._assertCorrectCalls(options) self._assertCorrectCalls(options)
self._assertCorrectMetadata() self._assertCorrectMetadata()
self._assertRepoFile(["Server", "Everything"]) self._assertRepoFile(["Server", "Everything"])
self.assertEqual(self.t.pool.registries, {"my-name-1.0-1": [{"foo": "bar"}]}) self.assertEqual(self.t.pool.registries, {"my-name-1.0-1": [{"foo": "bar"}]})
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_extra_repos_in_list(self, KojiWrapper): def test_run_with_extra_repos_in_list(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
'repo': ['Everything', 'Client', 'http://pkgs.example.com/my.repo'], "repo": ["Everything", "Client", "http://pkgs.example.com/my.repo"],
} }
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
options = { options = {
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'yum_repourls': [ "yum_repourls": [
'http://root/work/global/tmp-Server/compose-rpms-Server-1.repo', "http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
'http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo', "http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
'http://root/work/global/tmp-Client/compose-rpms-Client-1.repo', "http://root/work/global/tmp-Client/compose-rpms-Client-1.repo",
'http://pkgs.example.com/my.repo', "http://pkgs.example.com/my.repo",
] ],
} }
self._assertCorrectCalls(options) self._assertCorrectCalls(options)
self._assertCorrectMetadata() self._assertCorrectMetadata()
self._assertRepoFile(['Server', 'Everything', 'Client']) self._assertRepoFile(["Server", "Everything", "Client"])
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_gpgkey_enabled(self, KojiWrapper): def test_run_with_gpgkey_enabled(self, KojiWrapper):
gpgkey = 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release' gpgkey = "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release"
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
'repo': ['Everything', 'Client', 'http://pkgs.example.com/my.repo'], "repo": ["Everything", "Client", "http://pkgs.example.com/my.repo"],
'gpgkey': gpgkey, "gpgkey": gpgkey,
} }
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertRepoFile(['Server', 'Everything', 'Client'], gpgkey=gpgkey) self._assertRepoFile(["Server", "Everything", "Client"], gpgkey=gpgkey)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_extra_repos_missing_variant(self, KojiWrapper): def test_run_with_extra_repos_missing_variant(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
'version': '1.0', "version": "1.0",
'repo': 'Gold', "repo": "Gold",
} }
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self.assertIn('no variant Gold', str(ctx.exception)) self.assertIn("no variant Gold", str(ctx.exception))
def test_run_with_missing_url(self): def test_run_with_missing_url(self):
cfg = { cfg = {
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
} }
self._assertConfigMissing(cfg, 'url') self._assertConfigMissing(cfg, "url")
def test_run_with_missing_target(self): def test_run_with_missing_target(self):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'name': 'my-name', "name": "my-name",
} }
self._assertConfigMissing(cfg, 'target') self._assertConfigMissing(cfg, "target")
def test_run_with_missing_git_branch(self): def test_run_with_missing_git_branch(self):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
} }
self._assertConfigMissing(cfg, 'git_branch') self._assertConfigMissing(cfg, "git_branch")
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_failing_task(self, KojiWrapper): def test_failing_task(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'fedora-24-docker-candidate', "target": "fedora-24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
} }
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self.wrapper.watch_task.return_value = 1 self.wrapper.watch_task.return_value = 1
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self.assertRegexpMatches(str(ctx.exception), r"task 12345 failed: see .+ for details") self.assertRegexpMatches(
str(ctx.exception), r"task 12345 failed: see .+ for details"
)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_failing_task_with_failable(self, KojiWrapper): def test_failing_task_with_failable(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'fedora-24-docker-candidate', "target": "fedora-24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'failable': ['*'] "failable": ["*"],
} }
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper) self._setupMock(KojiWrapper)
self.wrapper.watch_task.return_value = 1 self.wrapper.watch_task.return_value = 1
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper') @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_scratch_metadata(self, KojiWrapper): def test_scratch_metadata(self, KojiWrapper):
cfg = { cfg = {
'url': 'git://example.com/repo?#BEEFCAFE', "url": "git://example.com/repo?#BEEFCAFE",
'target': 'f24-docker-candidate', "target": "f24-docker-candidate",
'git_branch': 'f24-docker', "git_branch": "f24-docker",
'scratch': True, "scratch": True,
} }
self._setupMock(KojiWrapper, scratch=True) self._setupMock(KojiWrapper, scratch=True)
self._assertConfigCorrect(cfg) self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1) self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'git_branch': 'f24-docker'}, scratch=True) self._assertCorrectCalls({"git_branch": "f24-docker"}, scratch=True)
self._assertCorrectMetadata(scratch=True) self._assertCorrectMetadata(scratch=True)
self._assertRepoFile() self._assertRepoFile()

File diff suppressed because it is too large Load Diff

View File

@ -12,19 +12,17 @@ from pungi.phases import ostree
class OSTreePhaseTest(helpers.PungiTestCase): class OSTreePhaseTest(helpers.PungiTestCase):
@mock.patch("pungi.phases.ostree.ThreadPool")
@mock.patch('pungi.phases.ostree.ThreadPool')
def test_run(self, ThreadPool): def test_run(self, ThreadPool):
cfg = helpers.IterableMock() cfg = helpers.IterableMock()
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'ostree': [ self.topdir,
('^Everything$', {'x86_64': cfg}) {
], "ostree": [("^Everything$", {"x86_64": cfg})],
'runroot': True, "runroot": True,
"translate_paths": [ "translate_paths": [(self.topdir, "http://example.com")],
(self.topdir, "http://example.com") },
], )
})
pool = ThreadPool.return_value pool = ThreadPool.return_value
@ -39,10 +37,12 @@ class OSTreePhaseTest(helpers.PungiTestCase):
"http://example.com/work/$basearch/repo/p2", "http://example.com/work/$basearch/repo/p2",
], ],
) )
self.assertEqual(pool.queue_put.call_args_list, self.assertEqual(
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg))]) pool.queue_put.call_args_list,
[mock.call((compose, compose.variants["Everything"], "x86_64", cfg))],
)
@mock.patch('pungi.phases.ostree.ThreadPool') @mock.patch("pungi.phases.ostree.ThreadPool")
def test_skip_without_config(self, ThreadPool): def test_skip_without_config(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {}) compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None compose.just_phases = None
@ -50,14 +50,10 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase = ostree.OSTreePhase(compose) phase = ostree.OSTreePhase(compose)
self.assertTrue(phase.skip()) self.assertTrue(phase.skip())
@mock.patch('pungi.phases.ostree.ThreadPool') @mock.patch("pungi.phases.ostree.ThreadPool")
def test_run_with_simple_config(self, ThreadPool): def test_run_with_simple_config(self, ThreadPool):
cfg = helpers.IterableMock(get=lambda x, y: None) cfg = helpers.IterableMock(get=lambda x, y: None)
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(self.topdir, {"ostree": {"^Everything$": cfg}})
'ostree': {
'^Everything$': cfg
}
})
pool = ThreadPool.return_value pool = ThreadPool.return_value
@ -65,18 +61,18 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase.run() phase.run()
self.assertEqual(len(pool.add.call_args_list), 2) self.assertEqual(len(pool.add.call_args_list), 2)
self.assertEqual(pool.queue_put.call_args_list, self.assertEqual(
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg)), pool.queue_put.call_args_list,
mock.call((compose, compose.variants['Everything'], 'amd64', cfg))]) [
mock.call((compose, compose.variants["Everything"], "x86_64", cfg)),
mock.call((compose, compose.variants["Everything"], "amd64", cfg)),
],
)
@mock.patch('pungi.phases.ostree.ThreadPool') @mock.patch("pungi.phases.ostree.ThreadPool")
def test_run_with_simple_config_limit_arches(self, ThreadPool): def test_run_with_simple_config_limit_arches(self, ThreadPool):
cfg = helpers.IterableMock(get=lambda x, y: ['x86_64']) cfg = helpers.IterableMock(get=lambda x, y: ["x86_64"])
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(self.topdir, {"ostree": {"^Everything$": cfg}})
'ostree': {
'^Everything$': cfg
}
})
pool = ThreadPool.return_value pool = ThreadPool.return_value
@ -84,18 +80,18 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase.run() phase.run()
self.assertEqual(len(pool.add.call_args_list), 1) self.assertEqual(len(pool.add.call_args_list), 1)
self.assertEqual(pool.queue_put.call_args_list, self.assertEqual(
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg))]) pool.queue_put.call_args_list,
[mock.call((compose, compose.variants["Everything"], "x86_64", cfg))],
)
@mock.patch('pungi.phases.ostree.ThreadPool') @mock.patch("pungi.phases.ostree.ThreadPool")
def test_run_with_simple_config_limit_arches_two_blocks(self, ThreadPool): def test_run_with_simple_config_limit_arches_two_blocks(self, ThreadPool):
cfg1 = helpers.IterableMock(get=lambda x, y: ['x86_64']) cfg1 = helpers.IterableMock(get=lambda x, y: ["x86_64"])
cfg2 = helpers.IterableMock(get=lambda x, y: ['s390x']) cfg2 = helpers.IterableMock(get=lambda x, y: ["s390x"])
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(
'ostree': { self.topdir, {"ostree": {"^Everything$": [cfg1, cfg2]}}
'^Everything$': [cfg1, cfg2], )
}
})
pool = ThreadPool.return_value pool = ThreadPool.return_value
@ -103,219 +99,319 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase.run() phase.run()
self.assertEqual(len(pool.add.call_args_list), 2) self.assertEqual(len(pool.add.call_args_list), 2)
self.assertEqual(pool.queue_put.call_args_list, self.assertEqual(
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg1)), pool.queue_put.call_args_list,
mock.call((compose, compose.variants['Everything'], 's390x', cfg2))]) [
mock.call((compose, compose.variants["Everything"], "x86_64", cfg1)),
mock.call((compose, compose.variants["Everything"], "s390x", cfg2)),
],
)
class OSTreeThreadTest(helpers.PungiTestCase): class OSTreeThreadTest(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(OSTreeThreadTest, self).setUp() super(OSTreeThreadTest, self).setUp()
self.repo = os.path.join(self.topdir, 'place/for/atomic') self.repo = os.path.join(self.topdir, "place/for/atomic")
os.makedirs(os.path.join(self.repo, 'refs', 'heads')) os.makedirs(os.path.join(self.repo, "refs", "heads"))
self.cfg = { self.cfg = {
'repo': 'Everything', "repo": "Everything",
'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git', "config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
'config_branch': 'f24', "config_branch": "f24",
'treefile': 'fedora-atomic-docker-host.json', "treefile": "fedora-atomic-docker-host.json",
'ostree_repo': self.repo, "ostree_repo": self.repo,
} }
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(
'koji_profile': 'koji', self.topdir,
'runroot_tag': 'rrt', {
'translate_paths': [ "koji_profile": "koji",
(self.topdir, 'http://example.com') "runroot_tag": "rrt",
], "translate_paths": [(self.topdir, "http://example.com")],
}) },
)
self.pool = mock.Mock() self.pool = mock.Mock()
def _dummy_config_repo(self, scm_dict, target, compose=None): def _dummy_config_repo(self, scm_dict, target, compose=None):
os.makedirs(target) os.makedirs(target)
helpers.touch(os.path.join(target, 'fedora-atomic-docker-host.json'), helpers.touch(
json.dumps({'ref': 'fedora-atomic/25/x86_64', os.path.join(target, "fedora-atomic-docker-host.json"),
'repos': ['fedora-rawhide', 'fedora-24', 'fedora-23']})) json.dumps(
helpers.touch(os.path.join(target, 'fedora-rawhide.repo'), {
'[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall') "ref": "fedora-atomic/25/x86_64",
helpers.touch(os.path.join(target, 'fedora-24.repo'), "repos": ["fedora-rawhide", "fedora-24", "fedora-23"],
'[fedora-24]\nmetalink=who-is-the-fairest-of-them-all') }
helpers.touch(os.path.join(target, 'fedora-23.repo'), ),
'[fedora-23]\nbaseurl=why-not-zoidberg?') )
helpers.touch(
os.path.join(target, "fedora-rawhide.repo"),
"[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall",
)
helpers.touch(
os.path.join(target, "fedora-24.repo"),
"[fedora-24]\nmetalink=who-is-the-fairest-of-them-all",
)
helpers.touch(
os.path.join(target, "fedora-23.repo"),
"[fedora-23]\nbaseurl=why-not-zoidberg?",
)
def _mock_runroot(self, retcode, writefiles=None): def _mock_runroot(self, retcode, writefiles=None):
"""Pretend to run a task in runroot, creating a log file with given line """Pretend to run a task in runroot, creating a log file with given line
Also allows for writing other files of requested""" Also allows for writing other files of requested"""
def fake_runroot(self, log_file, **kwargs): def fake_runroot(self, log_file, **kwargs):
if writefiles: if writefiles:
logdir = os.path.dirname(log_file) logdir = os.path.dirname(log_file)
for filename in writefiles: for filename in writefiles:
helpers.touch(os.path.join(logdir, filename), helpers.touch(
'\n'.join(writefiles[filename])) os.path.join(logdir, filename), "\n".join(writefiles[filename])
)
helpers.touch(os.path.join(logdir, filename + ".stamp")) helpers.touch(os.path.join(logdir, filename + ".stamp"))
return {'task_id': 1234, 'retcode': retcode, 'output': 'Foo bar\n'} return {"task_id": 1234, "retcode": retcode, "output": "Foo bar\n"}
return fake_runroot return fake_runroot
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_extra_config_content(self, KojiWrapper, get_dir_from_scm): def test_extra_config_content(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
self.compose.conf['runroot_weights'] = {'ostree': 123} self.compose.conf["runroot_weights"] = {"ostree": 123}
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(0) koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
extra_config_file = os.path.join(self.topdir, 'work/ostree-1/extra_config.json') extra_config_file = os.path.join(self.topdir, "work/ostree-1/extra_config.json")
self.assertFalse(os.path.isfile(extra_config_file)) self.assertFalse(os.path.isfile(extra_config_file))
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertTrue(os.path.isfile(extra_config_file)) self.assertTrue(os.path.isfile(extra_config_file))
with open(extra_config_file, 'r') as f: with open(extra_config_file, "r") as f:
extraconf_content = json.load(f) extraconf_content = json.load(f)
proper_extraconf_content = { proper_extraconf_content = {
"repo": [ "repo": [
{"name": "http:__example.com_repo_1", {
"baseurl": "http://example.com/repo/1"}, "name": "http:__example.com_repo_1",
{"name": "http:__example.com_work__basearch_comps_repo_Everything", "baseurl": "http://example.com/repo/1",
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything"} },
{
"name": "http:__example.com_work__basearch_comps_repo_Everything",
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything",
},
] ]
} }
self.assertEqual(proper_extraconf_content, extraconf_content) self.assertEqual(proper_extraconf_content, extraconf_content)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run(self, KojiWrapper, get_dir_from_scm): def test_run(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
self.compose.conf['runroot_weights'] = {'ostree': 123} self.compose.conf["runroot_weights"] = {"ostree": 123}
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(0) koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list, self.assertEqual(
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git', get_dir_from_scm.call_args_list,
'branch': 'f24', 'dir': '.'}, [
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)]) mock.call(
self.assertEqual(koji.get_runroot_cmd.call_args_list, {
[mock.call('rrt', 'x86_64', "scm": "git",
['pungi-make-ostree', "repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
'tree', "branch": "f24",
'--repo=%s' % self.repo, "dir": ".",
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir, },
'--treefile=%s/fedora-atomic-docker-host.json' % ( self.topdir + "/work/ostree-1/config_repo",
self.topdir + '/work/ostree-1/config_repo'), compose=self.compose,
'--extra-config=%s/extra_config.json' % (self.topdir + '/work/ostree-1')], )
channel=None, mounts=[self.topdir, self.repo], ],
packages=['pungi', 'ostree', 'rpm-ostree'], )
use_shell=True, new_chroot=True, weight=123)]) self.assertEqual(
self.assertEqual(koji.run_runroot_cmd.call_args_list, koji.get_runroot_cmd.call_args_list,
[mock.call(koji.get_runroot_cmd.return_value, [
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')]) mock.call(
"rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--extra-config=%s/extra_config.json"
% (self.topdir + "/work/ostree-1"),
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=123,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
self.assertTrue(os.path.isfile(os.path.join(self.topdir, 'work/ostree-1/extra_config.json'))) self.assertTrue(
os.path.isfile(os.path.join(self.topdir, "work/ostree-1/extra_config.json"))
)
self.assertTrue(os.path.isdir(self.repo)) self.assertTrue(os.path.isdir(self.repo))
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_fail(self, KojiWrapper, get_dir_from_scm): def test_run_fail(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
self.cfg['failable'] = ['*'] self.cfg["failable"] = ["*"]
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(1) koji.run_runroot_cmd.side_effect = self._mock_runroot(1)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.compose._logger.error.assert_has_calls([ self.compose._logger.error.assert_has_calls(
mock.call('[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway.'), [
mock.call('Runroot task failed: 1234. See %s for more details.' mock.call(
% (self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')) "[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
]) ),
mock.call(
"Runroot task failed: 1234. See %s for more details."
% (self.topdir + "/logs/x86_64/Everything/ostree-1/runroot.log")
),
]
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_handle_exception(self, KojiWrapper, get_dir_from_scm): def test_run_handle_exception(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
self.cfg['failable'] = ['*'] self.cfg["failable"] = ["*"]
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = helpers.boom koji.run_runroot_cmd.side_effect = helpers.boom
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.compose._logger.error.assert_has_calls([ self.compose._logger.error.assert_has_calls(
mock.call('[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway.'), [
mock.call('BOOM') mock.call(
]) "[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
),
mock.call("BOOM"),
]
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_message(self, KojiWrapper, get_dir_from_scm): def test_run_send_message(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
self.compose.notifier = mock.Mock() self.compose.notifier = mock.Mock()
self.compose.conf['translate_paths'] = [(self.topdir, 'http://example.com/')] self.compose.conf["translate_paths"] = [(self.topdir, "http://example.com/")]
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot( koji.run_runroot_cmd.side_effect = self._mock_runroot(
0, 0,
{'commitid.log': 'fca3465861a', {
'create-ostree-repo.log': "commitid.log": "fca3465861a",
['Doing work', 'fedora-atomic/25/x86_64 -> fca3465861a']}) "create-ostree-repo.log": [
"Doing work",
"fedora-atomic/25/x86_64 -> fca3465861a",
],
},
)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(self.compose.notifier.send.mock_calls, self.assertEqual(
[mock.call('ostree', self.compose.notifier.send.mock_calls,
variant='Everything', [
arch='x86_64', mock.call(
ref='fedora-atomic/25/x86_64', "ostree",
commitid='fca3465861a', variant="Everything",
repo_path='http://example.com/place/for/atomic', arch="x86_64",
local_repo_path=self.repo)]) ref="fedora-atomic/25/x86_64",
commitid="fca3465861a",
repo_path="http://example.com/place/for/atomic",
local_repo_path=self.repo,
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_message_custom_ref(self, KojiWrapper, get_dir_from_scm): def test_run_send_message_custom_ref(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
self.cfg["ostree_ref"] = "my/${basearch}" self.cfg["ostree_ref"] = "my/${basearch}"
self.compose.notifier = mock.Mock() self.compose.notifier = mock.Mock()
self.compose.conf['translate_paths'] = [(self.topdir, 'http://example.com/')] self.compose.conf["translate_paths"] = [(self.topdir, "http://example.com/")]
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot( koji.run_runroot_cmd.side_effect = self._mock_runroot(
0, 0,
{'commitid.log': 'fca3465861a', {
'create-ostree-repo.log': "commitid.log": "fca3465861a",
['Doing work', 'fedora-atomic/25/x86_64 -> fca3465861a']}) "create-ostree-repo.log": [
"Doing work",
"fedora-atomic/25/x86_64 -> fca3465861a",
],
},
)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(self.compose.notifier.send.mock_calls, self.assertEqual(
[mock.call('ostree', self.compose.notifier.send.mock_calls,
variant='Everything', [
arch='x86_64', mock.call(
ref='my/x86_64', "ostree",
commitid='fca3465861a', variant="Everything",
repo_path='http://example.com/place/for/atomic', arch="x86_64",
local_repo_path=self.repo)]) ref="my/x86_64",
commitid="fca3465861a",
repo_path="http://example.com/place/for/atomic",
local_repo_path=self.repo,
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_message_without_commit_id(self, KojiWrapper, get_dir_from_scm): def test_run_send_message_without_commit_id(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
@ -323,23 +419,31 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji = KojiWrapper.return_value koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot( koji.run_runroot_cmd.side_effect = self._mock_runroot(
0, 0, {"create-ostree-repo.log": ["Doing work", "Weird output"]}
{'create-ostree-repo.log': ['Doing work', 'Weird output']}) )
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(self.compose.notifier.send.mock_calls, self.assertEqual(
[mock.call('ostree', self.compose.notifier.send.mock_calls,
variant='Everything', [
arch='x86_64', mock.call(
ref='fedora-atomic/25/x86_64', "ostree",
variant="Everything",
arch="x86_64",
ref="fedora-atomic/25/x86_64",
commitid=None, commitid=None,
repo_path='http://example.com/place/for/atomic', repo_path="http://example.com/place/for/atomic",
local_repo_path=self.repo)]) local_repo_path=self.repo,
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_no_message_on_failure(self, KojiWrapper, get_dir_from_scm): def test_run_send_no_message_on_failure(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
@ -349,15 +453,18 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji.run_runroot_cmd.side_effect = self._mock_runroot(1) koji.run_runroot_cmd.side_effect = self._mock_runroot(1)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
self.assertRaises(RuntimeError, t.process, self.assertRaises(
(self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), RuntimeError,
1) t.process,
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg),
1,
)
self.assertEqual(self.compose.notifier.send.mock_calls, []) self.assertEqual(self.compose.notifier.send.mock_calls, [])
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_with_update_summary(self, KojiWrapper, get_dir_from_scm): def test_run_with_update_summary(self, KojiWrapper, get_dir_from_scm):
self.cfg['update_summary'] = True self.cfg["update_summary"] = True
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
@ -366,33 +473,66 @@ class OSTreeThreadTest(helpers.PungiTestCase):
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list, self.assertEqual(
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git', get_dir_from_scm.call_args_list,
'branch': 'f24', 'dir': '.'}, [
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)]) mock.call(
self.assertEqual(koji.get_runroot_cmd.call_args_list, {
[mock.call('rrt', 'x86_64', "scm": "git",
['pungi-make-ostree', "repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
'tree', "branch": "f24",
'--repo=%s' % self.repo, "dir": ".",
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir, },
'--treefile=%s/fedora-atomic-docker-host.json' % ( self.topdir + "/work/ostree-1/config_repo",
self.topdir + '/work/ostree-1/config_repo'), compose=self.compose,
'--extra-config=%s/work/ostree-1/extra_config.json' % self.topdir, )
'--update-summary'], ],
channel=None, mounts=[self.topdir, self.repo], )
packages=['pungi', 'ostree', 'rpm-ostree'], self.assertEqual(
use_shell=True, new_chroot=True, weight=None)]) koji.get_runroot_cmd.call_args_list,
self.assertEqual(koji.run_runroot_cmd.call_args_list, [
[mock.call(koji.get_runroot_cmd.return_value, mock.call(
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')]) "rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--extra-config=%s/work/ostree-1/extra_config.json"
% self.topdir,
"--update-summary",
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=None,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_with_versioning_metadata(self, KojiWrapper, get_dir_from_scm): def test_run_with_versioning_metadata(self, KojiWrapper, get_dir_from_scm):
self.cfg['version'] = '24' self.cfg["version"] = "24"
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
@ -401,33 +541,68 @@ class OSTreeThreadTest(helpers.PungiTestCase):
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list, self.assertEqual(
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git', get_dir_from_scm.call_args_list,
'branch': 'f24', 'dir': '.'}, [
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)]) mock.call(
self.assertEqual(koji.get_runroot_cmd.call_args_list, {
[mock.call('rrt', 'x86_64', "scm": "git",
['pungi-make-ostree', "repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
'tree', "branch": "f24",
'--repo=%s' % self.repo, "dir": ".",
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir, },
'--treefile=%s/fedora-atomic-docker-host.json' % ( self.topdir + "/work/ostree-1/config_repo",
self.topdir + '/work/ostree-1/config_repo'), compose=self.compose,
'--version=24', )
'--extra-config=%s/work/ostree-1/extra_config.json' % self.topdir], ],
channel=None, mounts=[self.topdir, self.repo], )
packages=['pungi', 'ostree', 'rpm-ostree'], self.assertEqual(
use_shell=True, new_chroot=True, weight=None)]) koji.get_runroot_cmd.call_args_list,
self.assertEqual(koji.run_runroot_cmd.call_args_list, [
[mock.call(koji.get_runroot_cmd.return_value, mock.call(
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')]) "rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--version=24",
"--extra-config=%s/work/ostree-1/extra_config.json"
% self.topdir,
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=None,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_with_generated_versioning_metadata(self, KojiWrapper, get_dir_from_scm): def test_run_with_generated_versioning_metadata(
self.cfg['version'] = '!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN' self, KojiWrapper, get_dir_from_scm
):
self.cfg["version"] = "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN"
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
@ -436,31 +611,64 @@ class OSTreeThreadTest(helpers.PungiTestCase):
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1) t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list, self.assertEqual(
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git', get_dir_from_scm.call_args_list,
'branch': 'f24', 'dir': '.'}, [
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)]) mock.call(
self.assertEqual(koji.get_runroot_cmd.call_args_list, {
[mock.call('rrt', 'x86_64', "scm": "git",
['pungi-make-ostree', "repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
'tree', "branch": "f24",
'--repo=%s' % self.repo, "dir": ".",
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir, },
'--treefile=%s/fedora-atomic-docker-host.json' % ( self.topdir + "/work/ostree-1/config_repo",
self.topdir + '/work/ostree-1/config_repo'), compose=self.compose,
'--version=25.20151203.t.0', )
'--extra-config=%s/work/ostree-1/extra_config.json' % self.topdir], ],
channel=None, mounts=[self.topdir, self.repo], )
packages=['pungi', 'ostree', 'rpm-ostree'], self.assertEqual(
use_shell=True, new_chroot=True, weight=None)]) koji.get_runroot_cmd.call_args_list,
self.assertEqual(koji.run_runroot_cmd.call_args_list, [
[mock.call(koji.get_runroot_cmd.return_value, mock.call(
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')]) "rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--version=25.20151203.t.0",
"--extra-config=%s/work/ostree-1/extra_config.json"
% self.topdir,
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=None,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm') @mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_write_extra_config_file(self, KojiWrapper, get_dir_from_scm): def test_write_extra_config_file(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo get_dir_from_scm.side_effect = self._dummy_config_repo
@ -468,38 +676,44 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji.run_runroot_cmd.side_effect = self._mock_runroot(0) koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
cfg = { cfg = {
'repo': [ # Variant type repos will not be included into extra_config. This part of the config is deprecated "repo": [ # Variant type repos will not be included into extra_config. This part of the config is deprecated
'Everything', # do not include "Everything", # do not include
{ {
'name': 'repo_a', "name": "repo_a",
'baseurl': 'http://url/to/repo/a', "baseurl": "http://url/to/repo/a",
'exclude': 'systemd-container' "exclude": "systemd-container",
}, },
{ # do not include { # do not include
'name': 'Server', "name": "Server",
'baseurl': 'Server', "baseurl": "Server",
'exclude': 'systemd-container' "exclude": "systemd-container",
} },
], ],
'keep_original_sources': True, "keep_original_sources": True,
'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git', "config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
'config_branch': 'f24', "config_branch": "f24",
'treefile': 'fedora-atomic-docker-host.json', "treefile": "fedora-atomic-docker-host.json",
'ostree_repo': self.repo "ostree_repo": self.repo,
} }
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"]) t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', cfg), 1) t.process((self.compose, self.compose.variants["Everything"], "x86_64", cfg), 1)
extra_config_file = os.path.join(self.topdir, 'work/ostree-1/extra_config.json') extra_config_file = os.path.join(self.topdir, "work/ostree-1/extra_config.json")
self.assertTrue(os.path.isfile(extra_config_file)) self.assertTrue(os.path.isfile(extra_config_file))
with open(extra_config_file, 'r') as extra_config_fd: with open(extra_config_file, "r") as extra_config_fd:
extra_config = json.load(extra_config_fd) extra_config = json.load(extra_config_fd)
self.assertTrue(extra_config.get('keep_original_sources', False)) self.assertTrue(extra_config.get("keep_original_sources", False))
# should equal to number of valid repositories in cfg['repo'] + default repository + comps repository # should equal to number of valid repositories in cfg['repo'] + default repository + comps repository
self.assertEqual(len(extra_config.get('repo', [])), 3) self.assertEqual(len(extra_config.get("repo", [])), 3)
self.assertEqual(extra_config.get('repo').pop()['baseurl'], self.assertEqual(
'http://example.com/work/$basearch/comps_repo_Everything') extra_config.get("repo").pop()["baseurl"],
self.assertEqual(extra_config.get("repo").pop()["baseurl"], "http://example.com/repo/1") "http://example.com/work/$basearch/comps_repo_Everything",
self.assertEqual(extra_config.get('repo').pop()['baseurl'], 'http://url/to/repo/a') )
self.assertEqual(
extra_config.get("repo").pop()["baseurl"], "http://example.com/repo/1"
)
self.assertEqual(
extra_config.get("repo").pop()["baseurl"], "http://url/to/repo/a"
)

View File

@ -14,24 +14,41 @@ from pungi import ostree
class OstreeTreeScriptTest(helpers.PungiTestCase): class OstreeTreeScriptTest(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(OstreeTreeScriptTest, self).setUp() super(OstreeTreeScriptTest, self).setUp()
self.repo = os.path.join(self.topdir, "atomic") self.repo = os.path.join(self.topdir, "atomic")
def _make_dummy_config_dir(self, path): def _make_dummy_config_dir(self, path):
helpers.touch(os.path.join(path, 'fedora-atomic-docker-host.json'), helpers.touch(
json.dumps({'ref': 'fedora-atomic/25/x86_64', os.path.join(path, "fedora-atomic-docker-host.json"),
'repos': ['fedora-rawhide', 'fedora-24', 'fedora-23']})) json.dumps(
helpers.touch(os.path.join(path, 'fedora-atomic-docker-host.yaml'), {
yaml.dump({'ref': 'fedora-atomic/25/x86_64', "ref": "fedora-atomic/25/x86_64",
'repos': ['fedora-rawhide', 'fedora-24', 'fedora-23']})) "repos": ["fedora-rawhide", "fedora-24", "fedora-23"],
helpers.touch(os.path.join(path, 'fedora-rawhide.repo'), }
'[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall') ),
helpers.touch(os.path.join(path, 'fedora-24.repo'), )
'[fedora-24]\nmetalink=who-is-the-fairest-of-them-all') helpers.touch(
helpers.touch(os.path.join(path, 'fedora-23.repo'), os.path.join(path, "fedora-atomic-docker-host.yaml"),
'[fedora-23]\nbaseurl=why-not-zoidberg?') yaml.dump(
{
"ref": "fedora-atomic/25/x86_64",
"repos": ["fedora-rawhide", "fedora-24", "fedora-23"],
}
),
)
helpers.touch(
os.path.join(path, "fedora-rawhide.repo"),
"[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall",
)
helpers.touch(
os.path.join(path, "fedora-24.repo"),
"[fedora-24]\nmetalink=who-is-the-fairest-of-them-all",
)
helpers.touch(
os.path.join(path, "fedora-23.repo"),
"[fedora-23]\nbaseurl=why-not-zoidberg?",
)
def assertCorrectCall(self, mock_run, extra_calls=[], extra_args=[]): def assertCorrectCall(self, mock_run, extra_calls=[], extra_args=[]):
six.assertCountEqual( six.assertCountEqual(
@ -44,65 +61,76 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
"compose", "compose",
"tree", "tree",
"--repo=%s" % self.repo, "--repo=%s" % self.repo,
"--write-commitid-to=%s" % (self.topdir + "/logs/Atomic/commitid.log"), "--write-commitid-to=%s"
"--touch-if-changed=%s.stamp" % (self.topdir + "/logs/Atomic/commitid.log"), % (self.topdir + "/logs/Atomic/commitid.log"),
] + extra_args + [ "--touch-if-changed=%s.stamp"
self.topdir + "/fedora-atomic-docker-host.json" % (self.topdir + "/logs/Atomic/commitid.log"),
], ]
+ extra_args
+ [self.topdir + "/fedora-atomic-docker-host.json"],
logfile=self.topdir + "/logs/Atomic/create-ostree-repo.log", logfile=self.topdir + "/logs/Atomic/create-ostree-repo.log",
show_cmd=True, show_cmd=True,
stdout=True, stdout=True,
universal_newlines=True, universal_newlines=True,
) )
] + extra_calls ]
+ extra_calls,
) )
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_full_run(self, run): def test_full_run(self, run):
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
]) "--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]
)
self.assertCorrectCall(run) self.assertCorrectCall(run)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_run_on_existing_empty_dir(self, run): def test_run_on_existing_empty_dir(self, run):
os.mkdir(self.repo) os.mkdir(self.repo)
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
]) "--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]
)
self.assertCorrectCall(run) self.assertCorrectCall(run)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_run_on_initialized_repo(self, run): def test_run_on_initialized_repo(self, run):
helpers.touch(os.path.join(self.repo, 'initialized')) helpers.touch(os.path.join(self.repo, "initialized"))
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
]) "--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]
)
self.assertCorrectCall(run) self.assertCorrectCall(run)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_update_summary(self, run): def test_update_summary(self, run):
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
'--update-summary', "--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]) "--update-summary",
]
)
self.assertCorrectCall( self.assertCorrectCall(
run, run,
@ -114,133 +142,137 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
stdout=True, stdout=True,
universal_newlines=True, universal_newlines=True,
) )
],
)
@mock.patch("kobo.shortcuts.run")
def test_versioning_metadata(self, run):
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
"--version=24",
] ]
) )
@mock.patch('kobo.shortcuts.run')
def test_versioning_metadata(self, run):
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
'--version=24',
])
self.assertCorrectCall(run, extra_args=["--add-metadata-string=version=24"]) self.assertCorrectCall(run, extra_args=["--add-metadata-string=version=24"])
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_ostree_ref(self, run): def test_ostree_ref(self, run):
self._make_dummy_config_dir(self.topdir) self._make_dummy_config_dir(self.topdir)
treefile = os.path.join(self.topdir, 'fedora-atomic-docker-host.json') treefile = os.path.join(self.topdir, "fedora-atomic-docker-host.json")
with open(treefile, 'r') as f: with open(treefile, "r") as f:
treefile_content = json.load(f) treefile_content = json.load(f)
original_repos = treefile_content['repos'] original_repos = treefile_content["repos"]
original_ref = treefile_content['ref'] original_ref = treefile_content["ref"]
replacing_ref = original_ref + '-changed' replacing_ref = original_ref + "-changed"
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s' % treefile, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
'--ostree-ref=%s' % replacing_ref, "--treefile=%s" % treefile,
]) "--ostree-ref=%s" % replacing_ref,
]
)
with open(treefile, 'r') as f: with open(treefile, "r") as f:
treefile_content = json.load(f) treefile_content = json.load(f)
new_repos = treefile_content['repos'] new_repos = treefile_content["repos"]
new_ref = treefile_content['ref'] new_ref = treefile_content["ref"]
# ref value in treefile should be overrided with new ref # ref value in treefile should be overrided with new ref
self.assertEqual(replacing_ref, new_ref) self.assertEqual(replacing_ref, new_ref)
# repos should stay unchanged # repos should stay unchanged
self.assertEqual(original_repos, new_repos) self.assertEqual(original_repos, new_repos)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_run_with_yaml_file(self, run): def test_run_with_yaml_file(self, run):
self._make_dummy_config_dir(self.topdir) self._make_dummy_config_dir(self.topdir)
treefile = os.path.join(self.topdir, 'fedora-atomic-docker-host.yaml') treefile = os.path.join(self.topdir, "fedora-atomic-docker-host.yaml")
with open(treefile, 'r') as f: with open(treefile, "r") as f:
# Read initial content from YAML file # Read initial content from YAML file
treefile_content = yaml.safe_load(f) treefile_content = yaml.safe_load(f)
original_repos = treefile_content['repos'] original_repos = treefile_content["repos"]
original_ref = treefile_content['ref'] original_ref = treefile_content["ref"]
replacing_ref = original_ref + '-changed' replacing_ref = original_ref + "-changed"
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s' % treefile, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
'--ostree-ref=%s' % replacing_ref, "--treefile=%s" % treefile,
]) "--ostree-ref=%s" % replacing_ref,
]
)
with open(treefile.replace(".yaml", ".json"), 'r') as f: with open(treefile.replace(".yaml", ".json"), "r") as f:
# There is now a tweaked JSON file # There is now a tweaked JSON file
treefile_content = json.load(f) treefile_content = json.load(f)
new_repos = treefile_content['repos'] new_repos = treefile_content["repos"]
new_ref = treefile_content['ref'] new_ref = treefile_content["ref"]
# ref value in treefile should be overrided with new ref # ref value in treefile should be overrided with new ref
self.assertEqual(replacing_ref, new_ref) self.assertEqual(replacing_ref, new_ref)
# repos should stay unchanged # repos should stay unchanged
self.assertEqual(original_repos, new_repos) self.assertEqual(original_repos, new_repos)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_force_new_commit(self, run): def test_force_new_commit(self, run):
helpers.touch(os.path.join(self.repo, 'initialized')) helpers.touch(os.path.join(self.repo, "initialized"))
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
'--force-new-commit', "--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]) "--force-new-commit",
]
)
self.assertCorrectCall(run, extra_args=["--force-nocache"]) self.assertCorrectCall(run, extra_args=["--force-nocache"])
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_extra_config_with_extra_repos(self, run): def test_extra_config_with_extra_repos(self, run):
configdir = os.path.join(self.topdir, 'config') configdir = os.path.join(self.topdir, "config")
self._make_dummy_config_dir(configdir) self._make_dummy_config_dir(configdir)
treefile = os.path.join(configdir, 'fedora-atomic-docker-host.json') treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
extra_config_file = os.path.join(self.topdir, 'extra_config.json') extra_config_file = os.path.join(self.topdir, "extra_config.json")
extra_config = { extra_config = {
"repo": [ "repo": [
{ {"name": "server", "baseurl": "http://www.example.com/Server/repo"},
"name": "server",
"baseurl": "http://www.example.com/Server/repo",
},
{ {
"name": "optional", "name": "optional",
"baseurl": "http://example.com/repo/x86_64/optional", "baseurl": "http://example.com/repo/x86_64/optional",
"exclude": "systemd-container", "exclude": "systemd-container",
"gpgcheck": False "gpgcheck": False,
}, },
{ {"name": "extra", "baseurl": "http://example.com/repo/x86_64/extra"},
"name": "extra",
"baseurl": "http://example.com/repo/x86_64/extra",
}
] ]
} }
helpers.touch(extra_config_file, json.dumps(extra_config)) helpers.touch(extra_config_file, json.dumps(extra_config))
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s' % treefile, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
'--extra-config=%s' % extra_config_file, "--treefile=%s" % treefile,
]) "--extra-config=%s" % extra_config_file,
]
)
pungi_repo = os.path.join(configdir, "pungi.repo") pungi_repo = os.path.join(configdir, "pungi.repo")
self.assertTrue(os.path.isfile(pungi_repo)) self.assertTrue(os.path.isfile(pungi_repo))
with open(pungi_repo, 'r') as f: with open(pungi_repo, "r") as f:
content = f.read().strip() content = f.read().strip()
result_template = ( result_template = (
"[repo-0]", "[repo-0]",
@ -257,57 +289,59 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
"baseurl=http://www.example.com/Server/repo", "baseurl=http://www.example.com/Server/repo",
"gpgcheck=0", "gpgcheck=0",
) )
result = '\n'.join(result_template).strip() result = "\n".join(result_template).strip()
self.assertEqual(content, result) self.assertEqual(content, result)
treeconf = json.load(open(treefile, 'r')) treeconf = json.load(open(treefile, "r"))
repos = treeconf['repos'] repos = treeconf["repos"]
self.assertEqual(len(repos), 3) self.assertEqual(len(repos), 3)
for name in ("repo-0", "repo-1", "repo-2"): for name in ("repo-0", "repo-1", "repo-2"):
self.assertIn(name, repos) self.assertIn(name, repos)
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_extra_config_with_keep_original_sources(self, run): def test_extra_config_with_keep_original_sources(self, run):
configdir = os.path.join(self.topdir, 'config') configdir = os.path.join(self.topdir, "config")
self._make_dummy_config_dir(configdir) self._make_dummy_config_dir(configdir)
treefile = os.path.join(configdir, 'fedora-atomic-docker-host.json') treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
extra_config_file = os.path.join(self.topdir, 'extra_config.json') extra_config_file = os.path.join(self.topdir, "extra_config.json")
extra_config = { extra_config = {
"repo": [ "repo": [
{ {"name": "server", "baseurl": "http://www.example.com/Server/repo"},
"name": "server",
"baseurl": "http://www.example.com/Server/repo",
},
{ {
"name": "optional", "name": "optional",
"baseurl": "http://example.com/repo/x86_64/optional", "baseurl": "http://example.com/repo/x86_64/optional",
"exclude": "systemd-container", "exclude": "systemd-container",
"gpgcheck": False "gpgcheck": False,
}, },
{ {"name": "extra", "baseurl": "http://example.com/repo/x86_64/extra"},
"name": "extra",
"baseurl": "http://example.com/repo/x86_64/extra",
}
], ],
"keep_original_sources": True "keep_original_sources": True,
} }
helpers.touch(extra_config_file, json.dumps(extra_config)) helpers.touch(extra_config_file, json.dumps(extra_config))
ostree.main([ ostree.main(
'tree', [
'--repo=%s' % self.repo, "tree",
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'), "--repo=%s" % self.repo,
'--treefile=%s' % treefile, "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
'--extra-config=%s' % extra_config_file, "--treefile=%s" % treefile,
]) "--extra-config=%s" % extra_config_file,
]
)
treeconf = json.load(open(treefile, 'r')) treeconf = json.load(open(treefile, "r"))
repos = treeconf['repos'] repos = treeconf["repos"]
self.assertEqual(len(repos), 6) self.assertEqual(len(repos), 6)
for name in ['fedora-rawhide', 'fedora-24', 'fedora-23', for name in [
'repo-0', 'repo-1', 'repo-2']: "fedora-rawhide",
"fedora-24",
"fedora-23",
"repo-0",
"repo-1",
"repo-2",
]:
self.assertIn(name, repos) self.assertIn(name, repos)
@ -317,30 +351,32 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
self.product = "dummyproduct" self.product = "dummyproduct"
self.version = "1.0" self.version = "1.0"
self.release = "20160101.t.0" self.release = "20160101.t.0"
self.output = os.path.join(self.topdir, 'output') self.output = os.path.join(self.topdir, "output")
self.logdir = os.path.join(self.topdir, 'logs') self.logdir = os.path.join(self.topdir, "logs")
self.volid = '%s-%s' % (self.product, self.version) self.volid = "%s-%s" % (self.product, self.version)
self.variant = 'dummy' self.variant = "dummy"
self.rootfs_size = None self.rootfs_size = None
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_run_with_args(self, run): def test_run_with_args(self, run):
args = ['installer', args = [
'--product=%s' % self.product, "installer",
'--version=%s' % self.version, "--product=%s" % self.product,
'--release=%s' % self.release, "--version=%s" % self.version,
'--output=%s' % self.output, "--release=%s" % self.release,
'--variant=%s' % self.variant, "--output=%s" % self.output,
'--rootfs-size=%s' % self.rootfs_size, "--variant=%s" % self.variant,
'--nomacboot', "--rootfs-size=%s" % self.rootfs_size,
'--isfinal'] "--nomacboot",
args.append('--source=%s' % 'http://www.example.com/dummy/repo') "--isfinal",
args.append('--installpkgs=dummy-foo') ]
args.append('--installpkgs=dummy-bar') args.append("--source=%s" % "http://www.example.com/dummy/repo")
args.append('--add-template=/path/to/lorax.tmpl') args.append("--installpkgs=dummy-foo")
args.append('--add-template-var=ostree_osname=dummy') args.append("--installpkgs=dummy-bar")
args.append('--add-arch-template=/path/to/lorax-embed.tmpl') args.append("--add-template=/path/to/lorax.tmpl")
args.append('--add-arch-template-var=ostree_repo=http://www.example.com/ostree') args.append("--add-template-var=ostree_osname=dummy")
args.append("--add-arch-template=/path/to/lorax-embed.tmpl")
args.append("--add-arch-template-var=ostree_repo=http://www.example.com/ostree")
ostree.main(args) ostree.main(args)
self.maxDiff = None self.maxDiff = None
six.assertCountEqual( six.assertCountEqual(
@ -370,29 +406,41 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
], ],
) )
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_run_with_extra_config_file(self, run): def test_run_with_extra_config_file(self, run):
extra_config_file = os.path.join(self.topdir, 'extra_config.json') extra_config_file = os.path.join(self.topdir, "extra_config.json")
helpers.touch(extra_config_file, helpers.touch(
json.dumps({'repo': 'http://www.example.com/another/repo', extra_config_file,
'installpkgs': ['dummy-foo', 'dummy-bar'], json.dumps(
'add_template': ['/path/to/lorax.tmpl'], {
'add_template_var': ['ostree_osname=dummy-atomic', "repo": "http://www.example.com/another/repo",
'ostree_ref=dummy/x86_64/docker'], "installpkgs": ["dummy-foo", "dummy-bar"],
'add_arch_template': ['/path/to/lorax-embed.tmpl'], "add_template": ["/path/to/lorax.tmpl"],
'add_arch_template_var': ['ostree_osname=dummy-atomic', "add_template_var": [
'ostree_repo=http://www.example.com/ostree']})) "ostree_osname=dummy-atomic",
args = ['installer', "ostree_ref=dummy/x86_64/docker",
'--product=%s' % self.product, ],
'--version=%s' % self.version, "add_arch_template": ["/path/to/lorax-embed.tmpl"],
'--release=%s' % self.release, "add_arch_template_var": [
'--output=%s' % self.output, "ostree_osname=dummy-atomic",
'--variant=%s' % self.variant, "ostree_repo=http://www.example.com/ostree",
'--rootfs-size=%s' % self.rootfs_size, ],
'--nomacboot', }
'--isfinal'] ),
args.append('--source=%s' % 'http://www.example.com/dummy/repo') )
args.append('--extra-config=%s' % extra_config_file) args = [
"installer",
"--product=%s" % self.product,
"--version=%s" % self.version,
"--release=%s" % self.release,
"--output=%s" % self.output,
"--variant=%s" % self.variant,
"--rootfs-size=%s" % self.rootfs_size,
"--nomacboot",
"--isfinal",
]
args.append("--source=%s" % "http://www.example.com/dummy/repo")
args.append("--extra-config=%s" % extra_config_file)
ostree.main(args) ostree.main(args)
self.maxDiff = None self.maxDiff = None
six.assertCountEqual( six.assertCountEqual(

View File

@ -3,6 +3,7 @@
import mock import mock
import os import os
import sys import sys
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -17,39 +18,39 @@ class TestUnifiedIsos(unittest.TestCase):
class TestGetLoraxDir(unittest.TestCase): class TestGetLoraxDir(unittest.TestCase):
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_success(self, mock_run): def test_success(self, mock_run):
mock_run.return_value = (0, 'hello') mock_run.return_value = (0, "hello")
self.assertEqual(patch_iso.get_lorax_dir(None), 'hello') self.assertEqual(patch_iso.get_lorax_dir(None), "hello")
self.assertEqual(1, len(mock_run.call_args_list)) self.assertEqual(1, len(mock_run.call_args_list))
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_crash(self, mock_run): def test_crash(self, mock_run):
mock_run.side_effect = boom mock_run.side_effect = boom
self.assertEqual(patch_iso.get_lorax_dir('hello'), 'hello') self.assertEqual(patch_iso.get_lorax_dir("hello"), "hello")
self.assertEqual(1, len(mock_run.call_args_list)) self.assertEqual(1, len(mock_run.call_args_list))
class TestSh(unittest.TestCase): class TestSh(unittest.TestCase):
@mock.patch('kobo.shortcuts.run') @mock.patch("kobo.shortcuts.run")
def test_cmd(self, mock_run): def test_cmd(self, mock_run):
mock_run.return_value = (0, 'ok') mock_run.return_value = (0, "ok")
log = mock.Mock() log = mock.Mock()
patch_iso.sh(log, ['ls'], foo='bar') patch_iso.sh(log, ["ls"], foo="bar")
self.assertEqual(mock_run.call_args_list, self.assertEqual(
[mock.call(['ls'], foo='bar', universal_newlines=True)]) mock_run.call_args_list,
self.assertEqual(log.info.call_args_list, [mock.call(["ls"], foo="bar", universal_newlines=True)],
[mock.call('Running: %s', 'ls')]) )
self.assertEqual(log.debug.call_args_list, self.assertEqual(log.info.call_args_list, [mock.call("Running: %s", "ls")])
[mock.call('%s', 'ok')]) self.assertEqual(log.debug.call_args_list, [mock.call("%s", "ok")])
class TestAsBool(unittest.TestCase): class TestAsBool(unittest.TestCase):
def test_true(self): def test_true(self):
self.assertTrue(patch_iso.as_bool('true')) self.assertTrue(patch_iso.as_bool("true"))
def test_false(self): def test_false(self):
self.assertFalse(patch_iso.as_bool('false')) self.assertFalse(patch_iso.as_bool("false"))
def test_anything_else(self): def test_anything_else(self):
obj = mock.Mock() obj = mock.Mock()
@ -61,171 +62,206 @@ class EqualsAny(object):
return True return True
def __repr__(self): def __repr__(self):
return u'ANYTHING' return u"ANYTHING"
ANYTHING = EqualsAny() ANYTHING = EqualsAny()
class TestPatchingIso(unittest.TestCase): class TestPatchingIso(unittest.TestCase):
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch('pungi_utils.patch_iso.util.copy_all') @mock.patch("pungi_utils.patch_iso.iso")
@mock.patch('pungi_utils.patch_iso.iso') @mock.patch("pungi_utils.patch_iso.sh")
@mock.patch('pungi_utils.patch_iso.sh')
def test_whole(self, sh, iso, copy_all): def test_whole(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir' iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
def _create_files(src, dest): def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello') touch(os.path.join(dest, "dir", "file.txt"), "Hello")
copy_all.side_effect = _create_files copy_all.side_effect = _create_files
log = mock.Mock(name='logger') log = mock.Mock(name="logger")
opts = mock.Mock( opts = mock.Mock(
target='test.iso', target="test.iso",
source='source.iso', source="source.iso",
force_arch=None, force_arch=None,
volume_id='FOOBAR', volume_id="FOOBAR",
dirs=[]
)
patch_iso.run(log, opts)
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=None,
exclude=['./lost+found'],
graft_points=ANYTHING,
input_charset=None,
volid='FOOBAR')])
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
def test_detect_arch_discinfo(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
touch(os.path.join(dest, '.discinfo'),
'1487578537.111417\nDummy Product 1.0\nppc64\n1')
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
opts = mock.Mock(
target='test.iso',
source='source.iso',
force_arch=None,
volume_id=None,
dirs=[]
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=iso.get_boot_options.return_value,
exclude=['./lost+found'],
graft_points=ANYTHING,
input_charset=None,
volid=iso.get_volume_id.return_value)])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
def test_run_isohybrid(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
copy_fixture(
'DP-1.0-20161013.t.4/compose/Server/x86_64/os/.treeinfo',
os.path.join(dest, '.treeinfo')
)
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
opts = mock.Mock(
target='test.iso',
source='source.iso',
force_arch=None,
volume_id=None,
dirs=[]
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=iso.get_boot_options.return_value,
exclude=['./lost+found'],
graft_points=ANYTHING,
input_charset='utf-8',
volid=iso.get_volume_id.return_value)])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_isohybrid_cmd.return_value),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
@mock.patch('pungi_utils.patch_iso.tweak_configs')
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
def test_add_ks_cfg(self, sh, iso, copy_all, tweak_configs):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
iso.get_graft_points.return_value = {
'ks.cfg': 'path/to/ks.cfg',
}
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
opts = mock.Mock(
target='test.iso',
source='source.iso',
force_arch='s390',
volume_id='foobar',
dirs=[], dirs=[],
) )
patch_iso.run(log, opts) patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, self.assertEqual(
[mock.call('source.iso')]) iso.get_mkisofs_cmd.call_args_list,
self.assertEqual(iso.get_mkisofs_cmd.call_args_list, [
[mock.call(os.path.abspath(opts.target), None, mock.call(
boot_args=iso.get_boot_options.return_value, os.path.abspath(opts.target),
exclude=['./lost+found'], None,
boot_args=None,
exclude=["./lost+found"],
graft_points=ANYTHING, graft_points=ANYTHING,
input_charset='utf-8', input_charset=None,
volid='foobar')]) volid="FOOBAR",
self.assertEqual(tweak_configs.call_args_list, )
[mock.call(ANYTHING, 'foobar', 'path/to/ks.cfg', logger=log)]) ],
self.assertEqual(copy_all.mock_calls, )
[mock.call('mounted-iso-dir', ANYTHING)]) self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual( self.assertEqual(
sh.call_args_list, sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING), [
mock.call(log, iso.get_implantisomd5_cmd.return_value)]) mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_detect_arch_discinfo(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
def _create_files(src, dest):
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
touch(
os.path.join(dest, ".discinfo"),
"1487578537.111417\nDummy Product 1.0\nppc64\n1",
)
copy_all.side_effect = _create_files
log = mock.Mock(name="logger")
opts = mock.Mock(
target="test.iso",
source="source.iso",
force_arch=None,
volume_id=None,
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=iso.get_boot_options.return_value,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset=None,
volid=iso.get_volume_id.return_value,
)
],
)
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_run_isohybrid(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
def _create_files(src, dest):
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
copy_fixture(
"DP-1.0-20161013.t.4/compose/Server/x86_64/os/.treeinfo",
os.path.join(dest, ".treeinfo"),
)
copy_all.side_effect = _create_files
log = mock.Mock(name="logger")
opts = mock.Mock(
target="test.iso",
source="source.iso",
force_arch=None,
volume_id=None,
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=iso.get_boot_options.return_value,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset="utf-8",
volid=iso.get_volume_id.return_value,
)
],
)
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_isohybrid_cmd.return_value),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)
@mock.patch("pungi_utils.patch_iso.tweak_configs")
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_add_ks_cfg(self, sh, iso, copy_all, tweak_configs):
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
iso.get_graft_points.return_value = {
"ks.cfg": "path/to/ks.cfg",
}
def _create_files(src, dest):
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
copy_all.side_effect = _create_files
log = mock.Mock(name="logger")
opts = mock.Mock(
target="test.iso",
source="source.iso",
force_arch="s390",
volume_id="foobar",
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=iso.get_boot_options.return_value,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset="utf-8",
volid="foobar",
)
],
)
self.assertEqual(
tweak_configs.call_args_list,
[mock.call(ANYTHING, "foobar", "path/to/ks.cfg", logger=log)],
)
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)

View File

@ -32,7 +32,6 @@ class TestHeadTailSplit(unittest.TestCase):
class TestPathMatch(unittest.TestCase): class TestPathMatch(unittest.TestCase):
def setUp(self): def setUp(self):
self.pm = PathMatch() self.pm = PathMatch()
@ -56,7 +55,9 @@ class TestPathMatch(unittest.TestCase):
self.pm["/*/*"] = "/star/star1" self.pm["/*/*"] = "/star/star1"
self.assertEqual(list(self.pm._patterns.keys()), ["*"]) self.assertEqual(list(self.pm._patterns.keys()), ["*"])
self.assertEqual(list(self.pm._patterns["*"]._final_patterns.keys()), ["*"]) self.assertEqual(list(self.pm._patterns["*"]._final_patterns.keys()), ["*"])
self.assertEqual(self.pm._patterns["*"]._final_patterns["*"]._values, ["/star/star1"]) self.assertEqual(
self.pm._patterns["*"]._final_patterns["*"]._values, ["/star/star1"]
)
self.assertEqual(sorted(self.pm["/lib/asd"]), ["/star/star1"]) self.assertEqual(sorted(self.pm["/lib/asd"]), ["/star/star1"])
self.pm["/*"] = "/star2" self.pm["/*"] = "/star2"

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import mock import mock
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -78,7 +79,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start() weaver_phase.start()
weaver_phase.stop() weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception)) self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1) self.assertFinalized(self.p1)
self.assertInterrupted(self.p2) self.assertInterrupted(self.p2)
self.assertMissed(self.p3) self.assertMissed(self.p3)
@ -92,7 +93,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start() weaver_phase.start()
weaver_phase.stop() weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception)) self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1) self.assertFinalized(self.p1)
self.assertInterrupted(self.p2) self.assertInterrupted(self.p2)
self.assertFinalized(self.p3) self.assertFinalized(self.p3)
@ -107,7 +108,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start() weaver_phase.start()
weaver_phase.stop() weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception)) self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1) self.assertFinalized(self.p1)
self.assertInterrupted(self.p2) self.assertInterrupted(self.p2)
self.assertMissed(self.p3) self.assertMissed(self.p3)
@ -125,7 +126,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start() weaver_phase.start()
weaver_phase.stop() weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception)) self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1) self.assertFinalized(self.p1)
self.assertInterrupted(self.p2) self.assertInterrupted(self.p2)
self.assertMissed(self.p3) self.assertMissed(self.p3)

View File

@ -4,6 +4,7 @@ import mock
import os import os
import six import six
import sys import sys
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -25,24 +26,24 @@ class MockPathInfo(object):
return self.topdir return self.topdir
def get_filename(self, rpm_info): def get_filename(self, rpm_info):
return '{name}@{version}@{release}@{arch}'.format(**rpm_info) return "{name}@{version}@{release}@{arch}".format(**rpm_info)
def signed(self, rpm_info, sigkey): def signed(self, rpm_info, sigkey):
return os.path.join('signed', sigkey, self.get_filename(rpm_info)) return os.path.join("signed", sigkey, self.get_filename(rpm_info))
def rpm(self, rpm_info): def rpm(self, rpm_info):
return os.path.join('rpms', self.get_filename(rpm_info)) return os.path.join("rpms", self.get_filename(rpm_info))
class MockFile(object): class MockFile(object):
def __init__(self, path): def __init__(self, path):
if path.startswith('/tmp'): if path.startswith("/tmp"):
# Drop /tmp/something/ from path # Drop /tmp/something/ from path
path = path.split('/', 3)[-1] path = path.split("/", 3)[-1]
self.file_path = path self.file_path = path
self.file_name = os.path.basename(path) self.file_name = os.path.basename(path)
self.name, self.version, self.release, self.arch = self.file_name.split('@') self.name, self.version, self.release, self.arch = self.file_name.split("@")
self.sourcerpm = '{0.name}-{0.version}-{0.release}.{0.arch}'.format(self) self.sourcerpm = "{0.name}-{0.version}-{0.release}.{0.arch}".format(self)
self.exclusivearch = [] self.exclusivearch = []
self.excludearch = [] self.excludearch = []
@ -78,6 +79,7 @@ class MockFileCache(dict):
"""Mock for kobo.pkgset.FileCache. """Mock for kobo.pkgset.FileCache.
It gets data from filename and does not touch filesystem. It gets data from filename and does not touch filesystem.
""" """
def __init__(self, _wrapper): def __init__(self, _wrapper):
super(MockFileCache, self).__init__() super(MockFileCache, self).__init__()
self.file_cache = self self.file_cache = self
@ -93,6 +95,7 @@ class FakePool(object):
It implements the same interface, but uses only the last added worker to It implements the same interface, but uses only the last added worker to
process all tasks sequentially. process all tasks sequentially.
""" """
def __init__(self, package_set, logger=None): def __init__(self, package_set, logger=None):
self.queue = [] self.queue = []
self.worker = None self.worker = None
@ -128,13 +131,12 @@ class PkgsetCompareMixin(object):
self.assertEqual({}, actual) self.assertEqual({}, actual)
@mock.patch('pungi.phases.pkgset.pkgsets.ReaderPool', new=FakePool) @mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache) @mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase): class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestKojiPkgset, self).setUp() super(TestKojiPkgset, self).setUp()
with open(os.path.join(helpers.FIXTURE_DIR, 'tagged-rpms.json')) as f: with open(os.path.join(helpers.FIXTURE_DIR, "tagged-rpms.json")) as f:
self.tagged_rpms = json.load(f) self.tagged_rpms = json.load(f)
self.path_info = MockPathInfo(self.topdir) self.path_info = MockPathInfo(self.topdir)
@ -152,163 +154,208 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
self.assertIn(k, actual) self.assertIn(k, actual)
v2 = actual.pop(k) v2 = actual.pop(k)
six.assertCountEqual(self, v1, v2) six.assertCountEqual(self, v1, v2)
self.assertEqual({}, actual, msg='Some architectures were missing') self.assertEqual({}, actual, msg="Some architectures were missing")
def test_all_arches(self): def test_all_arches(self):
self._touch_files([ self._touch_files(
'rpms/pungi@4.1.3@3.fc25@noarch', [
'rpms/pungi@4.1.3@3.fc25@src', "rpms/pungi@4.1.3@3.fc25@noarch",
'rpms/bash@4.3.42@4.fc24@i686', "rpms/pungi@4.1.3@3.fc25@src",
'rpms/bash@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@i686",
'rpms/bash@4.3.42@4.fc24@src', "rpms/bash@4.3.42@4.fc24@x86_64",
'rpms/bash-debuginfo@4.3.42@4.fc24@i686', "rpms/bash@4.3.42@4.fc24@src",
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "rpms/bash-debuginfo@4.3.42@4.fc24@i686",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
pkgset = pkgsets.KojiPackageSet("pkgset", self.koji_wrapper, [None]) pkgset = pkgsets.KojiPackageSet("pkgset", self.koji_wrapper, [None])
result = pkgset.populate('f25') result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result, self.assertPkgsetEqual(
{'src': ['rpms/pungi@4.1.3@3.fc25@src', result,
'rpms/bash@4.3.42@4.fc24@src'], {
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch'], "src": ["rpms/pungi@4.1.3@3.fc25@src", "rpms/bash@4.3.42@4.fc24@src"],
'i686': ['rpms/bash@4.3.42@4.fc24@i686', "noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
'rpms/bash-debuginfo@4.3.42@4.fc24@i686'], "i686": [
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@i686",
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64']}) "rpms/bash-debuginfo@4.3.42@4.fc24@i686",
],
"x86_64": [
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
],
},
)
def test_only_one_arch(self): def test_only_one_arch(self):
self._touch_files([ self._touch_files(
'rpms/bash@4.3.42@4.fc24@x86_64', [
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@x86_64",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, [None], arches=['x86_64']
) )
result = pkgset.populate('f25') pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, [None], arches=["x86_64"]
)
result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result, self.assertPkgsetEqual(
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', result,
'rpms/bash@4.3.42@4.fc24@x86_64']}) {
"x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_find_signed_with_preference(self): def test_find_signed_with_preference(self):
self._touch_files([ self._touch_files(
'signed/cafebabe/bash@4.3.42@4.fc24@x86_64', [
'signed/deadbeef/bash@4.3.42@4.fc24@x86_64', "signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
'signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64', "signed/deadbeef/bash@4.3.42@4.fc24@x86_64",
]) "signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe', 'deadbeef'], arches=['x86_64']
) )
result = pkgset.populate('f25') pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ["cafebabe", "deadbeef"], arches=["x86_64"]
)
result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result, self.assertPkgsetEqual(
{'x86_64': ['signed/cafebabe/bash@4.3.42@4.fc24@x86_64', result,
'signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64']}) {
"x86_64": [
"signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
"signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
},
)
def test_find_signed_fallback_unsigned(self): def test_find_signed_fallback_unsigned(self):
self._touch_files([ self._touch_files(
'signed/cafebabe/bash@4.3.42@4.fc24@x86_64', [
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe', None], arches=['x86_64']
) )
result = pkgset.populate('f25') pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
)
result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result, self.assertPkgsetEqual(
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', result,
'signed/cafebabe/bash@4.3.42@4.fc24@x86_64']}) {
"x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_can_not_find_signed_package(self): def test_can_not_find_signed_package(self):
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe'], arches=['x86_64'] "pkgset", self.koji_wrapper, ["cafebabe"], arches=["x86_64"]
) )
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
pkgset.populate('f25') pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
figure = re.compile( figure = re.compile(
r'^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$', r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
re.DOTALL) re.DOTALL,
)
self.assertRegexpMatches(str(ctx.exception), figure) self.assertRegexpMatches(str(ctx.exception), figure)
def test_can_not_find_signed_package_allow_invalid_sigkeys(self): def test_can_not_find_signed_package_allow_invalid_sigkeys(self):
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", "pkgset",
self.koji_wrapper, self.koji_wrapper,
['cafebabe'], ["cafebabe"],
arches=['x86_64'], arches=["x86_64"],
allow_invalid_sigkeys=True, allow_invalid_sigkeys=True,
) )
pkgset.populate('f25') pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms) pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
figure = re.compile( figure = re.compile(
r'^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$', r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
re.DOTALL) re.DOTALL,
)
self.assertRegexpMatches(str(ctx.exception), figure) self.assertRegexpMatches(str(ctx.exception), figure)
def test_can_not_find_any_package(self): def test_can_not_find_any_package(self):
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe', None], arches=['x86_64'] "pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
) )
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
pkgset.populate('f25') pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertRegexpMatches( self.assertRegexpMatches(
str(ctx.exception), str(ctx.exception),
r'^RPM\(s\) not found for sigs: .+Check log for details.+') r"^RPM\(s\) not found for sigs: .+Check log for details.+",
)
def test_packages_attribute(self): def test_packages_attribute(self):
self._touch_files([ self._touch_files(
'rpms/pungi@4.1.3@3.fc25@noarch', [
'rpms/pungi@4.1.3@3.fc25@src', "rpms/pungi@4.1.3@3.fc25@noarch",
'rpms/bash@4.3.42@4.fc24@i686', "rpms/pungi@4.1.3@3.fc25@src",
'rpms/bash@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@i686",
'rpms/bash@4.3.42@4.fc24@src', "rpms/bash@4.3.42@4.fc24@x86_64",
'rpms/bash-debuginfo@4.3.42@4.fc24@i686', "rpms/bash@4.3.42@4.fc24@src",
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "rpms/bash-debuginfo@4.3.42@4.fc24@i686",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", "pkgset",
@ -318,56 +365,75 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
populate_only_packages=True, populate_only_packages=True,
) )
result = pkgset.populate('f25') result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result, self.assertPkgsetEqual(
{'src': ['rpms/bash@4.3.42@4.fc24@src'], result,
'i686': ['rpms/bash@4.3.42@4.fc24@i686'], {
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64']}) "src": ["rpms/bash@4.3.42@4.fc24@src"],
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
"x86_64": ["rpms/bash@4.3.42@4.fc24@x86_64"],
},
)
def test_get_latest_rpms_cache(self): def test_get_latest_rpms_cache(self):
self._touch_files([ self._touch_files(
'rpms/bash@4.3.42@4.fc24@x86_64', [
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@x86_64",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
cache_region = make_region().configure("dogpile.cache.memory") cache_region = make_region().configure("dogpile.cache.memory")
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", "pkgset",
self.koji_wrapper, self.koji_wrapper,
[None], [None],
arches=['x86_64'], arches=["x86_64"],
cache_region=cache_region, cache_region=cache_region,
) )
# Try calling the populate twice, but expect just single listTaggedRPMs # Try calling the populate twice, but expect just single listTaggedRPMs
# call - that means the caching worked. # call - that means the caching worked.
for i in range(2): for i in range(2):
result = pkgset.populate('f25') result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [
mock.call.listTaggedRPMS(
"f25", event=None, inherit=True, latest=True
)
],
)
self.assertPkgsetEqual( self.assertPkgsetEqual(
result, result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', {
'rpms/bash@4.3.42@4.fc24@x86_64']}) "x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_get_latest_rpms_cache_different_id(self): def test_get_latest_rpms_cache_different_id(self):
self._touch_files([ self._touch_files(
'rpms/bash@4.3.42@4.fc24@x86_64', [
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@x86_64",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
cache_region = make_region().configure("dogpile.cache.memory") cache_region = make_region().configure("dogpile.cache.memory")
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", "pkgset",
self.koji_wrapper, self.koji_wrapper,
[None], [None],
arches=['x86_64'], arches=["x86_64"],
cache_region=cache_region, cache_region=cache_region,
) )
@ -376,185 +442,215 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
expected_calls = [] expected_calls = []
for i in range(2): for i in range(2):
expected_calls.append( expected_calls.append(
mock.call.listTaggedRPMS('f25', event=i, inherit=True, latest=True)) mock.call.listTaggedRPMS("f25", event=i, inherit=True, latest=True)
result = pkgset.populate('f25', event={"id": i}) )
self.assertEqual( result = pkgset.populate("f25", event={"id": i})
self.koji_wrapper.koji_proxy.mock_calls, self.assertEqual(self.koji_wrapper.koji_proxy.mock_calls, expected_calls)
expected_calls)
self.assertPkgsetEqual( self.assertPkgsetEqual(
result, result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', {
'rpms/bash@4.3.42@4.fc24@x86_64']}) "x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_extra_builds_attribute(self): def test_extra_builds_attribute(self):
self._touch_files([ self._touch_files(
'rpms/pungi@4.1.3@3.fc25@noarch', [
'rpms/pungi@4.1.3@3.fc25@src', "rpms/pungi@4.1.3@3.fc25@noarch",
'rpms/bash@4.3.42@4.fc24@i686', "rpms/pungi@4.1.3@3.fc25@src",
'rpms/bash@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@i686",
'rpms/bash@4.3.42@4.fc24@src', "rpms/bash@4.3.42@4.fc24@x86_64",
'rpms/bash-debuginfo@4.3.42@4.fc24@i686', "rpms/bash@4.3.42@4.fc24@src",
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64', "rpms/bash-debuginfo@4.3.42@4.fc24@i686",
]) "rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
# Return "pungi" RPMs and builds using "get_latest_rpms" which gets # Return "pungi" RPMs and builds using "get_latest_rpms" which gets
# them from Koji multiCall. # them from Koji multiCall.
extra_rpms = [rpm for rpm in self.tagged_rpms[0] extra_rpms = [rpm for rpm in self.tagged_rpms[0] if rpm["name"] == "pungi"]
if rpm["name"] == "pungi"] extra_builds = [
extra_builds = [build for build in self.tagged_rpms[1] build for build in self.tagged_rpms[1] if build["package_name"] == "pungi"
if build["package_name"] == "pungi"] ]
self.koji_wrapper.retrying_multicall_map.side_effect = [ self.koji_wrapper.retrying_multicall_map.side_effect = [
extra_builds, [extra_rpms]] extra_builds,
[extra_rpms],
]
# Do not return "pungi" RPMs and builds using the listTaggedRPMs, so # Do not return "pungi" RPMs and builds using the listTaggedRPMs, so
# we can be sure "pungi" gets into compose using the `extra_builds`. # we can be sure "pungi" gets into compose using the `extra_builds`.
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = [ self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = [
[rpm for rpm in self.tagged_rpms[0] if rpm["name"] != "pungi"], [rpm for rpm in self.tagged_rpms[0] if rpm["name"] != "pungi"],
[b for b in self.tagged_rpms[1] if b["package_name"] != "pungi"]] [b for b in self.tagged_rpms[1] if b["package_name"] != "pungi"],
]
pkgset = pkgsets.KojiPackageSet( pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, [None], extra_builds=["pungi-4.1.3-3.fc25"] "pkgset", self.koji_wrapper, [None], extra_builds=["pungi-4.1.3-3.fc25"]
) )
result = pkgset.populate('f25') result = pkgset.populate("f25")
self.assertEqual( self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls, self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)]) [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result, self.assertPkgsetEqual(
{'src': ['rpms/pungi@4.1.3@3.fc25@src', result,
'rpms/bash@4.3.42@4.fc24@src'], {
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch'], "src": ["rpms/pungi@4.1.3@3.fc25@src", "rpms/bash@4.3.42@4.fc24@src"],
'i686': ['rpms/bash@4.3.42@4.fc24@i686', "noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
'rpms/bash-debuginfo@4.3.42@4.fc24@i686'], "i686": [
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64', "rpms/bash@4.3.42@4.fc24@i686",
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64']}) "rpms/bash-debuginfo@4.3.42@4.fc24@i686",
],
"x86_64": [
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
],
},
)
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache) @mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase): class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase):
def test_merge_in_another_arch(self): def test_merge_in_another_arch(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
for name in ['rpms/pungi@4.1.3@3.fc25@noarch', 'rpms/pungi@4.1.3@3.fc25@src']: for name in ["rpms/pungi@4.1.3@3.fc25@noarch", "rpms/pungi@4.1.3@3.fc25@src"]:
pkg = first.file_cache.add(name) pkg = first.file_cache.add(name)
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
for name in ['rpms/bash@4.3.42@4.fc24@i686']: for name in ["rpms/bash@4.3.42@4.fc24@i686"]:
pkg = second.file_cache.add(name) pkg = second.file_cache.add(name)
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686']) first.merge(second, "i386", ["i686"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'src': ['rpms/pungi@4.1.3@3.fc25@src'], first.rpms_by_arch,
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch'], {
'i686': ['rpms/bash@4.3.42@4.fc24@i686']}) "src": ["rpms/pungi@4.1.3@3.fc25@src"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
},
)
def test_merge_includes_noarch_with_different_exclude_arch(self): def test_merge_includes_noarch_with_different_exclude_arch(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch') pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.excludearch = ['x86_64'] pkg.excludearch = ["x86_64"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch']) first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'], first.rpms_by_arch,
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch']}) {
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
},
)
def test_merge_excludes_noarch_exclude_arch(self): def test_merge_excludes_noarch_exclude_arch(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch') pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.excludearch = ['i686'] pkg.excludearch = ["i686"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch']) first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'], first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
'noarch': []}) )
def test_merge_excludes_noarch_exclusive_arch(self): def test_merge_excludes_noarch_exclusive_arch(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch') pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.exclusivearch = ['x86_64'] pkg.exclusivearch = ["x86_64"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch']) first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'], first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
'noarch': []}) )
def test_merge_includes_noarch_with_same_exclusive_arch(self): def test_merge_includes_noarch_with_same_exclusive_arch(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch') pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.exclusivearch = ['i686'] pkg.exclusivearch = ["i686"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch']) first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'], first.rpms_by_arch,
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch']}) {
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
},
)
def test_merge_skips_package_in_cache(self): def test_merge_skips_package_in_cache(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = second.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686']) first.merge(second, "i386", ["i686"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'i686': ['rpms/bash@4.3.42@4.fc24@i686']}) first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"]}
)
def test_merge_skips_src_without_binary(self): def test_merge_skips_src_without_binary(self):
first = pkgsets.PackageSetBase("first", [None]) first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None]) second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686') pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@src') pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@src")
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'src']) first.merge(second, "i386", ["i686", "src"])
self.assertPkgsetEqual(first.rpms_by_arch, self.assertPkgsetEqual(
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'], first.rpms_by_arch,
'src': [], {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "src": [], "nosrc": []},
'nosrc': []}) )
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache) @mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestSaveFileList(unittest.TestCase): class TestSaveFileList(unittest.TestCase):
def setUp(self): def setUp(self):
fd, self.tmpfile = tempfile.mkstemp() fd, self.tmpfile = tempfile.mkstemp()
@ -565,30 +661,37 @@ class TestSaveFileList(unittest.TestCase):
def test_save_arches_alphabetically(self): def test_save_arches_alphabetically(self):
pkgset = pkgsets.PackageSetBase("pkgset", [None]) pkgset = pkgsets.PackageSetBase("pkgset", [None])
for name in ['rpms/pungi@4.1.3@3.fc25@x86_64', for name in [
'rpms/pungi@4.1.3@3.fc25@src', "rpms/pungi@4.1.3@3.fc25@x86_64",
'rpms/pungi@4.1.3@3.fc25@ppc64']: "rpms/pungi@4.1.3@3.fc25@src",
"rpms/pungi@4.1.3@3.fc25@ppc64",
]:
pkg = pkgset.file_cache.add(name) pkg = pkgset.file_cache.add(name)
pkgset.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) pkgset.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkgset.save_file_list(self.tmpfile) pkgset.save_file_list(self.tmpfile)
with open(self.tmpfile) as f: with open(self.tmpfile) as f:
rpms = f.read().strip().split('\n') rpms = f.read().strip().split("\n")
self.assertEqual(rpms, ['rpms/pungi@4.1.3@3.fc25@ppc64', self.assertEqual(
'rpms/pungi@4.1.3@3.fc25@src', rpms,
'rpms/pungi@4.1.3@3.fc25@x86_64']) [
"rpms/pungi@4.1.3@3.fc25@ppc64",
"rpms/pungi@4.1.3@3.fc25@src",
"rpms/pungi@4.1.3@3.fc25@x86_64",
],
)
def test_save_strip_prefix(self): def test_save_strip_prefix(self):
pkgset = pkgsets.PackageSetBase("pkgset", [None]) pkgset = pkgsets.PackageSetBase("pkgset", [None])
for name in ['rpms/pungi@4.1.3@3.fc25@noarch', 'rpms/pungi@4.1.3@3.fc25@src']: for name in ["rpms/pungi@4.1.3@3.fc25@noarch", "rpms/pungi@4.1.3@3.fc25@src"]:
pkg = pkgset.file_cache.add(name) pkg = pkgset.file_cache.add(name)
pkgset.rpms_by_arch.setdefault(pkg.arch, []).append(pkg) pkgset.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkgset.save_file_list(self.tmpfile, remove_path_prefix='rpms/') pkgset.save_file_list(self.tmpfile, remove_path_prefix="rpms/")
with open(self.tmpfile) as f: with open(self.tmpfile) as f:
rpms = f.read().strip().split('\n') rpms = f.read().strip().split("\n")
six.assertCountEqual( six.assertCountEqual(
self, rpms, ["pungi@4.1.3@3.fc25@noarch", "pungi@4.1.3@3.fc25@src"] self, rpms, ["pungi@4.1.3@3.fc25@noarch", "pungi@4.1.3@3.fc25@src"]
) )

View File

@ -6,6 +6,7 @@ import os
import re import re
import six import six
import sys import sys
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -15,29 +16,26 @@ from pungi.phases.pkgset.sources import source_koji
from tests import helpers from tests import helpers
from pungi.module_util import Modulemd from pungi.module_util import Modulemd
EVENT_INFO = {'id': 15681980, 'ts': 1460956382.81936} EVENT_INFO = {"id": 15681980, "ts": 1460956382.81936}
TAG_INFO = { TAG_INFO = {
"maven_support": False, "maven_support": False,
"locked": False, "locked": False,
"name": "f25", "name": "f25",
"extra": { "extra": {"mock.package_manager": "dnf"},
"mock.package_manager": "dnf"
},
"perm": None, "perm": None,
"id": 335, "id": 335,
"arches": None, "arches": None,
"maven_include_all": None, "maven_include_all": None,
"perm_id": None "perm_id": None,
} }
class TestGetKojiEvent(helpers.PungiTestCase): class TestGetKojiEvent(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestGetKojiEvent, self).setUp() super(TestGetKojiEvent, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {}) self.compose = helpers.DummyCompose(self.topdir, {})
self.event_file = self.topdir + '/work/global/koji-event' self.event_file = self.topdir + "/work/global/koji-event"
def test_use_preconfigured_event(self): def test_use_preconfigured_event(self):
koji_wrapper = mock.Mock() koji_wrapper = mock.Mock()
@ -49,9 +47,8 @@ class TestGetKojiEvent(helpers.PungiTestCase):
self.assertEqual(event, EVENT_INFO) self.assertEqual(event, EVENT_INFO)
six.assertCountEqual( six.assertCountEqual(
self, self, koji_wrapper.mock_calls, [mock.call.koji_proxy.getEvent(123456)]
koji_wrapper.mock_calls, )
[mock.call.koji_proxy.getEvent(123456)])
with open(self.event_file) as f: with open(self.event_file) as f:
self.assertEqual(json.load(f), EVENT_INFO) self.assertEqual(json.load(f), EVENT_INFO)
@ -65,9 +62,8 @@ class TestGetKojiEvent(helpers.PungiTestCase):
self.assertEqual(event, EVENT_INFO) self.assertEqual(event, EVENT_INFO)
six.assertCountEqual( six.assertCountEqual(
self, self, koji_wrapper.mock_calls, [mock.call.koji_proxy.getLastEvent()]
koji_wrapper.mock_calls, )
[mock.call.koji_proxy.getLastEvent()])
with open(self.event_file) as f: with open(self.event_file) as f:
self.assertEqual(json.load(f), EVENT_INFO) self.assertEqual(json.load(f), EVENT_INFO)
@ -75,16 +71,19 @@ class TestGetKojiEvent(helpers.PungiTestCase):
class TestPopulateGlobalPkgset(helpers.PungiTestCase): class TestPopulateGlobalPkgset(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestPopulateGlobalPkgset, self).setUp() super(TestPopulateGlobalPkgset, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(
'pkgset_koji_tag': 'f25', self.topdir, {"pkgset_koji_tag": "f25", "sigkeys": ["foo", "bar"]}
'sigkeys': ["foo", "bar"], )
})
self.koji_wrapper = mock.Mock() self.koji_wrapper = mock.Mock()
self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle') self.pkgset_path = os.path.join(
self.koji_module_path = os.path.join(self.topdir, 'work', 'global', 'koji-module-Server.yaml') self.topdir, "work", "global", "pkgset_global.pickle"
)
self.koji_module_path = os.path.join(
self.topdir, "work", "global", "koji-module-Server.yaml"
)
@mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create") @mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet') @mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet")
def test_populate(self, KojiPackageSet, materialize): def test_populate(self, KojiPackageSet, materialize):
materialize.side_effect = self.mock_materialize materialize.side_effect = self.mock_materialize
@ -106,14 +105,12 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
return pkgset return pkgset
@mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create") @mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet') @mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet")
def test_populate_with_multiple_koji_tags( def test_populate_with_multiple_koji_tags(self, KojiPackageSet, materialize):
self, KojiPackageSet, materialize self.compose = helpers.DummyCompose(
): self.topdir,
self.compose = helpers.DummyCompose(self.topdir, { {"pkgset_koji_tag": ["f25", "f25-extra"], "sigkeys": ["foo", "bar"]},
'pkgset_koji_tag': ['f25', 'f25-extra'], )
'sigkeys': ["foo", "bar"],
})
materialize.side_effect = self.mock_materialize materialize.side_effect = self.mock_materialize
@ -123,7 +120,9 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
self.assertEqual(len(pkgsets), 2) self.assertEqual(len(pkgsets), 2)
init_calls = KojiPackageSet.call_args_list init_calls = KojiPackageSet.call_args_list
six.assertCountEqual(self, [call[0][0] for call in init_calls], ["f25", "f25-extra"]) six.assertCountEqual(
self, [call[0][0] for call in init_calls], ["f25", "f25-extra"]
)
six.assertCountEqual( six.assertCountEqual(
self, [call[0][1] for call in init_calls], [self.koji_wrapper] * 2 self, [call[0][1] for call in init_calls], [self.koji_wrapper] * 2
) )
@ -143,22 +142,24 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
) )
@mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create") @mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate') @mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet.populate")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list') @mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list")
def test_populate_packages_to_gather(self, save_file_list, popuplate, materialize): def test_populate_packages_to_gather(self, save_file_list, popuplate, materialize):
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(
'gather_method': 'nodeps', self.topdir,
'pkgset_koji_tag': 'f25', {
'sigkeys': ["foo", "bar"], "gather_method": "nodeps",
'additional_packages': [ "pkgset_koji_tag": "f25",
('.*', {'*': ['pkg', 'foo.x86_64']}), "sigkeys": ["foo", "bar"],
] "additional_packages": [(".*", {"*": ["pkg", "foo.x86_64"]})],
}) },
)
materialize.side_effect = self.mock_materialize materialize.side_effect = self.mock_materialize
pkgsets = source_koji.populate_global_pkgset( pkgsets = source_koji.populate_global_pkgset(
self.compose, self.koji_wrapper, '/prefix', 123456) self.compose, self.koji_wrapper, "/prefix", 123456
)
self.assertEqual(len(pkgsets), 1) self.assertEqual(len(pkgsets), 1)
six.assertCountEqual(self, pkgsets[0].packages, ["pkg", "foo"]) six.assertCountEqual(self, pkgsets[0].packages, ["pkg", "foo"])
@ -166,57 +167,55 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
class TestGetPackageSetFromKoji(helpers.PungiTestCase): class TestGetPackageSetFromKoji(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestGetPackageSetFromKoji, self).setUp() super(TestGetPackageSetFromKoji, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(self.topdir, {"pkgset_koji_tag": "f25"})
'pkgset_koji_tag': 'f25',
})
self.compose.koji_event = None self.compose.koji_event = None
self.koji_wrapper = mock.Mock() self.koji_wrapper = mock.Mock()
self.koji_wrapper.koji_proxy.getLastEvent.return_value = EVENT_INFO self.koji_wrapper.koji_proxy.getLastEvent.return_value = EVENT_INFO
self.koji_wrapper.koji_proxy.getTag.return_value = TAG_INFO self.koji_wrapper.koji_proxy.getTag.return_value = TAG_INFO
@mock.patch('pungi.phases.pkgset.sources.source_koji.populate_global_pkgset') @mock.patch("pungi.phases.pkgset.sources.source_koji.populate_global_pkgset")
def test_get_package_sets(self, pgp): def test_get_package_sets(self, pgp):
pkgsets = source_koji.get_pkgset_from_koji( pkgsets = source_koji.get_pkgset_from_koji(
self.compose, self.koji_wrapper, "/prefix" self.compose, self.koji_wrapper, "/prefix"
) )
six.assertCountEqual( six.assertCountEqual(
self, self, self.koji_wrapper.koji_proxy.mock_calls, [mock.call.getLastEvent()]
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.getLastEvent()]
) )
self.assertEqual(pkgsets, pgp.return_value) self.assertEqual(pkgsets, pgp.return_value)
self.assertEqual( self.assertEqual(
pgp.call_args_list, pgp.call_args_list,
[mock.call(self.compose, self.koji_wrapper, '/prefix', EVENT_INFO)], [mock.call(self.compose, self.koji_wrapper, "/prefix", EVENT_INFO)],
) )
def test_get_koji_modules(self): def test_get_koji_modules(self):
mock_build_ids = [{'id': 1065873, 'name': 'testmodule2-master_dash-20180406051653.96c371af'}] mock_build_ids = [
{"id": 1065873, "name": "testmodule2-master_dash-20180406051653.96c371af"}
]
mock_extra = { mock_extra = {
'typeinfo': { "typeinfo": {
'module': { "module": {
'content_koji_tag': 'module-b62270b82443edde', "content_koji_tag": "module-b62270b82443edde",
'modulemd_str': mock.Mock(), "modulemd_str": mock.Mock(),
'name': 'testmodule2', "name": "testmodule2",
'stream': 'master', "stream": "master",
'version': '20180406051653', "version": "20180406051653",
'context': '96c371af', "context": "96c371af",
} }
} }
} }
mock_build_md = [ mock_build_md = [
{ {
'id': 1065873, "id": 1065873,
'epoch': None, "epoch": None,
'extra': mock_extra, "extra": mock_extra,
'name': 'testmodule2', "name": "testmodule2",
'nvr': 'testmodule2-master_dash-20180406051653.2e6f5e0a', "nvr": "testmodule2-master_dash-20180406051653.2e6f5e0a",
'release': '20180406051653.2e6f5e0a', "release": "20180406051653.2e6f5e0a",
'state': 1, "state": 1,
'version': 'master_dash', "version": "master_dash",
'completion_ts': 1433473124.0, "completion_ts": 1433473124.0,
} }
] ]
@ -239,9 +238,12 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.assertIn("tag", module) self.assertIn("tag", module)
expected_query = "testmodule2-master_dash-20180406051653.96c371af" expected_query = "testmodule2-master_dash-20180406051653.96c371af"
self.koji_wrapper.koji_proxy.search.assert_called_once_with(expected_query, "build", self.koji_wrapper.koji_proxy.search.assert_called_once_with(
"glob") expected_query, "build", "glob"
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(mock_build_ids[0]["id"]) )
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(
mock_build_ids[0]["id"]
)
def test_get_koji_modules_filter_by_event(self): def test_get_koji_modules_filter_by_event(self):
mock_build_ids = [ mock_build_ids = [
@ -251,7 +253,8 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
"typeinfo": { "typeinfo": {
"module": { "module": {
"content_koji_tag": "module-b62270b82443edde", "content_koji_tag": "module-b62270b82443edde",
"modulemd_str": mock.Mock()} "modulemd_str": mock.Mock(),
}
} }
} }
mock_build_md = [ mock_build_md = [
@ -282,64 +285,66 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.koji_wrapper.koji_proxy.search.assert_called_once_with( self.koji_wrapper.koji_proxy.search.assert_called_once_with(
"testmodule2-master_dash-*", "build", "glob" "testmodule2-master_dash-*", "build", "glob"
) )
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(mock_build_ids[0]["id"]) self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(
mock_build_ids[0]["id"]
)
self.koji_wrapper.koji_proxy.listArchives.assert_not_called() self.koji_wrapper.koji_proxy.listArchives.assert_not_called()
self.koji_wrapper.koji_proxy.listRPMs.assert_not_called() self.koji_wrapper.koji_proxy.listRPMs.assert_not_called()
def test_get_koji_modules_no_version(self): def test_get_koji_modules_no_version(self):
mock_build_ids = [ mock_build_ids = [
{'id': 1065873, 'name': 'testmodule2-master-20180406051653.2e6f5e0a'}, {"id": 1065873, "name": "testmodule2-master-20180406051653.2e6f5e0a"},
{'id': 1065874, 'name': 'testmodule2-master-20180406051653.96c371af'} {"id": 1065874, "name": "testmodule2-master-20180406051653.96c371af"},
] ]
mock_extra = [ mock_extra = [
{ {
'typeinfo': { "typeinfo": {
'module': { "module": {
'content_koji_tag': 'module-b62270b82443edde', "content_koji_tag": "module-b62270b82443edde",
'modulemd_str': mock.Mock(), "modulemd_str": mock.Mock(),
'name': 'testmodule2', "name": "testmodule2",
'stream': 'master', "stream": "master",
'version': '20180406051653', "version": "20180406051653",
'context': '2e6f5e0a', "context": "2e6f5e0a",
} }
} }
}, },
{ {
'typeinfo': { "typeinfo": {
'module': { "module": {
'content_koji_tag': 'module-52e40b9cdd3c0f7d', "content_koji_tag": "module-52e40b9cdd3c0f7d",
'modulemd_str': mock.Mock(), "modulemd_str": mock.Mock(),
'name': 'testmodule2', "name": "testmodule2",
'stream': 'master', "stream": "master",
'version': '20180406051653', "version": "20180406051653",
'context': '96c371af', "context": "96c371af",
}
} }
} }
},
] ]
mock_build_md = [ mock_build_md = [
{ {
'id': 1065873, "id": 1065873,
'epoch': None, "epoch": None,
'extra': mock_extra[0], "extra": mock_extra[0],
'name': 'testmodule2', "name": "testmodule2",
'nvr': 'testmodule2-master-20180406051653.2e6f5e0a', "nvr": "testmodule2-master-20180406051653.2e6f5e0a",
'release': '20180406051653.2e6f5e0a', "release": "20180406051653.2e6f5e0a",
'state': 1, "state": 1,
'version': 'master', "version": "master",
'completion_ts': 1433473124.0, "completion_ts": 1433473124.0,
}, },
{ {
'id': 1065874, "id": 1065874,
'epoch': None, "epoch": None,
'extra': mock_extra[1], "extra": mock_extra[1],
'name': 'testmodule2', "name": "testmodule2",
'nvr': 'testmodule2-master-20180406051653.96c371af', "nvr": "testmodule2-master-20180406051653.96c371af",
'release': '20180406051653.96c371af', "release": "20180406051653.96c371af",
'state': 1, "state": 1,
'version': 'master', "version": "master",
'completion_ts': 1433473124.0, "completion_ts": 1433473124.0,
} },
] ]
self.koji_wrapper.koji_proxy.search.return_value = mock_build_ids self.koji_wrapper.koji_proxy.search.return_value = mock_build_ids
@ -362,10 +367,14 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.assertIn("module_context", module) self.assertIn("module_context", module)
expected_query = "testmodule2-master-*" expected_query = "testmodule2-master-*"
self.koji_wrapper.koji_proxy.search.assert_called_once_with(expected_query, "build", self.koji_wrapper.koji_proxy.search.assert_called_once_with(
"glob") expected_query, "build", "glob"
)
expected_calls = [mock.call(mock_build_ids[0]["id"]), mock.call(mock_build_ids[1]["id"])] expected_calls = [
mock.call(mock_build_ids[0]["id"]),
mock.call(mock_build_ids[1]["id"]),
]
self.koji_wrapper.koji_proxy.getBuild.mock_calls == expected_calls self.koji_wrapper.koji_proxy.getBuild.mock_calls == expected_calls
def test_get_koji_modules_ignore_deleted(self): def test_get_koji_modules_ignore_deleted(self):
@ -416,31 +425,29 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.koji_wrapper.koji_proxy.search.assert_called_once_with( self.koji_wrapper.koji_proxy.search.assert_called_once_with(
"testmodule2-master_dash-*", "build", "glob" "testmodule2-master_dash-*", "build", "glob"
) )
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(mock_build_ids[0]["id"]) self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(
mock_build_ids[0]["id"]
)
self.koji_wrapper.koji_proxy.listArchives.assert_not_called() self.koji_wrapper.koji_proxy.listArchives.assert_not_called()
self.koji_wrapper.koji_proxy.listRPMs.assert_not_called() self.koji_wrapper.koji_proxy.listRPMs.assert_not_called()
class TestSourceKoji(helpers.PungiTestCase): class TestSourceKoji(helpers.PungiTestCase):
@mock.patch("pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji")
@mock.patch('pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji') @mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
def test_run(self, KojiWrapper, gpfk): def test_run(self, KojiWrapper, gpfk):
compose = helpers.DummyCompose(self.topdir, { compose = helpers.DummyCompose(self.topdir, {"koji_profile": "koji"})
'koji_profile': 'koji' KojiWrapper.return_value.koji_module.config.topdir = "/prefix"
})
KojiWrapper.return_value.koji_module.config.topdir = '/prefix'
phase = source_koji.PkgsetSourceKoji(compose) phase = source_koji.PkgsetSourceKoji(compose)
pkgsets, path_prefix = phase() pkgsets, path_prefix = phase()
self.assertEqual(pkgsets, gpfk.return_value) self.assertEqual(pkgsets, gpfk.return_value)
self.assertEqual(path_prefix, '/prefix/') self.assertEqual(path_prefix, "/prefix/")
self.assertEqual(KojiWrapper.mock_calls, [mock.call('koji')]) self.assertEqual(KojiWrapper.mock_calls, [mock.call("koji")])
class TestCorrectNVR(helpers.PungiTestCase): class TestCorrectNVR(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestCorrectNVR, self).setUp() super(TestCorrectNVR, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {}) self.compose = helpers.DummyCompose(self.topdir, {})
@ -467,36 +474,39 @@ class TestCorrectNVR(helpers.PungiTestCase):
def test_new_nv(self): def test_new_nv(self):
module_info = source_koji.variant_dict_from_str(self.compose, self.new_nv) module_info = source_koji.variant_dict_from_str(self.compose, self.new_nv)
expected = { expected = {"name": "base-runtime", "stream": "f26"}
'name': 'base-runtime',
'stream': 'f26'}
self.assertEqual(module_info, expected) self.assertEqual(module_info, expected)
def test_new_nvr(self): def test_new_nvr(self):
module_info = source_koji.variant_dict_from_str(self.compose, self.new_nvr) module_info = source_koji.variant_dict_from_str(self.compose, self.new_nvr)
expected = { expected = {
'name': 'base-runtime', "name": "base-runtime",
'stream': 'f26', "stream": "f26",
'version': '20170502134116'} "version": "20170502134116",
}
self.assertEqual(module_info, expected) self.assertEqual(module_info, expected)
def test_new_nvrc(self): def test_new_nvrc(self):
module_info = source_koji.variant_dict_from_str(self.compose, self.new_nvrc) module_info = source_koji.variant_dict_from_str(self.compose, self.new_nvrc)
expected = { expected = {
'name': 'base-runtime', "name": "base-runtime",
'stream': 'f26', "stream": "f26",
'version': '20170502134116', "version": "20170502134116",
'context': '0123abcd'} "context": "0123abcd",
}
self.assertEqual(module_info, expected) self.assertEqual(module_info, expected)
def test_new_garbage_value(self): def test_new_garbage_value(self):
self.assertRaises(ValueError, source_koji.variant_dict_from_str, self.assertRaises(
self.compose, 'foo:bar:baz:quux:qaar') ValueError,
source_koji.variant_dict_from_str,
self.compose,
"foo:bar:baz:quux:qaar",
)
class TestFilterInherited(unittest.TestCase): class TestFilterInherited(unittest.TestCase):
def test_empty_module_list(self): def test_empty_module_list(self):
event = {"id": 123456} event = {"id": 123456}
koji_proxy = mock.Mock() koji_proxy = mock.Mock()
@ -504,7 +514,8 @@ class TestFilterInherited(unittest.TestCase):
top_tag = "top-tag" top_tag = "top-tag"
koji_proxy.getFullInheritance.return_value = [ koji_proxy.getFullInheritance.return_value = [
{"name": "middle-tag"}, {"name": "bottom-tag"} {"name": "middle-tag"},
{"name": "bottom-tag"},
] ]
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag) result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
@ -521,7 +532,8 @@ class TestFilterInherited(unittest.TestCase):
top_tag = "top-tag" top_tag = "top-tag"
koji_proxy.getFullInheritance.return_value = [ koji_proxy.getFullInheritance.return_value = [
{"name": "middle-tag"}, {"name": "bottom-tag"} {"name": "middle-tag"},
{"name": "bottom-tag"},
] ]
module_builds = [ module_builds = [
{"name": "foo", "version": "1", "release": "1", "tag_name": "top-tag"}, {"name": "foo", "version": "1", "release": "1", "tag_name": "top-tag"},
@ -547,7 +559,8 @@ class TestFilterInherited(unittest.TestCase):
top_tag = "top-tag" top_tag = "top-tag"
koji_proxy.getFullInheritance.return_value = [ koji_proxy.getFullInheritance.return_value = [
{"name": "middle-tag"}, {"name": "bottom-tag"} {"name": "middle-tag"},
{"name": "bottom-tag"},
] ]
module_builds = [ module_builds = [
{"name": "foo", "version": "1", "release": "2", "tag_name": "bottom-tag"}, {"name": "foo", "version": "1", "release": "2", "tag_name": "bottom-tag"},
@ -671,7 +684,6 @@ class MockModule(object):
@mock.patch("pungi.module_util.Modulemd.ModuleStream.read_file", new=MockModule) @mock.patch("pungi.module_util.Modulemd.ModuleStream.read_file", new=MockModule)
@unittest.skipIf(Modulemd is None, "Skipping tests, no module support") @unittest.skipIf(Modulemd is None, "Skipping tests, no module support")
class TestAddModuleToVariant(helpers.PungiTestCase): class TestAddModuleToVariant(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestAddModuleToVariant, self).setUp() super(TestAddModuleToVariant, self).setUp()
self.koji = mock.Mock() self.koji = mock.Mock()
@ -695,9 +707,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
} }
def test_adding_module(self): def test_adding_module(self):
variant = mock.Mock( variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
arches=["armhfp", "x86_64"], arch_mmds={}, modules=[]
)
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo) source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
@ -705,10 +715,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds, variant.arch_mmds,
{ {
"armhfp": { "armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
}, },
"x86_64": { "x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
}, },
}, },
) )
@ -729,10 +743,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds, variant.arch_mmds,
{ {
"armhfp": { "armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
}, },
"x86_64": { "x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
"m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"), "m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"),
}, },
}, },
@ -742,9 +760,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
) )
def test_adding_module_with_add_module(self): def test_adding_module_with_add_module(self):
variant = mock.Mock( variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
arches=["armhfp", "x86_64"], arch_mmds={}, modules=[]
)
source_koji._add_module_to_variant( source_koji._add_module_to_variant(
self.koji, variant, self.buildinfo, add_to_variant_modules=True self.koji, variant, self.buildinfo, add_to_variant_modules=True
@ -754,10 +770,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds, variant.arch_mmds,
{ {
"armhfp": { "armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
}, },
"x86_64": { "x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
}, },
}, },
) )
@ -782,10 +802,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds, variant.arch_mmds,
{ {
"armhfp": { "armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
}, },
"x86_64": { "x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"), "module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
"m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"), "m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"),
}, },
}, },

View File

@ -11,95 +11,101 @@ from . import helpers
class RepoclosureWrapperTestCase(helpers.BaseTestCase): class RepoclosureWrapperTestCase(helpers.BaseTestCase):
def test_minimal_command(self): def test_minimal_command(self):
self.assertEqual(rc.get_repoclosure_cmd(), self.assertEqual(
['/usr/bin/repoclosure', '--tempcache']) rc.get_repoclosure_cmd(), ["/usr/bin/repoclosure", "--tempcache"]
)
def test_minimal_dnf_command(self): def test_minimal_dnf_command(self):
self.assertEqual(rc.get_repoclosure_cmd(backend='dnf'), self.assertEqual(rc.get_repoclosure_cmd(backend="dnf"), ["dnf", "repoclosure"])
['dnf', 'repoclosure'])
def test_unknown_backend(self): def test_unknown_backend(self):
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
rc.get_repoclosure_cmd(backend='rpm') rc.get_repoclosure_cmd(backend="rpm")
self.assertEqual(str(ctx.exception), 'Unknown repoclosure backend: rpm') self.assertEqual(str(ctx.exception), "Unknown repoclosure backend: rpm")
def test_multiple_arches(self): def test_multiple_arches(self):
self.assertEqual(rc.get_repoclosure_cmd(arch=['x86_64', 'ppc64']), self.assertEqual(
['/usr/bin/repoclosure', '--tempcache', '--arch=x86_64', '--arch=ppc64']) rc.get_repoclosure_cmd(arch=["x86_64", "ppc64"]),
["/usr/bin/repoclosure", "--tempcache", "--arch=x86_64", "--arch=ppc64"],
)
def test_full_command(self): def test_full_command(self):
repos = {'my-repo': '/mnt/koji/repo'} repos = {"my-repo": "/mnt/koji/repo"}
lookaside = {'fedora': 'http://kojipkgs.fp.o/repo'} lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
cmd = rc.get_repoclosure_cmd(arch='x86_64', repos=repos, lookaside=lookaside) cmd = rc.get_repoclosure_cmd(arch="x86_64", repos=repos, lookaside=lookaside)
self.assertEqual(cmd[0], '/usr/bin/repoclosure') self.assertEqual(cmd[0], "/usr/bin/repoclosure")
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[1:], cmd[1:],
[ [
'--tempcache', "--tempcache",
'--arch=x86_64', "--arch=x86_64",
'--repofrompath=my-repo,file:///mnt/koji/repo', "--repofrompath=my-repo,file:///mnt/koji/repo",
'--repofrompath=fedora,http://kojipkgs.fp.o/repo', "--repofrompath=fedora,http://kojipkgs.fp.o/repo",
'--repoid=my-repo', "--repoid=my-repo",
'--lookaside=fedora', "--lookaside=fedora",
] ],
) )
def test_full_dnf_command(self): def test_full_dnf_command(self):
repos = {'my-repo': '/mnt/koji/repo'} repos = {"my-repo": "/mnt/koji/repo"}
lookaside = {'fedora': 'http://kojipkgs.fp.o/repo'} lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
cmd = rc.get_repoclosure_cmd(backend='dnf', arch='x86_64', cmd = rc.get_repoclosure_cmd(
repos=repos, lookaside=lookaside) backend="dnf", arch="x86_64", repos=repos, lookaside=lookaside
self.assertEqual(cmd[:2], ['dnf', 'repoclosure']) )
self.assertEqual(cmd[:2], ["dnf", "repoclosure"])
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[2:], cmd[2:],
['--arch=x86_64', [
'--repofrompath=my-repo,file:///mnt/koji/repo', "--arch=x86_64",
'--repofrompath=fedora,http://kojipkgs.fp.o/repo', "--repofrompath=my-repo,file:///mnt/koji/repo",
'--repo=my-repo', "--repofrompath=fedora,http://kojipkgs.fp.o/repo",
'--check=my-repo', "--repo=my-repo",
'--repo=fedora']) "--check=my-repo",
"--repo=fedora",
],
)
def test_expand_repo(self): def test_expand_repo(self):
repos = { repos = {
'local': '/mnt/koji/repo', "local": "/mnt/koji/repo",
'remote': 'http://kojipkgs.fp.o/repo', "remote": "http://kojipkgs.fp.o/repo",
} }
cmd = rc.get_repoclosure_cmd(repos=repos) cmd = rc.get_repoclosure_cmd(repos=repos)
self.assertEqual(cmd[0], '/usr/bin/repoclosure') self.assertEqual(cmd[0], "/usr/bin/repoclosure")
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[1:], cmd[1:],
[ [
'--tempcache', "--tempcache",
'--repofrompath=local,file:///mnt/koji/repo', "--repofrompath=local,file:///mnt/koji/repo",
'--repofrompath=remote,http://kojipkgs.fp.o/repo', "--repofrompath=remote,http://kojipkgs.fp.o/repo",
'--repoid=local', "--repoid=local",
'--repoid=remote', "--repoid=remote",
] ],
) )
def test_expand_lookaside(self): def test_expand_lookaside(self):
repos = { repos = {
'local': '/mnt/koji/repo', "local": "/mnt/koji/repo",
'remote': 'http://kojipkgs.fp.o/repo', "remote": "http://kojipkgs.fp.o/repo",
} }
cmd = rc.get_repoclosure_cmd(lookaside=repos) cmd = rc.get_repoclosure_cmd(lookaside=repos)
self.assertEqual(cmd[0], '/usr/bin/repoclosure') self.assertEqual(cmd[0], "/usr/bin/repoclosure")
six.assertCountEqual( six.assertCountEqual(
self, self,
cmd[1:], cmd[1:],
[ [
'--tempcache', "--tempcache",
'--repofrompath=local,file:///mnt/koji/repo', "--repofrompath=local,file:///mnt/koji/repo",
'--repofrompath=remote,http://kojipkgs.fp.o/repo', "--repofrompath=remote,http://kojipkgs.fp.o/repo",
'--lookaside=local', "--lookaside=local",
'--lookaside=remote', "--lookaside=remote",
] ],
) )
@ -118,7 +124,7 @@ class FusExtractorTestCase(helpers.PungiTestCase):
def test_error(self): def test_error(self):
helpers.touch( helpers.touch(
self.input1, self.input1,
"fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n" "fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n",
) )
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
rc.extract_from_fus_logs([self.input1], self.output) rc.extract_from_fus_logs([self.input1], self.output)
@ -130,11 +136,11 @@ class FusExtractorTestCase(helpers.PungiTestCase):
def test_errors_in_multiple_files(self): def test_errors_in_multiple_files(self):
helpers.touch( helpers.touch(
self.input1, self.input1,
"fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n" "fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n",
) )
helpers.touch( helpers.touch(
self.input2, self.input2,
"fus-DEBUG: Installing baz\nProblem 1/1\n - nothing provides quux\n" "fus-DEBUG: Installing baz\nProblem 1/1\n - nothing provides quux\n",
) )
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
rc.extract_from_fus_logs([self.input1, self.input2], self.output) rc.extract_from_fus_logs([self.input1, self.input2], self.output)

View File

@ -11,15 +11,16 @@ from tests import helpers
class TestRunrootOpenSSH(helpers.PungiTestCase): class TestRunrootOpenSSH(helpers.PungiTestCase):
def setUp(self): def setUp(self):
super(TestRunrootOpenSSH, self).setUp() super(TestRunrootOpenSSH, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, { self.compose = helpers.DummyCompose(
self.topdir,
{
"runroot": True, "runroot": True,
"runroot_method": "openssh", "runroot_method": "openssh",
"runroot_ssh_user": "root", "runroot_ssh_user": "root",
"runroot_ssh_hostnames": { "runroot_ssh_hostnames": {"x86_64": "localhost"},
"x86_64": "localhost"
},
"runroot_tag": "f28-build", "runroot_tag": "f28-build",
}) },
)
self.runroot = Runroot(self.compose) self.runroot = Runroot(self.compose)
@ -52,7 +53,7 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
""" """
logfile = ("/foo/runroot." + suffix + ".log") if suffix else "/foo/runroot.log" logfile = ("/foo/runroot." + suffix + ".log") if suffix else "/foo/runroot.log"
return mock.call( return mock.call(
['ssh', '-oBatchMode=yes', '-n', '-l', 'root', 'localhost', cmd], ["ssh", "-oBatchMode=yes", "-n", "-l", "root", "localhost", cmd],
logfile=logfile, logfile=logfile,
show_cmd=True, show_cmd=True,
) )
@ -61,12 +62,15 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
def test_run(self, run): def test_run(self, run):
run.return_value = (0, "dummy output\n") run.return_value = (0, "dummy output\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64") self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64")
run.assert_has_calls([ run.assert_has_calls(
self._ssh_call('df -h'), [
self._ssh_call("df -h"),
self._ssh_call( self._ssh_call(
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", suffix="rpms" "rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
), ),
]) ]
)
@mock.patch("pungi.runroot.run") @mock.patch("pungi.runroot.run")
def test_get_buildroot_rpms(self, run): def test_get_buildroot_rpms(self, run):
@ -75,92 +79,123 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64") self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64")
rpms = self.runroot.get_buildroot_rpms() rpms = self.runroot.get_buildroot_rpms()
self.assertEqual( self.assertEqual(set(rpms), set(["foo-1-1.fc29.noarch", "bar-1-1.fc29.noarch"]))
set(rpms), set(["foo-1-1.fc29.noarch", "bar-1-1.fc29.noarch"]))
@mock.patch("pungi.runroot.run") @mock.patch("pungi.runroot.run")
def test_run_templates(self, run): def test_run_templates(self, run):
self.compose.conf["runroot_ssh_init_template"] = "/usr/sbin/init_runroot {runroot_tag}" self.compose.conf[
self.compose.conf["runroot_ssh_install_packages_template"] = \ "runroot_ssh_init_template"
"install {runroot_key} {packages}" ] = "/usr/sbin/init_runroot {runroot_tag}"
self.compose.conf[
"runroot_ssh_install_packages_template"
] = "install {runroot_key} {packages}"
self.compose.conf["runroot_ssh_run_template"] = "run {runroot_key} {command}" self.compose.conf["runroot_ssh_run_template"] = "run {runroot_key} {command}"
run.return_value = (0, "key\n") run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64", self.runroot.run(
packages=["lorax", "automake"]) "df -h",
run.assert_has_calls([ log_file="/foo/runroot.log",
self._ssh_call('/usr/sbin/init_runroot f28-build', suffix="init"), arch="x86_64",
self._ssh_call('install key lorax automake', suffix="install_packages"), packages=["lorax", "automake"],
self._ssh_call('run key df -h'), )
run.assert_has_calls(
[
self._ssh_call("/usr/sbin/init_runroot f28-build", suffix="init"),
self._ssh_call("install key lorax automake", suffix="install_packages"),
self._ssh_call("run key df -h"),
self._ssh_call( self._ssh_call(
"run key rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", "run key rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms", suffix="rpms",
), ),
]) ]
)
@mock.patch("pungi.runroot.run") @mock.patch("pungi.runroot.run")
def test_run_templates_no_init(self, run): def test_run_templates_no_init(self, run):
self.compose.conf["runroot_ssh_install_packages_template"] = \ self.compose.conf[
"install {packages}" "runroot_ssh_install_packages_template"
] = "install {packages}"
self.compose.conf["runroot_ssh_run_template"] = "run {command}" self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n") run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64", self.runroot.run(
packages=["lorax", "automake"]) "df -h",
run.assert_has_calls([ log_file="/foo/runroot.log",
self._ssh_call('install lorax automake', suffix="install_packages"), arch="x86_64",
self._ssh_call('run df -h'), packages=["lorax", "automake"],
)
run.assert_has_calls(
[
self._ssh_call("install lorax automake", suffix="install_packages"),
self._ssh_call("run df -h"),
self._ssh_call( self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", "run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms", suffix="rpms",
), ),
]) ]
)
@mock.patch("pungi.runroot.run") @mock.patch("pungi.runroot.run")
def test_run_templates_no_packages(self, run): def test_run_templates_no_packages(self, run):
self.compose.conf["runroot_ssh_install_packages_template"] = \ self.compose.conf[
"install {packages}" "runroot_ssh_install_packages_template"
] = "install {packages}"
self.compose.conf["runroot_ssh_run_template"] = "run {command}" self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n") run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64") self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64")
run.assert_has_calls([ run.assert_has_calls(
self._ssh_call('run df -h'), [
self._ssh_call("run df -h"),
self._ssh_call( self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", "run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms", suffix="rpms",
), ),
]) ]
)
@mock.patch("pungi.runroot.run") @mock.patch("pungi.runroot.run")
def test_run_templates_no_install_packages(self, run): def test_run_templates_no_install_packages(self, run):
self.compose.conf["runroot_ssh_run_template"] = "run {command}" self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n") run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64", self.runroot.run(
packages=["lorax", "automake"]) "df -h",
run.assert_has_calls([ log_file="/foo/runroot.log",
self._ssh_call('run df -h'), arch="x86_64",
packages=["lorax", "automake"],
)
run.assert_has_calls(
[
self._ssh_call("run df -h"),
self._ssh_call( self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", "run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms", suffix="rpms",
), ),
]) ]
)
@mock.patch("pungi.runroot.run") @mock.patch("pungi.runroot.run")
def test_run_templates_output_dir(self, run): def test_run_templates_output_dir(self, run):
self.compose.conf["runroot_ssh_run_template"] = "run {command}" self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n") run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64", self.runroot.run(
"df -h",
log_file="/foo/runroot.log",
arch="x86_64",
packages=["lorax", "automake"], packages=["lorax", "automake"],
chown_paths=["/mnt/foo/compose", "/mnt/foo/x"]) chown_paths=["/mnt/foo/compose", "/mnt/foo/x"],
run.assert_has_calls([ )
run.assert_has_calls(
[
self._ssh_call( self._ssh_call(
"run df -h && chmod -R a+r /mnt/foo/compose /mnt/foo/x && " "run df -h && chmod -R a+r /mnt/foo/compose /mnt/foo/x && "
"chown -R %d /mnt/foo/compose /mnt/foo/x" % os.getuid()), "chown -R %d /mnt/foo/compose /mnt/foo/x" % os.getuid()
),
self._ssh_call( self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", "run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms", suffix="rpms",
), ),
]) ]
)

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import mock import mock
try: try:
import unittest2 as unittest import unittest2 as unittest
except ImportError: except ImportError:
@ -53,52 +54,58 @@ class FileSCMTestCase(SCMBaseTest):
""" """
super(FileSCMTestCase, self).setUp() super(FileSCMTestCase, self).setUp()
self.srcdir = tempfile.mkdtemp() self.srcdir = tempfile.mkdtemp()
touch(os.path.join(self.srcdir, 'in_root')) touch(os.path.join(self.srcdir, "in_root"))
touch(os.path.join(self.srcdir, 'subdir', 'first')) touch(os.path.join(self.srcdir, "subdir", "first"))
touch(os.path.join(self.srcdir, 'subdir', 'second')) touch(os.path.join(self.srcdir, "subdir", "second"))
def tearDown(self): def tearDown(self):
super(FileSCMTestCase, self).tearDown() super(FileSCMTestCase, self).tearDown()
shutil.rmtree(self.srcdir) shutil.rmtree(self.srcdir)
def test_get_file_by_name(self): def test_get_file_by_name(self):
file = os.path.join(self.srcdir, 'in_root') file = os.path.join(self.srcdir, "in_root")
retval = scm.get_file_from_scm(file, self.destdir) retval = scm.get_file_from_scm(file, self.destdir)
self.assertStructure(retval, ['in_root']) self.assertStructure(retval, ["in_root"])
def test_get_file_by_dict(self): def test_get_file_by_dict(self):
retval = scm.get_file_from_scm({ retval = scm.get_file_from_scm(
'scm': 'file', 'repo': None, 'file': os.path.join(self.srcdir, 'subdir', 'first')}, {
self.destdir) "scm": "file",
self.assertStructure(retval, ['first']) "repo": None,
"file": os.path.join(self.srcdir, "subdir", "first"),
},
self.destdir,
)
self.assertStructure(retval, ["first"])
def test_get_dir_by_name(self): def test_get_dir_by_name(self):
retval = scm.get_dir_from_scm(os.path.join(self.srcdir, 'subdir'), self.destdir) retval = scm.get_dir_from_scm(os.path.join(self.srcdir, "subdir"), self.destdir)
self.assertStructure(retval, ['first', 'second']) self.assertStructure(retval, ["first", "second"])
def test_get_dir_by_dict(self): def test_get_dir_by_dict(self):
retval = scm.get_dir_from_scm( retval = scm.get_dir_from_scm(
{'scm': 'file', 'repo': None, 'dir': os.path.join(self.srcdir, 'subdir')}, {"scm": "file", "repo": None, "dir": os.path.join(self.srcdir, "subdir")},
self.destdir) self.destdir,
self.assertStructure(retval, ['first', 'second']) )
self.assertStructure(retval, ["first", "second"])
def test_get_missing_file(self): def test_get_missing_file(self):
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
scm.get_file_from_scm({'scm': 'file', scm.get_file_from_scm(
'repo': None, {"scm": "file", "repo": None, "file": "this-is-really-not-here.txt"},
'file': 'this-is-really-not-here.txt'}, self.destdir,
self.destdir) )
self.assertIn('No files matched', str(ctx.exception)) self.assertIn("No files matched", str(ctx.exception))
def test_get_missing_dir(self): def test_get_missing_dir(self):
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
scm.get_dir_from_scm({'scm': 'file', scm.get_dir_from_scm(
'repo': None, {"scm": "file", "repo": None, "dir": "this-is-really-not-here"},
'dir': 'this-is-really-not-here'}, self.destdir,
self.destdir) )
self.assertIn('No directories matched', str(ctx.exception)) self.assertIn("No directories matched", str(ctx.exception))
class GitSCMTestCase(SCMBaseTest): class GitSCMTestCase(SCMBaseTest):
@ -110,26 +117,30 @@ class GitSCMTestCase(SCMBaseTest):
["git", "init"], ["git", "init"],
["git", "fetch", "--depth=1", url, branch], ["git", "fetch", "--depth=1", url, branch],
["git", "checkout", "FETCH_HEAD"], ["git", "checkout", "FETCH_HEAD"],
] + command, ]
+ command,
) )
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_file(self, run): def test_get_file(self, run):
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
touch(os.path.join(workdir, 'some_file.txt')) touch(os.path.join(workdir, "some_file.txt"))
touch(os.path.join(workdir, 'other_file.txt')) touch(os.path.join(workdir, "other_file.txt"))
run.side_effect = process run.side_effect = process
retval = scm.get_file_from_scm({'scm': 'git', retval = scm.get_file_from_scm(
'repo': 'git://example.com/git/repo.git', {
'file': 'some_file.txt'}, "scm": "git",
self.destdir) "repo": "git://example.com/git/repo.git",
self.assertStructure(retval, ['some_file.txt']) "file": "some_file.txt",
},
self.destdir,
)
self.assertStructure(retval, ["some_file.txt"])
self.assertCalls(run, "git://example.com/git/repo.git", "master") self.assertCalls(run, "git://example.com/git/repo.git", "master")
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_file_fetch_fails(self, run): def test_get_file_fetch_fails(self, run):
url = "git://example.com/git/repo.git" url = "git://example.com/git/repo.git"
@ -138,15 +149,15 @@ class GitSCMTestCase(SCMBaseTest):
exc = RuntimeError() exc = RuntimeError()
exc.output = "" exc.output = ""
raise exc raise exc
touch(os.path.join(workdir, 'some_file.txt')) touch(os.path.join(workdir, "some_file.txt"))
touch(os.path.join(workdir, 'other_file.txt')) touch(os.path.join(workdir, "other_file.txt"))
run.side_effect = process run.side_effect = process
retval = scm.get_file_from_scm( retval = scm.get_file_from_scm(
{"scm": "git", "repo": url, "file": "some_file.txt"}, self.destdir {"scm": "git", "repo": url, "file": "some_file.txt"}, self.destdir
) )
self.assertStructure(retval, ['some_file.txt']) self.assertStructure(retval, ["some_file.txt"])
self.assertEqual( self.assertEqual(
[call[0][0] for call in run.call_args_list], [call[0][0] for call in run.call_args_list],
[ [
@ -158,77 +169,85 @@ class GitSCMTestCase(SCMBaseTest):
], ],
) )
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_file_generated_by_command(self, run): def test_get_file_generated_by_command(self, run):
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
if cmd[0] == "git": if cmd[0] == "git":
touch(os.path.join(workdir, 'some_file.txt')) touch(os.path.join(workdir, "some_file.txt"))
return 0, '' return 0, ""
run.side_effect = process run.side_effect = process
retval = scm.get_file_from_scm({'scm': 'git', retval = scm.get_file_from_scm(
'repo': 'git://example.com/git/repo.git', {
'file': 'some_file.txt', "scm": "git",
'command': 'make'}, "repo": "git://example.com/git/repo.git",
self.destdir) "file": "some_file.txt",
self.assertStructure(retval, ['some_file.txt']) "command": "make",
},
self.destdir,
)
self.assertStructure(retval, ["some_file.txt"])
self.assertCalls(run, "git://example.com/git/repo.git", "master", "make") self.assertCalls(run, "git://example.com/git/repo.git", "master", "make")
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_file_and_fail_to_generate(self, run): def test_get_file_and_fail_to_generate(self, run):
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
if cmd[0] == "git": if cmd[0] == "git":
touch(os.path.join(workdir, 'some_file.txt')) touch(os.path.join(workdir, "some_file.txt"))
return 0, "output" return 0, "output"
return 1, "output" return 1, "output"
run.side_effect = process run.side_effect = process
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
scm.get_file_from_scm({'scm': 'git', scm.get_file_from_scm(
'repo': 'git://example.com/git/repo.git', {
'file': 'some_file.txt', "scm": "git",
'command': 'make'}, "repo": "git://example.com/git/repo.git",
self.destdir) "file": "some_file.txt",
"command": "make",
},
self.destdir,
)
self.assertEqual(str(ctx.exception), "'make' failed with exit code 1") self.assertEqual(str(ctx.exception), "'make' failed with exit code 1")
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_dir(self, run): def test_get_dir(self, run):
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
touch(os.path.join(workdir, "subdir", 'first')) touch(os.path.join(workdir, "subdir", "first"))
touch(os.path.join(workdir, "subdir", 'second')) touch(os.path.join(workdir, "subdir", "second"))
run.side_effect = process run.side_effect = process
retval = scm.get_dir_from_scm({'scm': 'git', retval = scm.get_dir_from_scm(
'repo': 'git://example.com/git/repo.git', {"scm": "git", "repo": "git://example.com/git/repo.git", "dir": "subdir"},
'dir': 'subdir'}, self.destdir,
self.destdir) )
self.assertStructure(retval, ['first', 'second']) self.assertStructure(retval, ["first", "second"])
self.assertCalls(run, "git://example.com/git/repo.git", "master") self.assertCalls(run, "git://example.com/git/repo.git", "master")
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_dir_and_generate(self, run): def test_get_dir_and_generate(self, run):
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
if cmd[0] == "git": if cmd[0] == "git":
touch(os.path.join(workdir, 'subdir', 'first')) touch(os.path.join(workdir, "subdir", "first"))
touch(os.path.join(workdir, 'subdir', 'second')) touch(os.path.join(workdir, "subdir", "second"))
return 0, '' return 0, ""
run.side_effect = process run.side_effect = process
retval = scm.get_dir_from_scm({'scm': 'git', retval = scm.get_dir_from_scm(
'repo': 'git://example.com/git/repo.git', {
'dir': 'subdir', "scm": "git",
'command': 'make'}, "repo": "git://example.com/git/repo.git",
self.destdir) "dir": "subdir",
self.assertStructure(retval, ['first', 'second']) "command": "make",
},
self.destdir,
)
self.assertStructure(retval, ["first", "second"])
self.assertCalls(run, "git://example.com/git/repo.git", "master", "make") self.assertCalls(run, "git://example.com/git/repo.git", "master", "make")
@ -237,8 +256,11 @@ class RpmSCMTestCase(SCMBaseTest):
super(RpmSCMTestCase, self).setUp() super(RpmSCMTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp() self.tmpdir = tempfile.mkdtemp()
self.exploded = set() self.exploded = set()
self.rpms = [self.tmpdir + '/whatever.rpm', self.tmpdir + '/another.rpm'] self.rpms = [self.tmpdir + "/whatever.rpm", self.tmpdir + "/another.rpm"]
self.numbered = [self.tmpdir + x for x in ['/one1.rpm', '/one2.rpm', '/two1.rpm', '/two2.rpm']] self.numbered = [
self.tmpdir + x
for x in ["/one1.rpm", "/one2.rpm", "/two1.rpm", "/two2.rpm"]
]
for rpm in self.rpms + self.numbered: for rpm in self.rpms + self.numbered:
touch(rpm) touch(rpm)
@ -248,155 +270,180 @@ class RpmSCMTestCase(SCMBaseTest):
def _explode_rpm(self, path, dest): def _explode_rpm(self, path, dest):
self.exploded.add(path) self.exploded.add(path)
touch(os.path.join(dest, 'some-file.txt')) touch(os.path.join(dest, "some-file.txt"))
touch(os.path.join(dest, 'subdir', 'foo.txt')) touch(os.path.join(dest, "subdir", "foo.txt"))
touch(os.path.join(dest, 'subdir', 'bar.txt')) touch(os.path.join(dest, "subdir", "bar.txt"))
def _explode_multiple(self, path, dest): def _explode_multiple(self, path, dest):
self.exploded.add(path) self.exploded.add(path)
cnt = len(self.exploded) cnt = len(self.exploded)
touch(os.path.join(dest, 'some-file-%d.txt' % cnt)) touch(os.path.join(dest, "some-file-%d.txt" % cnt))
touch(os.path.join(dest, 'subdir-%d' % cnt, 'foo-%d.txt' % cnt)) touch(os.path.join(dest, "subdir-%d" % cnt, "foo-%d.txt" % cnt))
touch(os.path.join(dest, 'common', 'foo-%d.txt' % cnt)) touch(os.path.join(dest, "common", "foo-%d.txt" % cnt))
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_file(self, explode): def test_get_file(self, explode):
explode.side_effect = self._explode_rpm explode.side_effect = self._explode_rpm
retval = scm.get_file_from_scm( retval = scm.get_file_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], 'file': 'some-file.txt'}, {"scm": "rpm", "repo": self.rpms[0], "file": "some-file.txt"}, self.destdir
self.destdir) )
self.assertStructure(retval, ['some-file.txt']) self.assertStructure(retval, ["some-file.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]])) self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_more_files(self, explode): def test_get_more_files(self, explode):
explode.side_effect = self._explode_rpm explode.side_effect = self._explode_rpm
retval = scm.get_file_from_scm( retval = scm.get_file_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], {
'file': ['some-file.txt', 'subdir/foo.txt']}, "scm": "rpm",
self.destdir) "repo": self.rpms[0],
"file": ["some-file.txt", "subdir/foo.txt"],
},
self.destdir,
)
self.assertStructure(retval, ['some-file.txt', 'foo.txt']) self.assertStructure(retval, ["some-file.txt", "foo.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]])) self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_whole_dir(self, explode): def test_get_whole_dir(self, explode):
explode.side_effect = self._explode_rpm explode.side_effect = self._explode_rpm
retval = scm.get_dir_from_scm( retval = scm.get_dir_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], 'dir': 'subdir'}, {"scm": "rpm", "repo": self.rpms[0], "dir": "subdir"}, self.destdir
self.destdir) )
self.assertStructure(retval, ['subdir/foo.txt', 'subdir/bar.txt']) self.assertStructure(retval, ["subdir/foo.txt", "subdir/bar.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]])) self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_dir_contents(self, explode): def test_get_dir_contents(self, explode):
explode.side_effect = self._explode_rpm explode.side_effect = self._explode_rpm
retval = scm.get_dir_from_scm( retval = scm.get_dir_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], 'dir': 'subdir/'}, {"scm": "rpm", "repo": self.rpms[0], "dir": "subdir/"}, self.destdir
self.destdir) )
self.assertStructure(retval, ['foo.txt', 'bar.txt']) self.assertStructure(retval, ["foo.txt", "bar.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]])) self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_files_from_two_rpms(self, explode): def test_get_files_from_two_rpms(self, explode):
explode.side_effect = self._explode_multiple explode.side_effect = self._explode_multiple
retval = scm.get_file_from_scm( retval = scm.get_file_from_scm(
{'scm': 'rpm', 'repo': self.rpms, {
'file': ['some-file-1.txt', 'some-file-2.txt']}, "scm": "rpm",
self.destdir) "repo": self.rpms,
"file": ["some-file-1.txt", "some-file-2.txt"],
},
self.destdir,
)
self.assertStructure(retval, ['some-file-1.txt', 'some-file-2.txt']) self.assertStructure(retval, ["some-file-1.txt", "some-file-2.txt"])
six.assertCountEqual(self, self.exploded, self.rpms) six.assertCountEqual(self, self.exploded, self.rpms)
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_files_from_glob_rpms(self, explode): def test_get_files_from_glob_rpms(self, explode):
explode.side_effect = self._explode_multiple explode.side_effect = self._explode_multiple
retval = scm.get_file_from_scm( retval = scm.get_file_from_scm(
{'scm': 'rpm', 'file': 'some-file-*.txt', {
'repo': [self.tmpdir + '/one*.rpm', self.tmpdir + '/two*.rpm']}, "scm": "rpm",
self.destdir) "file": "some-file-*.txt",
"repo": [self.tmpdir + "/one*.rpm", self.tmpdir + "/two*.rpm"],
},
self.destdir,
)
self.assertStructure(retval, self.assertStructure(
['some-file-1.txt', 'some-file-2.txt', 'some-file-3.txt', 'some-file-4.txt']) retval,
[
"some-file-1.txt",
"some-file-2.txt",
"some-file-3.txt",
"some-file-4.txt",
],
)
six.assertCountEqual(self, self.exploded, self.numbered) six.assertCountEqual(self, self.exploded, self.numbered)
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_dir_from_two_rpms(self, explode): def test_get_dir_from_two_rpms(self, explode):
explode.side_effect = self._explode_multiple explode.side_effect = self._explode_multiple
retval = scm.get_dir_from_scm({'scm': 'rpm', retval = scm.get_dir_from_scm(
'repo': self.rpms, {"scm": "rpm", "repo": self.rpms, "dir": "common"}, self.destdir
'dir': 'common'}, )
self.destdir)
self.assertStructure(retval, ['common/foo-1.txt', 'common/foo-2.txt']) self.assertStructure(retval, ["common/foo-1.txt", "common/foo-2.txt"])
six.assertCountEqual(self, self.exploded, self.rpms) six.assertCountEqual(self, self.exploded, self.rpms)
@mock.patch('pungi.wrappers.scm.explode_rpm_package') @mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_dir_from_glob_rpms(self, explode): def test_get_dir_from_glob_rpms(self, explode):
explode.side_effect = self._explode_multiple explode.side_effect = self._explode_multiple
retval = scm.get_dir_from_scm( retval = scm.get_dir_from_scm(
{'scm': 'rpm', 'dir': 'common/', {
'repo': [self.tmpdir + '/one*.rpm', self.tmpdir + '/two*.rpm']}, "scm": "rpm",
self.destdir) "dir": "common/",
"repo": [self.tmpdir + "/one*.rpm", self.tmpdir + "/two*.rpm"],
},
self.destdir,
)
self.assertStructure(retval, self.assertStructure(
['foo-1.txt', 'foo-2.txt', 'foo-3.txt', 'foo-4.txt']) retval, ["foo-1.txt", "foo-2.txt", "foo-3.txt", "foo-4.txt"]
)
six.assertCountEqual(self, self.exploded, self.numbered) six.assertCountEqual(self, self.exploded, self.numbered)
class CvsSCMTestCase(SCMBaseTest): class CvsSCMTestCase(SCMBaseTest):
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_file(self, run): def test_get_file(self, run):
commands = [] commands = []
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
fname = cmd[-1] fname = cmd[-1]
touch(os.path.join(workdir, fname)) touch(os.path.join(workdir, fname))
commands.append(' '.join(cmd)) commands.append(" ".join(cmd))
run.side_effect = process run.side_effect = process
retval = scm.get_file_from_scm({'scm': 'cvs', retval = scm.get_file_from_scm(
'repo': 'http://example.com/cvs', {"scm": "cvs", "repo": "http://example.com/cvs", "file": "some_file.txt"},
'file': 'some_file.txt'}, self.destdir,
self.destdir) )
self.assertStructure(retval, ['some_file.txt']) self.assertStructure(retval, ["some_file.txt"])
self.assertEqual( self.assertEqual(
commands, commands,
['/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD some_file.txt']) ["/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD some_file.txt"],
)
@mock.patch('pungi.wrappers.scm.run') @mock.patch("pungi.wrappers.scm.run")
def test_get_dir(self, run): def test_get_dir(self, run):
commands = [] commands = []
def process(cmd, workdir=None, **kwargs): def process(cmd, workdir=None, **kwargs):
fname = cmd[-1] fname = cmd[-1]
touch(os.path.join(workdir, fname, 'first')) touch(os.path.join(workdir, fname, "first"))
touch(os.path.join(workdir, fname, 'second')) touch(os.path.join(workdir, fname, "second"))
commands.append(' '.join(cmd)) commands.append(" ".join(cmd))
run.side_effect = process run.side_effect = process
retval = scm.get_dir_from_scm({'scm': 'cvs', retval = scm.get_dir_from_scm(
'repo': 'http://example.com/cvs', {"scm": "cvs", "repo": "http://example.com/cvs", "dir": "subdir"},
'dir': 'subdir'}, self.destdir,
self.destdir) )
self.assertStructure(retval, ['first', 'second']) self.assertStructure(retval, ["first", "second"])
self.assertEqual( self.assertEqual(
commands, commands,
['/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD subdir']) ["/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD subdir"],
)
@mock.patch("pungi.wrappers.scm.urlretrieve") @mock.patch("pungi.wrappers.scm.urlretrieve")

View File

@ -16,27 +16,36 @@ from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom
try: try:
import dnf import dnf
HAS_DNF = True HAS_DNF = True
except ImportError: except ImportError:
HAS_DNF = False HAS_DNF = False
try: try:
import yum import yum
HAS_YUM = True HAS_YUM = True
except ImportError: except ImportError:
HAS_YUM = False HAS_YUM = False
PAD = b'\0' * 100 PAD = b"\0" * 100
UNBOOTABLE_ISO = (b'\0' * 0x8001) + b'CD001' + PAD UNBOOTABLE_ISO = (b"\0" * 0x8001) + b"CD001" + PAD
ISO_WITH_MBR = (b'\0' * 0x1fe) + b'\x55\xAA' + (b'\0' * 0x7e01) + b'CD001' + PAD ISO_WITH_MBR = (b"\0" * 0x1FE) + b"\x55\xAA" + (b"\0" * 0x7E01) + b"CD001" + PAD
ISO_WITH_GPT = (b'\0' * 0x200) + b'EFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD ISO_WITH_GPT = (b"\0" * 0x200) + b"EFI PART" + (b"\0" * 0x7DF9) + b"CD001" + PAD
ISO_WITH_MBR_AND_GPT = (b'\0' * 0x1fe) + b'\x55\xAAEFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD ISO_WITH_MBR_AND_GPT = (
ISO_WITH_TORITO = (b'\0' * 0x8001) + b'CD001' + (b'\0' * 0x7fa) + b'\0CD001\1EL TORITO SPECIFICATION' + PAD (b"\0" * 0x1FE) + b"\x55\xAAEFI PART" + (b"\0" * 0x7DF9) + b"CD001" + PAD
)
ISO_WITH_TORITO = (
(b"\0" * 0x8001)
+ b"CD001"
+ (b"\0" * 0x7FA)
+ b"\0CD001\1EL TORITO SPECIFICATION"
+ PAD
)
class TestCheckImageSanity(PungiTestCase): class TestCheckImageSanity(PungiTestCase):
def test_missing_file_reports_error(self): def test_missing_file_reports_error(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
@ -45,141 +54,154 @@ class TestCheckImageSanity(PungiTestCase):
def test_missing_file_doesnt_report_if_failable(self): def test_missing_file_doesnt_report_if_failable(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.deliverable = 'iso' compose.image.deliverable = "iso"
compose.image.can_fail = True compose.image.can_fail = True
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Failable deliverable must not raise') self.fail("Failable deliverable must not raise")
def test_correct_iso_does_not_raise(self): def test_correct_iso_does_not_raise(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO) touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Correct unbootable image must not raise') self.fail("Correct unbootable image must not raise")
def test_incorrect_iso_raises(self): def test_incorrect_iso_raises(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
touch(os.path.join(self.topdir, 'compose', compose.image.path), 'Hey there') touch(os.path.join(self.topdir, "compose", compose.image.path), "Hey there")
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
self.assertIn('does not look like an ISO file', str(ctx.exception)) self.assertIn("does not look like an ISO file", str(ctx.exception))
def test_bootable_iso_without_mbr_or_gpt_raises_on_x86_64(self): def test_bootable_iso_without_mbr_or_gpt_raises_on_x86_64(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.arch = 'x86_64' compose.image.arch = "x86_64"
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO) touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
with self.assertRaises(RuntimeError) as ctx: with self.assertRaises(RuntimeError) as ctx:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
self.assertIn('is supposed to be bootable, but does not have MBR nor GPT', self.assertIn(
str(ctx.exception)) "is supposed to be bootable, but does not have MBR nor GPT",
str(ctx.exception),
)
def test_bootable_iso_without_mbr_or_gpt_doesnt_raise_on_arm(self): def test_bootable_iso_without_mbr_or_gpt_doesnt_raise_on_arm(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.arch = 'armhfp' compose.image.arch = "armhfp"
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO) touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Failable deliverable must not raise') self.fail("Failable deliverable must not raise")
def test_failable_bootable_iso_without_mbr_gpt_doesnt_raise(self): def test_failable_bootable_iso_without_mbr_gpt_doesnt_raise(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
compose.image.deliverable = 'iso' compose.image.deliverable = "iso"
compose.image.can_fail = True compose.image.can_fail = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO) touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Failable deliverable must not raise') self.fail("Failable deliverable must not raise")
def test_bootable_iso_with_mbr_does_not_raise(self): def test_bootable_iso_with_mbr_does_not_raise(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_MBR) touch(os.path.join(self.topdir, "compose", compose.image.path), ISO_WITH_MBR)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Bootable image with MBR must not raise') self.fail("Bootable image with MBR must not raise")
def test_bootable_iso_with_gpt_does_not_raise(self): def test_bootable_iso_with_gpt_does_not_raise(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_GPT) touch(os.path.join(self.topdir, "compose", compose.image.path), ISO_WITH_GPT)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Bootable image with GPT must not raise') self.fail("Bootable image with GPT must not raise")
def test_bootable_iso_with_mbr_and_gpt_does_not_raise(self): def test_bootable_iso_with_mbr_and_gpt_does_not_raise(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_MBR_AND_GPT) touch(
os.path.join(self.topdir, "compose", compose.image.path),
ISO_WITH_MBR_AND_GPT,
)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Bootable image with MBR and GPT must not raise') self.fail("Bootable image with MBR and GPT must not raise")
def test_bootable_iso_with_el_torito_does_not_raise(self): def test_bootable_iso_with_el_torito_does_not_raise(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_TORITO) touch(os.path.join(self.topdir, "compose", compose.image.path), ISO_WITH_TORITO)
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Bootable image with El Torito must not raise') self.fail("Bootable image with El Torito must not raise")
def test_checks_with_optional_variant(self): def test_checks_with_optional_variant(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.variants['Server'].variants = { compose.variants["Server"].variants = {
'optional': mock.Mock(uid='Server-optional', arches=['x86_64'], "optional": mock.Mock(
type='optional', is_empty=False) uid="Server-optional",
arches=["x86_64"],
type="optional",
is_empty=False,
)
} }
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = True compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_MBR_AND_GPT) touch(
os.path.join(self.topdir, "compose", compose.image.path),
ISO_WITH_MBR_AND_GPT,
)
image = mock.Mock(path="Server/i386/optional/iso/image.iso", image = mock.Mock(
format='iso', bootable=False) path="Server/i386/optional/iso/image.iso", format="iso", bootable=False
compose.im.images['Server-optional'] = {'i386': [image]} )
compose.im.images["Server-optional"] = {"i386": [image]}
try: try:
test_phase.check_image_sanity(compose) test_phase.check_image_sanity(compose)
except Exception: except Exception:
self.fail('Checking optional variant must not raise') self.fail("Checking optional variant must not raise")
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock()) @mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_iso(self): def test_too_big_iso(self):
compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 10})]}) compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 10})]})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 20 compose.image.size = 20
@ -200,7 +222,7 @@ class TestCheckImageSanity(PungiTestCase):
"createiso_max_size_is_strict": [(".*", {"*": True})], "createiso_max_size_is_strict": [(".*", {"*": True})],
}, },
) )
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 20 compose.image.size = 20
@ -221,7 +243,7 @@ class TestCheckImageSanity(PungiTestCase):
"createiso_max_size_is_strict": [(".*", {"*": False})], "createiso_max_size_is_strict": [(".*", {"*": False})],
}, },
) )
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 20 compose.image.size = 20
@ -236,7 +258,7 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock()) @mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_unified(self): def test_too_big_unified(self):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 20 compose.image.size = 20
compose.image.unified = True compose.image.unified = True
@ -253,10 +275,9 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock()) @mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_unified_strict(self): def test_too_big_unified_strict(self):
compose = DummyCompose( compose = DummyCompose(
self.topdir, self.topdir, {"createiso_max_size_is_strict": [(".*", {"*": True})]},
{"createiso_max_size_is_strict": [(".*", {"*": True})]},
) )
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 20 compose.image.size = 20
compose.image.unified = True compose.image.unified = True
@ -273,7 +294,7 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock()) @mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_fits_in_limit(self): def test_fits_in_limit(self):
compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 20})]}) compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 20})]})
compose.image.format = 'iso' compose.image.format = "iso"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 5 compose.image.size = 5
@ -284,7 +305,7 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock()) @mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_non_iso(self): def test_non_iso(self):
compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 10})]}) compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 10})]})
compose.image.format = 'qcow2' compose.image.format = "qcow2"
compose.image.bootable = False compose.image.bootable = False
compose.image.size = 20 compose.image.size = 20
@ -294,32 +315,32 @@ class TestCheckImageSanity(PungiTestCase):
class TestRepoclosure(PungiTestCase): class TestRepoclosure(PungiTestCase):
def setUp(self): def setUp(self):
super(TestRepoclosure, self).setUp() super(TestRepoclosure, self).setUp()
self.maxDiff = None self.maxDiff = None
def _get_repo(self, compose_id, variant, arch, path=None): def _get_repo(self, compose_id, variant, arch, path=None):
path = path or arch + '/os' path = path or arch + "/os"
return { return {
'%s-repoclosure-%s.%s' % (compose_id, variant, arch): self.topdir + '/compose/%s/%s' % (variant, path) "%s-repoclosure-%s.%s" % (compose_id, variant, arch): self.topdir
+ "/compose/%s/%s" % (variant, path)
} }
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd') @mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch('pungi.phases.test.run') @mock.patch("pungi.phases.test.run")
def test_repoclosure_skip_if_disabled(self, mock_run, mock_grc): def test_repoclosure_skip_if_disabled(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, { compose = DummyCompose(
'repoclosure_strictness': [('^.*$', {'*': 'off'})] self.topdir, {"repoclosure_strictness": [("^.*$", {"*": "off"})]}
}) )
test_phase.run_repoclosure(compose) test_phase.run_repoclosure(compose)
self.assertEqual(mock_grc.call_args_list, []) self.assertEqual(mock_grc.call_args_list, [])
@unittest.skipUnless(HAS_YUM, 'YUM is not available') @unittest.skipUnless(HAS_YUM, "YUM is not available")
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd') @mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch('pungi.phases.test.run') @mock.patch("pungi.phases.test.run")
def test_repoclosure_default_backend(self, mock_run, mock_grc): def test_repoclosure_default_backend(self, mock_run, mock_grc):
with mock.patch('six.PY2', new=True): with mock.patch("six.PY2", new=True):
compose = DummyCompose(self.topdir, {}) compose = DummyCompose(self.topdir, {})
test_phase.run_repoclosure(compose) test_phase.run_repoclosure(compose)
@ -327,37 +348,83 @@ class TestRepoclosure(PungiTestCase):
six.assertCountEqual( six.assertCountEqual(
self, self,
mock_grc.call_args_list, mock_grc.call_args_list,
[mock.call(backend='yum', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, [
repos=self._get_repo(compose.compose_id, 'Everything', 'amd64')), mock.call(
mock.call(backend='yum', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, backend="yum",
repos=self._get_repo(compose.compose_id, 'Client', 'amd64')), arch=["amd64", "x86_64", "noarch"],
mock.call(backend='yum', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'amd64')), repos=self._get_repo(compose.compose_id, "Everything", "amd64"),
mock.call(backend='yum', arch=['x86_64', 'noarch'], lookaside={}, ),
repos=self._get_repo(compose.compose_id, 'Server', 'x86_64')), mock.call(
mock.call(backend='yum', arch=['x86_64', 'noarch'], lookaside={}, backend="yum",
repos=self._get_repo(compose.compose_id, 'Everything', 'x86_64'))]) arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Client", "amd64"),
),
mock.call(
backend="yum",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
),
mock.call(
backend="yum",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
),
mock.call(
backend="yum",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Everything", "x86_64"),
),
],
)
@unittest.skipUnless(HAS_DNF, 'DNF is not available') @unittest.skipUnless(HAS_DNF, "DNF is not available")
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd') @mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch('pungi.phases.test.run') @mock.patch("pungi.phases.test.run")
def test_repoclosure_dnf_backend(self, mock_run, mock_grc): def test_repoclosure_dnf_backend(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, {'repoclosure_backend': 'dnf'}) compose = DummyCompose(self.topdir, {"repoclosure_backend": "dnf"})
test_phase.run_repoclosure(compose) test_phase.run_repoclosure(compose)
six.assertCountEqual( six.assertCountEqual(
self, self,
mock_grc.call_args_list, mock_grc.call_args_list,
[mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, [
repos=self._get_repo(compose.compose_id, 'Everything', 'amd64')), mock.call(
mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, backend="dnf",
repos=self._get_repo(compose.compose_id, 'Client', 'amd64')), arch=["amd64", "x86_64", "noarch"],
mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'amd64')), repos=self._get_repo(compose.compose_id, "Everything", "amd64"),
mock.call(backend='dnf', arch=['x86_64', 'noarch'], lookaside={}, ),
repos=self._get_repo(compose.compose_id, 'Server', 'x86_64')), mock.call(
mock.call(backend='dnf', arch=['x86_64', 'noarch'], lookaside={}, backend="dnf",
repos=self._get_repo(compose.compose_id, 'Everything', 'x86_64'))]) arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Client", "amd64"),
),
mock.call(
backend="dnf",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
),
mock.call(
backend="dnf",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
),
mock.call(
backend="dnf",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Everything", "x86_64"),
),
],
)
@mock.patch("glob.glob") @mock.patch("glob.glob")
@mock.patch("pungi.wrappers.repoclosure.extract_from_fus_logs") @mock.patch("pungi.wrappers.repoclosure.extract_from_fus_logs")
@ -385,53 +452,71 @@ class TestRepoclosure(PungiTestCase):
mock.call([f], _log("amd64", "Server")), mock.call([f], _log("amd64", "Server")),
mock.call([f], _log("x86_64", "Server")), mock.call([f], _log("x86_64", "Server")),
mock.call([f], _log("x86_64", "Everything")), mock.call([f], _log("x86_64", "Everything")),
] ],
) )
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd') @mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch('pungi.phases.test.run') @mock.patch("pungi.phases.test.run")
def test_repoclosure_report_error(self, mock_run, mock_grc): def test_repoclosure_report_error(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, { compose = DummyCompose(
'repoclosure_strictness': [('^.*$', {'*': 'fatal'})] self.topdir, {"repoclosure_strictness": [("^.*$", {"*": "fatal"})]}
}) )
mock_run.side_effect = mk_boom(cls=RuntimeError) mock_run.side_effect = mk_boom(cls=RuntimeError)
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
test_phase.run_repoclosure(compose) test_phase.run_repoclosure(compose)
@unittest.skipUnless(HAS_DNF, 'DNF is not available') @unittest.skipUnless(HAS_DNF, "DNF is not available")
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd') @mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch('pungi.phases.test.run') @mock.patch("pungi.phases.test.run")
def test_repoclosure_overwrite_options_creates_correct_commands(self, mock_run, mock_grc): def test_repoclosure_overwrite_options_creates_correct_commands(
compose = DummyCompose(self.topdir, { self, mock_run, mock_grc
'repoclosure_backend': 'dnf', ):
'repoclosure_strictness': [ compose = DummyCompose(
('^.*$', {'*': 'off'}), self.topdir,
('^Server$', {'*': 'fatal'}), {
] "repoclosure_backend": "dnf",
}) "repoclosure_strictness": [
("^.*$", {"*": "off"}),
("^Server$", {"*": "fatal"}),
],
},
)
test_phase.run_repoclosure(compose) test_phase.run_repoclosure(compose)
six.assertCountEqual( six.assertCountEqual(
self, self,
mock_grc.call_args_list, mock_grc.call_args_list,
[mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={}, [
repos=self._get_repo(compose.compose_id, 'Server', 'amd64')), mock.call(
mock.call(backend='dnf', arch=['x86_64', 'noarch'], lookaside={}, backend="dnf",
repos=self._get_repo(compose.compose_id, 'Server', 'x86_64')), arch=["amd64", "x86_64", "noarch"],
]) lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
),
mock.call(
backend="dnf",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
),
],
)
@mock.patch('pungi.phases.test._delete_repoclosure_cache_dirs') @mock.patch("pungi.phases.test._delete_repoclosure_cache_dirs")
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd') @mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch('pungi.phases.test.run') @mock.patch("pungi.phases.test.run")
def test_repoclosure_uses_correct_behaviour(self, mock_run, mock_grc, mock_del): def test_repoclosure_uses_correct_behaviour(self, mock_run, mock_grc, mock_del):
compose = DummyCompose(self.topdir, { compose = DummyCompose(
'repoclosure_backend': 'dnf', self.topdir,
'repoclosure_strictness': [ {
('^.*$', {'*': 'off'}), "repoclosure_backend": "dnf",
('^Server$', {'*': 'fatal'}), "repoclosure_strictness": [
] ("^.*$", {"*": "off"}),
}) ("^Server$", {"*": "fatal"}),
],
},
)
mock_run.side_effect = mk_boom(cls=RuntimeError) mock_run.side_effect = mk_boom(cls=RuntimeError)
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,8 @@ filename = *.py
# H306: imports not in alphabetical order # H306: imports not in alphabetical order
# E226: missing whitespace around arithmetic operator # E226: missing whitespace around arithmetic operator
# W503: line break occured before a binary operator # W503: line break occured before a binary operator
ignore = E501,E402,H301,H306,E226,W503 # E203: whitespace before ':'
ignore = E501,E402,H301,H306,E226,W503,E203
[run] [run]
omit = tests/* omit = tests/*