Format tests with black

JIRA: COMPOSE-4086
Signed-off-by: Haibo Lin <hlin@redhat.com>
This commit is contained in:
Haibo Lin 2020-01-22 18:02:22 +08:00
parent ef33d00f5b
commit 38142d30ba
51 changed files with 13767 additions and 9180 deletions

View File

@ -76,6 +76,7 @@ setup(
]
},
tests_require = [
"black",
"mock",
"nose",
"nose-cov",

View File

@ -23,23 +23,25 @@ from pungi.module_util import Modulemd
class BaseTestCase(unittest.TestCase):
def assertFilesEqual(self, fn1, fn2):
with open(fn1, 'rb') as f1:
lines1 = f1.read().decode('utf-8').splitlines()
with open(fn2, 'rb') as f2:
lines2 = f2.read().decode('utf-8').splitlines()
diff = '\n'.join(difflib.unified_diff(lines1, lines2,
fromfile='EXPECTED', tofile='ACTUAL'))
self.assertEqual(diff, '', 'Files differ:\n' + diff)
with open(fn1, "rb") as f1:
lines1 = f1.read().decode("utf-8").splitlines()
with open(fn2, "rb") as f2:
lines2 = f2.read().decode("utf-8").splitlines()
diff = "\n".join(
difflib.unified_diff(lines1, lines2, fromfile="EXPECTED", tofile="ACTUAL")
)
self.assertEqual(diff, "", "Files differ:\n" + diff)
def assertFileContent(self, fn, expected):
with open(fn, 'rb') as f:
lines = f.read().decode('utf-8').splitlines()
diff = '\n'.join(difflib.unified_diff(
lines, expected.splitlines(), fromfile='EXPECTED', tofile='ACTUAL')
with open(fn, "rb") as f:
lines = f.read().decode("utf-8").splitlines()
diff = "\n".join(
difflib.unified_diff(
lines, expected.splitlines(), fromfile="EXPECTED", tofile="ACTUAL"
)
self.assertEqual(diff, '', 'Files differ:\n' + diff)
)
self.assertEqual(diff, "", "Files differ:\n" + diff)
class PungiTestCase(BaseTestCase):
@ -72,7 +74,7 @@ class PungiTestCase(BaseTestCase):
class MockVariant(mock.Mock):
def __init__(self, is_empty=False, name=None, *args, **kwargs):
super(MockVariant, self).__init__(*args, is_empty=is_empty, **kwargs)
self.parent = kwargs.get('parent', None)
self.parent = kwargs.get("parent", None)
self.arch_mmds = {}
self.module_uid_to_koji_tag = {}
self.variants = {}
@ -85,8 +87,11 @@ class MockVariant(mock.Mock):
return self.uid
def get_variants(self, arch=None, types=None):
return [v for v in list(self.variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)]
return [
v
for v in list(self.variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)
]
def get_modules(self, arch=None, types=None):
return []
@ -133,22 +138,19 @@ class IterableMock(mock.Mock):
class DummyCompose(object):
def __init__(self, topdir, config):
self.supported = True
self.compose_date = '20151203'
self.compose_type_suffix = '.t'
self.compose_type = 'test'
self.compose_date = "20151203"
self.compose_type_suffix = ".t"
self.compose_type = "test"
self.compose_respin = 0
self.compose_id = 'Test-20151203.0.t'
self.compose_id = "Test-20151203.0.t"
self.compose_label = None
self.compose_label_major_version = None
self.image_release = '20151203.t.0'
self.image_version = '25'
self.image_release = "20151203.t.0"
self.image_version = "25"
self.ci_base = mock.Mock(
release_id='Test-1.0',
release_id="Test-1.0",
release=mock.Mock(
short='test',
version='1.0',
is_layered=False,
type_suffix=''
short="test", version="1.0", is_layered=False, type_suffix=""
),
)
self.topdir = topdir
@ -157,12 +159,27 @@ class DummyCompose(object):
self.paths = paths.Paths(self)
self.has_comps = True
self.variants = {
'Server': MockVariant(uid='Server', arches=['x86_64', 'amd64'],
type='variant', id='Server', name='Server'),
'Client': MockVariant(uid='Client', arches=['amd64'],
type='variant', id='Client', name='Client'),
'Everything': MockVariant(uid='Everything', arches=['x86_64', 'amd64'],
type='variant', id='Everything', name='Everything'),
"Server": MockVariant(
uid="Server",
arches=["x86_64", "amd64"],
type="variant",
id="Server",
name="Server",
),
"Client": MockVariant(
uid="Client",
arches=["amd64"],
type="variant",
id="Client",
name="Client",
),
"Everything": MockVariant(
uid="Everything",
arches=["x86_64", "amd64"],
type="variant",
id="Everything",
name="Everything",
),
}
self.all_variants = self.variants.copy()
@ -174,13 +191,13 @@ class DummyCompose(object):
self.log_error = mock.Mock()
self.log_debug = mock.Mock()
self.log_warning = mock.Mock()
self.get_image_name = mock.Mock(return_value='image-name')
self.get_image_name = mock.Mock(return_value="image-name")
self.image = mock.Mock(
path='Client/i386/iso/image.iso', can_fail=False, size=123, _max_size=None,
path="Client/i386/iso/image.iso", can_fail=False, size=123, _max_size=None,
)
self.im = mock.Mock(images={'Client': {'amd64': [self.image]}})
self.im = mock.Mock(images={"Client": {"amd64": [self.image]}})
self.old_composes = []
self.config_dir = '/home/releng/config'
self.config_dir = "/home/releng/config"
self.notifier = None
self.attempt_deliverable = mock.Mock()
self.fail_deliverable = mock.Mock()
@ -189,23 +206,32 @@ class DummyCompose(object):
self.cache_region = None
def setup_optional(self):
self.all_variants['Server-optional'] = MockVariant(
uid='Server-optional', arches=['x86_64'], type='optional')
self.all_variants['Server-optional'].parent = self.variants['Server']
self.variants['Server'].variants['optional'] = self.all_variants['Server-optional']
self.all_variants["Server-optional"] = MockVariant(
uid="Server-optional", arches=["x86_64"], type="optional"
)
self.all_variants["Server-optional"].parent = self.variants["Server"]
self.variants["Server"].variants["optional"] = self.all_variants[
"Server-optional"
]
def setup_addon(self):
self.all_variants['Server-HA'] = MockVariant(
uid='Server-HA', arches=['x86_64'], type='addon', is_empty=False)
self.all_variants['Server-HA'].parent = self.variants['Server']
self.variants['Server'].variants['HA'] = self.all_variants['Server-HA']
self.all_variants["Server-HA"] = MockVariant(
uid="Server-HA", arches=["x86_64"], type="addon", is_empty=False
)
self.all_variants["Server-HA"].parent = self.variants["Server"]
self.variants["Server"].variants["HA"] = self.all_variants["Server-HA"]
def get_variants(self, arch=None, types=None):
return [v for v in list(self.all_variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)]
return [
v
for v in list(self.all_variants.values())
if (not arch or arch in v.arches) and (not types or v.type in types)
]
def can_fail(self, variant, arch, deliverable):
failable = get_arch_variant_data(self.conf, 'failable_deliverables', arch, variant)
failable = get_arch_variant_data(
self.conf, "failable_deliverables", arch, variant
)
return deliverable in failable
def get_arches(self):
@ -221,19 +247,19 @@ class DummyCompose(object):
def touch(path, content=None):
"""Helper utility that creates an dummy file in given location. Directories
will be created."""
content = content or (path + '\n')
content = content or (path + "\n")
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
if not isinstance(content, six.binary_type):
content = content.encode()
with open(path, 'wb') as f:
with open(path, "wb") as f:
f.write(content)
return path
FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixtures')
FIXTURE_DIR = os.path.join(os.path.dirname(__file__), "fixtures")
def copy_fixture(fixture_name, dest):
@ -243,27 +269,25 @@ def copy_fixture(fixture_name, dest):
def boom(*args, **kwargs):
raise Exception('BOOM')
raise Exception("BOOM")
def mk_boom(cls=Exception, msg='BOOM'):
def mk_boom(cls=Exception, msg="BOOM"):
def b(*args, **kwargs):
raise cls(msg)
return b
PKGSET_REPOS = dict(
pkgset_source='repos',
pkgset_repos={},
)
PKGSET_REPOS = dict(pkgset_source="repos", pkgset_repos={},)
BASE_CONFIG = dict(
release_short='test',
release_name='Test',
release_version='1.0',
variants_file='variants.xml',
createrepo_checksum='sha256',
gather_method='deps',
release_short="test",
release_name="Test",
release_version="1.0",
variants_file="variants.xml",
createrepo_checksum="sha256",
gather_method="deps",
)

View File

@ -6,49 +6,66 @@ import unittest
import os
import sys
from pungi.arch import (get_compatible_arches, get_valid_arches, get_valid_multilib_arches,
is_excluded, is_valid_arch, split_name_arch)
from pungi.arch import (
get_compatible_arches,
get_valid_arches,
get_valid_multilib_arches,
is_excluded,
is_valid_arch,
split_name_arch,
)
class TestArch(unittest.TestCase):
def test_i386(self):
arches = get_valid_arches("i386")
self.assertEqual(arches, ['i686', 'i586', 'i486', 'i386', 'noarch'])
self.assertEqual(arches, ["i686", "i586", "i486", "i386", "noarch"])
arches = get_valid_arches("i386", multilib=False)
self.assertEqual(arches, ['i686', 'i586', 'i486', 'i386', 'noarch'])
self.assertEqual(arches, ["i686", "i586", "i486", "i386", "noarch"])
arches = get_valid_arches("i386", add_src=True)
self.assertEqual(arches, ['i686', 'i586', 'i486', 'i386', 'noarch', 'src'])
self.assertEqual(arches, ["i686", "i586", "i486", "i386", "noarch", "src"])
def test_x86_64(self):
arches = get_valid_arches("x86_64")
self.assertEqual(arches, ['x86_64', 'athlon', 'i686', 'i586', 'i486', 'i386', 'noarch'])
self.assertEqual(
arches, ["x86_64", "athlon", "i686", "i586", "i486", "i386", "noarch"]
)
arches = get_valid_arches("x86_64", multilib=False)
self.assertEqual(arches, ['x86_64', 'noarch'])
self.assertEqual(arches, ["x86_64", "noarch"])
arches = get_valid_arches("x86_64", add_src=True)
self.assertEqual(arches, ['x86_64', 'athlon', 'i686', 'i586', 'i486', 'i386', 'noarch', 'src'])
self.assertEqual(
arches,
["x86_64", "athlon", "i686", "i586", "i486", "i386", "noarch", "src"],
)
def test_armhfp(self):
arches = get_valid_arches("armhfp")
self.assertEqual(arches, ['armv7hnl', 'armv7hl', 'armv6hl', 'noarch'])
self.assertEqual(arches, ["armv7hnl", "armv7hl", "armv6hl", "noarch"])
arches = get_valid_arches("armhfp", multilib=False)
self.assertEqual(arches, ['armv7hnl', 'armv7hl', 'armv6hl', 'noarch'])
self.assertEqual(arches, ["armv7hnl", "armv7hl", "armv6hl", "noarch"])
arches = get_valid_arches("armhfp", add_src=True)
self.assertEqual(arches, ['armv7hnl', 'armv7hl', 'armv6hl', 'noarch', 'src'])
self.assertEqual(arches, ["armv7hnl", "armv7hl", "armv6hl", "noarch", "src"])
def test_get_compatible_arches(self):
self.assertEqual(get_compatible_arches("noarch"), ["noarch"])
self.assertEqual(get_compatible_arches("i386"), get_valid_arches("i386"))
self.assertEqual(get_compatible_arches("i586"), get_valid_arches("i386"))
self.assertEqual(get_compatible_arches("x86_64"), get_valid_arches("x86_64", multilib=False))
self.assertEqual(get_compatible_arches("ppc64p7"), get_valid_arches("ppc64", multilib=False))
self.assertEqual(get_compatible_arches("armhfp"), get_valid_arches("armv7hnl", multilib=False))
self.assertEqual(
get_compatible_arches("x86_64"), get_valid_arches("x86_64", multilib=False)
)
self.assertEqual(
get_compatible_arches("ppc64p7"), get_valid_arches("ppc64", multilib=False)
)
self.assertEqual(
get_compatible_arches("armhfp"),
get_valid_arches("armv7hnl", multilib=False),
)
def test_is_valid_arch(self):
self.assertEqual(is_valid_arch("i386"), True)
@ -63,29 +80,38 @@ class TestArch(unittest.TestCase):
self.assertEqual(split_name_arch("package"), ("package", None))
self.assertEqual(split_name_arch("package.x86_64"), ("package", "x86_64"))
self.assertEqual(split_name_arch("package.foo"), ("package.foo", None))
self.assertEqual(split_name_arch("i386"), ("i386", None)) # we suppose that $name is never empty
self.assertEqual(
split_name_arch("i386"), ("i386", None)
) # we suppose that $name is never empty
def test_get_valid_multilib_arches(self):
self.assertEqual(get_valid_multilib_arches("noarch"), [])
self.assertEqual(get_valid_multilib_arches("athlon"), [])
self.assertEqual(get_valid_multilib_arches("x86_64"), ['athlon', 'i686', 'i586', 'i486', 'i386'])
self.assertEqual(
get_valid_multilib_arches("x86_64"),
["athlon", "i686", "i586", "i486", "i386"],
)
class TestExclusiveExcludeArch(unittest.TestCase):
def test_no_exclude(self):
pkg = mock.Mock(excludearch=[], exclusivearch=[], file_name='pkg.rpm')
self.assertFalse(is_excluded(pkg, ['x86_64']))
pkg = mock.Mock(excludearch=[], exclusivearch=[], file_name="pkg.rpm")
self.assertFalse(is_excluded(pkg, ["x86_64"]))
def test_exclude_arch(self):
log = mock.Mock()
pkg = mock.Mock(excludearch=['x86_64'], exclusivearch=[], file_name='pkg.rpm')
self.assertTrue(is_excluded(pkg, ['x86_64'], logger=log))
self.assertEqual(log.mock_calls,
[mock.call.debug("Excluding (EXCLUDEARCH: ['x86_64']): pkg.rpm")])
pkg = mock.Mock(excludearch=["x86_64"], exclusivearch=[], file_name="pkg.rpm")
self.assertTrue(is_excluded(pkg, ["x86_64"], logger=log))
self.assertEqual(
log.mock_calls,
[mock.call.debug("Excluding (EXCLUDEARCH: ['x86_64']): pkg.rpm")],
)
def test_exclusive_arch(self):
log = mock.Mock()
pkg = mock.Mock(excludearch=[], exclusivearch=['aarch64'], file_name='pkg.rpm')
self.assertTrue(is_excluded(pkg, ['x86_64'], logger=log))
self.assertEqual(log.mock_calls,
[mock.call.debug("Excluding (EXCLUSIVEARCH: ['aarch64']): pkg.rpm")])
pkg = mock.Mock(excludearch=[], exclusivearch=["aarch64"], file_name="pkg.rpm")
self.assertTrue(is_excluded(pkg, ["x86_64"], logger=log))
self.assertEqual(
log.mock_calls,
[mock.call.debug("Excluding (EXCLUSIVEARCH: ['aarch64']): pkg.rpm")],
)

View File

@ -1,4 +1,5 @@
import mock
try:
import unittest2 as unittest
except ImportError:
@ -9,17 +10,16 @@ from pungi.scripts.pungi_koji import cli_main
class PungiKojiTestCase(unittest.TestCase):
@mock.patch('sys.argv', new=['prog', '--version'])
@mock.patch('sys.stderr', new_callable=six.StringIO)
@mock.patch('sys.stdout', new_callable=six.StringIO)
@mock.patch('pungi.scripts.pungi_koji.get_full_version', return_value='a-b-c.111')
@mock.patch("sys.argv", new=["prog", "--version"])
@mock.patch("sys.stderr", new_callable=six.StringIO)
@mock.patch("sys.stdout", new_callable=six.StringIO)
@mock.patch("pungi.scripts.pungi_koji.get_full_version", return_value="a-b-c.111")
def test_version(self, get_full_version, stdout, stderr):
with self.assertRaises(SystemExit) as cm:
cli_main()
self.assertEqual(cm.exception.code, 0)
# Python 2.7 prints the version to stderr, 3.4+ to stdout.
if six.PY3:
self.assertMultiLineEqual(stdout.getvalue(), 'a-b-c.111\n')
self.assertMultiLineEqual(stdout.getvalue(), "a-b-c.111\n")
else:
self.assertMultiLineEqual(stderr.getvalue(), 'a-b-c.111\n')
self.assertMultiLineEqual(stderr.getvalue(), "a-b-c.111\n")

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import mock
try:
import unittest2 as unittest
except ImportError:
@ -15,7 +16,6 @@ from pungi import checks
class CheckDependenciesTestCase(unittest.TestCase):
def dont_find(self, paths):
return lambda path: path not in paths
@ -23,149 +23,147 @@ class CheckDependenciesTestCase(unittest.TestCase):
def custom_exists(path):
return False
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = custom_exists
result = checks.check({})
self.assertGreater(len(out.getvalue().strip().split('\n')), 1)
self.assertGreater(len(out.getvalue().strip().split("\n")), 1)
self.assertFalse(result)
def test_all_deps_ok(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine:
machine.return_value = 'x86_64'
with mock.patch('os.path.exists') as exists:
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("platform.machine") as machine:
machine.return_value = "x86_64"
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find([])
result = checks.check({})
self.assertEqual('', out.getvalue())
self.assertEqual("", out.getvalue())
self.assertTrue(result)
def test_does_not_require_jigdo_if_not_configured(self):
conf = {
'create_jigdo': False
}
conf = {"create_jigdo": False}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine:
machine.return_value = 'x86_64'
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/jigdo-lite'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("platform.machine") as machine:
machine.return_value = "x86_64"
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/jigdo-lite"])
result = checks.check(conf)
self.assertEqual('', out.getvalue())
self.assertEqual("", out.getvalue())
self.assertTrue(result)
def test_isohybrid_not_required_without_productimg_phase(self):
conf = {
'bootable': True,
'productimg': False,
'runroot_tag': 'dummy_tag',
"bootable": True,
"productimg": False,
"runroot_tag": "dummy_tag",
}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf)
self.assertEqual('', out.getvalue())
self.assertEqual("", out.getvalue())
self.assertTrue(result)
def test_isohybrid_not_required_on_not_bootable(self):
conf = {
'bootable': False,
'runroot_tag': 'dummy_tag',
"bootable": False,
"runroot_tag": "dummy_tag",
}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf)
self.assertEqual('', out.getvalue())
self.assertEqual("", out.getvalue())
self.assertTrue(result)
def test_isohybrid_not_required_on_arm(self):
conf = {
'buildinstall_method': 'lorax',
'runroot_tag': '',
"buildinstall_method": "lorax",
"runroot_tag": "",
}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('platform.machine') as machine:
machine.return_value = 'armhfp'
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("platform.machine") as machine:
machine.return_value = "armhfp"
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf)
self.assertRegexpMatches(out.getvalue(), r'^Not checking.*Expect failures.*$')
self.assertRegexpMatches(out.getvalue(), r"^Not checking.*Expect failures.*$")
self.assertTrue(result)
def test_isohybrid_not_needed_in_runroot(self):
conf = {
'runroot_tag': 'dummy_tag',
"runroot_tag": "dummy_tag",
}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/isohybrid"])
result = checks.check(conf)
self.assertEqual('', out.getvalue())
self.assertEqual("", out.getvalue())
self.assertTrue(result)
def test_genisoimg_not_needed_in_runroot(self):
conf = {
'runroot_tag': 'dummy_tag',
"runroot_tag": "dummy_tag",
}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/genisoimage"])
result = checks.check(conf)
self.assertEqual('', out.getvalue())
self.assertEqual("", out.getvalue())
self.assertTrue(result)
def test_requires_modifyrepo(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/modifyrepo'])
result = checks.check({'createrepo_c': False})
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/modifyrepo"])
result = checks.check({"createrepo_c": False})
self.assertIn('createrepo', out.getvalue())
self.assertIn("createrepo", out.getvalue())
self.assertFalse(result)
def test_requires_modifyrepo_c(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/modifyrepo_c'])
result = checks.check({'createrepo_c': True})
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/modifyrepo_c"])
result = checks.check({"createrepo_c": True})
self.assertIn('createrepo_c', out.getvalue())
self.assertIn("createrepo_c", out.getvalue())
self.assertFalse(result)
def test_requires_createrepo_c(self):
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/createrepo_c'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
result = checks.check({})
self.assertIn('createrepo_c', out.getvalue())
self.assertIn("createrepo_c", out.getvalue())
self.assertFalse(result)
def test_doesnt_require_createrepo_c_if_configured(self):
conf = {
'createrepo_c': False,
"createrepo_c": False,
}
with mock.patch('sys.stdout', new_callable=StringIO) as out:
with mock.patch('os.path.exists') as exists:
exists.side_effect = self.dont_find(['/usr/bin/createrepo_c'])
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
result = checks.check(conf)
self.assertNotIn('createrepo_c', out.getvalue())
self.assertNotIn("createrepo_c", out.getvalue())
self.assertTrue(result)
@ -175,7 +173,7 @@ class TestSchemaValidator(unittest.TestCase):
conf.load_from_string(string)
return conf
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_property(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -198,7 +196,7 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(len(warnings), 0)
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_alias_property(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -218,10 +216,13 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_required_is_missing(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -241,11 +242,13 @@ class TestSchemaValidator(unittest.TestCase):
config = self._load_conf_from_string(string)
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 1)
self.assertIn("Failed validation in : 'release_name' is a required property", errors)
self.assertIn(
"Failed validation in : 'release_name' is a required property", errors
)
self.assertEqual(len(warnings), 1)
self.assertIn("WARNING: Unrecognized config option: name.", warnings)
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_required_is_in_alias(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -266,10 +269,13 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_redundant_alias(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -290,12 +296,18 @@ class TestSchemaValidator(unittest.TestCase):
config = self._load_conf_from_string(string)
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 1)
self.assertRegexpMatches(errors[0], r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*")
self.assertRegexpMatches(
errors[0],
r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*",
)
self.assertEqual(len(warnings), 1)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product")
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_properties_in_deep(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -303,15 +315,10 @@ class TestSchemaValidator(unittest.TestCase):
"type": "object",
"properties": {
"release_name": {"type": "string", "alias": "product_name"},
"keys": {
"type": "array",
"items": {"type": "string"},
},
"keys": {"type": "array", "items": {"type": "string"}},
"foophase": {
"type": "object",
"properties": {
"repo": {"type": "string", "alias": "tree"},
},
"properties": {"repo": {"type": "string", "alias": "tree"}},
"additionalProperties": False,
"required": ["repo"],
},
@ -331,22 +338,27 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*",
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("foophase", {}).get("repo", None), "http://www.exampe.com/os")
self.assertEqual(
config.get("foophase", {}).get("repo", None), "http://www.exampe.com/os"
)
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_append_option(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration",
"type": "object",
"definitions": {
"list_of_strings": {
"type": "array",
"items": {"type": "string"},
},
"list_of_strings": {"type": "array", "items": {"type": "string"}},
"strings": {
"anyOf": [
{"type": "string"},
@ -356,7 +368,7 @@ class TestSchemaValidator(unittest.TestCase):
},
"properties": {
"release_name": {"type": "string"},
"repo": {"$ref": "#/definitions/strings", "append": "repo_from"}
"repo": {"$ref": "#/definitions/strings", "append": "repo_from"},
},
"additionalProperties": False,
}
@ -371,22 +383,25 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'",
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_append_to_nonexist_option(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration",
"type": "object",
"definitions": {
"list_of_strings": {
"type": "array",
"items": {"type": "string"},
},
"list_of_strings": {"type": "array", "items": {"type": "string"}},
"strings": {
"anyOf": [
{"type": "string"},
@ -396,7 +411,7 @@ class TestSchemaValidator(unittest.TestCase):
},
"properties": {
"release_name": {"type": "string"},
"repo": {"$ref": "#/definitions/strings", "append": "repo_from"}
"repo": {"$ref": "#/definitions/strings", "append": "repo_from"},
},
"additionalProperties": False,
}
@ -410,22 +425,25 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_multiple_appends(self, make_schema):
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration",
"type": "object",
"definitions": {
"list_of_strings": {
"type": "array",
"items": {"type": "string"},
},
"list_of_strings": {"type": "array", "items": {"type": "string"}},
"strings": {
"anyOf": [
{"type": "string"},
@ -437,8 +455,8 @@ class TestSchemaValidator(unittest.TestCase):
"release_name": {"type": "string"},
"repo": {
"$ref": "#/definitions/strings",
"append": ["repo_from", "source_repo_from"]
}
"append": ["repo_from", "source_repo_from"],
},
},
"additionalProperties": False,
}
@ -453,14 +471,28 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 4)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")
self.assertRegexpMatches(warnings[2], r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'")
self.assertRegexpMatches(warnings[3], r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.")
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,",
)
self.assertRegexpMatches(
warnings[2],
r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'",
)
self.assertRegexpMatches(
warnings[3],
r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.",
)
self.assertEqual(config.get("release_name", None), "dummy product")
self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server", "Client"])
self.assertEqual(
config.get("repo", None), ["http://url/to/repo", "Server", "Client"]
)
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_anyof_validator_not_raise_our_warnings_as_error(self, make_schema):
# https://pagure.io/pungi/issue/598
schema = {
@ -470,26 +502,21 @@ class TestSchemaValidator(unittest.TestCase):
"definitions": {
"live_image_config": {
"type": "object",
"properties": {
"repo": {
"type": "string",
"append": "repo_from",
},
},
"properties": {"repo": {"type": "string", "append": "repo_from"}},
},
},
"properties": {
"live_images": checks._variant_arch_mapping({
"live_images": checks._variant_arch_mapping(
{
"anyOf": [
{"$ref": "#/definitions/live_image_config"},
{
"type": "array",
"items": {
"$ref": "#/definitions/live_image_config"
}
}
"items": {"$ref": "#/definitions/live_image_config"},
},
]
}),
}
),
},
}
make_schema.return_value = schema
@ -506,12 +533,20 @@ class TestSchemaValidator(unittest.TestCase):
errors, warnings = checks.validate(config)
self.assertEqual(len(errors), 0)
self.assertEqual(len(warnings), 2)
self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")
self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*")
self.assertEqual(config.get("live_images")[0][1]['armhfp']['repo'], 'Everything')
self.assertRegexpMatches(
warnings[0],
r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*",
)
self.assertRegexpMatches(
warnings[1],
r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*",
)
self.assertEqual(
config.get("live_images")[0][1]["armhfp"]["repo"], "Everything"
)
@mock.patch("pungi.util.resolve_git_url")
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_resolve_url(self, make_schema, resolve_git_url):
resolve_git_url.return_value = "git://example.com/repo.git#CAFE"
make_schema.return_value = {
@ -527,7 +562,7 @@ class TestSchemaValidator(unittest.TestCase):
self.assertEqual(config["foo"], resolve_git_url.return_value)
@mock.patch("pungi.util.resolve_git_url")
@mock.patch('pungi.checks.make_schema')
@mock.patch("pungi.checks.make_schema")
def test_resolve_url_when_offline(self, make_schema, resolve_git_url):
make_schema.return_value = {
"$schema": "http://json-schema.org/draft-04/schema#",
@ -594,8 +629,13 @@ class TestUmask(unittest.TestCase):
checks.check_umask(logger)
self.assertEqual(
logger.mock_calls,
[mock.call.warning('Unusually strict umask detected (0%03o), '
'expect files with broken permissions.', 0o044)]
[
mock.call.warning(
"Unusually strict umask detected (0%03o), "
"expect files with broken permissions.",
0o044,
)
],
)

View File

@ -2,6 +2,7 @@
import logging
import mock
try:
import unittest2 as unittest
except ImportError:
@ -18,7 +19,9 @@ from pungi.compose import Compose
class ConfigWrapper(dict):
def __init__(self, *args, **kwargs):
super(ConfigWrapper, self).__init__(*args, **kwargs)
self._open_file = '%s/fixtures/config.conf' % os.path.abspath(os.path.dirname(__file__))
self._open_file = "%s/fixtures/config.conf" % os.path.abspath(
os.path.dirname(__file__)
)
class ComposeTestCase(unittest.TestCase):
@ -28,17 +31,19 @@ class ComposeTestCase(unittest.TestCase):
def tearDown(self):
shutil.rmtree(self.tmp_dir)
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_setup_logger(self, ci):
conf = {}
logger = logging.getLogger('test_setup_logger')
logger = logging.getLogger("test_setup_logger")
compose = Compose(conf, self.tmp_dir, logger=logger)
self.assertEqual(len(logger.handlers), 2)
pungi_log = logger.handlers[0].stream.name
exclude_arch_log = logger.handlers[1].stream.name
self.assertEqual(os.path.basename(pungi_log), 'pungi.global.log')
self.assertEqual(os.path.basename(exclude_arch_log), 'excluding-arch.global.log')
self.assertEqual(os.path.basename(pungi_log), "pungi.global.log")
self.assertEqual(
os.path.basename(exclude_arch_log), "excluding-arch.global.log"
)
msg = "test log"
compose.log_info(msg)
@ -52,240 +57,322 @@ class ComposeTestCase(unittest.TestCase):
with open(exclude_arch_log) as f:
self.assertTrue(msg in f.read())
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_can_fail(self, ci):
conf = {
'failable_deliverables': [
('^.*$', {
'*': ['buildinstall'],
'i386': ['buildinstall', 'live', 'iso'],
}),
"failable_deliverables": [
(
"^.*$",
{"*": ["buildinstall"], "i386": ["buildinstall", "live", "iso"]},
),
]
}
compose = Compose(conf, self.tmp_dir)
variant = mock.Mock(uid='Server')
variant = mock.Mock(uid="Server")
self.assertTrue(compose.can_fail(variant, 'x86_64', 'buildinstall'))
self.assertFalse(compose.can_fail(variant, 'x86_64', 'live'))
self.assertTrue(compose.can_fail(variant, 'i386', 'live'))
self.assertTrue(compose.can_fail(variant, "x86_64", "buildinstall"))
self.assertFalse(compose.can_fail(variant, "x86_64", "live"))
self.assertTrue(compose.can_fail(variant, "i386", "live"))
self.assertFalse(compose.can_fail(None, 'x86_64', 'live'))
self.assertTrue(compose.can_fail(None, 'i386', 'live'))
self.assertFalse(compose.can_fail(None, "x86_64", "live"))
self.assertTrue(compose.can_fail(None, "i386", "live"))
self.assertTrue(compose.can_fail(variant, '*', 'buildinstall'))
self.assertFalse(compose.can_fail(variant, '*', 'live'))
self.assertTrue(compose.can_fail(variant, "*", "buildinstall"))
self.assertFalse(compose.can_fail(variant, "*", "live"))
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name(self, ci):
conf = {}
variant = mock.Mock(uid='Server', type='variant')
variant = mock.Mock(uid="Server", type="variant")
ci.return_value.compose.respin = 2
ci.return_value.compose.id = 'compose_id'
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.label = 'RC-1.0'
ci.return_value.compose.label_major_version = '1'
ci.return_value.compose.id = "compose_id"
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = "RC-1.0"
ci.return_value.compose.label_major_version = "1"
ci.return_value.release.version = '3.0'
ci.return_value.release.short = 'rel_short'
ci.return_value.release.version = "3.0"
ci.return_value.release.short = "rel_short"
compose = Compose(conf, self.tmp_dir)
keys = ['arch', 'compose_id', 'date', 'disc_num', 'disc_type',
'label', 'label_major_version', 'release_short', 'respin',
'suffix', 'type', 'type_suffix', 'variant', 'version']
format = '-'.join(['%(' + k + ')s' for k in keys])
name = compose.get_image_name('x86_64', variant, format=format,
disc_num=7, disc_type='live', suffix='.iso')
keys = [
"arch",
"compose_id",
"date",
"disc_num",
"disc_type",
"label",
"label_major_version",
"release_short",
"respin",
"suffix",
"type",
"type_suffix",
"variant",
"version",
]
format = "-".join(["%(" + k + ")s" for k in keys])
name = compose.get_image_name(
"x86_64",
variant,
format=format,
disc_num=7,
disc_type="live",
suffix=".iso",
)
self.assertEqual(name, '-'.join(['x86_64', 'compose_id', '20160107', '7', 'live',
'RC-1.0', '1', 'rel_short', '2', '.iso', 'nightly',
'.n', 'Server', '3.0']))
self.assertEqual(
name,
"-".join(
[
"x86_64",
"compose_id",
"20160107",
"7",
"live",
"RC-1.0",
"1",
"rel_short",
"2",
".iso",
"nightly",
".n",
"Server",
"3.0",
]
),
)
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_variant_mapping(self, ci):
conf = {"image_name_format": {"^Server$": "whatever"}}
variant = mock.Mock(uid='Server', type='variant')
variant = mock.Mock(uid="Server", type="variant")
compose = Compose(conf, self.tmp_dir)
name = compose.get_image_name(
'x86_64', variant, disc_num=7, disc_type='live', suffix='.iso'
"x86_64", variant, disc_num=7, disc_type="live", suffix=".iso"
)
self.assertEqual(name, "whatever")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_variant_mapping_no_match(self, ci):
conf = {"image_name_format": {"^Client$": "whatever"}}
variant = mock.Mock(uid='Server', type='variant')
ci.return_value.compose.id = 'compose_id'
variant = mock.Mock(uid="Server", type="variant")
ci.return_value.compose.id = "compose_id"
compose = Compose(conf, self.tmp_dir)
name = compose.get_image_name(
'x86_64', variant, disc_num=7, disc_type='live', suffix='.iso'
"x86_64", variant, disc_num=7, disc_type="live", suffix=".iso"
)
self.assertEqual(name, "compose_id-Server-x86_64-live7.iso")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_layered_product(self, ci):
conf = {}
variant = mock.Mock(uid='Server-LP', type='layered-product')
variant.parent = mock.Mock(uid='Server')
variant = mock.Mock(uid="Server-LP", type="layered-product")
variant.parent = mock.Mock(uid="Server")
ci.return_value.compose.respin = 2
ci.return_value.compose.id = 'compose_id'
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.label = 'RC-1.0'
ci.return_value.compose.label_major_version = '1'
ci.return_value.compose.id = "compose_id"
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = "RC-1.0"
ci.return_value.compose.label_major_version = "1"
ci.return_value.release.version = '3.0'
ci.return_value.release.short = 'rel_short'
ci.return_value.release.version = "3.0"
ci.return_value.release.short = "rel_short"
ci.return_value['Server-LP'].compose_id = 'Gluster 1.0'
ci.return_value["Server-LP"].compose_id = "Gluster 1.0"
compose = Compose(conf, self.tmp_dir)
format = '{compose_id} {variant}'
name = compose.get_image_name('x86_64', variant, format=format,
disc_num=7, disc_type='live', suffix='.iso')
format = "{compose_id} {variant}"
name = compose.get_image_name(
"x86_64",
variant,
format=format,
disc_num=7,
disc_type="live",
suffix=".iso",
)
self.assertEqual(name, 'Gluster 1.0 Server')
self.assertEqual(name, "Gluster 1.0 Server")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_image_name_type_netinst(self, ci):
conf = {}
variant = mock.Mock(uid='Server', type='variant')
variant = mock.Mock(uid="Server", type="variant")
ci.return_value.compose.respin = 2
ci.return_value.compose.id = 'compose_id'
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.label = 'RC-1.0'
ci.return_value.compose.label_major_version = '1'
ci.return_value.compose.id = "compose_id"
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = "RC-1.0"
ci.return_value.compose.label_major_version = "1"
ci.return_value.release.version = '3.0'
ci.return_value.release.short = 'rel_short'
ci.return_value.release.version = "3.0"
ci.return_value.release.short = "rel_short"
compose = Compose(conf, self.tmp_dir)
keys = ['arch', 'compose_id', 'date', 'disc_num', 'disc_type',
'label', 'label_major_version', 'release_short', 'respin',
'suffix', 'type', 'type_suffix', 'variant', 'version']
format = '-'.join(['%(' + k + ')s' for k in keys])
name = compose.get_image_name('x86_64', variant, format=format,
disc_num=7, disc_type='netinst', suffix='.iso')
keys = [
"arch",
"compose_id",
"date",
"disc_num",
"disc_type",
"label",
"label_major_version",
"release_short",
"respin",
"suffix",
"type",
"type_suffix",
"variant",
"version",
]
format = "-".join(["%(" + k + ")s" for k in keys])
name = compose.get_image_name(
"x86_64",
variant,
format=format,
disc_num=7,
disc_type="netinst",
suffix=".iso",
)
self.assertEqual(name, '-'.join(['x86_64', 'compose_id', '20160107', '7', 'netinst',
'RC-1.0', '1', 'rel_short', '2', '.iso', 'nightly',
'.n', 'Server', '3.0']))
self.assertEqual(
name,
"-".join(
[
"x86_64",
"compose_id",
"20160107",
"7",
"netinst",
"RC-1.0",
"1",
"rel_short",
"2",
".iso",
"nightly",
".n",
"Server",
"3.0",
]
),
)
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_image_release(self, ci):
conf = {}
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = None
compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_release, '20160107.n.2')
self.assertEqual(compose.image_release, "20160107.n.2")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_image_release_production(self, ci):
conf = {}
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'production'
ci.return_value.compose.type_suffix = ''
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = ""
ci.return_value.compose.label = None
compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_release, '20160107.2')
self.assertEqual(compose.image_release, "20160107.2")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_image_release_from_label(self, ci):
conf = {}
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'production'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.label = 'Alpha-1.2'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = "Alpha-1.2"
compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_release, '1.2')
self.assertEqual(compose.image_release, "1.2")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_image_version_without_label(self, ci):
conf = {}
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = None
ci.return_value.release.version = '25'
ci.return_value.release.version = "25"
compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_version, '25')
self.assertEqual(compose.image_version, "25")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_image_version_with_label(self, ci):
conf = {}
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.label = 'Alpha-1.2'
ci.return_value.release.version = '25'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = "Alpha-1.2"
ci.return_value.release.version = "25"
compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_version, '25_Alpha')
self.assertEqual(compose.image_version, "25_Alpha")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_image_version_with_label_rc(self, ci):
conf = {}
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'nightly'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.label = 'RC-1.2'
ci.return_value.release.version = '25'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "nightly"
ci.return_value.compose.type_suffix = ".n"
ci.return_value.compose.label = "RC-1.2"
ci.return_value.release.version = "25"
compose = Compose(conf, self.tmp_dir)
self.assertEqual(compose.image_version, '25')
self.assertEqual(compose.image_version, "25")
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_without_filter(self, ci):
ci.return_value.compose.id = 'composeid'
ci.return_value.compose.id = "composeid"
conf = ConfigWrapper(
variants_file={'scm': 'file',
'repo': None,
'file': 'variants.xml'},
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
release_name="Test",
release_version="1.0",
release_short="test",
release_type="ga",
release_internal=False,
)
compose = Compose(conf, self.tmp_dir)
compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Crashy', 'Live', 'Server'])
self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()),
['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])
self.assertEqual(
sorted(v.uid for v in compose.variants.values()),
["Client", "Crashy", "Live", "Server"],
)
self.assertEqual(
sorted(v.uid for v in compose.variants["Server"].variants.values()),
["Server-Gluster", "Server-ResilientStorage", "Server-optional"],
)
six.assertCountEqual(
self, compose.variants["Client"].arches, ["i386", "x86_64"]
)
@ -303,37 +390,48 @@ class ComposeTestCase(unittest.TestCase):
six.assertCountEqual(
self,
compose.variants["Server"].variants["optional"].arches,
["s390x", "x86_64"]
["s390x", "x86_64"],
)
self.assertEqual([v.uid for v in compose.get_variants()],
['Client', 'Crashy', 'Live', 'Server', 'Server-Gluster',
'Server-ResilientStorage', 'Server-optional'])
self.assertEqual(compose.get_arches(), ['i386', 'ppc64le', 's390x', 'x86_64'])
self.assertEqual(
[v.uid for v in compose.get_variants()],
[
"Client",
"Crashy",
"Live",
"Server",
"Server-Gluster",
"Server-ResilientStorage",
"Server-optional",
],
)
self.assertEqual(compose.get_arches(), ["i386", "ppc64le", "s390x", "x86_64"])
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_with_arch_filter(self, ci):
ci.return_value.compose.id = 'composeid'
ci.return_value.compose.id = "composeid"
conf = ConfigWrapper(
variants_file={'scm': 'file',
'repo': None,
'file': 'variants.xml'},
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
release_name="Test",
release_version="1.0",
release_short="test",
release_type="ga",
release_internal=False,
tree_arches=['x86_64'],
tree_arches=["x86_64"],
)
compose = Compose(conf, self.tmp_dir)
compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Live', 'Server'])
self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()),
['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])
self.assertEqual(
sorted(v.uid for v in compose.variants.values()),
["Client", "Live", "Server"],
)
self.assertEqual(
sorted(v.uid for v in compose.variants["Server"].variants.values()),
["Server-Gluster", "Server-ResilientStorage", "Server-optional"],
)
self.assertEqual(compose.variants["Client"].arches, ["x86_64"])
self.assertEqual(compose.variants["Live"].arches, ["x86_64"])
self.assertEqual(compose.variants["Server"].arches, ["x86_64"])
@ -347,36 +445,43 @@ class ComposeTestCase(unittest.TestCase):
compose.variants["Server"].variants["optional"].arches, ["x86_64"]
)
self.assertEqual(compose.get_arches(), ['x86_64'])
self.assertEqual([v.uid for v in compose.get_variants()],
['Client', 'Live', 'Server', 'Server-Gluster',
'Server-ResilientStorage', 'Server-optional'])
self.assertEqual(compose.get_arches(), ["x86_64"])
self.assertEqual(
[v.uid for v in compose.get_variants()],
[
"Client",
"Live",
"Server",
"Server-Gluster",
"Server-ResilientStorage",
"Server-optional",
],
)
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_with_variant_filter(self, ci):
ci.return_value.compose.id = 'composeid'
ci.return_value.compose.id = "composeid"
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'production'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = ".n"
conf = ConfigWrapper(
variants_file={'scm': 'file',
'repo': None,
'file': 'variants.xml'},
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
release_name="Test",
release_version="1.0",
release_short="test",
release_type="ga",
release_internal=False,
tree_variants=['Server', 'Client', 'Server-Gluster'],
tree_variants=["Server", "Client", "Server-Gluster"],
)
compose = Compose(conf, self.tmp_dir)
compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Server'])
self.assertEqual(
sorted(v.uid for v in compose.variants.values()), ["Client", "Server"]
)
six.assertCountEqual(
self, compose.variants["Client"].arches, ["i386", "x86_64"]
)
@ -387,77 +492,84 @@ class ComposeTestCase(unittest.TestCase):
compose.variants["Server"].variants["Gluster"].arches, ["x86_64"]
)
self.assertEqual(compose.get_arches(), ['i386', 's390x', 'x86_64'])
self.assertEqual([v.uid for v in compose.get_variants()],
['Client', 'Server', 'Server-Gluster'])
self.assertEqual(compose.get_arches(), ["i386", "s390x", "x86_64"])
self.assertEqual(
[v.uid for v in compose.get_variants()],
["Client", "Server", "Server-Gluster"],
)
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_get_variant_arches_with_both_filters(self, ci):
ci.return_value.compose.id = 'composeid'
ci.return_value.compose.id = "composeid"
ci.return_value.compose.respin = 2
ci.return_value.compose.date = '20160107'
ci.return_value.compose.type = 'production'
ci.return_value.compose.type_suffix = '.n'
ci.return_value.compose.date = "20160107"
ci.return_value.compose.type = "production"
ci.return_value.compose.type_suffix = ".n"
logger = mock.Mock()
logger.handlers = []
conf = ConfigWrapper(
variants_file={'scm': 'file',
'repo': None,
'file': 'variants.xml'},
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
release_name="Test",
release_version="1.0",
release_short="test",
release_type="ga",
release_internal=False,
tree_variants=['Server', 'Client', 'Server-optional'],
tree_arches=['x86_64'],
tree_variants=["Server", "Client", "Server-optional"],
tree_arches=["x86_64"],
)
compose = Compose(conf, self.tmp_dir, logger=logger)
compose.read_variants()
self.assertEqual(sorted(v.uid for v in compose.variants.values()),
['Client', 'Server'])
self.assertEqual(
sorted(v.uid for v in compose.variants.values()), ["Client", "Server"]
)
self.assertEqual(compose.variants["Client"].arches, ["x86_64"])
self.assertEqual(compose.variants["Server"].arches, ["x86_64"])
self.assertEqual(
compose.variants["Server"].variants["optional"].arches, ["x86_64"]
)
self.assertEqual(compose.get_arches(), ['x86_64'])
self.assertEqual([v.uid for v in compose.get_variants()],
['Client', 'Server', 'Server-optional'])
self.assertEqual(compose.get_arches(), ["x86_64"])
self.assertEqual(
[v.uid for v in compose.get_variants()],
["Client", "Server", "Server-optional"],
)
six.assertCountEqual(
self,
logger.info.call_args_list,
[mock.call('Excluding variant Live: filtered by configuration.'),
mock.call('Excluding variant Crashy: all its arches are filtered.'),
mock.call('Excluding variant Server-ResilientStorage: filtered by configuration.'),
mock.call('Excluding variant Server-Gluster: filtered by configuration.')]
[
mock.call("Excluding variant Live: filtered by configuration."),
mock.call("Excluding variant Crashy: all its arches are filtered."),
mock.call(
"Excluding variant Server-ResilientStorage: filtered by configuration."
),
mock.call(
"Excluding variant Server-Gluster: filtered by configuration."
),
],
)
@mock.patch('pungi.compose.ComposeInfo')
@mock.patch("pungi.compose.ComposeInfo")
def test_mkdtemp(self, ci):
ci.return_value.compose.id = 'composeid'
ci.return_value.compose.id = "composeid"
conf = ConfigWrapper(
variants_file={'scm': 'file',
'repo': None,
'file': 'variants.xml'},
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
variants_file={"scm": "file", "repo": None, "file": "variants.xml"},
release_name="Test",
release_version="1.0",
release_short="test",
release_type="ga",
release_internal=False,
tree_variants=['Server', 'Client', 'Server-optional'],
tree_arches=['x86_64'],
tree_variants=["Server", "Client", "Server-optional"],
tree_arches=["x86_64"],
)
compose = Compose(conf, self.tmp_dir)
d = compose.mkdtemp()
self.assertTrue(os.path.isdir(d))
d = compose.mkdtemp(prefix='tweak_buildinstall')
d = compose.mkdtemp(prefix="tweak_buildinstall")
self.assertTrue(os.path.isdir(d))
@ -466,7 +578,7 @@ class StatusTest(unittest.TestCase):
self.tmp_dir = tempfile.mkdtemp()
self.logger = mock.Mock()
self.logger.handlers = []
with mock.patch('pungi.compose.ComposeInfo'):
with mock.patch("pungi.compose.ComposeInfo"):
self.compose = Compose({}, self.tmp_dir, logger=self.logger)
def tearDown(self):
@ -477,78 +589,82 @@ class StatusTest(unittest.TestCase):
self.assertIsNone(status)
def test_get_status_existing(self):
with open(os.path.join(self.tmp_dir, 'STATUS'), 'w') as f:
f.write('FOOBAR')
with open(os.path.join(self.tmp_dir, "STATUS"), "w") as f:
f.write("FOOBAR")
self.assertEqual(self.compose.get_status(), 'FOOBAR')
self.assertEqual(self.compose.get_status(), "FOOBAR")
def test_get_status_is_dir(self):
os.mkdir(os.path.join(self.tmp_dir, 'STATUS'))
os.mkdir(os.path.join(self.tmp_dir, "STATUS"))
self.assertIsNone(self.compose.get_status())
def test_write_status(self):
self.compose.write_status('DOOMED')
self.compose.write_status("DOOMED")
with open(os.path.join(self.tmp_dir, 'STATUS'), 'r') as f:
self.assertEqual(f.read(), 'DOOMED\n')
with open(os.path.join(self.tmp_dir, "STATUS"), "r") as f:
self.assertEqual(f.read(), "DOOMED\n")
def test_write_non_standard_status(self):
self.compose.write_status('FOOBAR')
self.compose.write_status("FOOBAR")
self.assertEqual(self.logger.log.call_count, 1)
with open(os.path.join(self.tmp_dir, 'STATUS'), 'r') as f:
self.assertEqual(f.read(), 'FOOBAR\n')
with open(os.path.join(self.tmp_dir, "STATUS"), "r") as f:
self.assertEqual(f.read(), "FOOBAR\n")
def test_write_status_on_finished(self):
self.compose.write_status('FINISHED')
self.compose.write_status("FINISHED")
with self.assertRaises(RuntimeError):
self.compose.write_status('NOT REALLY')
self.compose.write_status("NOT REALLY")
def test_write_status_with_failed_deliverables(self):
self.compose.conf = {
'failable_deliverables': [
('^.+$', {
'*': ['live', 'build-image'],
})
]
"failable_deliverables": [("^.+$", {"*": ["live", "build-image"]})]
}
variant = mock.Mock(uid='Server')
self.compose.fail_deliverable(variant, 'x86_64', 'live')
self.compose.fail_deliverable(None, '*', 'build-image')
variant = mock.Mock(uid="Server")
self.compose.fail_deliverable(variant, "x86_64", "live")
self.compose.fail_deliverable(None, "*", "build-image")
self.compose.write_status('FINISHED')
self.compose.write_status("FINISHED")
self.logger.log.assert_has_calls(
[mock.call(20, 'Failed build-image on variant <>, arch <*>, subvariant <None>.'),
mock.call(20, 'Failed live on variant <Server>, arch <x86_64>, subvariant <None>.')],
any_order=True)
[
mock.call(
20, "Failed build-image on variant <>, arch <*>, subvariant <None>."
),
mock.call(
20,
"Failed live on variant <Server>, arch <x86_64>, subvariant <None>.",
),
],
any_order=True,
)
with open(os.path.join(self.tmp_dir, 'STATUS'), 'r') as f:
self.assertEqual(f.read(), 'FINISHED_INCOMPLETE\n')
with open(os.path.join(self.tmp_dir, "STATUS"), "r") as f:
self.assertEqual(f.read(), "FINISHED_INCOMPLETE\n")
def test_calls_notifier(self):
self.compose.notifier = mock.Mock()
self.compose.write_status('FINISHED')
self.compose.write_status("FINISHED")
self.assertTrue(self.compose.notifier.send.call_count, 1)
def test_no_database_with_dnf_backend(self):
self.compose.conf['gather_backend'] = 'dnf'
self.compose.conf["gather_backend"] = "dnf"
self.assertFalse(self.compose.should_create_yum_database)
def test_no_database_with_dnf_backend_config_override(self):
self.compose.conf['gather_backend'] = 'dnf'
self.compose.conf['createrepo_database'] = True
self.compose.conf["gather_backend"] = "dnf"
self.compose.conf["createrepo_database"] = True
self.assertTrue(self.compose.should_create_yum_database)
def test_no_database_with_yum_backend(self):
self.compose.conf['gather_backend'] = 'yum'
self.compose.conf["gather_backend"] = "yum"
self.assertTrue(self.compose.should_create_yum_database)
def test_no_database_with_yum_backend_config_override(self):
self.compose.conf['gather_backend'] = 'yum'
self.compose.conf['createrepo_database'] = False
self.compose.conf["gather_backend"] = "yum"
self.compose.conf["createrepo_database"] = False
self.assertFalse(self.compose.should_create_yum_database)

View File

@ -12,29 +12,40 @@ import sys
from pungi.wrappers.comps import CompsWrapper, CompsFilter, CompsValidationError
from tests.helpers import BaseTestCase, FIXTURE_DIR
COMPS_FILE = os.path.join(FIXTURE_DIR, 'comps.xml')
COMPS_FORMATTED_FILE = os.path.join(FIXTURE_DIR, 'comps-formatted.xml')
COMPS_GROUP_FILE = os.path.join(FIXTURE_DIR, 'comps-group.xml')
COMPS_ENVIRONMENT_FILE = os.path.join(FIXTURE_DIR, 'comps-env.xml')
COMPS_FILE_WITH_TYPO = os.path.join(FIXTURE_DIR, 'comps-typo.xml')
COMPS_FILE_WITH_WHITESPACE = os.path.join(FIXTURE_DIR, 'comps-ws.xml')
COMPS_FILE = os.path.join(FIXTURE_DIR, "comps.xml")
COMPS_FORMATTED_FILE = os.path.join(FIXTURE_DIR, "comps-formatted.xml")
COMPS_GROUP_FILE = os.path.join(FIXTURE_DIR, "comps-group.xml")
COMPS_ENVIRONMENT_FILE = os.path.join(FIXTURE_DIR, "comps-env.xml")
COMPS_FILE_WITH_TYPO = os.path.join(FIXTURE_DIR, "comps-typo.xml")
COMPS_FILE_WITH_WHITESPACE = os.path.join(FIXTURE_DIR, "comps-ws.xml")
class CompsWrapperTest(BaseTestCase):
def setUp(self):
self.file = tempfile.NamedTemporaryFile(prefix='comps-wrapper-test-')
self.file = tempfile.NamedTemporaryFile(prefix="comps-wrapper-test-")
def test_get_groups(self):
comps = CompsWrapper(COMPS_FILE)
self.assertEqual(
sorted(comps.get_comps_groups()),
sorted(['core', 'standard', 'text-internet', 'firefox', 'resilient-storage', 'basic-desktop']))
sorted(
[
"core",
"standard",
"text-internet",
"firefox",
"resilient-storage",
"basic-desktop",
]
),
)
def test_get_packages(self):
comps = CompsWrapper(COMPS_FILE)
self.assertEqual(
sorted(comps.get_packages('text-internet')),
sorted(['dummy-elinks', 'dummy-tftp']))
sorted(comps.get_packages("text-internet")),
sorted(["dummy-elinks", "dummy-tftp"]),
)
def test_get_langpacks(self):
comps = CompsWrapper(COMPS_FILE)
@ -44,13 +55,13 @@ class CompsWrapperTest(BaseTestCase):
"aspell": "aspell-%s",
"firefox": "firefox-langpack-%s",
"kdelibs": "kde-l10n-%s",
}
},
)
def test_get_packages_for_non_existing_group(self):
comps = CompsWrapper(COMPS_FILE)
with self.assertRaises(KeyError):
comps.get_packages('foo')
comps.get_packages("foo")
def test_write_comps(self):
comps = CompsWrapper(COMPS_FILE)
@ -59,44 +70,44 @@ class CompsWrapperTest(BaseTestCase):
def test_filter_groups(self):
comps = CompsWrapper(COMPS_FILE)
unmatched = comps.filter_groups([
unmatched = comps.filter_groups(
[
{"name": "core", "glob": False, "default": False, "uservisible": True},
{"name": "*a*", "glob": True, "default": None, "uservisible": None},
])
]
)
self.assertEqual(unmatched, set())
comps.write_comps(target_file=self.file.name)
self.assertFilesEqual(COMPS_GROUP_FILE, self.file.name)
def test_filter_groups_unused_filter(self):
comps = CompsWrapper(COMPS_FILE)
unmatched = comps.filter_groups([
{"name": "boom", "glob": False, "default": False, "uservisible": True},
])
unmatched = comps.filter_groups(
[{"name": "boom", "glob": False, "default": False, "uservisible": True}]
)
self.assertEqual(unmatched, set(["boom"]))
def test_filter_environments(self):
comps = CompsWrapper(COMPS_FILE)
comps.filter_environments([
{"name": "minimal", "display_order": 10}
])
comps.filter_environments([{"name": "minimal", "display_order": 10}])
comps.write_comps(target_file=self.file.name)
self.assertFilesEqual(COMPS_ENVIRONMENT_FILE, self.file.name)
def test_read_display_order(self):
comps = CompsWrapper(COMPS_FILE)
groups = [
{"name": "minimal", "display_order": None}
]
groups = [{"name": "minimal", "display_order": None}]
comps.filter_environments(groups)
self.assertEqual(groups, [{"name": "minimal", "display_order": 99, "groups": ["core"]}])
self.assertEqual(
groups, [{"name": "minimal", "display_order": 99, "groups": ["core"]}]
)
def test_report_typo_in_package_type(self):
comps = CompsWrapper(COMPS_FILE_WITH_TYPO)
with self.assertRaises(RuntimeError) as ctx:
comps.write_comps(target_file=self.file.name)
self.assertIn(
'Package dummy-bash in group core has unknown type',
str(ctx.exception))
"Package dummy-bash in group core has unknown type", str(ctx.exception)
)
def test_validate_correct(self):
comps = CompsWrapper(COMPS_FILE)
@ -121,81 +132,87 @@ class CompsWrapperTest(BaseTestCase):
)
COMPS_IN_FILE = os.path.join(FIXTURE_DIR, 'comps.xml.in')
COMPS_IN_FILE = os.path.join(FIXTURE_DIR, "comps.xml.in")
class CompsFilterTest(unittest.TestCase):
def setUp(self):
self.filter = CompsFilter(COMPS_IN_FILE, reindent=True)
self.output = tempfile.NamedTemporaryFile(prefix='comps-filter-test-')
self.output = tempfile.NamedTemporaryFile(prefix="comps-filter-test-")
def assertOutput(self, filepath):
self.filter.write(self.output)
self.output.flush()
with open(self.output.name, 'r') as f:
actual = f.read().strip().replace('utf-8', 'UTF-8')
with open(filepath, 'r') as f:
with open(self.output.name, "r") as f:
actual = f.read().strip().replace("utf-8", "UTF-8")
with open(filepath, "r") as f:
expected = f.read().strip()
self.maxDiff = None
self.assertEqual(expected, actual)
def test_filter_packages(self):
self.filter.filter_packages('ppc64le', None)
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-packages.xml'))
self.filter.filter_packages("ppc64le", None)
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-filtered-packages.xml"))
def test_filter_packages_with_variant(self):
self.filter.filter_packages('ppc64le', 'Server')
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-packages-variant.xml'))
self.filter.filter_packages("ppc64le", "Server")
self.assertOutput(
os.path.join(FIXTURE_DIR, "comps-filtered-packages-variant.xml")
)
def test_filter_groups(self):
self.filter.filter_groups('ppc64le', None)
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-groups.xml'))
self.filter.filter_groups("ppc64le", None)
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-filtered-groups.xml"))
def test_filter_groups_with_variant(self):
self.filter.filter_groups('ppc64le', 'Server')
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-groups-variant.xml'))
self.filter.filter_groups("ppc64le", "Server")
self.assertOutput(
os.path.join(FIXTURE_DIR, "comps-filtered-groups-variant.xml")
)
def test_filter_environments(self):
self.filter.filter_environments('ppc64le', None)
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-environments.xml'))
self.filter.filter_environments("ppc64le", None)
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-filtered-environments.xml"))
def test_filter_environments_variant(self):
self.filter.filter_environments('ppc64le', 'Client')
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-filtered-environments-variant.xml'))
self.filter.filter_environments("ppc64le", "Client")
self.assertOutput(
os.path.join(FIXTURE_DIR, "comps-filtered-environments-variant.xml")
)
def test_remove_categories(self):
self.filter.remove_categories()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-categories.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-categories.xml"))
def test_remove_langpacks(self):
self.filter.remove_langpacks()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-langpacks.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-langpacks.xml"))
def test_remove_translations(self):
self.filter.remove_translations()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-translations.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-translations.xml"))
def test_remove_environments(self):
self.filter.remove_environments()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-removed-environments.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-environments.xml"))
def test_cleanup(self):
self.filter.cleanup()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup.xml"))
def test_cleanup_after_filter(self):
self.filter.filter_packages('ppc64le', None)
self.filter.filter_packages("ppc64le", None)
self.filter.cleanup()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup-filter.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-filter.xml"))
def test_cleanup_after_filter_keep_group(self):
self.filter.filter_packages('ppc64le', None)
self.filter.cleanup(['standard'])
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup-keep.xml'))
self.filter.filter_packages("ppc64le", None)
self.filter.cleanup(["standard"])
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-keep.xml"))
def test_cleanup_all(self):
self.filter.filter_packages('ppc64le', None)
self.filter.filter_groups('ppc64le', None)
self.filter.filter_environments('ppc64le', None)
self.filter.filter_packages("ppc64le", None)
self.filter.filter_groups("ppc64le", None)
self.filter.filter_environments("ppc64le", None)
self.filter.cleanup()
self.assertOutput(os.path.join(FIXTURE_DIR, 'comps-cleanup-all.xml'))
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-all.xml"))

View File

@ -23,49 +23,46 @@ class ConfigTestCase(unittest.TestCase):
class PkgsetConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self):
cfg = load_config(
pkgset_source='koji',
)
cfg = load_config(pkgset_source="koji",)
self.assertValidation(cfg)
def test_validate_minimal_pkgset_repos(self):
cfg = load_config(
pkgset_source='repos',
pkgset_repos={'x86_64': '/first', 'ppc64': '/second'},
pkgset_source="repos",
pkgset_repos={"x86_64": "/first", "ppc64": "/second"},
)
self.assertValidation(cfg)
def test_pkgset_mismatch_repos(self):
cfg = load_config(
pkgset_source='repos',
pkgset_koji_tag='f25',
pkgset_koji_inherit=False,
pkgset_source="repos", pkgset_koji_tag="f25", pkgset_koji_inherit=False,
)
self.assertValidation(
cfg,
[checks.REQUIRES.format('pkgset_source', 'repos', 'pkgset_repos'),
checks.CONFLICTS.format('pkgset_source', 'repos', 'pkgset_koji_tag'),
checks.CONFLICTS.format('pkgset_source', 'repos', 'pkgset_koji_inherit')])
[
checks.REQUIRES.format("pkgset_source", "repos", "pkgset_repos"),
checks.CONFLICTS.format("pkgset_source", "repos", "pkgset_koji_tag"),
checks.CONFLICTS.format(
"pkgset_source", "repos", "pkgset_koji_inherit"
),
],
)
def test_pkgset_mismatch_koji(self):
cfg = load_config(
pkgset_source='koji',
pkgset_repos={'whatever': '/foo'},
)
cfg = load_config(pkgset_source="koji", pkgset_repos={"whatever": "/foo"},)
self.assertValidation(
cfg,
[checks.CONFLICTS.format('pkgset_source', 'koji', 'pkgset_repos')])
cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")]
)
def test_pkgset_multiple_koji_tags(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag=['f25', 'f25-extra'],
pkgset_source="koji",
pkgset_koji_tag=["f25", "f25-extra"],
pkgset_koji_inherit=False,
)
self.assertValidation(cfg)
@ -73,215 +70,222 @@ class PkgsetConfigTestCase(ConfigTestCase):
class ReleaseConfigTestCase(ConfigTestCase):
def test_set_release_is_layered(self):
cfg = load_config(
PKGSET_REPOS,
release_is_layered=True
)
cfg = load_config(PKGSET_REPOS, release_is_layered=True)
self.assertValidation(
cfg,
warnings=[
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product."])
"WARNING: Config option release_is_layered was removed and has no effect; remove it. It's layered if there's configuration for base product."
],
)
def test_only_config_base_product_name(self):
cfg = load_config(
PKGSET_REPOS,
base_product_name='Prod',
)
cfg = load_config(PKGSET_REPOS, base_product_name="Prod",)
self.assertValidation(
cfg,
[checks.REQUIRES.format('base_product_name', 'Prod', 'base_product_short'),
checks.REQUIRES.format('base_product_name', 'Prod', 'base_product_version'),
checks.CONFLICTS.format('base_product_short', None, 'base_product_name'),
checks.CONFLICTS.format('base_product_version', None, 'base_product_name')])
[
checks.REQUIRES.format(
"base_product_name", "Prod", "base_product_short"
),
checks.REQUIRES.format(
"base_product_name", "Prod", "base_product_version"
),
checks.CONFLICTS.format(
"base_product_short", None, "base_product_name"
),
checks.CONFLICTS.format(
"base_product_version", None, "base_product_name"
),
],
)
def test_only_config_base_product_short(self):
cfg = load_config(
PKGSET_REPOS,
base_product_short='bp',
)
cfg = load_config(PKGSET_REPOS, base_product_short="bp",)
self.assertValidation(
cfg,
[checks.REQUIRES.format('base_product_short', 'bp', 'base_product_name'),
checks.REQUIRES.format('base_product_short', 'bp', 'base_product_version'),
checks.CONFLICTS.format('base_product_name', None, 'base_product_short'),
checks.CONFLICTS.format('base_product_version', None, 'base_product_short')])
[
checks.REQUIRES.format("base_product_short", "bp", "base_product_name"),
checks.REQUIRES.format(
"base_product_short", "bp", "base_product_version"
),
checks.CONFLICTS.format(
"base_product_name", None, "base_product_short"
),
checks.CONFLICTS.format(
"base_product_version", None, "base_product_short"
),
],
)
def test_only_config_base_product_version(self):
cfg = load_config(
PKGSET_REPOS,
base_product_version='1.0',
)
cfg = load_config(PKGSET_REPOS, base_product_version="1.0",)
self.assertValidation(
cfg,
[checks.REQUIRES.format('base_product_version', '1.0', 'base_product_name'),
checks.REQUIRES.format('base_product_version', '1.0', 'base_product_short'),
checks.CONFLICTS.format('base_product_name', None, 'base_product_version'),
checks.CONFLICTS.format('base_product_short', None, 'base_product_version')])
[
checks.REQUIRES.format(
"base_product_version", "1.0", "base_product_name"
),
checks.REQUIRES.format(
"base_product_version", "1.0", "base_product_short"
),
checks.CONFLICTS.format(
"base_product_name", None, "base_product_version"
),
checks.CONFLICTS.format(
"base_product_short", None, "base_product_version"
),
],
)
class ImageNameConfigTestCase(ConfigTestCase):
def test_image_name_simple_string(self):
cfg = load_config(
PKGSET_REPOS,
image_name_format="foobar",
)
cfg = load_config(PKGSET_REPOS, image_name_format="foobar",)
self.assertValidation(cfg, [])
def test_image_name_variant_mapping(self):
cfg = load_config(
PKGSET_REPOS,
image_name_format={"^Server$": "foobar"},
)
cfg = load_config(PKGSET_REPOS, image_name_format={"^Server$": "foobar"},)
self.assertValidation(cfg, [])
class RunrootConfigTestCase(ConfigTestCase):
def test_set_runroot_true(self):
cfg = load_config(
PKGSET_REPOS,
runroot=True,
)
cfg = load_config(PKGSET_REPOS, runroot=True,)
self.assertValidation(
cfg,
warnings=["WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."])
warnings=[
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
],
)
def test_set_runroot_false(self):
cfg = load_config(
PKGSET_REPOS,
runroot=False,
)
cfg = load_config(PKGSET_REPOS, runroot=False,)
self.assertValidation(
cfg,
warnings=["WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."])
warnings=[
"WARNING: Config option runroot was removed and has no effect; remove it. Please specify 'runroot_method' if you want to enable runroot, otherwise run things locally."
],
)
class BuildinstallConfigTestCase(ConfigTestCase):
def test_bootable_deprecated(self):
cfg = load_config(
PKGSET_REPOS,
bootable=True,
)
cfg = load_config(PKGSET_REPOS, bootable=True,)
self.assertValidation(
cfg,
warnings=['WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer.'])
warnings=[
"WARNING: Config option bootable was removed and has no effect; remove it. Setting buildinstall_method option if you want a bootable installer."
],
)
def test_buildinstall_method_without_bootable(self):
cfg = load_config(
PKGSET_REPOS,
buildinstall_method='lorax',
)
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax",)
self.assertValidation(
cfg,
[])
self.assertValidation(cfg, [])
def test_buildinstall_with_lorax_options(self):
cfg = load_config(
PKGSET_REPOS,
buildinstall_method='buildinstall',
lorax_options=[('^Server$', {})]
buildinstall_method="buildinstall",
lorax_options=[("^Server$", {})],
)
self.assertValidation(
cfg,
[checks.CONFLICTS.format('buildinstall_method', 'buildinstall', 'lorax_options')])
[
checks.CONFLICTS.format(
"buildinstall_method", "buildinstall", "lorax_options"
)
],
)
def test_lorax_with_lorax_options(self):
cfg = load_config(
PKGSET_REPOS,
buildinstall_method='lorax',
lorax_options=[]
)
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax", lorax_options=[])
self.assertValidation(cfg)
def test_lorax_options_without_bootable_and_method(self):
cfg = load_config(
PKGSET_REPOS,
lorax_options=[('^Server$', {})],
buildinstall_kickstart='foo',
lorax_options=[("^Server$", {})],
buildinstall_kickstart="foo",
)
self.assertValidation(
cfg,
[checks.CONFLICTS.format('buildinstall_method', 'None', 'lorax_options'),
checks.CONFLICTS.format('buildinstall_method', 'None', 'buildinstall_kickstart')])
[
checks.CONFLICTS.format("buildinstall_method", "None", "lorax_options"),
checks.CONFLICTS.format(
"buildinstall_method", "None", "buildinstall_kickstart"
),
],
)
class CreaterepoConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag="f25",
product_id_allow_missing=True,
pkgset_source="koji", pkgset_koji_tag="f25", product_id_allow_missing=True,
)
self.assertValidation(
cfg,
[checks.CONFLICTS.format('product_id', 'None', 'product_id_allow_missing')])
[checks.CONFLICTS.format("product_id", "None", "product_id_allow_missing")],
)
class GatherConfigTestCase(ConfigTestCase):
def test_dnf_backend_is_default_on_py3(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag='f27',
)
cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
with mock.patch('six.PY2', new=False):
with mock.patch("six.PY2", new=False):
self.assertValidation(cfg, [])
self.assertEqual(cfg['gather_backend'], 'dnf')
self.assertEqual(cfg["gather_backend"], "dnf")
def test_yum_backend_is_default_on_py2(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag='f27',
)
cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
with mock.patch('six.PY2', new=True):
with mock.patch("six.PY2", new=True):
self.assertValidation(cfg, [])
self.assertEqual(cfg['gather_backend'], 'yum')
self.assertEqual(cfg["gather_backend"], "yum")
def test_yum_backend_is_rejected_on_py3(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag='f27',
gather_backend='yum',
pkgset_source="koji", pkgset_koji_tag="f27", gather_backend="yum",
)
with mock.patch('six.PY2', new=False):
with mock.patch("six.PY2", new=False):
self.assertValidation(
cfg,
["Failed validation in gather_backend: 'yum' is not one of ['dnf']"])
["Failed validation in gather_backend: 'yum' is not one of ['dnf']"],
)
class OSBSConfigTestCase(ConfigTestCase):
def test_validate(self):
cfg = load_config(
PKGSET_REPOS,
osbs={"^Server$": {
'url': 'http://example.com',
'target': 'f25-build',
'git_branch': 'f25',
}}
osbs={
"^Server$": {
"url": "http://example.com",
"target": "f25-build",
"git_branch": "f25",
}
},
)
self.assertValidation(cfg)
def test_validate_bad_conf(self):
cfg = load_config(
PKGSET_REPOS,
osbs='yes please'
)
cfg = load_config(PKGSET_REPOS, osbs="yes please")
self.assertNotEqual(checks.validate(cfg), ([], []))
@ -291,25 +295,25 @@ class OstreeConfigTestCase(ConfigTestCase):
cfg = load_config(
PKGSET_REPOS,
ostree=[
("^Atomic$", {
(
"^Atomic$",
{
"x86_64": {
"treefile": "fedora-atomic-docker-host.json",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"repo": "Everything",
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
"version": '!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN',
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
}
})
]
},
)
],
)
self.assertValidation(cfg)
def test_validate_bad_conf(self):
cfg = load_config(
PKGSET_REPOS,
ostree='yes please'
)
cfg = load_config(PKGSET_REPOS, ostree="yes please")
self.assertNotEqual(checks.validate(cfg), ([], []))
@ -319,26 +323,33 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
cfg = load_config(
PKGSET_REPOS,
ostree_installer=[
("^Atomic$", {
(
"^Atomic$",
{
"x86_64": {
"repo": "Everything",
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],
"add_template": [
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
],
"add_template_var": [
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
"add_arch_template": ["/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"],
"add_arch_template": [
"/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"
],
"rootfs_size": "3",
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
]
],
}
})
]
},
)
],
)
self.assertValidation(cfg)
@ -347,12 +358,16 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
cfg = load_config(
PKGSET_REPOS,
ostree_installer=[
("^Atomic$", {
(
"^Atomic$",
{
"x86_64": {
"repo": "Everything",
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],
"add_template": [
"/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"
],
"add_template_var": [
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
@ -362,10 +377,11 @@ class OstreeInstallerConfigTestCase(ConfigTestCase):
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
]
],
}
})
]
},
)
],
)
self.assertNotEqual(checks.validate(cfg), ([], []))
@ -376,10 +392,10 @@ class LiveMediaConfigTestCase(ConfigTestCase):
def test_global_config_validation(self, resolve_git_url):
cfg = load_config(
PKGSET_REPOS,
live_media_ksurl='git://example.com/repo.git#HEAD',
live_media_target='f24',
live_media_release='RRR',
live_media_version='Rawhide',
live_media_ksurl="git://example.com/repo.git#HEAD",
live_media_target="f24",
live_media_release="RRR",
live_media_version="Rawhide",
)
resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE")
@ -388,53 +404,50 @@ class LiveMediaConfigTestCase(ConfigTestCase):
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
def test_global_config_null_release(self):
cfg = load_config(
PKGSET_REPOS,
live_media_release=None,
)
cfg = load_config(PKGSET_REPOS, live_media_release=None,)
self.assertValidation(cfg)
class TestSuggestions(ConfigTestCase):
def test_with_a_typo(self):
cfg = load_config(PKGSET_REPOS,
product_pid=None)
cfg = load_config(PKGSET_REPOS, product_pid=None)
self.assertValidation(cfg, [], [checks.UNKNOWN_SUGGEST.format('product_pid', 'product_id')])
self.assertValidation(
cfg, [], [checks.UNKNOWN_SUGGEST.format("product_pid", "product_id")]
)
class TestRegexValidation(ConfigTestCase):
def test_incorrect_regular_expression(self):
cfg = load_config(PKGSET_REPOS,
multilib=[('^*$', {'*': []})])
cfg = load_config(PKGSET_REPOS, multilib=[("^*$", {"*": []})])
msg = 'Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat'
msg = "Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat"
if six.PY3:
msg += ' at position 1'
msg += " at position 1"
self.assertValidation(cfg, [msg], [])
class RepoclosureTestCase(ConfigTestCase):
def test_invalid_backend(self):
cfg = load_config(
PKGSET_REPOS,
repoclosure_backend='fnd', # Intentionally with a typo
PKGSET_REPOS, repoclosure_backend="fnd", # Intentionally with a typo
)
options = ['yum', 'dnf'] if six.PY2 else ['dnf']
options = ["yum", "dnf"] if six.PY2 else ["dnf"]
self.assertValidation(
cfg,
["Failed validation in repoclosure_backend: 'fnd' is not one of %s" % options])
[
"Failed validation in repoclosure_backend: 'fnd' is not one of %s"
% options
],
)
class VariantAsLookasideTestCase(ConfigTestCase):
def test_empty(self):
variant_as_lookaside = []
cfg = load_config(
PKGSET_REPOS,
variant_as_lookaside=variant_as_lookaside,
)
cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
self.assertValidation(cfg)
def test_basic(self):
@ -443,20 +456,14 @@ class VariantAsLookasideTestCase(ConfigTestCase):
("Server", "Client"),
("Everything", "Spin"),
]
cfg = load_config(
PKGSET_REPOS,
variant_as_lookaside=variant_as_lookaside,
)
cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
self.assertValidation(cfg)
class SkipPhasesTestCase(ConfigTestCase):
def test_empty(self):
skip_phases = []
cfg = load_config(
PKGSET_REPOS,
skip_phases=skip_phases,
)
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
self.assertValidation(cfg)
def test_basic(self):
@ -464,10 +471,7 @@ class SkipPhasesTestCase(ConfigTestCase):
"buildinstall",
"gather",
]
cfg = load_config(
PKGSET_REPOS,
skip_phases=skip_phases,
)
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
self.assertValidation(cfg)
def test_bad_phase_name(self):
@ -475,8 +479,5 @@ class SkipPhasesTestCase(ConfigTestCase):
"gather",
"non-existing-phase_name",
]
cfg = load_config(
PKGSET_REPOS,
skip_phases=skip_phases,
)
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
self.assertNotEqual(checks.validate(cfg), ([], []))

File diff suppressed because it is too large Load Diff

View File

@ -11,206 +11,384 @@ from pungi import createiso
class CreateIsoScriptTest(helpers.PungiTestCase):
def setUp(self):
super(CreateIsoScriptTest, self).setUp()
self.outdir = os.path.join(self.topdir, 'isos')
self.outdir = os.path.join(self.topdir, "isos")
self.out = StringIO()
self.maxDiff = None
def assertScript(self, cmds):
script = self.out.getvalue().strip().split('\n')
self.assertEqual(script[:3],
['#!/bin/bash',
'set -ex',
'cd %s' % self.outdir])
script = self.out.getvalue().strip().split("\n")
self.assertEqual(script[:3], ["#!/bin/bash", "set -ex", "cd %s" % self.outdir])
self.assertEqual(script[3:], cmds)
def test_minimal_run(self):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='x86_64',
), self.out)
iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="x86_64",
),
self.out,
)
self.assertScript(
[' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-input-charset', 'utf-8', '-x', './lost+found',
'-o', 'DP-1.0-20160405.t.3-x86_64.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest']
[
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-o",
"DP-1.0-20160405.t.3-x86_64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
]
)
def test_bootable_run(self):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='x86_64',
buildinstall_method='lorax',
), self.out)
iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="x86_64",
buildinstall_method="lorax",
),
self.out,
)
self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET,
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-input-charset', 'utf-8', '-x', './lost+found',
'-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat',
'-no-emul-boot',
'-boot-load-size', '4', '-boot-info-table',
'-eltorito-alt-boot', '-e', 'images/efiboot.img',
'-no-emul-boot',
'-o', 'DP-1.0-20160405.t.3-x86_64.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/isohybrid', '--uefi', 'DP-1.0-20160405.t.3-x86_64.iso']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest']
[
createiso.FIND_TEMPLATE_SNIPPET,
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-b",
"isolinux/isolinux.bin",
"-c",
"isolinux/boot.cat",
"-no-emul-boot",
"-boot-load-size",
"4",
"-boot-info-table",
"-eltorito-alt-boot",
"-e",
"images/efiboot.img",
"-no-emul-boot",
"-o",
"DP-1.0-20160405.t.3-x86_64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(
["/usr/bin/isohybrid", "--uefi", "DP-1.0-20160405.t.3-x86_64.iso"]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
]
)
def test_bootable_run_on_i386(self):
# This will call isohybrid, but not with --uefi switch
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-i386.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='i386',
buildinstall_method='lorax',
), self.out)
iso_name="DP-1.0-20160405.t.3-i386.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="i386",
buildinstall_method="lorax",
),
self.out,
)
self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET,
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-input-charset', 'utf-8', '-x', './lost+found',
'-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat',
'-no-emul-boot',
'-boot-load-size', '4', '-boot-info-table',
'-o', 'DP-1.0-20160405.t.3-i386.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/isohybrid', 'DP-1.0-20160405.t.3-i386.iso']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-i386.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest']
[
createiso.FIND_TEMPLATE_SNIPPET,
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-b",
"isolinux/isolinux.bin",
"-c",
"isolinux/boot.cat",
"-no-emul-boot",
"-boot-load-size",
"4",
"-boot-info-table",
"-o",
"DP-1.0-20160405.t.3-i386.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/isohybrid", "DP-1.0-20160405.t.3-i386.iso"]),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-i386.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest",
]
)
def test_bootable_run_ppc64(self):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-ppc64.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='ppc64',
buildinstall_method='lorax',
), self.out)
iso_name="DP-1.0-20160405.t.3-ppc64.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="ppc64",
buildinstall_method="lorax",
),
self.out,
)
self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET,
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-x', './lost+found',
'-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop',
'-allow-multidot', '-chrp-boot', '-map', '$TEMPLATE/config_files/ppc/mapping',
'-hfs-bless', '/ppc/mac',
'-o', 'DP-1.0-20160405.t.3-ppc64.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-ppc64.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest']
[
createiso.FIND_TEMPLATE_SNIPPET,
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-x",
"./lost+found",
"-part",
"-hfs",
"-r",
"-l",
"-sysid",
"PPC",
"-no-desktop",
"-allow-multidot",
"-chrp-boot",
"-map",
"$TEMPLATE/config_files/ppc/mapping",
"-hfs-bless",
"/ppc/mac",
"-o",
"DP-1.0-20160405.t.3-ppc64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
]
)
def test_bootable_run_on_s390x(self):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-s390x.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='s390x',
buildinstall_method='lorax',
), self.out)
iso_name="DP-1.0-20160405.t.3-s390x.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="s390x",
buildinstall_method="lorax",
),
self.out,
)
self.assertScript(
[createiso.FIND_TEMPLATE_SNIPPET,
' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-input-charset', 'utf-8',
'-x', './lost+found',
'-eltorito-boot images/cdboot.img', '-no-emul-boot',
'-o', 'DP-1.0-20160405.t.3-s390x.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-s390x.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest']
[
createiso.FIND_TEMPLATE_SNIPPET,
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-eltorito-boot images/cdboot.img",
"-no-emul-boot",
"-o",
"DP-1.0-20160405.t.3-s390x.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-s390x.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-s390x.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-s390x.iso.manifest",
]
)
def test_bootable_run_buildinstall(self):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-ppc64.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='ppc64',
buildinstall_method='buildinstall',
), self.out)
self.assertScript(
[' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-x', './lost+found',
'-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop',
'-allow-multidot', '-chrp-boot',
'-map', '/usr/lib/anaconda-runtime/boot/mapping',
'-hfs-bless', '/ppc/mac',
'-o', 'DP-1.0-20160405.t.3-ppc64.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-ppc64.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest']
iso_name="DP-1.0-20160405.t.3-ppc64.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="ppc64",
buildinstall_method="buildinstall",
),
self.out,
)
@mock.patch('sys.stderr')
@mock.patch('kobo.shortcuts.run')
self.assertScript(
[
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-x",
"./lost+found",
"-part",
"-hfs",
"-r",
"-l",
"-sysid",
"PPC",
"-no-desktop",
"-allow-multidot",
"-chrp-boot",
"-map",
"/usr/lib/anaconda-runtime/boot/mapping",
"-hfs-bless",
"/ppc/mac",
"-o",
"DP-1.0-20160405.t.3-ppc64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest",
]
)
@mock.patch("sys.stderr")
@mock.patch("kobo.shortcuts.run")
def test_run_with_jigdo_bad_args(self, run, stderr):
with self.assertRaises(RuntimeError):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='x86_64',
jigdo_dir='%s/jigdo' % self.topdir,
), self.out)
iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="x86_64",
jigdo_dir="%s/jigdo" % self.topdir,
),
self.out,
)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_run_with_jigdo(self, run):
createiso.write_script(createiso.CreateIsoOpts(
createiso.write_script(
createiso.CreateIsoOpts(
output_dir=self.outdir,
iso_name='DP-1.0-20160405.t.3-x86_64.iso',
volid='DP-1.0-20160405.t.3',
graft_points='graft-list',
arch='x86_64',
jigdo_dir='%s/jigdo' % self.topdir,
os_tree='%s/os' % self.topdir,
), self.out)
iso_name="DP-1.0-20160405.t.3-x86_64.iso",
volid="DP-1.0-20160405.t.3",
graft_points="graft-list",
arch="x86_64",
jigdo_dir="%s/jigdo" % self.topdir,
os_tree="%s/os" % self.topdir,
),
self.out,
)
self.assertScript(
[' '.join(['/usr/bin/genisoimage', '-untranslated-filenames',
'-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',
'-rational-rock', '-translation-table',
'-input-charset', 'utf-8', '-x', './lost+found',
'-o', 'DP-1.0-20160405.t.3-x86_64.iso',
'-graft-points', '-path-list', 'graft-list']),
' '.join(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso']),
'isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest',
' '.join(['jigdo-file', 'make-template', '--force',
'--image=%s/isos/DP-1.0-20160405.t.3-x86_64.iso' % self.topdir,
'--jigdo=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.jigdo' % self.topdir,
'--template=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.template' % self.topdir,
'--no-servers-section', '--report=noprogress', self.topdir + '/os//'])]
[
" ".join(
[
"/usr/bin/genisoimage",
"-untranslated-filenames",
"-volid",
"DP-1.0-20160405.t.3",
"-J",
"-joliet-long",
"-rational-rock",
"-translation-table",
"-input-charset",
"utf-8",
"-x",
"./lost+found",
"-o",
"DP-1.0-20160405.t.3-x86_64.iso",
"-graft-points",
"-path-list",
"graft-list",
]
),
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-x86_64.iso"]),
"isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest",
" ".join(
[
"jigdo-file",
"make-template",
"--force",
"--image=%s/isos/DP-1.0-20160405.t.3-x86_64.iso" % self.topdir,
"--jigdo=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.jigdo"
% self.topdir,
"--template=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.template"
% self.topdir,
"--no-servers-section",
"--report=noprogress",
self.topdir + "/os//",
]
),
]
)

View File

@ -14,12 +14,11 @@ from pungi.wrappers.createrepo import CreaterepoWrapper
class CreateRepoWrapperTest(unittest.TestCase):
def test_get_createrepo_c_cmd_minimal(self):
repo = CreaterepoWrapper()
cmd = repo.get_createrepo_cmd('/test/dir')
cmd = repo.get_createrepo_cmd("/test/dir")
self.assertEqual(cmd[:2], ['createrepo_c', '/test/dir'])
self.assertEqual(cmd[:2], ["createrepo_c", "/test/dir"])
six.assertCountEqual(
self, cmd[2:], ["--update", "--database", "--unique-md-filenames"]
)
@ -27,35 +26,75 @@ class CreateRepoWrapperTest(unittest.TestCase):
def test_get_createrepo_c_cmd_full(self):
repo = CreaterepoWrapper()
cmd = repo.get_createrepo_cmd(
'/test/dir', baseurl='http://base.example.com', excludes=['abc', 'xyz'],
pkglist='/test/pkglist', groupfile='/test/comps', cachedir='/test/cache',
update=False, update_md_path='/test/md_path', skip_stat=True, checkts=True,
split=True, pretty=False, database=False, checksum='sha256', unique_md_filenames=False,
distro='Fedora', content=['c1', 'c2'], repo=['r1', 'r2'], revision='rev', deltas=True,
oldpackagedirs='/test/old', num_deltas=2, workers=3, outputdir='/test/output',
"/test/dir",
baseurl="http://base.example.com",
excludes=["abc", "xyz"],
pkglist="/test/pkglist",
groupfile="/test/comps",
cachedir="/test/cache",
update=False,
update_md_path="/test/md_path",
skip_stat=True,
checkts=True,
split=True,
pretty=False,
database=False,
checksum="sha256",
unique_md_filenames=False,
distro="Fedora",
content=["c1", "c2"],
repo=["r1", "r2"],
revision="rev",
deltas=True,
oldpackagedirs="/test/old",
num_deltas=2,
workers=3,
outputdir="/test/output",
use_xz=True,
extra_args=["--zck", "--zck-primary-dict=/foo/bar"],
)
self.maxDiff = None
self.assertEqual(cmd[:2], ['createrepo_c', '/test/dir'])
self.assertEqual(cmd[:2], ["createrepo_c", "/test/dir"])
six.assertCountEqual(
self,
cmd[2:],
["--baseurl=http://base.example.com", "--excludes=abc", "--excludes=xyz",
"--pkglist=/test/pkglist", "--groupfile=/test/comps", "--cachedir=/test/cache",
"--skip-stat", "--update-md-path=/test/md_path", "--split", "--checkts",
"--checksum=sha256", "--distro=Fedora", "--simple-md-filenames", "--no-database",
"--content=c1", "--content=c2", "--repo=r1", "--repo=r2", "--revision=rev",
"--deltas", "--oldpackagedirs=/test/old", "--num-deltas=2", "--workers=3",
"--outputdir=/test/output", "--xz", "--zck", "--zck-primary-dict=/foo/bar"],
[
"--baseurl=http://base.example.com",
"--excludes=abc",
"--excludes=xyz",
"--pkglist=/test/pkglist",
"--groupfile=/test/comps",
"--cachedir=/test/cache",
"--skip-stat",
"--update-md-path=/test/md_path",
"--split",
"--checkts",
"--checksum=sha256",
"--distro=Fedora",
"--simple-md-filenames",
"--no-database",
"--content=c1",
"--content=c2",
"--repo=r1",
"--repo=r2",
"--revision=rev",
"--deltas",
"--oldpackagedirs=/test/old",
"--num-deltas=2",
"--workers=3",
"--outputdir=/test/output",
"--xz",
"--zck",
"--zck-primary-dict=/foo/bar",
],
)
def test_get_createrepo_cmd_minimal(self):
repo = CreaterepoWrapper(False)
cmd = repo.get_createrepo_cmd('/test/dir')
cmd = repo.get_createrepo_cmd("/test/dir")
self.assertEqual(cmd[:2], ['createrepo', '/test/dir'])
self.assertEqual(cmd[:2], ["createrepo", "/test/dir"])
six.assertCountEqual(
self,
cmd[2:],
@ -65,24 +104,61 @@ class CreateRepoWrapperTest(unittest.TestCase):
def test_get_createrepo_cmd_full(self):
repo = CreaterepoWrapper(False)
cmd = repo.get_createrepo_cmd(
'/test/dir', baseurl='http://base.example.com', excludes=['abc', 'xyz'],
pkglist='/test/pkglist', groupfile='/test/comps', cachedir='/test/cache',
update=False, update_md_path='/test/md_path', skip_stat=True, checkts=True,
split=True, pretty=False, database=False, checksum='sha256', unique_md_filenames=False,
distro='Fedora', content=['c1', 'c2'], repo=['r1', 'r2'], revision='rev', deltas=True,
oldpackagedirs='/test/old', num_deltas=2, workers=3, outputdir='/test/output'
"/test/dir",
baseurl="http://base.example.com",
excludes=["abc", "xyz"],
pkglist="/test/pkglist",
groupfile="/test/comps",
cachedir="/test/cache",
update=False,
update_md_path="/test/md_path",
skip_stat=True,
checkts=True,
split=True,
pretty=False,
database=False,
checksum="sha256",
unique_md_filenames=False,
distro="Fedora",
content=["c1", "c2"],
repo=["r1", "r2"],
revision="rev",
deltas=True,
oldpackagedirs="/test/old",
num_deltas=2,
workers=3,
outputdir="/test/output",
)
self.maxDiff = None
self.assertEqual(cmd[:2], ['createrepo', '/test/dir'])
self.assertEqual(cmd[:2], ["createrepo", "/test/dir"])
six.assertCountEqual(
self,
cmd[2:],
["--baseurl=http://base.example.com", "--excludes=abc", "--excludes=xyz",
"--pkglist=/test/pkglist", "--groupfile=/test/comps", "--cachedir=/test/cache",
"--skip-stat", "--update-md-path=/test/md_path", "--split", "--checkts",
"--checksum=sha256", "--distro=Fedora", "--simple-md-filenames", "--no-database",
"--content=c1", "--content=c2", "--repo=r1", "--repo=r2", "--revision=rev",
"--deltas", "--oldpackagedirs=/test/old", "--num-deltas=2", "--workers=3",
"--outputdir=/test/output"],
[
"--baseurl=http://base.example.com",
"--excludes=abc",
"--excludes=xyz",
"--pkglist=/test/pkglist",
"--groupfile=/test/comps",
"--cachedir=/test/cache",
"--skip-stat",
"--update-md-path=/test/md_path",
"--split",
"--checkts",
"--checksum=sha256",
"--distro=Fedora",
"--simple-md-filenames",
"--no-database",
"--content=c1",
"--content=c2",
"--repo=r1",
"--repo=r2",
"--revision=rev",
"--deltas",
"--oldpackagedirs=/test/old",
"--num-deltas=2",
"--workers=3",
"--outputdir=/test/output",
],
)

File diff suppressed because it is too large Load Diff

View File

@ -13,8 +13,7 @@ from tests import helpers
class TestExtraFilePhase(helpers.PungiTestCase):
@mock.patch('pungi.phases.extra_files.copy_extra_files')
@mock.patch("pungi.phases.extra_files.copy_extra_files")
def test_skips_unless_has_config(self, copy_extra_files):
compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None
@ -22,15 +21,13 @@ class TestExtraFilePhase(helpers.PungiTestCase):
phase = extra_files.ExtraFilesPhase(compose, mock.Mock())
self.assertTrue(phase.skip())
@mock.patch('pungi.phases.extra_files.copy_extra_files')
@mock.patch("pungi.phases.extra_files.copy_extra_files")
def test_runs_copy_files_for_each_variant(self, copy_extra_files):
cfg = mock.Mock()
pkgset_phase = mock.Mock()
compose = helpers.DummyCompose(self.topdir, {
'extra_files': [
('^.+$', {'x86_64': [cfg]})
]
})
compose = helpers.DummyCompose(
self.topdir, {"extra_files": [("^.+$", {"x86_64": [cfg]})]}
)
phase = extra_files.ExtraFilesPhase(compose, pkgset_phase)
phase.run()
@ -61,7 +58,6 @@ class TestExtraFilePhase(helpers.PungiTestCase):
class TestCopyFiles(helpers.PungiTestCase):
def setUp(self):
super(TestCopyFiles, self).setUp()
self.metadata = ExtraFiles()
@ -69,24 +65,27 @@ class TestCopyFiles(helpers.PungiTestCase):
self.variant = self.compose.variants["Server"]
def test_copy_local_file(self):
tgt = os.path.join(self.topdir, 'file')
tgt = os.path.join(self.topdir, "file")
helpers.touch(tgt)
cfg = {'scm': 'file', 'file': tgt, 'repo': None}
cfg = {"scm": "file", "file": tgt, "repo": None}
extra_files.copy_extra_files(
self.compose, [cfg], "x86_64", self.variant, mock.Mock(), self.metadata
)
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file')))
self.assertTrue(
os.path.isfile(
os.path.join(self.topdir, "compose", "Server", "x86_64", "os", "file")
)
)
def test_copy_multiple_sources(self):
tgt1 = os.path.join(self.topdir, 'file')
tgt2 = os.path.join(self.topdir, 'gpl')
tgt1 = os.path.join(self.topdir, "file")
tgt2 = os.path.join(self.topdir, "gpl")
helpers.touch(tgt1)
helpers.touch(tgt2)
cfg1 = {'scm': 'file', 'file': tgt1, 'repo': None}
cfg2 = {'scm': 'file', 'file': tgt2, 'repo': None, 'target': 'license'}
cfg1 = {"scm": "file", "file": tgt1, "repo": None}
cfg2 = {"scm": "file", "file": tgt2, "repo": None, "target": "license"}
extra_files.copy_extra_files(
self.compose,
@ -94,32 +93,64 @@ class TestCopyFiles(helpers.PungiTestCase):
"x86_64",
self.variant,
mock.Mock(),
self.metadata
self.metadata,
)
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file')))
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'license', 'gpl')))
self.assertTrue(
os.path.isfile(
os.path.join(self.topdir, "compose", "Server", "x86_64", "os", "file")
)
)
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "license", "gpl"
)
)
)
def test_copy_local_dir(self):
helpers.touch(os.path.join(self.topdir, 'src', 'file'))
helpers.touch(os.path.join(self.topdir, 'src', 'another'))
cfg = {'scm': 'file', 'dir': os.path.join(self.topdir, 'src'),
'repo': None, 'target': 'subdir'}
helpers.touch(os.path.join(self.topdir, "src", "file"))
helpers.touch(os.path.join(self.topdir, "src", "another"))
cfg = {
"scm": "file",
"dir": os.path.join(self.topdir, "src"),
"repo": None,
"target": "subdir",
}
extra_files.copy_extra_files(
self.compose, [cfg], "x86_64", self.variant, mock.Mock(), self.metadata
)
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'subdir', 'file')))
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'subdir', 'another')))
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "subdir", "file"
)
)
)
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir,
"compose",
"Server",
"x86_64",
"os",
"subdir",
"another",
)
)
)
@mock.patch('pungi.phases.extra_files.get_file_from_scm')
@mock.patch('pungi.phases.extra_files.get_dir_from_scm')
@mock.patch("pungi.phases.extra_files.get_file_from_scm")
@mock.patch("pungi.phases.extra_files.get_dir_from_scm")
def test_copy_from_external_rpm(self, get_dir_from_scm, get_file_from_scm):
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'http://example.com/package.rpm'}
cfg = {
"scm": "rpm",
"file": "file.txt",
"repo": "http://example.com/package.rpm",
}
get_file_from_scm.side_effect = self.fake_get_file
@ -129,26 +160,41 @@ class TestCopyFiles(helpers.PungiTestCase):
self.assertEqual(len(get_file_from_scm.call_args_list), 1)
self.assertEqual(get_dir_from_scm.call_args_list, [])
self.assertEqual(self.scm_dict,
{'scm': 'rpm', 'file': 'file.txt', 'repo': 'http://example.com/package.rpm'})
self.assertEqual(
self.scm_dict,
{
"scm": "rpm",
"file": "file.txt",
"repo": "http://example.com/package.rpm",
},
)
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file.txt')))
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "file.txt"
)
)
)
@mock.patch('pungi.phases.extra_files.get_file_from_scm')
@mock.patch('pungi.phases.extra_files.get_dir_from_scm')
@mock.patch("pungi.phases.extra_files.get_file_from_scm")
@mock.patch("pungi.phases.extra_files.get_dir_from_scm")
def test_copy_from_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm):
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': '%(variant_uid)s-data*'}
cfg = {"scm": "rpm", "file": "file.txt", "repo": "%(variant_uid)s-data*"}
server_po, client_po, src_po = mock.Mock(), mock.Mock(), mock.Mock()
server_po.configure_mock(name='Server-data-1.1-1.fc24.x86_64.rpm',
file_path='/server/location',
arch='x86_64')
client_po.configure_mock(name='Client-data-1.1-1.fc24.x86_64.rpm',
file_path='/client/location',
arch='x86_64')
src_po.configure_mock(name='extra-data-1.1-1.fc24.src.rpm',
file_path='/src/location',
arch='src')
server_po.configure_mock(
name="Server-data-1.1-1.fc24.x86_64.rpm",
file_path="/server/location",
arch="x86_64",
)
client_po.configure_mock(
name="Client-data-1.1-1.fc24.x86_64.rpm",
file_path="/client/location",
arch="x86_64",
)
src_po.configure_mock(
name="extra-data-1.1-1.fc24.src.rpm", file_path="/src/location", arch="src"
)
package_sets = [
{
"x86_64": {
@ -168,21 +214,30 @@ class TestCopyFiles(helpers.PungiTestCase):
self.assertEqual(len(get_file_from_scm.call_args_list), 1)
self.assertEqual(get_dir_from_scm.call_args_list, [])
self.assertEqual(self.scm_dict,
{'scm': 'rpm', 'file': 'file.txt', 'repo': ['/server/location']})
self.assertEqual(
self.scm_dict,
{"scm": "rpm", "file": "file.txt", "repo": ["/server/location"]},
)
self.assertTrue(os.path.isfile(os.path.join(
self.topdir, 'compose', 'Server', 'x86_64', 'os', 'file.txt')))
self.assertTrue(
os.path.isfile(
os.path.join(
self.topdir, "compose", "Server", "x86_64", "os", "file.txt"
)
)
)
def fake_get_file(self, scm_dict, dest, compose):
self.scm_dict = scm_dict
helpers.touch(os.path.join(dest, scm_dict['file']))
return [scm_dict['file']]
helpers.touch(os.path.join(dest, scm_dict["file"]))
return [scm_dict["file"]]
@mock.patch('pungi.phases.extra_files.get_file_from_scm')
@mock.patch('pungi.phases.extra_files.get_dir_from_scm')
def test_copy_from_non_existing_rpm_in_compose(self, get_dir_from_scm, get_file_from_scm):
cfg = {'scm': 'rpm', 'file': 'file.txt', 'repo': 'bad-%(variant_uid_lower)s*'}
@mock.patch("pungi.phases.extra_files.get_file_from_scm")
@mock.patch("pungi.phases.extra_files.get_dir_from_scm")
def test_copy_from_non_existing_rpm_in_compose(
self, get_dir_from_scm, get_file_from_scm
):
cfg = {"scm": "rpm", "file": "file.txt", "repo": "bad-%(variant_uid_lower)s*"}
package_sets = [{"x86_64": {}}]
with self.assertRaises(RuntimeError) as ctx:
@ -191,7 +246,7 @@ class TestCopyFiles(helpers.PungiTestCase):
)
self.assertRegexpMatches(
str(ctx.exception), r'No.*package.*matching bad-server\*.*'
str(ctx.exception), r"No.*package.*matching bad-server\*.*"
)
self.assertEqual(len(get_file_from_scm.call_args_list), 0)

File diff suppressed because it is too large Load Diff

View File

@ -101,12 +101,11 @@ class TestGetCmd(unittest.TestCase):
"x86_64",
"--repo=lookaside-0,lookaside,http:///tmp",
"@conf",
]
],
)
class TestWriteConfig(PungiTestCase):
def test_write_sorted_mix(self):
f = os.path.join(self.topdir, "solvables")
fus.write_config(f, ["moda:master"], ["pkg", "foo"])
@ -149,8 +148,7 @@ class TestParseOutput(unittest.TestCase):
touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n")
packages, modules = fus.parse_output(self.file)
self.assertEqual(
packages,
set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
packages, set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
)
self.assertEqual(modules, set())

View File

@ -16,9 +16,11 @@ import logging
from six.moves import cStringIO
from pungi.wrappers.pungi import PungiWrapper
try:
from pungi.dnf_wrapper import DnfWrapper, Conf
from pungi.gather_dnf import Gather, GatherOptions, PkgFlag
HAS_DNF = True
except ImportError:
HAS_DNF = False
@ -36,18 +38,19 @@ def convert_pkg_map(data):
"""
result = {}
for pkg_type in data:
result[pkg_type] = sorted(set([os.path.basename(pkg['path'])
for pkg in data[pkg_type]]))
result[pkg_type] = sorted(
set([os.path.basename(pkg["path"]) for pkg in data[pkg_type]])
)
return result
class DepsolvingBase(object):
def setUp(self):
self.tmp_dir = tempfile.mkdtemp(prefix="test_compose_")
self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/repo")
self.lookaside = os.path.join(os.path.dirname(__file__),
"fixtures/repos/repo-krb5-lookaside")
self.lookaside = os.path.join(
os.path.dirname(__file__), "fixtures/repos/repo-krb5-lookaside"
)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
@ -88,8 +91,13 @@ class DepsolvingBase(object):
packages = [
"dummy-kernel",
]
pkg_map = self.go(packages, None, greedy="none", fulltree=True,
fulltree_excludes=['dummy-kernel'])
pkg_map = self.go(
packages,
None,
greedy="none",
fulltree=True,
fulltree_excludes=["dummy-kernel"],
)
self.assertNotIn("dummy-kernel-3.1.0-1.i686.rpm", pkg_map["rpm"])
@ -181,9 +189,9 @@ class DepsolvingBase(object):
def test_bash_exclude_debuginfo(self):
packages = [
'dummy-bash',
'-dummy-bash-debuginfo',
'-dummy-bash-debugsource',
"dummy-bash",
"-dummy-bash-debuginfo",
"-dummy-bash-debugsource",
]
pkg_map = self.go(packages, None, greedy="none")
@ -219,9 +227,9 @@ class DepsolvingBase(object):
def test_bash_multilib_exclude_debuginfo(self):
packages = [
'dummy-bash.+',
'-dummy-bash-debuginfo',
'-dummy-bash-debugsource',
"dummy-bash.+",
"-dummy-bash-debuginfo",
"-dummy-bash-debugsource",
]
pkg_map = self.go(packages, None, greedy="none")
@ -439,8 +447,12 @@ class DepsolvingBase(object):
]
pkg_map = self.go(packages, None, greedy="none")
self.assertNotIn("dummy-release-client-workstation-1.0.0-1.i686.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-release-client-workstation-1.0.0-1.x86_64.rpm", pkg_map["rpm"])
self.assertNotIn(
"dummy-release-client-workstation-1.0.0-1.i686.rpm", pkg_map["rpm"]
)
self.assertNotIn(
"dummy-release-client-workstation-1.0.0-1.x86_64.rpm", pkg_map["rpm"]
)
self.assertNotIn("dummy-release-client-1.0.0-1.i686.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-release-client-1.0.0-1.x86_64.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-release-server-1.0.0-1.i686.rpm", pkg_map["rpm"])
@ -892,10 +904,11 @@ class DepsolvingBase(object):
#
# By default newer version should be pulled in.
self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/cockpit")
self.lookaside = os.path.join(os.path.dirname(__file__),
"fixtures/repos/cockpit-lookaside")
self.lookaside = os.path.join(
os.path.dirname(__file__), "fixtures/repos/cockpit-lookaside"
)
packages = [
'dummy-cockpit-docker',
"dummy-cockpit-docker",
]
pkg_map = self.go(packages, None, lookaside=self.lookaside)
@ -920,11 +933,12 @@ class DepsolvingBase(object):
# satisfied by the older version in lookaside. No broken dependencies
# should be reported.
self.repo = os.path.join(os.path.dirname(__file__), "fixtures/repos/cockpit")
self.lookaside = os.path.join(os.path.dirname(__file__),
"fixtures/repos/cockpit-lookaside")
self.lookaside = os.path.join(
os.path.dirname(__file__), "fixtures/repos/cockpit-lookaside"
)
packages = [
'dummy-cockpit-docker',
'-dummy-cockpit-system',
"dummy-cockpit-docker",
"-dummy-cockpit-system",
]
pkg_map = self.go(packages, None, lookaside=self.lookaside)
@ -982,7 +996,9 @@ class DepsolvingBase(object):
packages = [
"Dummy-firefox",
]
pkg_map = self.go(packages, None, greedy="none", selfhosting=True, fulltree=True)
pkg_map = self.go(
packages, None, greedy="none", selfhosting=True, fulltree=True
)
self.assertNotIn("Dummy-firefox-16.0.1-2.i686.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-krb5-devel-1.10-5.i686.rpm", pkg_map["rpm"])
@ -1128,7 +1144,7 @@ class DepsolvingBase(object):
packages = [
"dummy-glibc*",
]
pkg_map = self.go(packages, None, multilib_blacklist=['dummy-glibc*'])
pkg_map = self.go(packages, None, multilib_blacklist=["dummy-glibc*"])
six.assertCountEqual(
self,
@ -1297,7 +1313,7 @@ class DepsolvingBase(object):
],
)
@unittest.skip('This test is broken')
@unittest.skip("This test is broken")
def test_bash_multilib_nogreedy(self):
packages = [
"dummy-bash.+",
@ -1429,9 +1445,7 @@ class DepsolvingBase(object):
"dummy-kmod-ipw3945-xen-1.2.0-4.20.x86_64.rpm",
],
)
self.assertEqual(
pkg_map["srpm"], ["dummy-ipw3945-kmod-1.2.0-4.20.src.rpm"]
)
self.assertEqual(pkg_map["srpm"], ["dummy-ipw3945-kmod-1.2.0-4.20.src.rpm"])
self.assertEqual(
pkg_map["debuginfo"], ["dummy-ipw3945-kmod-debuginfo-1.2.0-4.20.x86_64.rpm"]
)
@ -1440,8 +1454,13 @@ class DepsolvingBase(object):
packages = [
"dummy-lvm2-devel",
]
pkg_map = self.go(packages, None, greedy="none", fulltree=False,
multilib_methods=["devel", "runtime"])
pkg_map = self.go(
packages,
None,
greedy="none",
fulltree=False,
multilib_methods=["devel", "runtime"],
)
six.assertCountEqual(
self,
@ -1518,7 +1537,7 @@ class DepsolvingBase(object):
"dummy-freeipa-server-2.2.0-1.ppc64.rpm", # Important
"dummy-selinux-policy-minimal-3.10.0-121.noarch.rpm",
"dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", # Important
"dummy-selinux-policy-targeted-3.10.0-121.noarch.rpm"
"dummy-selinux-policy-targeted-3.10.0-121.noarch.rpm",
],
)
six.assertCountEqual(
@ -1538,7 +1557,9 @@ class DepsolvingBase(object):
]
pkg_map = self.go(packages, None, greedy="none", fulltree=False, arch="ppc64")
self.assertNotIn("dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", pkg_map["rpm"])
self.assertNotIn(
"dummy-selinux-policy-mls-3.10.0-121.noarch.rpm", pkg_map["rpm"]
)
six.assertCountEqual(
self,
@ -1559,9 +1580,7 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["debuginfo"], [])
def test_selinux_policy_doc_fulltree(self):
packages = [
"dummy-selinux-policy-doc"
]
packages = ["dummy-selinux-policy-doc"]
pkg_map = self.go(packages, None, fulltree=True)
six.assertCountEqual(
@ -1637,10 +1656,10 @@ class DepsolvingBase(object):
packages = [
"dummy-imsettings",
]
groups = [
"basic-desktop"
]
pkg_map = self.go(packages, groups, greedy="none", fulltree=False, arch="x86_64")
groups = ["basic-desktop"]
pkg_map = self.go(
packages, groups, greedy="none", fulltree=False, arch="x86_64"
)
self.assertNotIn("dummy-imsettings-qt-1.2.9-1.x86_64.rpm", pkg_map["rpm"])
# prefers gnome over qt (condrequires in @basic-desktop)
@ -1660,11 +1679,10 @@ class DepsolvingBase(object):
packages = [
"dummy-imsettings",
]
groups = [
"basic-desktop"
]
pkg_map = self.go(packages, groups, greedy="none", fulltree=False, nodeps=True,
arch="x86_64")
groups = ["basic-desktop"]
pkg_map = self.go(
packages, groups, greedy="none", fulltree=False, nodeps=True, arch="x86_64"
)
self.assertNotIn("dummy-imsettings-gnome-1.2.9-1.x86_64.rpm", pkg_map["rpm"])
self.assertNotIn("dummy-imsettings-qt-1.2.9-1.x86_64.rpm", pkg_map["rpm"])
@ -1679,10 +1697,10 @@ class DepsolvingBase(object):
"dummy-imsettings",
"dummy-imsettings-qt",
]
groups = [
"basic-desktop"
]
pkg_map = self.go(packages, groups, greedy="none", fulltree=False, arch="x86_64")
groups = ["basic-desktop"]
pkg_map = self.go(
packages, groups, greedy="none", fulltree=False, arch="x86_64"
)
# prefers gnome over qt (condrequires in @basic-desktop)
six.assertCountEqual(
@ -1751,8 +1769,9 @@ class DepsolvingBase(object):
packages = [
"*",
]
pkg_map = self.go(packages, None, lookaside=self.repo,
nodeps=True, fulltree=True)
pkg_map = self.go(
packages, None, lookaside=self.repo, nodeps=True, fulltree=True
)
self.assertEqual(pkg_map["rpm"], [])
self.assertEqual(pkg_map["srpm"], [])
@ -1764,8 +1783,7 @@ class DepsolvingBase(object):
"-dummy-bas*",
"dummy-glibc",
]
pkg_map = self.go(packages, None,
greedy="none", nodeps=True, fulltree=True)
pkg_map = self.go(packages, None, greedy="none", nodeps=True, fulltree=True)
# neither dummy-bash or dummy-basesystem is pulled in
six.assertCountEqual(
@ -1796,10 +1814,7 @@ class DepsolvingBase(object):
six.assertCountEqual(
self,
pkg_map["rpm"],
[
"dummy-atlas-3.8.4-7.x86_64.rpm",
"dummy-atlas-devel-3.8.4-7.x86_64.rpm",
],
["dummy-atlas-3.8.4-7.x86_64.rpm", "dummy-atlas-devel-3.8.4-7.x86_64.rpm"],
)
self.assertEqual(pkg_map["srpm"], ["dummy-atlas-3.8.4-7.src.rpm"])
self.assertEqual(pkg_map["debuginfo"], [])
@ -1827,8 +1842,14 @@ class DepsolvingBase(object):
packages = [
"dummy-atlas-devel",
]
pkg_map = self.go(packages, None, greedy="build", multilib_methods=["devel", "runtime"],
fulltree=False, arch="x86_64")
pkg_map = self.go(
packages,
None,
greedy="build",
multilib_methods=["devel", "runtime"],
fulltree=False,
arch="x86_64",
)
six.assertCountEqual(
self,
@ -1847,8 +1868,14 @@ class DepsolvingBase(object):
packages = [
"dummy-atlas-devel.+",
]
pkg_map = self.go(packages, None, greedy="build", multilib_methods=["devel", "runtime"],
fulltree=False, arch="x86_64")
pkg_map = self.go(
packages,
None,
greedy="build",
multilib_methods=["devel", "runtime"],
fulltree=False,
arch="x86_64",
)
six.assertCountEqual(
self,
@ -1878,7 +1905,6 @@ class DepsolvingBase(object):
"dummy-atlas-sse-3.8.4-7.i686.rpm",
"dummy-atlas-sse2-3.8.4-7.i686.rpm",
"dummy-atlas-sse3-3.8.4-7.i686.rpm",
"dummy-atlas-3.8.4-7.x86_64.rpm",
"dummy-atlas-devel-3.8.4-7.x86_64.rpm",
"dummy-atlas-sse3-3.8.4-7.x86_64.rpm",
@ -2052,7 +2078,7 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["srpm"], ["dummy-mingw-qt5-qtbase-5.6.0-1.src.rpm"])
self.assertEqual(
pkg_map["debuginfo"],
["dummy-mingw32-qt5-qtbase-debuginfo-5.6.0-1.noarch.rpm"]
["dummy-mingw32-qt5-qtbase-debuginfo-5.6.0-1.noarch.rpm"],
)
def test_input_by_wildcard(self):
@ -2083,9 +2109,7 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["debuginfo"], [])
def test_requires_pre_post(self):
packages = [
"dummy-perl"
]
packages = ["dummy-perl"]
pkg_map = self.go(packages, None)
six.assertCountEqual(
@ -2102,8 +2126,8 @@ class DepsolvingBase(object):
def test_multilib_exclude_pattern_does_not_match_noarch(self):
packages = [
'dummy-release-notes-en-US',
'-dummy-release-notes-en*.+',
"dummy-release-notes-en-US",
"-dummy-release-notes-en*.+",
]
pkg_map = self.go(packages, None)
@ -2113,9 +2137,8 @@ class DepsolvingBase(object):
self.assertEqual(pkg_map["debuginfo"], [])
@unittest.skipUnless(HAS_YUM, 'YUM only available on Python 2')
@unittest.skipUnless(HAS_YUM, "YUM only available on Python 2")
class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def setUp(self):
super(PungiYumDepsolvingTestCase, self).setUp()
self.ks = os.path.join(self.tmp_dir, "ks")
@ -2125,9 +2148,9 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
self.old_cwd = os.getcwd()
os.chdir(self.cwd)
logger = logging.getLogger('Pungi')
logger = logging.getLogger("Pungi")
if not logger.handlers:
formatter = logging.Formatter('%(name)s:%(levelname)s: %(message)s')
formatter = logging.Formatter("%(name)s:%(levelname)s: %(message)s")
console = logging.StreamHandler(sys.stdout)
console.setFormatter(formatter)
console.setLevel(logging.INFO)
@ -2137,9 +2160,17 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
os.chdir(self.old_cwd)
super(PungiYumDepsolvingTestCase, self).tearDown()
def go(self, packages, groups, lookaside=None, prepopulate=None,
fulltree_excludes=None, multilib_blacklist=None,
multilib_whitelist=None, **kwargs):
def go(
self,
packages,
groups,
lookaside=None,
prepopulate=None,
fulltree_excludes=None,
multilib_blacklist=None,
multilib_whitelist=None,
**kwargs
):
"""
Write a kickstart with given packages and groups, then run the
depsolving and parse the output.
@ -2147,19 +2178,25 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
p = PungiWrapper()
repos = {"repo": self.repo}
if lookaside:
repos['lookaside'] = lookaside
kwargs['lookaside_repos'] = ['lookaside']
p.write_kickstart(self.ks, repos, groups, packages, prepopulate=prepopulate,
repos["lookaside"] = lookaside
kwargs["lookaside_repos"] = ["lookaside"]
p.write_kickstart(
self.ks,
repos,
groups,
packages,
prepopulate=prepopulate,
multilib_whitelist=multilib_whitelist,
multilib_blacklist=multilib_blacklist,
fulltree_excludes=fulltree_excludes)
kwargs.setdefault('cache_dir', self.tmp_dir)
fulltree_excludes=fulltree_excludes,
)
kwargs.setdefault("cache_dir", self.tmp_dir)
# Unless the test specifies an arch, we need to default to x86_64.
# Otherwise the arch of current machine will be used, which will cause
# failure most of the time.
kwargs.setdefault('arch', 'x86_64')
kwargs.setdefault("arch", "x86_64")
p.run_pungi(self.ks, self.tmp_dir, 'DP', **kwargs)
p.run_pungi(self.ks, self.tmp_dir, "DP", **kwargs)
with open(self.out, "r") as f:
pkg_map, self.broken_deps, _ = p.parse_log(f)
return convert_pkg_map(pkg_map)
@ -2168,7 +2205,7 @@ class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def convert_dnf_packages(pkgs, flags):
convert_table = {
# Hawkey returns nosrc package as src
'dummy-AdobeReader_enu-9.5.1-1.src': 'dummy-AdobeReader_enu-9.5.1-1.nosrc',
"dummy-AdobeReader_enu-9.5.1-1.src": "dummy-AdobeReader_enu-9.5.1-1.nosrc",
}
result = set()
for p in pkgs:
@ -2178,20 +2215,20 @@ def convert_dnf_packages(pkgs, flags):
# Package is coming from lookaside repo, we don't want those in
# output.
continue
result.add(name + '.rpm')
result.add(name + ".rpm")
return sorted(result)
@unittest.skipUnless(HAS_DNF, 'Dependencies are not available')
@unittest.skipUnless(HAS_DNF, "Dependencies are not available")
class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def setUp(self):
super(DNFDepsolvingTestCase, self).setUp()
self.cachedir = os.path.join(self.tmp_dir, 'pungi_dnf_cache')
self.cachedir = os.path.join(self.tmp_dir, "pungi_dnf_cache")
self.get_langpacks = False
logger = logging.getLogger('gather_dnf')
logger = logging.getLogger("gather_dnf")
if not logger.handlers:
formatter = logging.Formatter('%(name)s:%(levelname)s: %(message)s')
formatter = logging.Formatter("%(name)s:%(levelname)s: %(message)s")
console = logging.StreamHandler(sys.stdout)
console.setFormatter(formatter)
console.setLevel(logging.INFO)
@ -2200,29 +2237,32 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
self.maxDiff = None
def go(self, packages, groups, lookaside=None, **kwargs):
arch = kwargs.pop('arch', 'x86_64')
if 'greedy' in kwargs:
kwargs['greedy_method'] = kwargs.pop('greedy')
if 'nodeps' in kwargs:
kwargs['resolve_deps'] = not kwargs.pop('nodeps')
arch = kwargs.pop("arch", "x86_64")
if "greedy" in kwargs:
kwargs["greedy_method"] = kwargs.pop("greedy")
if "nodeps" in kwargs:
kwargs["resolve_deps"] = not kwargs.pop("nodeps")
if lookaside:
kwargs['lookaside_repos'] = ['lookaside']
kwargs["lookaside_repos"] = ["lookaside"]
self.dnf = self.dnf_instance(arch, lookaside=lookaside, persistdir=self.tmp_dir)
if self.get_langpacks:
kwargs['langpacks'] = self.dnf.comps_wrapper.get_langpacks()
kwargs["langpacks"] = self.dnf.comps_wrapper.get_langpacks()
groups = groups or []
exclude_groups = []
_, conditional_packages = self.dnf.comps_wrapper.get_comps_packages(groups, exclude_groups)
_, conditional_packages = self.dnf.comps_wrapper.get_comps_packages(
groups, exclude_groups
)
self.g = Gather(self.dnf, GatherOptions(**kwargs))
self.g.logger.handlers = [h for h in self.g.logger.handlers
if h.name != 'capture-logs']
self.g.logger.handlers = [
h for h in self.g.logger.handlers if h.name != "capture-logs"
]
log_output = cStringIO()
handler = logging.StreamHandler(log_output)
handler.name = 'capture-logs'
handler.name = "capture-logs"
handler.setLevel(logging.WARNING)
self.g.logger.addHandler(handler)
@ -2231,12 +2271,15 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
_, self.broken_deps, _ = PungiWrapper().parse_log(log_output)
return {
'debuginfo': convert_dnf_packages(self.g.result_debug_packages,
self.g.result_package_flags),
'srpm': convert_dnf_packages(self.g.result_source_packages,
self.g.result_package_flags),
'rpm': convert_dnf_packages(self.g.result_binary_packages,
self.g.result_package_flags),
"debuginfo": convert_dnf_packages(
self.g.result_debug_packages, self.g.result_package_flags
),
"srpm": convert_dnf_packages(
self.g.result_source_packages, self.g.result_package_flags
),
"rpm": convert_dnf_packages(
self.g.result_binary_packages, self.g.result_package_flags
),
}
def dnf_instance(self, base_arch, exclude=None, lookaside=False, persistdir=None):
@ -2262,25 +2305,33 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
for pkg, flags in self.g.result_package_flags.items():
if nvra == "%s-%s-%s.%s" % (pkg.name, pkg.version, pkg.release, pkg.arch):
self.assertEqual(
flags, expected_flags,
"pkg: %s; flags: %s; expected flags: %s" % (nvra, flags, expected_flags))
flags,
expected_flags,
"pkg: %s; flags: %s; expected flags: %s"
% (nvra, flags, expected_flags),
)
found = True
if not found:
flags = set()
self.assertEqual(
flags, expected_flags,
"pkg: %s; flags: %s; expected flags: %s" % (nvra, flags, expected_flags))
flags,
expected_flags,
"pkg: %s; flags: %s; expected flags: %s"
% (nvra, flags, expected_flags),
)
def test_langpacks(self):
self.get_langpacks = True
super(DNFDepsolvingTestCase, self).test_langpacks()
@unittest.skip('DNF code does not support NVR as input')
@unittest.skip("DNF code does not support NVR as input")
def test_bash_older(self):
pass
def test_firefox_selfhosting_with_krb5_lookaside(self):
super(DNFDepsolvingTestCase, self).test_firefox_selfhosting_with_krb5_lookaside()
super(
DNFDepsolvingTestCase, self
).test_firefox_selfhosting_with_krb5_lookaside()
self.assertFlags("dummy-krb5-devel-1.10-5.x86_64", [PkgFlag.lookaside])
self.assertFlags("dummy-krb5-1.10-5.src", [PkgFlag.lookaside])
@ -2343,9 +2394,9 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
def test_bash_multilib_exclude_debuginfo(self):
packages = [
'dummy-bash.+',
'-dummy-bash-debuginfo',
'-dummy-bash-debugsource',
"dummy-bash.+",
"-dummy-bash-debuginfo",
"-dummy-bash-debugsource",
]
pkg_map = self.go(packages, None, greedy="none")
@ -2385,8 +2436,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
"-dummy-bas*",
"dummy-glibc",
]
pkg_map = self.go(packages, None,
greedy="none", nodeps=True, fulltree=True)
pkg_map = self.go(packages, None, greedy="none", nodeps=True, fulltree=True)
# neither dummy-bash or dummy-basesystem is pulled in
six.assertCountEqual(
@ -2465,8 +2515,13 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
packages = [
"dummy-lvm2-devel",
]
pkg_map = self.go(packages, None, greedy="none", fulltree=False,
multilib_methods=["devel", "runtime"])
pkg_map = self.go(
packages,
None,
greedy="none",
fulltree=False,
multilib_methods=["devel", "runtime"],
)
six.assertCountEqual(
self,

View File

@ -16,32 +16,46 @@ class TestWritePungiConfig(helpers.PungiTestCase):
def assertWritten(self, PungiWrapper, **kwargs):
wrapper = PungiWrapper.return_value
self.assertEqual(wrapper.mock_calls,
[mock.call.write_kickstart(**kwargs)])
self.assertEqual(wrapper.mock_calls, [mock.call.write_kickstart(**kwargs)])
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
@mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_correct(self, PungiWrapper):
pkgs = [('pkg1', None), ('pkg2', 'x86_64')]
grps = ['grp1']
filter = [('pkg3', None), ('pkg4', 'x86_64')]
pkgs = [("pkg1", None), ("pkg2", "x86_64")]
grps = ["grp1"]
filter = [("pkg3", None), ("pkg4", "x86_64")]
white = mock.Mock()
black = mock.Mock()
prepopulate = mock.Mock()
fulltree = mock.Mock()
deps.write_pungi_config(
self.compose, 'x86_64', self.compose.variants['Server'],
pkgs, grps, filter, white, black,
prepopulate=prepopulate, fulltree_excludes=fulltree,
self.compose,
"x86_64",
self.compose.variants["Server"],
pkgs,
grps,
filter,
white,
black,
prepopulate=prepopulate,
fulltree_excludes=fulltree,
package_sets=self.package_sets,
)
self.assertWritten(PungiWrapper, packages=['pkg1', 'pkg2.x86_64'],
ks_path=self.topdir + '/work/x86_64/pungi/Server.x86_64.conf',
lookaside_repos={}, multilib_whitelist=white, multilib_blacklist=black,
groups=['grp1'], prepopulate=prepopulate,
repos={"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
'comps-repo': self.topdir + '/work/x86_64/comps_repo_Server'},
exclude_packages=['pkg3', 'pkg4.x86_64'],
fulltree_excludes=fulltree)
self.assertWritten(
PungiWrapper,
packages=["pkg1", "pkg2.x86_64"],
ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf",
lookaside_repos={},
multilib_whitelist=white,
multilib_blacklist=black,
groups=["grp1"],
prepopulate=prepopulate,
repos={
"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server",
},
exclude_packages=["pkg3", "pkg4.x86_64"],
fulltree_excludes=fulltree,
)
@mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_duplicated_package_name(self, PungiWrapper):
@ -53,73 +67,114 @@ class TestWritePungiConfig(helpers.PungiTestCase):
prepopulate = mock.Mock()
fulltree = mock.Mock()
deps.write_pungi_config(
self.compose, "x86_64", self.compose.variants["Server"],
pkgs, grps, filter, white, black,
prepopulate=prepopulate, fulltree_excludes=fulltree,
self.compose,
"x86_64",
self.compose.variants["Server"],
pkgs,
grps,
filter,
white,
black,
prepopulate=prepopulate,
fulltree_excludes=fulltree,
package_sets=self.package_sets,
)
self.assertWritten(PungiWrapper, packages=["pkg1", "pkg1.x86_64"],
self.assertWritten(
PungiWrapper,
packages=["pkg1", "pkg1.x86_64"],
ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf",
lookaside_repos={}, multilib_whitelist=white, multilib_blacklist=black,
groups=[], prepopulate=prepopulate,
repos={"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server"},
lookaside_repos={},
multilib_whitelist=white,
multilib_blacklist=black,
groups=[],
prepopulate=prepopulate,
repos={
"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server",
},
exclude_packages=["pkg2", "pkg2.x86_64"],
fulltree_excludes=fulltree)
fulltree_excludes=fulltree,
)
@mock.patch('pungi.phases.gather.get_lookaside_repos')
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
@mock.patch("pungi.phases.gather.get_lookaside_repos")
@mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_with_lookaside(self, PungiWrapper, glr):
glr.return_value = ['http://example.com/repo']
pkgs = [('pkg1', None)]
glr.return_value = ["http://example.com/repo"]
pkgs = [("pkg1", None)]
deps.write_pungi_config(
self.compose, 'x86_64', self.compose.variants['Server'],
pkgs, [], [], [], [],
self.compose,
"x86_64",
self.compose.variants["Server"],
pkgs,
[],
[],
[],
[],
package_sets=self.package_sets,
)
self.assertWritten(PungiWrapper, packages=['pkg1'],
ks_path=self.topdir + '/work/x86_64/pungi/Server.x86_64.conf',
lookaside_repos={'lookaside-repo-0': 'http://example.com/repo'},
multilib_whitelist=[], multilib_blacklist=[],
groups=[], prepopulate=None,
repos={"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
'comps-repo': self.topdir + '/work/x86_64/comps_repo_Server'},
exclude_packages=[], fulltree_excludes=None)
self.assertEqual(glr.call_args_list,
[mock.call(self.compose, 'x86_64', self.compose.variants['Server'])])
self.assertWritten(
PungiWrapper,
packages=["pkg1"],
ks_path=self.topdir + "/work/x86_64/pungi/Server.x86_64.conf",
lookaside_repos={"lookaside-repo-0": "http://example.com/repo"},
multilib_whitelist=[],
multilib_blacklist=[],
groups=[],
prepopulate=None,
repos={
"pungi-repo-0": self.topdir + "/work/x86_64/repo/p1",
"comps-repo": self.topdir + "/work/x86_64/comps_repo_Server",
},
exclude_packages=[],
fulltree_excludes=None,
)
self.assertEqual(
glr.call_args_list,
[mock.call(self.compose, "x86_64", self.compose.variants["Server"])],
)
@mock.patch('pungi.phases.gather.methods.method_deps.PungiWrapper')
@mock.patch("pungi.phases.gather.methods.method_deps.PungiWrapper")
def test_without_input(self, PungiWrapper):
with self.assertRaises(RuntimeError) as ctx:
deps.write_pungi_config(self.compose, 'x86_64', self.compose.variants['Server'],
[], [], [], [], [])
deps.write_pungi_config(
self.compose,
"x86_64",
self.compose.variants["Server"],
[],
[],
[],
[],
[],
)
self.assertEqual(
str(ctx.exception),
'No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)')
"No packages included in Server.x86_64 (no comps groups, no input packages, no prepopulate)",
)
self.assertEqual(PungiWrapper.return_value.mock_calls, [])
class TestRaiseOnInvalidSigkeys(helpers.PungiTestCase):
def test_raise_on_invalid_sigkeys(self):
pkgset = {
"global": mock.Mock(),
}
pkgset["global"].invalid_sigkey_rpms = [{'name': 'pkg1'}]
pkgset["global"].raise_invalid_sigkeys_exception = mock.Mock(side_effect=RuntimeError())
pkgset["global"].invalid_sigkey_rpms = [{"name": "pkg1"}]
pkgset["global"].raise_invalid_sigkeys_exception = mock.Mock(
side_effect=RuntimeError()
)
result = {
'rpm': [{'path': 'pkg1-1-1.el7'}],
"rpm": [{"path": "pkg1-1-1.el7"}],
}
with self.assertRaises(RuntimeError):
deps.raise_on_invalid_sigkeys('', '', [pkgset], result)
deps.raise_on_invalid_sigkeys("", "", [pkgset], result)
class TestCheckDeps(helpers.PungiTestCase):
def setUp(self):
super(TestCheckDeps, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {})
self.arch = 'x86_64'
self.variant = self.compose.variants['Server']
self.arch = "x86_64"
self.variant = self.compose.variants["Server"]
def test_not_check_deps(self):
self.compose.conf["check_deps"] = False
@ -127,15 +182,16 @@ class TestCheckDeps(helpers.PungiTestCase):
def test_missing_deps(self):
self.compose.conf["check_deps"] = True
missing_deps = {'foo.noarch': set(['bar = 1.1'])}
missing_deps = {"foo.noarch": set(["bar = 1.1"])}
with self.assertRaises(RuntimeError) as ctx:
deps.check_deps(self.compose, self.arch, self.variant, missing_deps)
self.assertEqual(str(ctx.exception), 'Unresolved dependencies detected')
self.assertEqual(str(ctx.exception), "Unresolved dependencies detected")
self.assertEqual(
self.compose.log_error.call_args_list,
[
mock.call(
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']" % (self.variant, self.arch)
"Unresolved dependencies for %s.%s in package foo.noarch: ['bar = 1.1']"
% (self.variant, self.arch)
)
]
],
)

View File

@ -244,6 +244,7 @@ class MockModule(object):
def get_runtime_streams(platform):
assert platform == "platform"
return [self.platform]
return [mock.Mock(get_runtime_streams=get_runtime_streams)]
def get_rpm_artifacts(self):
@ -368,9 +369,9 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
self.assertEqual(
self.compose.log_debug.call_args_list,
[
mock.call('[BEGIN] Running fus (arch: x86_64, variant: Server)'),
mock.call('[DONE ] Running fus (arch: x86_64, variant: Server)')
]
mock.call("[BEGIN] Running fus (arch: x86_64, variant: Server)"),
mock.call("[DONE ] Running fus (arch: x86_64, variant: Server)"),
],
)
def test_with_modules_with_devel(self, run, gc, po, wc):
@ -481,9 +482,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
"pkg-debuginfo-1.0-2.x86_64": dbg2,
}
self.phase.debuginfo = {
"x86_64": {
"pkg-debuginfo": [dbg1, dbg2],
},
"x86_64": {"pkg-debuginfo": [dbg1, dbg2]},
}
po.side_effect = [
([("pkg-1.0-1", "x86_64", frozenset())], []),
@ -639,15 +638,11 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
(
[
("pkg-devel-1.0-1", "x86_64", frozenset()),
("foo-1.0-1", "x86_64", frozenset())
("foo-1.0-1", "x86_64", frozenset()),
],
frozenset()),
(
[
("pkg-devel-1.0-1", "i686", frozenset()),
],
[],
frozenset(),
),
([("pkg-devel-1.0-1", "i686", frozenset())], []),
]
res = self.phase.run_solver(
@ -666,7 +661,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
("pkg-devel-1.0-1", "x86_64", frozenset()),
("foo-1.0-1", "x86_64", frozenset()),
("pkg-devel-1.0-1", "i686", frozenset()),
]
],
)
self.assertEqual(res[1], set())
self.assertEqual(
@ -761,16 +756,11 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
(
[
("pkg-devel-1.0-1", "x86_64", frozenset()),
("foo-1.0-1", "x86_64", frozenset())
],
[],
),
(
[
("foo-1.0-1", "i686", frozenset()),
("foo-1.0-1", "x86_64", frozenset()),
],
[],
),
([("foo-1.0-1", "i686", frozenset())], []),
]
res = self.phase.run_solver(
@ -856,7 +846,7 @@ class TestExpandPackages(helpers.PungiTestCase):
nevra_to_pkg["pkg-debuginfo-3:1-2.%s" % debug_arch] = pkg._replace(
name="pkg-debuginfo",
arch=debug_arch,
file_path="/tmp/pkg-debuginfo.%s.rpm" % debug_arch
file_path="/tmp/pkg-debuginfo.%s.rpm" % debug_arch,
)
return nevra_to_pkg

View File

@ -9,7 +9,7 @@ import six
from pungi.phases.gather.methods import method_nodeps as nodeps
from tests import helpers
COMPS_FILE = os.path.join(helpers.FIXTURE_DIR, 'comps.xml')
COMPS_FILE = os.path.join(helpers.FIXTURE_DIR, "comps.xml")
class TestWritePungiConfig(helpers.PungiTestCase):
@ -19,7 +19,9 @@ class TestWritePungiConfig(helpers.PungiTestCase):
self.compose.paths.work.comps = mock.Mock(return_value=COMPS_FILE)
def test_expand_group(self):
packages = nodeps.expand_groups(self.compose, 'x86_64', None, ['core', 'text-internet'])
packages = nodeps.expand_groups(
self.compose, "x86_64", None, ["core", "text-internet"]
)
six.assertCountEqual(
self,
packages,

File diff suppressed because it is too large Load Diff

View File

@ -12,15 +12,14 @@ from pungi.graph import SimpleAcyclicOrientedGraph
class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
def setUp(self):
self.g = SimpleAcyclicOrientedGraph()
def test_simple_graph(self):
graph_data = (
('Client', 'Base'),
('Server', 'Base'),
('Workstation', 'Base'),
("Client", "Base"),
("Server", "Base"),
("Workstation", "Base"),
)
for start, end in graph_data:
@ -33,13 +32,13 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
def test_complex_graph(self):
graph_data = (
('1', '3'), # 1 --> 3 --> 4 --> 5 ...
('3', '4'),
('4', '5'),
('4', '6'),
('2', '4'),
('7', '6'),
('6', '5'),
("1", "3"), # 1 --> 3 --> 4 --> 5 ...
("3", "4"),
("4", "5"),
("4", "6"),
("2", "4"),
("7", "6"),
("6", "5"),
)
for start, end in graph_data:
@ -47,13 +46,13 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
spanning_line = self.g.prune_graph()
# spanning line have to match completely to given graph
self.assertEqual(['1', '3', '2', '4', '7', '6', '5'], spanning_line)
self.assertEqual(["1", "3", "2", "4", "7", "6", "5"], spanning_line)
def test_cyclic_graph(self):
graph_data = (
('1', '2'),
('2', '3'),
('3', '1'),
("1", "2"),
("2", "3"),
("3", "1"),
)
with self.assertRaises(ValueError):
@ -62,28 +61,28 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
def test_two_separate_graph_lines(self):
graph_data = (
('1', '3'), # 1st graph
('3', '2'), # 1st graph
('6', '5'), # 2nd graph
("1", "3"), # 1st graph
("3", "2"), # 1st graph
("6", "5"), # 2nd graph
)
for start, end in graph_data:
self.g.add_edge(start, end)
spanning_line = self.g.prune_graph()
spanning_line_str = ''.join(spanning_line)
spanning_line_str = "".join(spanning_line)
self.assertEqual(5, len(spanning_line))
# Particular parts should match. Order of these parts is not crucial.
self.assertTrue(
"132" in spanning_line_str and "65" in spanning_line_str,
"Spanning line '%s' does not match to graphs" % spanning_line_str
"Spanning line '%s' does not match to graphs" % spanning_line_str,
)
def alternative_route_in_graph(self):
graph_data = (
('1', '3'),
('3', '2'),
('1', '2'),
("1", "3"),
("3", "2"),
("1", "2"),
)
for start, end in graph_data:
@ -91,4 +90,4 @@ class SimpleAcyclicOrientedGraphTestCase(unittest.TestCase):
spanning_line = self.g.prune_graph()
# spanning line have to match completely to given graph
self.assertEqual(['1', '3', '2'], spanning_line)
self.assertEqual(["1", "3", "2"], spanning_line)

File diff suppressed because it is too large Load Diff

View File

@ -16,162 +16,221 @@ from tests.helpers import DummyCompose, PungiTestCase
class TestImageChecksumPhase(PungiTestCase):
def test_phase_is_never_skipped(self):
compose = DummyCompose(self.topdir, {})
phase = ImageChecksumPhase(compose)
self.assertFalse(phase.skip())
def test_config_skip_individual_with_multiple_algorithms(self):
compose = DummyCompose(self.topdir, {
'media_checksums': ['md5', 'sha1'],
'media_checksum_one_file': True
})
compose = DummyCompose(
self.topdir,
{"media_checksums": ["md5", "sha1"], "media_checksum_one_file": True},
)
phase = ImageChecksumPhase(compose)
with self.assertRaises(ValueError) as ctx:
phase.validate()
self.assertIn('media_checksum_one_file', str(ctx.exception))
self.assertIn("media_checksum_one_file", str(ctx.exception))
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_one_file(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['sha256'],
'media_checksum_one_file': True,
})
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'sha256': 'cafebabe'}
phase.run()
dump_checksums.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/CHECKSUM',
set([('image.iso', 123, 'sha256', 'cafebabe')]))
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['sha256'])
compose.image.add_checksum.assert_called_once_with(None, 'sha256', 'cafebabe')
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
def test_checksum_save_individuals(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['md5', 'sha256'],
})
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'md5': 'cafebabe', 'sha256': 'deadbeef'}
phase.run()
dump_checksums.assert_has_calls(
[mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')])),
mock.call(self.topdir + '/compose/Client/i386/iso/MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')]))],
any_order=True
compose = DummyCompose(
self.topdir,
{"media_checksums": ["sha256"], "media_checksum_one_file": True},
)
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['md5', 'sha256'])
compose.image.add_checksum.assert_has_calls([mock.call(None, 'sha256', 'deadbeef'),
mock.call(None, 'md5', 'cafebabe')],
any_order=True)
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
def test_checksum_one_file_custom_name(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['sha256'],
'media_checksum_one_file': True,
'media_checksum_base_filename': '%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s'
})
compose.compose_label = 'Alpha-1.0'
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'sha256': 'cafebabe'}
cc.return_value = {"sha256": "cafebabe"}
phase.run()
dump_checksums.assert_called_once_with(
self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM',
set([('image.iso', 123, 'sha256', 'cafebabe')]))
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['sha256'])
compose.image.add_checksum.assert_called_once_with(None, 'sha256', 'cafebabe')
self.topdir + "/compose/Client/i386/iso/CHECKSUM",
set([("image.iso", 123, "sha256", "cafebabe")]),
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["sha256"]
)
compose.image.add_checksum.assert_called_once_with(None, "sha256", "cafebabe")
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_save_individuals(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {"media_checksums": ["md5", "sha256"]})
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {"md5": "cafebabe", "sha256": "deadbeef"}
phase.run()
dump_checksums.assert_has_calls(
[
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
],
any_order=True,
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["md5", "sha256"]
)
compose.image.add_checksum.assert_has_calls(
[mock.call(None, "sha256", "deadbeef"), mock.call(None, "md5", "cafebabe")],
any_order=True,
)
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_one_file_custom_name(self, dump_checksums, cc, exists):
compose = DummyCompose(
self.topdir,
{
"media_checksums": ["sha256"],
"media_checksum_one_file": True,
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s-%(dirname)s",
},
)
compose.compose_label = "Alpha-1.0"
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {"sha256": "cafebabe"}
phase.run()
dump_checksums.assert_called_once_with(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0_Alpha-1.0-iso-CHECKSUM",
set([("image.iso", 123, "sha256", "cafebabe")]),
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["sha256"]
)
compose.image.add_checksum.assert_called_once_with(None, "sha256", "cafebabe")
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_save_individuals_custom_name(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['md5', 'sha256'],
'media_checksum_base_filename': '%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s'
})
compose = DummyCompose(
self.topdir,
{
"media_checksums": ["md5", "sha256"],
"media_checksum_base_filename": "%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s",
},
)
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'md5': 'cafebabe', 'sha256': 'deadbeef'}
cc.return_value = {"md5": "cafebabe", "sha256": "deadbeef"}
phase.run()
dump_checksums.assert_has_calls(
[mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')])),
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')]))],
any_order=True
[
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
],
any_order=True,
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["md5", "sha256"]
)
compose.image.add_checksum.assert_has_calls(
[mock.call(None, "sha256", "deadbeef"), mock.call(None, "md5", "cafebabe")],
any_order=True,
)
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['md5', 'sha256'])
compose.image.add_checksum.assert_has_calls([mock.call(None, 'sha256', 'deadbeef'),
mock.call(None, 'md5', 'cafebabe')],
any_order=True)
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')
@mock.patch('pungi.phases.image_checksum.dump_checksums')
def test_checksum_save_individuals_custom_name_str_format(self, dump_checksums, cc, exists):
compose = DummyCompose(self.topdir, {
'media_checksums': ['md5', 'sha256'],
'media_checksum_base_filename': '{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}'
})
@mock.patch("os.path.exists")
@mock.patch("kobo.shortcuts.compute_file_checksums")
@mock.patch("pungi.phases.image_checksum.dump_checksums")
def test_checksum_save_individuals_custom_name_str_format(
self, dump_checksums, cc, exists
):
compose = DummyCompose(
self.topdir,
{
"media_checksums": ["md5", "sha256"],
"media_checksum_base_filename": "{release_short}-{variant}-{version}-{date}{type_suffix}.{respin}",
},
)
phase = ImageChecksumPhase(compose)
exists.return_value = True
cc.return_value = {'md5': 'cafebabe', 'sha256': 'deadbeef'}
cc.return_value = {"md5": "cafebabe", "sha256": "deadbeef"}
phase.run()
dump_checksums.assert_has_calls(
[mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/image.iso.SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')])),
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM',
set([('image.iso', 123, 'md5', 'cafebabe')])),
mock.call(self.topdir + '/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM',
set([('image.iso', 123, 'sha256', 'deadbeef')]))],
any_order=True
[
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir + "/compose/Client/i386/iso/image.iso.SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-MD5SUM",
set([("image.iso", 123, "md5", "cafebabe")]),
),
mock.call(
self.topdir
+ "/compose/Client/i386/iso/test-Client-1.0-20151203.t.0-SHA256SUM",
set([("image.iso", 123, "sha256", "deadbeef")]),
),
],
any_order=True,
)
cc.assert_called_once_with(
self.topdir + "/compose/Client/i386/iso/image.iso", ["md5", "sha256"]
)
compose.image.add_checksum.assert_has_calls(
[mock.call(None, "sha256", "deadbeef"), mock.call(None, "md5", "cafebabe")],
any_order=True,
)
cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['md5', 'sha256'])
compose.image.add_checksum.assert_has_calls([mock.call(None, 'sha256', 'deadbeef'),
mock.call(None, 'md5', 'cafebabe')],
any_order=True)
class TestDumpChecksums(unittest.TestCase):
@ -182,16 +241,20 @@ class TestDumpChecksums(unittest.TestCase):
shutil.rmtree(self.tmp_dir)
def test_dump_checksums(self):
dump_checksums(os.path.join(self.tmp_dir, 'CHECKSUM'),
[('file2.iso', 456, 'md5', 'cafebabe'),
('file1.iso', 123, 'md5', 'abcdef')])
dump_checksums(
os.path.join(self.tmp_dir, "CHECKSUM"),
[
("file2.iso", 456, "md5", "cafebabe"),
("file1.iso", 123, "md5", "abcdef"),
],
)
with open(os.path.join(self.tmp_dir, 'CHECKSUM'), 'r') as f:
data = f.read().rstrip().split('\n')
with open(os.path.join(self.tmp_dir, "CHECKSUM"), "r") as f:
data = f.read().rstrip().split("\n")
expected = [
'# file1.iso: 123 bytes',
'MD5 (file1.iso) = abcdef',
'# file2.iso: 456 bytes',
'MD5 (file2.iso) = cafebabe',
"# file1.iso: 123 bytes",
"MD5 (file1.iso) = abcdef",
"# file2.iso: 456 bytes",
"MD5 (file2.iso) = cafebabe",
]
self.assertEqual(expected, data)

View File

@ -14,7 +14,13 @@ import sys
from pungi.module_util import Modulemd
from pungi.phases import init
from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom, fake_run_in_threads
from tests.helpers import (
DummyCompose,
PungiTestCase,
touch,
mk_boom,
fake_run_in_threads,
)
@mock.patch("pungi.phases.init.run_in_threads", new=fake_run_in_threads)
@ -27,7 +33,6 @@ from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom, fake_run_
@mock.patch("pungi.phases.init.write_variant_comps")
@mock.patch("pungi.phases.init.write_prepopulate_file")
class TestInitPhase(PungiTestCase):
def test_run(
self,
write_prepopulate,
@ -56,23 +61,28 @@ class TestInitPhase(PungiTestCase):
six.assertCountEqual(
self,
create_comps.mock_calls,
[mock.call(compose, "x86_64", None), mock.call(compose, "amd64", None),
[
mock.call(compose, "x86_64", None),
mock.call(compose, "amd64", None),
mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"]),
mock.call(compose, "x86_64", compose.all_variants["Server-optional"])],
mock.call(compose, "x86_64", compose.all_variants["Server-optional"]),
],
)
six.assertCountEqual(
self,
write_variant.mock_calls,
[mock.call(compose, "x86_64", compose.variants["Server"]),
[
mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"]),
mock.call(compose, "x86_64", compose.all_variants["Server-optional"])],
mock.call(compose, "x86_64", compose.all_variants["Server-optional"]),
],
)
self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
@ -91,8 +101,8 @@ class TestInitPhase(PungiTestCase):
compose = DummyCompose(self.topdir, {})
compose.has_comps = True
compose.has_module_defaults = False
compose.variants['Everything'].groups = []
compose.variants['Everything'].modules = []
compose.variants["Everything"].groups = []
compose.variants["Everything"].modules = []
phase = init.InitPhase(compose)
phase.run()
@ -109,21 +119,26 @@ class TestInitPhase(PungiTestCase):
six.assertCountEqual(
self,
create_comps.mock_calls,
[mock.call(compose, "x86_64", None), mock.call(compose, "amd64", None),
[
mock.call(compose, "x86_64", None),
mock.call(compose, "amd64", None),
mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"])],
mock.call(compose, "amd64", compose.variants["Everything"]),
],
)
six.assertCountEqual(
self,
write_variant.mock_calls,
[mock.call(compose, "x86_64", compose.variants["Server"]),
[
mock.call(compose, "x86_64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Server"]),
mock.call(compose, "amd64", compose.variants["Client"]),
mock.call(compose, "x86_64", compose.variants["Everything"]),
mock.call(compose, "amd64", compose.variants["Everything"])],
mock.call(compose, "amd64", compose.variants["Everything"]),
],
)
self.assertEqual(write_defaults.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
@ -185,96 +200,140 @@ class TestInitPhase(PungiTestCase):
class TestWriteArchComps(PungiTestCase):
@mock.patch('pungi.phases.init.run')
@mock.patch("pungi.phases.init.run")
def test_run(self, run):
compose = DummyCompose(self.topdir, {})
init.write_arch_comps(compose, 'x86_64')
init.write_arch_comps(compose, "x86_64")
self.assertEqual(run.mock_calls,
[mock.call(['comps_filter', '--arch=x86_64', '--no-cleanup',
'--output=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir,
self.topdir + '/work/global/comps/comps-global.xml'])])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"comps_filter",
"--arch=x86_64",
"--no-cleanup",
"--output=%s/work/x86_64/comps/comps-x86_64.xml" % self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
class TestCreateCompsRepo(PungiTestCase):
@mock.patch('pungi.phases.init.run')
@mock.patch("pungi.phases.init.run")
def test_run(self, run):
compose = DummyCompose(self.topdir, {
'createrepo_checksum': 'sha256',
})
compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
init.create_comps_repo(compose, 'x86_64', None)
init.create_comps_repo(compose, "x86_64", None)
self.assertEqual(run.mock_calls,
[mock.call(['createrepo_c', self.topdir + '/work/x86_64/comps_repo',
'--outputdir=%s/work/x86_64/comps_repo' % self.topdir,
'--groupfile=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir,
'--update', '--no-database', '--checksum=sha256',
'--unique-md-filenames'],
logfile=self.topdir + '/logs/x86_64/comps_repo.x86_64.log',
show_cmd=True)])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"createrepo_c",
self.topdir + "/work/x86_64/comps_repo",
"--outputdir=%s/work/x86_64/comps_repo" % self.topdir,
"--groupfile=%s/work/x86_64/comps/comps-x86_64.xml"
% self.topdir,
"--update",
"--no-database",
"--checksum=sha256",
"--unique-md-filenames",
],
logfile=self.topdir + "/logs/x86_64/comps_repo.x86_64.log",
show_cmd=True,
)
],
)
@mock.patch('pungi.phases.init.run')
@mock.patch("pungi.phases.init.run")
def test_run_with_variant(self, run):
compose = DummyCompose(self.topdir, {
'createrepo_checksum': 'sha256',
})
compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
init.create_comps_repo(compose, 'x86_64', compose.variants['Server'])
init.create_comps_repo(compose, "x86_64", compose.variants["Server"])
self.assertEqual(run.mock_calls,
[mock.call(['createrepo_c', self.topdir + '/work/x86_64/comps_repo_Server',
'--outputdir=%s/work/x86_64/comps_repo_Server' % self.topdir,
'--groupfile=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir,
'--update', '--no-database', '--checksum=sha256',
'--unique-md-filenames'],
logfile=self.topdir + '/logs/x86_64/comps_repo-Server.x86_64.log',
show_cmd=True)])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"createrepo_c",
self.topdir + "/work/x86_64/comps_repo_Server",
"--outputdir=%s/work/x86_64/comps_repo_Server" % self.topdir,
"--groupfile=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
"--update",
"--no-database",
"--checksum=sha256",
"--unique-md-filenames",
],
logfile=self.topdir + "/logs/x86_64/comps_repo-Server.x86_64.log",
show_cmd=True,
)
],
)
class TestWriteGlobalComps(PungiTestCase):
@mock.patch('pungi.phases.init.get_file_from_scm')
@mock.patch("pungi.phases.init.get_file_from_scm")
def test_run_local_file(self, get_file):
compose = DummyCompose(self.topdir, {'comps_file': 'some-file.xml'})
compose = DummyCompose(self.topdir, {"comps_file": "some-file.xml"})
def gen_file(src, dest, compose=None):
self.assertEqual(src, '/home/releng/config/some-file.xml')
touch(os.path.join(dest, 'some-file.xml'))
self.assertEqual(src, "/home/releng/config/some-file.xml")
touch(os.path.join(dest, "some-file.xml"))
get_file.side_effect = gen_file
init.write_global_comps(compose)
self.assertTrue(os.path.isfile(self.topdir + '/work/global/comps/comps-global.xml'))
self.assertTrue(
os.path.isfile(self.topdir + "/work/global/comps/comps-global.xml")
)
class TestWriteVariantComps(PungiTestCase):
@mock.patch('pungi.phases.init.run')
@mock.patch('pungi.phases.init.CompsWrapper')
@mock.patch("pungi.phases.init.run")
@mock.patch("pungi.phases.init.CompsWrapper")
def test_run(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server']
variant = compose.variants["Server"]
comps = CompsWrapper.return_value
comps.filter_groups.return_value = []
init.write_variant_comps(compose, 'x86_64', variant)
init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls,
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts',
'--keep-empty-group=conflicts-server',
'--variant=Server',
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir,
self.topdir + '/work/global/comps/comps-global.xml'])])
self.assertEqual(CompsWrapper.call_args_list,
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])
self.assertEqual(comps.filter_groups.call_args_list, [mock.call(variant.groups)])
self.assertEqual(comps.filter_environments.mock_calls,
[mock.call(variant.environments)])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"comps_filter",
"--arch=x86_64",
"--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(
comps.filter_groups.call_args_list, [mock.call(variant.groups)]
)
self.assertEqual(
comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch("pungi.phases.init.get_lookaside_groups")
@ -299,7 +358,8 @@ class TestWriteVariantComps(PungiTestCase):
"--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml" % self.topdir,
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
"--lookaside-group=foo",
"--lookaside-group=bar",
@ -319,81 +379,128 @@ class TestWriteVariantComps(PungiTestCase):
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch('pungi.phases.init.run')
@mock.patch('pungi.phases.init.CompsWrapper')
@mock.patch("pungi.phases.init.run")
@mock.patch("pungi.phases.init.CompsWrapper")
def test_run_no_filter_without_groups(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server']
variant = compose.variants["Server"]
variant.groups = []
comps = CompsWrapper.return_value
comps.filter_groups.return_value = []
init.write_variant_comps(compose, 'x86_64', variant)
init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls,
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts',
'--keep-empty-group=conflicts-server',
'--variant=Server',
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir,
self.topdir + '/work/global/comps/comps-global.xml'])])
self.assertEqual(CompsWrapper.call_args_list,
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"comps_filter",
"--arch=x86_64",
"--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(comps.filter_groups.call_args_list, [])
self.assertEqual(comps.filter_environments.mock_calls,
[mock.call(variant.environments)])
self.assertEqual(
comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch('pungi.phases.init.run')
@mock.patch('pungi.phases.init.CompsWrapper')
@mock.patch("pungi.phases.init.run")
@mock.patch("pungi.phases.init.CompsWrapper")
def test_run_filter_for_modular(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server']
variant = compose.variants["Server"]
variant.groups = []
variant.modules = ['testmodule:2.0']
variant.modules = ["testmodule:2.0"]
comps = CompsWrapper.return_value
comps.filter_groups.return_value = []
init.write_variant_comps(compose, 'x86_64', variant)
init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls,
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts',
'--keep-empty-group=conflicts-server',
'--variant=Server',
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir,
self.topdir + '/work/global/comps/comps-global.xml'])])
self.assertEqual(CompsWrapper.call_args_list,
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"comps_filter",
"--arch=x86_64",
"--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(comps.filter_groups.call_args_list, [mock.call([])])
self.assertEqual(comps.filter_environments.mock_calls,
[mock.call(variant.environments)])
self.assertEqual(
comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
@mock.patch('pungi.phases.init.run')
@mock.patch('pungi.phases.init.CompsWrapper')
@mock.patch("pungi.phases.init.run")
@mock.patch("pungi.phases.init.CompsWrapper")
def test_run_report_unmatched(self, CompsWrapper, run):
compose = DummyCompose(self.topdir, {})
variant = compose.variants['Server']
variant = compose.variants["Server"]
comps = CompsWrapper.return_value
comps.filter_groups.return_value = ['foo', 'bar']
comps.filter_groups.return_value = ["foo", "bar"]
init.write_variant_comps(compose, 'x86_64', variant)
init.write_variant_comps(compose, "x86_64", variant)
self.assertEqual(run.mock_calls,
[mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts',
'--keep-empty-group=conflicts-server',
'--variant=Server',
'--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir,
self.topdir + '/work/global/comps/comps-global.xml'])])
self.assertEqual(CompsWrapper.call_args_list,
[mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])
self.assertEqual(comps.filter_groups.call_args_list, [mock.call(variant.groups)])
self.assertEqual(comps.filter_environments.mock_calls,
[mock.call(variant.environments)])
self.assertEqual(
run.mock_calls,
[
mock.call(
[
"comps_filter",
"--arch=x86_64",
"--keep-empty-group=conflicts",
"--keep-empty-group=conflicts-server",
"--variant=Server",
"--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
% self.topdir,
self.topdir + "/work/global/comps/comps-global.xml",
]
)
],
)
self.assertEqual(
CompsWrapper.call_args_list,
[mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
)
self.assertEqual(
comps.filter_groups.call_args_list, [mock.call(variant.groups)]
)
self.assertEqual(
comps.filter_environments.mock_calls, [mock.call(variant.environments)]
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
self.assertEqual(
compose.log_warning.call_args_list,
[mock.call(init.UNMATCHED_GROUP_MSG % ('Server', 'x86_64', 'foo')),
mock.call(init.UNMATCHED_GROUP_MSG % ('Server', 'x86_64', 'bar'))])
[
mock.call(init.UNMATCHED_GROUP_MSG % ("Server", "x86_64", "foo")),
mock.call(init.UNMATCHED_GROUP_MSG % ("Server", "x86_64", "bar")),
],
)
class TestGetLookasideGroups(PungiTestCase):
@ -426,7 +533,6 @@ class TestGetLookasideGroups(PungiTestCase):
@mock.patch("shutil.copytree")
@mock.patch("pungi.phases.init.get_dir_from_scm")
class TestWriteModuleDefaults(PungiTestCase):
def test_clone_git(self, gdfs, ct):
conf = {"scm": "git", "repo": "https://pagure.io/pungi.git", "dir": "."}
compose = DummyCompose(self.topdir, {"module_defaults_dir": conf})
@ -497,7 +603,6 @@ class TestWriteModuleDefaults(PungiTestCase):
@unittest.skipUnless(Modulemd, "Skipped test, no module support.")
class TestValidateModuleDefaults(PungiTestCase):
def _write_defaults(self, defs):
for mod_name, streams in defs.items():
for stream in streams:

View File

@ -4,6 +4,7 @@ import itertools
import mock
import os
import six
try:
import unittest2 as unittest
except ImportError:
@ -11,13 +12,13 @@ except ImportError:
from pungi.wrappers import iso
CORRECT_OUTPUT = '''dummy.iso: 31ff3e405e26ad01c63b62f6b11d30f6
CORRECT_OUTPUT = """dummy.iso: 31ff3e405e26ad01c63b62f6b11d30f6
Fragment sums: 6eb92e7bda221d7fe5f19b4d21468c9bf261d84c96d145d96c76444b9cbc
Fragment count: 20
Supported ISO: no
'''
"""
INCORRECT_OUTPUT = '''This should never happen: File not found'''
INCORRECT_OUTPUT = """This should never happen: File not found"""
# Cached to use in tests that mock os.listdir
orig_listdir = os.listdir
@ -35,40 +36,54 @@ def fake_listdir(pattern, result=None, exc=None):
raise exc
return result
return orig_listdir(path)
return worker
class TestIsoUtils(unittest.TestCase):
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.wrappers.iso.run")
def test_get_implanted_md5_correct(self, mock_run):
mock_run.return_value = (0, CORRECT_OUTPUT)
logger = mock.Mock()
self.assertEqual(iso.get_implanted_md5('dummy.iso', logger=logger),
'31ff3e405e26ad01c63b62f6b11d30f6')
self.assertEqual(mock_run.call_args_list,
[mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'],
universal_newlines=True)])
self.assertEqual(
iso.get_implanted_md5("dummy.iso", logger=logger),
"31ff3e405e26ad01c63b62f6b11d30f6",
)
self.assertEqual(
mock_run.call_args_list,
[
mock.call(
["/usr/bin/checkisomd5", "--md5sumonly", "dummy.iso"],
universal_newlines=True,
)
],
)
self.assertEqual(logger.mock_calls, [])
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.wrappers.iso.run")
def test_get_implanted_md5_incorrect(self, mock_run):
mock_run.return_value = (0, INCORRECT_OUTPUT)
logger = mock.Mock()
self.assertEqual(iso.get_implanted_md5('dummy.iso', logger=logger), None)
self.assertEqual(mock_run.call_args_list,
[mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'],
universal_newlines=True)])
self.assertEqual(iso.get_implanted_md5("dummy.iso", logger=logger), None)
self.assertEqual(
mock_run.call_args_list,
[
mock.call(
["/usr/bin/checkisomd5", "--md5sumonly", "dummy.iso"],
universal_newlines=True,
)
],
)
self.assertTrue(len(logger.mock_calls) > 0)
@mock.patch('pungi.util.run_unmount_cmd')
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.util.run_unmount_cmd")
@mock.patch("pungi.wrappers.iso.run")
def test_mount_iso(self, mock_run, mock_unmount):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('1' - guestmount is not available)
# for approach as a root, pair commands mount-umount are used
mock_run.side_effect = [(1, ''), (0, '')]
with iso.mount('dummy') as temp_dir:
mock_run.side_effect = [(1, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1])
@ -80,14 +95,14 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.util.rmtree")
@mock.patch("os.listdir", new=fake_listdir("guestfs", ["root"]))
@mock.patch('pungi.util.run_unmount_cmd')
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.util.run_unmount_cmd")
@mock.patch("pungi.wrappers.iso.run")
def test_guestmount(self, mock_run, mock_unmount, mock_rmtree):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ''), (0, '')]
with iso.mount('dummy') as temp_dir:
mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1])
@ -99,14 +114,14 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.util.rmtree")
@mock.patch("os.listdir", new=fake_listdir("guestfs", []))
@mock.patch('pungi.util.run_unmount_cmd')
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.util.run_unmount_cmd")
@mock.patch("pungi.wrappers.iso.run")
def test_guestmount_cleans_up_cache(self, mock_run, mock_unmount, mock_rmtree):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ''), (0, '')]
with iso.mount('dummy') as temp_dir:
mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1])
@ -118,14 +133,16 @@ class TestIsoUtils(unittest.TestCase):
@mock.patch("pungi.util.rmtree")
@mock.patch("os.listdir", new=fake_listdir("guestfs", OSError("No such file")))
@mock.patch('pungi.util.run_unmount_cmd')
@mock.patch('pungi.wrappers.iso.run')
def test_guestmount_handles_missing_cache(self, mock_run, mock_unmount, mock_rmtree):
@mock.patch("pungi.util.run_unmount_cmd")
@mock.patch("pungi.wrappers.iso.run")
def test_guestmount_handles_missing_cache(
self, mock_run, mock_unmount, mock_rmtree
):
# first tuple is return value for command 'which guestmount'
# value determines type of the mount/unmount command ('0' - guestmount is available)
# for approach as a non-root, pair commands guestmount-fusermount are used
mock_run.side_effect = [(0, ''), (0, '')]
with iso.mount('dummy') as temp_dir:
mock_run.side_effect = [(0, ""), (0, "")]
with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2)
mount_call_str = str(mock_run.call_args_list[1])
@ -135,12 +152,12 @@ class TestIsoUtils(unittest.TestCase):
self.assertTrue(unmount_call_str.startswith("call(['fusermount'"))
self.assertFalse(os.path.isdir(temp_dir))
@mock.patch('pungi.util.run_unmount_cmd')
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.util.run_unmount_cmd")
@mock.patch("pungi.wrappers.iso.run")
def test_mount_iso_always_unmounts(self, mock_run, mock_unmount):
mock_run.side_effect = [(1, ''), (0, '')]
mock_run.side_effect = [(1, ""), (0, "")]
try:
with iso.mount('dummy') as temp_dir:
with iso.mount("dummy") as temp_dir:
self.assertTrue(os.path.isdir(temp_dir))
raise RuntimeError()
except RuntimeError:
@ -149,13 +166,13 @@ class TestIsoUtils(unittest.TestCase):
self.assertEqual(len(mock_unmount.call_args_list), 1)
self.assertFalse(os.path.isdir(temp_dir))
@mock.patch('pungi.util.run_unmount_cmd')
@mock.patch('pungi.wrappers.iso.run')
@mock.patch("pungi.util.run_unmount_cmd")
@mock.patch("pungi.wrappers.iso.run")
def test_mount_iso_raises_on_error(self, mock_run, mock_unmount):
log = mock.Mock()
mock_run.side_effect = [(1, ''), (1, 'Boom')]
mock_run.side_effect = [(1, ""), (1, "Boom")]
with self.assertRaises(RuntimeError):
with iso.mount('dummy', logger=log) as temp_dir:
with iso.mount("dummy", logger=log) as temp_dir:
self.assertTrue(os.path.isdir(temp_dir))
self.assertEqual(len(mock_run.call_args_list), 2)
self.assertEqual(len(mock_unmount.call_args_list), 0)
@ -166,29 +183,28 @@ class TestCmpGraftPoints(unittest.TestCase):
def assertSorted(self, *args):
"""Tests that all permutations of arguments yield the same sorted results."""
for perm in itertools.permutations(args):
self.assertEqual(sorted(perm, key=iso.graft_point_sort_key),
list(args))
self.assertEqual(sorted(perm, key=iso.graft_point_sort_key), list(args))
def test_eq(self):
self.assertSorted('pkgs/foo.rpm', 'pkgs/foo.rpm')
self.assertSorted("pkgs/foo.rpm", "pkgs/foo.rpm")
def test_rpms_sorted_alphabetically(self):
self.assertSorted('pkgs/bar.rpm', 'pkgs/foo.rpm')
self.assertSorted("pkgs/bar.rpm", "pkgs/foo.rpm")
def test_images_sorted_alphabetically(self):
self.assertSorted('aaa.img', 'images/foo', 'isolinux/foo')
self.assertSorted("aaa.img", "images/foo", "isolinux/foo")
def test_other_files_sorted_alphabetically(self):
self.assertSorted('bar.txt', 'foo.txt')
self.assertSorted("bar.txt", "foo.txt")
def test_rpms_after_images(self):
self.assertSorted('foo.ins', 'bar.rpm')
self.assertSorted("foo.ins", "bar.rpm")
def test_other_after_images(self):
self.assertSorted('EFI/anything', 'zzz.txt')
self.assertSorted("EFI/anything", "zzz.txt")
def test_rpm_after_other(self):
self.assertSorted('bbb.txt', 'aaa.rpm')
self.assertSorted("bbb.txt", "aaa.rpm")
def test_all_kinds(self):
self.assertSorted('etc/file', 'ppc/file', 'c.txt', 'd.txt', 'a.rpm', 'b.rpm')
self.assertSorted("etc/file", "ppc/file", "c.txt", "d.txt", "a.rpm", "b.rpm")

File diff suppressed because it is too large Load Diff

View File

@ -67,7 +67,6 @@ class TestLinkerBase(helpers.PungiTestCase):
class TestLinkerSymlink(TestLinkerBase):
def test_symlink(self):
path_dst = os.path.join(self.topdir, "symlink")
@ -81,7 +80,9 @@ class TestLinkerSymlink(TestLinkerBase):
self.linker.symlink(self.path_src, path_dst)
# linking existing file with different target must fail
self.assertRaises(OSError, self.linker.symlink, self.path_src, path_dst, relative=False)
self.assertRaises(
OSError, self.linker.symlink, self.path_src, path_dst, relative=False
)
def test_symlink_different_type(self):
# try to symlink 'symlink' -> 'another-file' ('symlink' already exists
@ -111,7 +112,6 @@ class TestLinkerSymlink(TestLinkerBase):
class TestLinkerHardlink(TestLinkerBase):
def test_hardlink(self):
path_dst = os.path.join(self.topdir, "hardlink")
@ -146,7 +146,7 @@ class TestLinkerCopy(TestLinkerBase):
def test_copy_to_existing_file_with_different_content(self):
path_dst = os.path.join(self.topdir, "b")
helpers.touch(path_dst, 'xxx')
helpers.touch(path_dst, "xxx")
self.assertRaises(Exception, self.linker.copy, self.path_src, path_dst)
def test_copy_to_directory(self):
@ -260,27 +260,37 @@ class TestLinkerLink(TestLinkerBase):
self.assertEqual(len(self.logger.mock_calls), 1)
def test_link_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link,
self.file1, self.file2, link_type="hardlink")
self.assertRaises(
OSError, self.linker.link, self.file1, self.file2, link_type="hardlink"
)
def test_symlink_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link,
self.file1, self.file2, link_type="symlink")
self.assertRaises(
OSError, self.linker.link, self.file1, self.file2, link_type="symlink"
)
def test_copy_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link,
self.file1, self.file2, link_type="copy")
self.assertRaises(
OSError, self.linker.link, self.file1, self.file2, link_type="copy"
)
def test_hardlink_or_copy_file_to_existing_destination(self):
self.assertRaises(OSError, self.linker.link,
self.file1, self.file2, link_type="hardlink-or-copy")
self.assertRaises(
OSError,
self.linker.link,
self.file1,
self.file2,
link_type="hardlink-or-copy",
)
def test_link_dir_hardlink(self):
self.linker.link(self.src_dir, self.dst_dir, link_type="hardlink")
self.assertTrue(os.path.isfile(self.dst_file1))
self.assertTrue(self.same_inode(self.file1, self.dst_file1))
self.assertTrue(self.same_inode(self.file3, self.dst_file3))
self.assertSameStat(os.path.dirname(self.file3), os.path.dirname(self.dst_file3))
self.assertSameStat(
os.path.dirname(self.file3), os.path.dirname(self.dst_file3)
)
# always preserve symlinks
self.assertEqual(os.readlink(self.dst_symlink1), "../file1")
@ -292,7 +302,9 @@ class TestLinkerLink(TestLinkerBase):
self.assertTrue(os.path.isfile(self.dst_file1))
self.assertFalse(self.same_inode(self.file1, self.dst_file1))
self.assertFalse(self.same_inode(self.file3, self.dst_file3))
self.assertSameStat(os.path.dirname(self.file3), os.path.dirname(self.dst_file3))
self.assertSameStat(
os.path.dirname(self.file3), os.path.dirname(self.dst_file3)
)
# always preserve symlinks
self.assertEqual(os.readlink(self.dst_symlink1), "../file1")
@ -323,7 +335,9 @@ class TestLinkerLink(TestLinkerBase):
self.assertTrue(os.path.isfile(self.dst_file1))
self.assertTrue(os.path.islink(self.dst_file1))
self.assertEqual(os.readlink(self.dst_file1), self.file1)
self.assertSameStat(os.path.dirname(self.file3), os.path.dirname(self.dst_file3))
self.assertSameStat(
os.path.dirname(self.file3), os.path.dirname(self.dst_file3)
)
self.assertTrue(os.path.isdir(os.path.dirname(self.file3)))
# always preserve symlinks

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -14,56 +14,79 @@ from pungi.wrappers.lorax import LoraxWrapper
class LoraxWrapperTest(unittest.TestCase):
def setUp(self):
self.lorax = LoraxWrapper()
def test_get_command_with_minimal_arguments(self):
cmd = self.lorax.get_lorax_cmd("product", "version", "release",
"/mnt/repo_baseurl", "/mnt/output_dir")
cmd = self.lorax.get_lorax_cmd(
"product", "version", "release", "/mnt/repo_baseurl", "/mnt/output_dir"
)
self.assertEqual(cmd[0], 'lorax')
self.assertEqual(cmd[0], "lorax")
six.assertCountEqual(
self,
cmd[1:],
["--product=product",
[
"--product=product",
"--version=version",
"--release=release",
"--source=file:///mnt/repo_baseurl",
"/mnt/output_dir"],
"/mnt/output_dir",
],
)
def test_get_command_with_all_arguments(self):
cmd = self.lorax.get_lorax_cmd("product", "version", "release",
"/mnt/repo_baseurl", "/mnt/output_dir",
variant="Server", bugurl="http://example.com/",
nomacboot=True, noupgrade=True, is_final=True,
buildarch='x86_64', volid='VOLUME_ID',
buildinstallpackages=['bash', 'vim'],
add_template=['t1', 't2'],
add_arch_template=['ta1', 'ta2'],
add_template_var=['v1', 'v2'],
add_arch_template_var=['va1', 'va2'],
cmd = self.lorax.get_lorax_cmd(
"product",
"version",
"release",
"/mnt/repo_baseurl",
"/mnt/output_dir",
variant="Server",
bugurl="http://example.com/",
nomacboot=True,
noupgrade=True,
is_final=True,
buildarch="x86_64",
volid="VOLUME_ID",
buildinstallpackages=["bash", "vim"],
add_template=["t1", "t2"],
add_arch_template=["ta1", "ta2"],
add_template_var=["v1", "v2"],
add_arch_template_var=["va1", "va2"],
log_dir="/tmp",
dracut_args=["--foo", "bar"])
dracut_args=["--foo", "bar"],
)
self.assertEqual(cmd[0], 'lorax')
self.assertEqual(cmd[0], "lorax")
six.assertCountEqual(
self,
cmd[1:],
["--product=product", "--version=version",
"--release=release", "--variant=Server",
[
"--product=product",
"--version=version",
"--release=release",
"--variant=Server",
"--source=file:///mnt/repo_baseurl",
"--bugurl=http://example.com/",
"--buildarch=x86_64", "--volid=VOLUME_ID",
"--nomacboot", "--noupgrade", "--isfinal",
"--installpkgs=bash", "--installpkgs=vim",
"--add-template=t1", "--add-template=t2",
"--add-arch-template=ta1", "--add-arch-template=ta2",
"--add-template-var=v1", "--add-template-var=v2",
"--add-arch-template-var=va1", "--add-arch-template-var=va2",
"--buildarch=x86_64",
"--volid=VOLUME_ID",
"--nomacboot",
"--noupgrade",
"--isfinal",
"--installpkgs=bash",
"--installpkgs=vim",
"--add-template=t1",
"--add-template=t2",
"--add-arch-template=ta1",
"--add-arch-template=ta2",
"--add-template-var=v1",
"--add-template-var=v2",
"--add-arch-template-var=va1",
"--add-arch-template-var=va2",
"--logfile=/tmp/lorax.log",
"--dracut-arg=--foo",
"--dracut-arg=bar",
"/mnt/output_dir"],
"/mnt/output_dir",
],
)

View File

@ -12,26 +12,25 @@ from pungi import media_split
class ConvertMediaSizeTestCase(unittest.TestCase):
def test_size_parser_correct_number_as_int(self):
self.assertEqual(media_split.convert_media_size(123), 123)
def test_size_parser_correct_number_as_str(self):
self.assertEqual(media_split.convert_media_size('123'), 123)
self.assertEqual(media_split.convert_media_size("123"), 123)
def test_size_parser_with_unit_b(self):
self.assertEqual(media_split.convert_media_size('123b'), 123)
self.assertEqual(media_split.convert_media_size("123b"), 123)
def test_size_parser_with_unit_k(self):
self.assertEqual(media_split.convert_media_size('123k'), 123 * 1024)
self.assertEqual(media_split.convert_media_size("123k"), 123 * 1024)
def test_size_parser_with_unit_M(self):
self.assertEqual(media_split.convert_media_size('123M'),
123 * 1024 * 1024)
self.assertEqual(media_split.convert_media_size("123M"), 123 * 1024 * 1024)
def test_size_parser_with_unit_G(self):
self.assertEqual(media_split.convert_media_size('123G'),
123 * 1024 * 1024 * 1024)
self.assertEqual(
media_split.convert_media_size("123G"), 123 * 1024 * 1024 * 1024
)
def test_size_parser_with_negative_number(self):
with self.assertRaises(ValueError):
@ -39,11 +38,10 @@ class ConvertMediaSizeTestCase(unittest.TestCase):
def test_size_parser_with_unknown_unit(self):
with self.assertRaises(ValueError):
media_split.convert_media_size('123X')
media_split.convert_media_size("123X")
class ConvertFileSizeTestCase(unittest.TestCase):
def test_round_up(self):
self.assertEqual(media_split.convert_file_size(123, 2048), 2048)
@ -56,71 +54,78 @@ def bl(s):
class MediaSplitterTestCase(unittest.TestCase):
def setUp(self):
self.compose = mock.Mock()
def test_sum_size(self):
ms = media_split.MediaSplitter(bl(100))
ms.add_file('first', bl(20))
ms.add_file('second', bl(30))
ms.add_file('third', 10)
ms.add_file("first", bl(20))
ms.add_file("second", bl(30))
ms.add_file("third", 10)
self.assertEqual(ms.total_size, bl(50) + 10)
self.assertEqual(ms.total_size_in_blocks, bl(51))
def test_add_same_file_twice(self):
ms = media_split.MediaSplitter(bl(100))
ms.add_file('first', bl(20))
ms.add_file('first', bl(20))
ms.add_file("first", bl(20))
ms.add_file("first", bl(20))
self.assertEqual(ms.total_size, bl(20))
def test_add_same_file_twice_with_different_size(self):
ms = media_split.MediaSplitter(bl(100))
ms.add_file('first', bl(20))
ms.add_file("first", bl(20))
with self.assertRaises(ValueError):
ms.add_file('first', bl(30))
ms.add_file("first", bl(30))
def test_add_too_big_file(self):
ms = media_split.MediaSplitter(bl(100))
with self.assertRaises(ValueError):
ms.add_file('too-big', bl(300))
ms.add_file("too-big", bl(300))
def test_fit_on_one(self):
ms = media_split.MediaSplitter(bl(100), compose=self.compose)
ms.add_file('first', bl(20))
ms.add_file('second', bl(30))
ms.add_file("first", bl(20))
ms.add_file("second", bl(30))
self.assertEqual(ms.split(),
[{'files': ['first', 'second'], 'size': bl(50)}])
self.assertEqual(ms.split(), [{"files": ["first", "second"], "size": bl(50)}])
def test_split_on_two_discs(self):
ms = media_split.MediaSplitter(bl(100), compose=self.compose)
ms.add_file('first', bl(25))
ms.add_file('second', bl(40))
ms.add_file('third', bl(80))
ms.add_file("first", bl(25))
ms.add_file("second", bl(40))
ms.add_file("third", bl(80))
self.assertEqual(ms.split(),
[{'files': ['first', 'second'], 'size': bl(65)},
{'files': ['third'], 'size': bl(80)}])
self.assertEqual(
ms.split(),
[
{"files": ["first", "second"], "size": bl(65)},
{"files": ["third"], "size": bl(80)},
],
)
def test_split_with_sticky_file(self):
ms = media_split.MediaSplitter(bl(100))
ms.add_file('sticky', bl(15), sticky=True)
ms.add_file('first', bl(25))
ms.add_file('second', bl(40))
ms.add_file('third', bl(80))
ms.add_file("sticky", bl(15), sticky=True)
ms.add_file("first", bl(25))
ms.add_file("second", bl(40))
ms.add_file("third", bl(80))
self.assertEqual(ms.split(),
[{'files': ['sticky', 'first', 'second'], 'size': bl(80)},
{'files': ['sticky', 'third'], 'size': bl(95)}])
self.assertEqual(
ms.split(),
[
{"files": ["sticky", "first", "second"], "size": bl(80)},
{"files": ["sticky", "third"], "size": bl(95)},
],
)
def test_split_unlimited_media(self):
ms = media_split.MediaSplitter(None, compose=self.compose)
ms.add_file('first', bl(25))
ms.add_file('second', bl(40))
ms.add_file('third', bl(80))
ms.add_file("first", bl(25))
ms.add_file("second", bl(40))
ms.add_file("third", bl(80))
self.assertEqual(ms.split(),
[{'files': ['first', 'second', 'third'], 'size': bl(145)}])
self.assertEqual(
ms.split(), [{"files": ["first", "second", "third"], "size": bl(145)}]
)

View File

@ -11,119 +11,127 @@ from pungi.compose_metadata import discinfo
class DiscInfoTestCase(helpers.PungiTestCase):
def setUp(self):
super(DiscInfoTestCase, self).setUp()
os.environ['SOURCE_DATE_EPOCH'] = '101010101'
self.path = os.path.join(self.topdir, 'compose/Server/x86_64/os/.discinfo')
os.environ["SOURCE_DATE_EPOCH"] = "101010101"
self.path = os.path.join(self.topdir, "compose/Server/x86_64/os/.discinfo")
def test_write_discinfo_variant(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
})
compose = helpers.DummyCompose(
self.topdir, {"release_name": "Test", "release_version": "1.0"}
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server'])
metadata.write_discinfo(compose, "x86_64", compose.variants["Server"])
with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'),
['101010101',
'Test 1.0',
'x86_64',
'ALL'])
self.assertEqual(
f.read().strip().split("\n"), ["101010101", "Test 1.0", "x86_64", "ALL"]
)
self.assertEqual(discinfo.read_discinfo(self.path),
{'timestamp': '101010101',
'description': 'Test 1.0',
'disc_numbers': ['ALL'],
'arch': 'x86_64'})
self.assertEqual(
discinfo.read_discinfo(self.path),
{
"timestamp": "101010101",
"description": "Test 1.0",
"disc_numbers": ["ALL"],
"arch": "x86_64",
},
)
def test_write_discinfo_custom_description(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
'release_discinfo_description': 'Fuzzy %(variant_name)s.%(arch)s',
})
compose.variants['Server'].name = 'Server'
compose = helpers.DummyCompose(
self.topdir,
{
"release_name": "Test",
"release_version": "1.0",
"release_discinfo_description": "Fuzzy %(variant_name)s.%(arch)s",
},
)
compose.variants["Server"].name = "Server"
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server'])
metadata.write_discinfo(compose, "x86_64", compose.variants["Server"])
with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'),
['101010101',
'Fuzzy Server.x86_64',
'x86_64',
'ALL'])
self.assertEqual(
f.read().strip().split("\n"),
["101010101", "Fuzzy Server.x86_64", "x86_64", "ALL"],
)
def test_write_discinfo_layered_product(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
'base_product_name': 'Base',
'base_product_version': 42,
})
compose = helpers.DummyCompose(
self.topdir,
{
"release_name": "Test",
"release_version": "1.0",
"base_product_name": "Base",
"base_product_version": 42,
},
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server'])
metadata.write_discinfo(compose, "x86_64", compose.variants["Server"])
with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'),
['101010101',
'Test 1.0 for Base 42',
'x86_64',
'ALL'])
self.assertEqual(
f.read().strip().split("\n"),
["101010101", "Test 1.0 for Base 42", "x86_64", "ALL"],
)
def test_write_discinfo_integrated_layered_product(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
})
compose.variants['ILP'] = mock.Mock(uid='Server', arches=['x86_64'],
type='layered-product', is_empty=False,
release_name='Integrated',
release_version='2.1',
parent=compose.variants['Server'])
compose = helpers.DummyCompose(
self.topdir, {"release_name": "Test", "release_version": "1.0"}
)
compose.variants["ILP"] = mock.Mock(
uid="Server",
arches=["x86_64"],
type="layered-product",
is_empty=False,
release_name="Integrated",
release_version="2.1",
parent=compose.variants["Server"],
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['ILP'])
metadata.write_discinfo(compose, "x86_64", compose.variants["ILP"])
with open(self.path) as f:
self.assertEqual(f.read().strip().split('\n'),
['101010101',
'Integrated 2.1 for Test 1',
'x86_64',
'ALL'])
self.assertEqual(
f.read().strip().split("\n"),
["101010101", "Integrated 2.1 for Test 1", "x86_64", "ALL"],
)
def test_addons_dont_have_discinfo(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
})
compose.variants['ILP'] = mock.Mock(uid='Server', arches=['x86_64'],
type='addon', is_empty=False,
parent=compose.variants['Server'])
compose = helpers.DummyCompose(
self.topdir, {"release_name": "Test", "release_version": "1.0"}
)
compose.variants["ILP"] = mock.Mock(
uid="Server",
arches=["x86_64"],
type="addon",
is_empty=False,
parent=compose.variants["Server"],
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['ILP'])
metadata.write_discinfo(compose, "x86_64", compose.variants["ILP"])
self.assertFalse(os.path.isfile(self.path))
class MediaRepoTestCase(helpers.PungiTestCase):
def setUp(self):
super(MediaRepoTestCase, self).setUp()
self.path = os.path.join(self.topdir, 'compose/Server/x86_64/os/media.repo')
self.path = os.path.join(self.topdir, "compose/Server/x86_64/os/media.repo")
def test_write_media_repo(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
})
compose = helpers.DummyCompose(
self.topdir, {"release_name": "Test", "release_version": "1.0"}
)
metadata.write_media_repo(compose, 'x86_64', compose.variants['Server'],
timestamp=123456)
metadata.write_media_repo(
compose, "x86_64", compose.variants["Server"], timestamp=123456
)
with open(self.path) as f:
lines = f.read().strip().split('\n')
self.assertEqual(lines[0], '[InstallMedia]')
lines = f.read().strip().split("\n")
self.assertEqual(lines[0], "[InstallMedia]")
six.assertCountEqual(
self,
lines[1:],
@ -137,15 +145,18 @@ class MediaRepoTestCase(helpers.PungiTestCase):
)
def test_addons_dont_have_media_repo(self):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
})
compose.variants['ILP'] = mock.Mock(uid='Server', arches=['x86_64'],
type='addon', is_empty=False,
parent=compose.variants['Server'])
compose = helpers.DummyCompose(
self.topdir, {"release_name": "Test", "release_version": "1.0"}
)
compose.variants["ILP"] = mock.Mock(
uid="Server",
arches=["x86_64"],
type="addon",
is_empty=False,
parent=compose.variants["Server"],
)
metadata.write_discinfo(compose, 'x86_64', compose.variants['ILP'])
metadata.write_discinfo(compose, "x86_64", compose.variants["ILP"])
self.assertFalse(os.path.isfile(self.path))
@ -155,7 +166,6 @@ BAR_MD5 = {"md5": "37b51d194a7513e45b56f6524f2d51f2"}
class TestPopulateExtraFiles(helpers.PungiTestCase):
def setUp(self):
super(TestPopulateExtraFiles, self).setUp()
self.variant = mock.Mock(uid="Server")
@ -185,12 +195,8 @@ class TestPopulateExtraFiles(helpers.PungiTestCase):
self,
self.metadata.mock_calls,
[
mock.call.add(
"Server", "x86_64", "Server/x86_64/os/foo", 3, FOO_MD5
),
mock.call.add(
"Server", "x86_64", "Server/x86_64/os/bar", 3, BAR_MD5
),
mock.call.add("Server", "x86_64", "Server/x86_64/os/foo", 3, FOO_MD5),
mock.call.add("Server", "x86_64", "Server/x86_64/os/bar", 3, BAR_MD5),
mock.call.dump_for_tree(
mock.ANY, "Server", "x86_64", "Server/x86_64/os/"
),

View File

@ -5,6 +5,7 @@ import json
import mock
import os
import sys
try:
import unittest2 as unittest
except ImportError:
@ -18,124 +19,131 @@ mock_datetime.utcnow.return_value = datetime(2017, 6, 28, 9, 34)
mock_datetime.side_effect = lambda *args, **kwargs: datetime(*args, **kwargs)
@mock.patch('pungi.util.makedirs')
@mock.patch('pungi.notifier.datetime', new=mock_datetime)
@mock.patch("pungi.util.makedirs")
@mock.patch("pungi.notifier.datetime", new=mock_datetime)
class TestNotifier(unittest.TestCase):
def setUp(self):
super(TestNotifier, self).setUp()
self.logfile = '/logs/notifications/notification-2017-06-28_09-34-00.log'
self.logfile = "/logs/notifications/notification-2017-06-28_09-34-00.log"
self.compose = mock.Mock(
compose_id='COMPOSE_ID',
compose_date='20171031',
compose_id="COMPOSE_ID",
compose_date="20171031",
compose_respin=1,
compose_label='Updates-20171031.1021',
compose_type='production',
compose_label="Updates-20171031.1021",
compose_type="production",
log_warning=mock.Mock(),
conf={
'release_name': 'Layer',
'release_short': 'L',
'release_version': '27',
'release_type': 'updates',
'release_is_layered': True,
'base_product_name': 'Base',
'base_product_short': 'B',
'base_product_version': '1',
'base_product_type': 'ga',
"release_name": "Layer",
"release_short": "L",
"release_version": "27",
"release_type": "updates",
"release_is_layered": True,
"base_product_name": "Base",
"base_product_short": "B",
"base_product_version": "1",
"base_product_type": "ga",
},
paths=mock.Mock(
compose=mock.Mock(
topdir=mock.Mock(return_value='/a/b')
compose=mock.Mock(topdir=mock.Mock(return_value="/a/b")),
log=mock.Mock(topdir=mock.Mock(return_value="/logs")),
),
log=mock.Mock(
topdir=mock.Mock(return_value='/logs')
)
)
)
self.data = {'foo': 'bar', 'baz': 'quux'}
self.data = {"foo": "bar", "baz": "quux"}
def _call(self, script, cmd, **kwargs):
data = self.data.copy()
data['compose_id'] = 'COMPOSE_ID'
data['location'] = '/a/b'
data['compose_date'] = '20171031'
data['compose_type'] = 'production'
data['compose_respin'] = 1
data['compose_label'] = 'Updates-20171031.1021'
data['release_short'] = 'L'
data['release_name'] = 'Layer'
data['release_version'] = '27'
data['release_type'] = 'updates'
data['release_is_layered'] = True
data['base_product_name'] = 'Base'
data['base_product_version'] = '1'
data['base_product_short'] = 'B'
data['base_product_type'] = 'ga'
data["compose_id"] = "COMPOSE_ID"
data["location"] = "/a/b"
data["compose_date"] = "20171031"
data["compose_type"] = "production"
data["compose_respin"] = 1
data["compose_label"] = "Updates-20171031.1021"
data["release_short"] = "L"
data["release_name"] = "Layer"
data["release_version"] = "27"
data["release_type"] = "updates"
data["release_is_layered"] = True
data["base_product_name"] = "Base"
data["base_product_version"] = "1"
data["base_product_short"] = "B"
data["base_product_type"] = "ga"
data.update(kwargs)
return mock.call((script, cmd),
return mock.call(
(script, cmd),
stdin_data=json.dumps(data),
can_fail=True, return_stdout=False,
can_fail=True,
return_stdout=False,
workdir=self.compose.paths.compose.topdir.return_value,
universal_newlines=True, show_cmd=True, logfile=self.logfile)
universal_newlines=True,
show_cmd=True,
logfile=self.logfile,
)
@mock.patch('pungi.util.translate_path')
@mock.patch('kobo.shortcuts.run')
@mock.patch("pungi.util.translate_path")
@mock.patch("kobo.shortcuts.run")
def test_invokes_script(self, run, translate_path, makedirs):
run.return_value = (0, None)
translate_path.side_effect = lambda compose, x: x
n = PungiNotifier(['run-notify'])
n = PungiNotifier(["run-notify"])
n.compose = self.compose
n.send('cmd', **self.data)
n.send("cmd", **self.data)
makedirs.assert_called_once_with('/logs/notifications')
self.assertEqual(run.call_args_list, [self._call('run-notify', 'cmd')])
makedirs.assert_called_once_with("/logs/notifications")
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
@mock.patch('pungi.util.translate_path')
@mock.patch('kobo.shortcuts.run')
@mock.patch("pungi.util.translate_path")
@mock.patch("kobo.shortcuts.run")
def test_invokes_multiple_scripts(self, run, translate_path, makedirs):
run.return_value = (0, None)
translate_path.side_effect = lambda compose, x: x
n = PungiNotifier(['run-notify', 'ping-user'])
n = PungiNotifier(["run-notify", "ping-user"])
n.compose = self.compose
n.send('cmd', **self.data)
n.send("cmd", **self.data)
self.assertEqual(
sorted(run.call_args_list),
sorted([self._call('run-notify', 'cmd'),
self._call('ping-user', 'cmd')]))
sorted([self._call("run-notify", "cmd"), self._call("ping-user", "cmd")]),
)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_translates_path(self, run, makedirs):
self.compose.paths.compose.topdir.return_value = '/root/a/b'
self.compose.conf["translate_paths"] = [("/root/", "http://example.com/compose/")]
self.compose.paths.compose.topdir.return_value = "/root/a/b"
self.compose.conf["translate_paths"] = [
("/root/", "http://example.com/compose/")
]
run.return_value = (0, None)
n = PungiNotifier(['run-notify'])
n = PungiNotifier(["run-notify"])
n.compose = self.compose
n.send('cmd', **self.data)
n.send("cmd", **self.data)
self.assertEqual(
run.call_args_list,
[self._call('run-notify', 'cmd', location='http://example.com/compose/a/b')])
[
self._call(
"run-notify", "cmd", location="http://example.com/compose/a/b"
)
],
)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_does_not_run_without_config(self, run, makedirs):
n = PungiNotifier(None)
n.send('cmd', foo='bar', baz='quux')
n.send("cmd", foo="bar", baz="quux")
self.assertFalse(run.called)
@mock.patch('pungi.util.translate_path')
@mock.patch('kobo.shortcuts.run')
@mock.patch("pungi.util.translate_path")
@mock.patch("kobo.shortcuts.run")
def test_logs_warning_on_failure(self, run, translate_path, makedirs):
translate_path.side_effect = lambda compose, x: x
run.return_value = (1, None)
n = PungiNotifier(['run-notify'])
n = PungiNotifier(["run-notify"])
n.compose = self.compose
n.send('cmd', **self.data)
n.send("cmd", **self.data)
self.assertEqual(run.call_args_list, [self._call('run-notify', 'cmd')])
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
self.assertTrue(self.compose.log_warning.called)

View File

@ -730,9 +730,7 @@ class TestPrepareComposeDir(PungiTestCase):
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "logs")))
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "parts")))
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "work/global")))
self.assertFileContent(
os.path.join(self.topdir, "STATUS"), "STARTED"
)
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
def test_restarting_compose(self, gtd):
args = mock.Mock(name="args", spec=["label", "compose_path"])
@ -914,7 +912,7 @@ class TestSendNotification(BaseTestCase):
self.assertEqual(len(notif.mock_calls), 2)
self.assertEqual(notif.mock_calls[0], mock.call(["handler"]))
_, args, kwargs = notif.mock_calls[1]
self.assertEqual(args, ("status-change", ))
self.assertEqual(args, ("status-change",))
self.assertEqual(
kwargs,
{

View File

@ -13,13 +13,10 @@ from pungi.phases import osbs
class OSBSPhaseTest(helpers.PungiTestCase):
@mock.patch('pungi.phases.osbs.ThreadPool')
@mock.patch("pungi.phases.osbs.ThreadPool")
def test_run(self, ThreadPool):
cfg = helpers.IterableMock()
compose = helpers.DummyCompose(self.topdir, {
'osbs': {'^Everything$': cfg}
})
compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": cfg}})
pool = ThreadPool.return_value
@ -27,10 +24,12 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase.run()
self.assertEqual(len(pool.add.call_args_list), 1)
self.assertEqual(pool.queue_put.call_args_list,
[mock.call((compose, compose.variants['Everything'], cfg))])
self.assertEqual(
pool.queue_put.call_args_list,
[mock.call((compose, compose.variants["Everything"], cfg))],
)
@mock.patch('pungi.phases.osbs.ThreadPool')
@mock.patch("pungi.phases.osbs.ThreadPool")
def test_skip_without_config(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None
@ -38,11 +37,9 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase = osbs.OSBSPhase(compose)
self.assertTrue(phase.skip())
@mock.patch('pungi.phases.osbs.ThreadPool')
@mock.patch("pungi.phases.osbs.ThreadPool")
def test_dump_metadata(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {
'osbs': {'^Everything$': {}}
})
compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": {}}})
compose.just_phases = None
compose.skip_phases = []
compose.notifier = mock.Mock()
@ -52,11 +49,11 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase.pool.metadata = METADATA
phase.dump_metadata()
with open(self.topdir + '/compose/metadata/osbs.json') as f:
with open(self.topdir + "/compose/metadata/osbs.json") as f:
data = json.load(f)
self.assertEqual(data, METADATA)
@mock.patch('pungi.phases.osbs.ThreadPool')
@mock.patch("pungi.phases.osbs.ThreadPool")
def test_dump_metadata_after_skip(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None
@ -66,13 +63,11 @@ class OSBSPhaseTest(helpers.PungiTestCase):
phase.stop()
phase.dump_metadata()
self.assertFalse(os.path.isfile(self.topdir + '/compose/metadata/osbs.json'))
self.assertFalse(os.path.isfile(self.topdir + "/compose/metadata/osbs.json"))
@mock.patch("pungi.phases.osbs.ThreadPool")
def test_request_push(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {
"osbs": {"^Everything$": {}}
})
compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": {}}})
compose.just_phases = None
compose.skip_phases = []
compose.notifier = mock.Mock()
@ -87,112 +82,124 @@ class OSBSPhaseTest(helpers.PungiTestCase):
self.assertEqual(data, phase.pool.registries)
self.assertEqual(
compose.notifier.call_args_list,
[],
compose.notifier.call_args_list, [],
)
TASK_RESULT = {
'koji_builds': ['54321'],
'repositories': [
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632',
]
"koji_builds": ["54321"],
"repositories": [
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
],
}
BUILD_INFO = {
'completion_time': '2016-06-17 18:25:30',
'completion_ts': 1466187930.0,
'creation_event_id': 13227702,
'creation_time': '2016-06-17 18:25:57.611172',
'creation_ts': 1466187957.61117,
'epoch': None,
'extra': {'container_koji_task_id': '12345', 'image': {}},
'id': 54321,
'name': 'my-name',
'nvr': 'my-name-1.0-1',
'owner_id': 3436,
'owner_name': 'osbs',
'package_id': 50072,
'package_name': 'my-name',
'release': '1',
'source': 'git://example.com/repo?#BEEFCAFE',
'start_time': '2016-06-17 18:16:37',
'start_ts': 1466187397.0,
'state': 1,
'task_id': None,
'version': '1.0',
'volume_id': 0,
'volume_name': 'DEFAULT'
"completion_time": "2016-06-17 18:25:30",
"completion_ts": 1466187930.0,
"creation_event_id": 13227702,
"creation_time": "2016-06-17 18:25:57.611172",
"creation_ts": 1466187957.61117,
"epoch": None,
"extra": {"container_koji_task_id": "12345", "image": {}},
"id": 54321,
"name": "my-name",
"nvr": "my-name-1.0-1",
"owner_id": 3436,
"owner_name": "osbs",
"package_id": 50072,
"package_name": "my-name",
"release": "1",
"source": "git://example.com/repo?#BEEFCAFE",
"start_time": "2016-06-17 18:16:37",
"start_ts": 1466187397.0,
"state": 1,
"task_id": None,
"version": "1.0",
"volume_id": 0,
"volume_name": "DEFAULT",
}
ARCHIVES = [
{'build_id': 54321,
'buildroot_id': 2955357,
'checksum': 'a2922842dc80873ac782da048c54f6cc',
'checksum_type': 0,
'extra': {
'docker': {
'id': '408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7',
'parent_id': '6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e',
'repositories': ['registry.example.com:8888/rcm/buildroot:1.0-1']},
'image': {'arch': 'x86_64'}},
'filename': 'docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz',
'id': 1436049,
'metadata_only': False,
'size': 174038795,
'type_description': 'Tar file',
'type_extensions': 'tar tar.gz tar.bz2 tar.xz',
'type_id': 4,
'type_name': 'tar'}
{
"build_id": 54321,
"buildroot_id": 2955357,
"checksum": "a2922842dc80873ac782da048c54f6cc",
"checksum_type": 0,
"extra": {
"docker": {
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
},
"image": {"arch": "x86_64"},
},
"filename": "docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz",
"id": 1436049,
"metadata_only": False,
"size": 174038795,
"type_description": "Tar file",
"type_extensions": "tar tar.gz tar.bz2 tar.xz",
"type_id": 4,
"type_name": "tar",
}
]
METADATA = {
'Server': {'x86_64': [{
'name': 'my-name',
'version': '1.0',
'release': '1',
'nvr': 'my-name-1.0-1',
'creation_time': BUILD_INFO['creation_time'],
'filename': ARCHIVES[0]['filename'],
'size': ARCHIVES[0]['size'],
'docker': {
'id': '408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7',
'parent_id': '6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e',
'repositories': ['registry.example.com:8888/rcm/buildroot:1.0-1']},
'image': {'arch': 'x86_64'},
'checksum': ARCHIVES[0]['checksum'],
}]}
"Server": {
"x86_64": [
{
"name": "my-name",
"version": "1.0",
"release": "1",
"nvr": "my-name-1.0-1",
"creation_time": BUILD_INFO["creation_time"],
"filename": ARCHIVES[0]["filename"],
"size": ARCHIVES[0]["size"],
"docker": {
"id": "408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7",
"parent_id": "6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e",
"repositories": ["registry.example.com:8888/rcm/buildroot:1.0-1"],
},
"image": {"arch": "x86_64"},
"checksum": ARCHIVES[0]["checksum"],
}
]
}
}
SCRATCH_TASK_RESULT = {
'koji_builds': [],
'repositories': [
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632',
]
"koji_builds": [],
"repositories": [
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
],
}
SCRATCH_METADATA = {
"Server": {'scratch': [{
"Server": {
"scratch": [
{
"koji_task": 12345,
"repositories": [
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632',
"registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632",
],
}
]
}]}
}
}
class OSBSThreadTest(helpers.PungiTestCase):
def setUp(self):
super(OSBSThreadTest, self).setUp()
self.pool = mock.Mock(metadata={}, registries={})
self.t = osbs.OSBSThread(self.pool)
self.compose = helpers.DummyCompose(self.topdir, {
'koji_profile': 'koji',
'translate_paths': [
(self.topdir, 'http://root'),
]
})
self.compose = helpers.DummyCompose(
self.topdir,
{
"koji_profile": "koji",
"translate_paths": [(self.topdir, "http://root")],
},
)
def _setupMock(self, KojiWrapper, scratch=False):
self.wrapper = KojiWrapper.return_value
@ -203,7 +210,10 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.wrapper.koji_proxy.getTaskResult.return_value = TASK_RESULT
self.wrapper.koji_proxy.getBuild.return_value = BUILD_INFO
self.wrapper.koji_proxy.listArchives.return_value = ARCHIVES
self.wrapper.koji_proxy.getLatestBuilds.return_value = [mock.Mock(), mock.Mock()]
self.wrapper.koji_proxy.getLatestBuilds.return_value = [
mock.Mock(),
mock.Mock(),
]
self.wrapper.koji_proxy.getNextRelease.return_value = 3
self.wrapper.watch_task.return_value = 0
@ -211,149 +221,168 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.maxDiff = None
if scratch:
metadata = copy.deepcopy(SCRATCH_METADATA)
metadata['Server']['scratch'][0]['compose_id'] = self.compose.compose_id
metadata['Server']['scratch'][0]['koji_task'] = 12345
metadata["Server"]["scratch"][0]["compose_id"] = self.compose.compose_id
metadata["Server"]["scratch"][0]["koji_task"] = 12345
else:
metadata = copy.deepcopy(METADATA)
metadata['Server']['x86_64'][0]['compose_id'] = self.compose.compose_id
metadata['Server']['x86_64'][0]['koji_task'] = 12345
metadata["Server"]["x86_64"][0]["compose_id"] = self.compose.compose_id
metadata["Server"]["x86_64"][0]["koji_task"] = 12345
self.assertEqual(self.pool.metadata, metadata)
def _assertCorrectCalls(self, opts, setupCalls=None, scratch=False):
setupCalls = setupCalls or []
options = {'yum_repourls': ['http://root/work/global/tmp-Server/compose-rpms-Server-1.repo']}
options = {
"yum_repourls": [
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo"
]
}
if scratch:
options['scratch'] = True
options["scratch"] = True
options.update(opts)
expect_calls = [mock.call.login()] + setupCalls
expect_calls.extend([
expect_calls.extend(
[
mock.call.koji_proxy.buildContainer(
'git://example.com/repo?#BEEFCAFE',
'f24-docker-candidate',
"git://example.com/repo?#BEEFCAFE",
"f24-docker-candidate",
options,
priority=None),
priority=None,
),
mock.call.watch_task(
12345, self.topdir + '/logs/global/osbs/Server-1-watch-task.log'),
mock.call.koji_proxy.getTaskResult(12345)])
12345, self.topdir + "/logs/global/osbs/Server-1-watch-task.log"
),
mock.call.koji_proxy.getTaskResult(12345),
]
)
if not scratch:
expect_calls.extend([mock.call.koji_proxy.getBuild(54321),
mock.call.koji_proxy.listArchives(54321)])
expect_calls.extend(
[
mock.call.koji_proxy.getBuild(54321),
mock.call.koji_proxy.listArchives(54321),
]
)
self.assertEqual(self.wrapper.mock_calls, expect_calls)
def _assertRepoFile(self, variants=None, gpgkey=None):
variants = variants or ['Server']
variants = variants or ["Server"]
for variant in variants:
with open(self.topdir + '/work/global/tmp-%s/compose-rpms-%s-1.repo' % (variant, variant)) as f:
lines = f.read().split('\n')
self.assertIn('baseurl=http://root/compose/%s/$basearch/os' % variant, lines)
with open(
self.topdir
+ "/work/global/tmp-%s/compose-rpms-%s-1.repo" % (variant, variant)
) as f:
lines = f.read().split("\n")
self.assertIn(
"baseurl=http://root/compose/%s/$basearch/os" % variant, lines
)
if gpgkey:
self.assertIn('gpgcheck=1', lines)
self.assertIn('gpgkey=%s' % gpgkey, lines)
self.assertIn("gpgcheck=1", lines)
self.assertIn("gpgkey=%s" % gpgkey, lines)
def _assertConfigCorrect(self, cfg):
config = copy.deepcopy(self.compose.conf)
config['osbs'] = {
'^Server$': cfg
}
config["osbs"] = {"^Server$": cfg}
self.assertEqual(([], []), checks.validate(config, offline=True))
def _assertConfigMissing(self, cfg, key):
config = copy.deepcopy(self.compose.conf)
config['osbs'] = {
'^Server$': cfg
}
config["osbs"] = {"^Server$": cfg}
errors, warnings = checks.validate(config, offline=True)
self.assertIn(
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas" % cfg,
"Failed validation in osbs.^Server$: %r is not valid under any of the given schemas"
% cfg,
errors,
)
self.assertIn(" Possible reason: %r is a required property" % key, errors)
self.assertEqual([], warnings)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_minimal_run(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
}
self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'git_branch': 'f24-docker'})
self._assertCorrectCalls({"git_branch": "f24-docker"})
self._assertCorrectMetadata()
self._assertRepoFile()
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_failable(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'failable': ['*']
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"failable": ["*"],
}
self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'git_branch': 'f24-docker'})
self._assertCorrectCalls({"git_branch": "f24-docker"})
self._assertCorrectMetadata()
self._assertRepoFile()
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_more_args(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'name': 'my-name',
'version': '1.0',
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
"version": "1.0",
}
self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'name': 'my-name', 'version': '1.0', 'git_branch': 'f24-docker'})
self._assertCorrectCalls(
{"name": "my-name", "version": "1.0", "git_branch": "f24-docker"}
)
self._assertCorrectMetadata()
self._assertRepoFile()
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_extra_repos(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'name': 'my-name',
'version': '1.0',
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
"version": "1.0",
"repo": ["Everything", "http://pkgs.example.com/my.repo", "/extra/repo"],
}
self.compose.conf["translate_paths"].append(("/extra", "http://example.com"))
self._setupMock(KojiWrapper)
self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
options = {
'name': 'my-name',
'version': '1.0',
'git_branch': 'f24-docker',
'yum_repourls': [
'http://root/work/global/tmp-Server/compose-rpms-Server-1.repo',
'http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo',
'http://pkgs.example.com/my.repo',
"name": "my-name",
"version": "1.0",
"git_branch": "f24-docker",
"yum_repourls": [
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://pkgs.example.com/my.repo",
"http://root/work/global/tmp/compose-rpms-local-1.repo",
]
],
}
self._assertCorrectCalls(options)
self._assertCorrectMetadata()
self._assertRepoFile(['Server', 'Everything'])
self._assertRepoFile(["Server", "Everything"])
with open(os.path.join(self.topdir, "work/global/tmp/compose-rpms-local-1.repo")) as f:
with open(
os.path.join(self.topdir, "work/global/tmp/compose-rpms-local-1.repo")
) as f:
self.assertIn("baseurl=http://example.com/repo\n", f)
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
@ -380,7 +409,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://pkgs.example.com/my.repo",
]
],
}
self._assertCorrectCalls(options)
self._assertCorrectMetadata()
@ -411,146 +440,148 @@ class OSBSThreadTest(helpers.PungiTestCase):
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://pkgs.example.com/my.repo",
]
],
}
self._assertCorrectCalls(options)
self._assertCorrectMetadata()
self._assertRepoFile(["Server", "Everything"])
self.assertEqual(self.t.pool.registries, {"my-name-1.0-1": [{"foo": "bar"}]})
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_extra_repos_in_list(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'name': 'my-name',
'version': '1.0',
'repo': ['Everything', 'Client', 'http://pkgs.example.com/my.repo'],
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
"version": "1.0",
"repo": ["Everything", "Client", "http://pkgs.example.com/my.repo"],
}
self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
options = {
'name': 'my-name',
'version': '1.0',
'git_branch': 'f24-docker',
'yum_repourls': [
'http://root/work/global/tmp-Server/compose-rpms-Server-1.repo',
'http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo',
'http://root/work/global/tmp-Client/compose-rpms-Client-1.repo',
'http://pkgs.example.com/my.repo',
]
"name": "my-name",
"version": "1.0",
"git_branch": "f24-docker",
"yum_repourls": [
"http://root/work/global/tmp-Server/compose-rpms-Server-1.repo",
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://root/work/global/tmp-Client/compose-rpms-Client-1.repo",
"http://pkgs.example.com/my.repo",
],
}
self._assertCorrectCalls(options)
self._assertCorrectMetadata()
self._assertRepoFile(['Server', 'Everything', 'Client'])
self._assertRepoFile(["Server", "Everything", "Client"])
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_gpgkey_enabled(self, KojiWrapper):
gpgkey = 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release'
gpgkey = "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release"
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'name': 'my-name',
'version': '1.0',
'repo': ['Everything', 'Client', 'http://pkgs.example.com/my.repo'],
'gpgkey': gpgkey,
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
"version": "1.0",
"repo": ["Everything", "Client", "http://pkgs.example.com/my.repo"],
"gpgkey": gpgkey,
}
self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertRepoFile(['Server', 'Everything', 'Client'], gpgkey=gpgkey)
self._assertRepoFile(["Server", "Everything", "Client"], gpgkey=gpgkey)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_run_with_extra_repos_missing_variant(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'name': 'my-name',
'version': '1.0',
'repo': 'Gold',
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
"version": "1.0",
"repo": "Gold",
}
self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper)
with self.assertRaises(RuntimeError) as ctx:
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self.assertIn('no variant Gold', str(ctx.exception))
self.assertIn("no variant Gold", str(ctx.exception))
def test_run_with_missing_url(self):
cfg = {
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'name': 'my-name',
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
}
self._assertConfigMissing(cfg, 'url')
self._assertConfigMissing(cfg, "url")
def test_run_with_missing_target(self):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'git_branch': 'f24-docker',
'name': 'my-name',
"url": "git://example.com/repo?#BEEFCAFE",
"git_branch": "f24-docker",
"name": "my-name",
}
self._assertConfigMissing(cfg, 'target')
self._assertConfigMissing(cfg, "target")
def test_run_with_missing_git_branch(self):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
}
self._assertConfigMissing(cfg, 'git_branch')
self._assertConfigMissing(cfg, "git_branch")
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_failing_task(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'fedora-24-docker-candidate',
'git_branch': 'f24-docker',
"url": "git://example.com/repo?#BEEFCAFE",
"target": "fedora-24-docker-candidate",
"git_branch": "f24-docker",
}
self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper)
self.wrapper.watch_task.return_value = 1
with self.assertRaises(RuntimeError) as ctx:
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self.assertRegexpMatches(str(ctx.exception), r"task 12345 failed: see .+ for details")
self.assertRegexpMatches(
str(ctx.exception), r"task 12345 failed: see .+ for details"
)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_failing_task_with_failable(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'fedora-24-docker-candidate',
'git_branch': 'f24-docker',
'failable': ['*']
"url": "git://example.com/repo?#BEEFCAFE",
"target": "fedora-24-docker-candidate",
"git_branch": "f24-docker",
"failable": ["*"],
}
self._assertConfigCorrect(cfg)
self._setupMock(KojiWrapper)
self.wrapper.watch_task.return_value = 1
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
def test_scratch_metadata(self, KojiWrapper):
cfg = {
'url': 'git://example.com/repo?#BEEFCAFE',
'target': 'f24-docker-candidate',
'git_branch': 'f24-docker',
'scratch': True,
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"scratch": True,
}
self._setupMock(KojiWrapper, scratch=True)
self._assertConfigCorrect(cfg)
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
self._assertCorrectCalls({'git_branch': 'f24-docker'}, scratch=True)
self._assertCorrectCalls({"git_branch": "f24-docker"}, scratch=True)
self._assertCorrectMetadata(scratch=True)
self._assertRepoFile()

File diff suppressed because it is too large Load Diff

View File

@ -12,19 +12,17 @@ from pungi.phases import ostree
class OSTreePhaseTest(helpers.PungiTestCase):
@mock.patch('pungi.phases.ostree.ThreadPool')
@mock.patch("pungi.phases.ostree.ThreadPool")
def test_run(self, ThreadPool):
cfg = helpers.IterableMock()
compose = helpers.DummyCompose(self.topdir, {
'ostree': [
('^Everything$', {'x86_64': cfg})
],
'runroot': True,
"translate_paths": [
(self.topdir, "http://example.com")
],
})
compose = helpers.DummyCompose(
self.topdir,
{
"ostree": [("^Everything$", {"x86_64": cfg})],
"runroot": True,
"translate_paths": [(self.topdir, "http://example.com")],
},
)
pool = ThreadPool.return_value
@ -39,10 +37,12 @@ class OSTreePhaseTest(helpers.PungiTestCase):
"http://example.com/work/$basearch/repo/p2",
],
)
self.assertEqual(pool.queue_put.call_args_list,
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg))])
self.assertEqual(
pool.queue_put.call_args_list,
[mock.call((compose, compose.variants["Everything"], "x86_64", cfg))],
)
@mock.patch('pungi.phases.ostree.ThreadPool')
@mock.patch("pungi.phases.ostree.ThreadPool")
def test_skip_without_config(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None
@ -50,14 +50,10 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase = ostree.OSTreePhase(compose)
self.assertTrue(phase.skip())
@mock.patch('pungi.phases.ostree.ThreadPool')
@mock.patch("pungi.phases.ostree.ThreadPool")
def test_run_with_simple_config(self, ThreadPool):
cfg = helpers.IterableMock(get=lambda x, y: None)
compose = helpers.DummyCompose(self.topdir, {
'ostree': {
'^Everything$': cfg
}
})
compose = helpers.DummyCompose(self.topdir, {"ostree": {"^Everything$": cfg}})
pool = ThreadPool.return_value
@ -65,18 +61,18 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase.run()
self.assertEqual(len(pool.add.call_args_list), 2)
self.assertEqual(pool.queue_put.call_args_list,
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg)),
mock.call((compose, compose.variants['Everything'], 'amd64', cfg))])
self.assertEqual(
pool.queue_put.call_args_list,
[
mock.call((compose, compose.variants["Everything"], "x86_64", cfg)),
mock.call((compose, compose.variants["Everything"], "amd64", cfg)),
],
)
@mock.patch('pungi.phases.ostree.ThreadPool')
@mock.patch("pungi.phases.ostree.ThreadPool")
def test_run_with_simple_config_limit_arches(self, ThreadPool):
cfg = helpers.IterableMock(get=lambda x, y: ['x86_64'])
compose = helpers.DummyCompose(self.topdir, {
'ostree': {
'^Everything$': cfg
}
})
cfg = helpers.IterableMock(get=lambda x, y: ["x86_64"])
compose = helpers.DummyCompose(self.topdir, {"ostree": {"^Everything$": cfg}})
pool = ThreadPool.return_value
@ -84,18 +80,18 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase.run()
self.assertEqual(len(pool.add.call_args_list), 1)
self.assertEqual(pool.queue_put.call_args_list,
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg))])
self.assertEqual(
pool.queue_put.call_args_list,
[mock.call((compose, compose.variants["Everything"], "x86_64", cfg))],
)
@mock.patch('pungi.phases.ostree.ThreadPool')
@mock.patch("pungi.phases.ostree.ThreadPool")
def test_run_with_simple_config_limit_arches_two_blocks(self, ThreadPool):
cfg1 = helpers.IterableMock(get=lambda x, y: ['x86_64'])
cfg2 = helpers.IterableMock(get=lambda x, y: ['s390x'])
compose = helpers.DummyCompose(self.topdir, {
'ostree': {
'^Everything$': [cfg1, cfg2],
}
})
cfg1 = helpers.IterableMock(get=lambda x, y: ["x86_64"])
cfg2 = helpers.IterableMock(get=lambda x, y: ["s390x"])
compose = helpers.DummyCompose(
self.topdir, {"ostree": {"^Everything$": [cfg1, cfg2]}}
)
pool = ThreadPool.return_value
@ -103,219 +99,319 @@ class OSTreePhaseTest(helpers.PungiTestCase):
phase.run()
self.assertEqual(len(pool.add.call_args_list), 2)
self.assertEqual(pool.queue_put.call_args_list,
[mock.call((compose, compose.variants['Everything'], 'x86_64', cfg1)),
mock.call((compose, compose.variants['Everything'], 's390x', cfg2))])
self.assertEqual(
pool.queue_put.call_args_list,
[
mock.call((compose, compose.variants["Everything"], "x86_64", cfg1)),
mock.call((compose, compose.variants["Everything"], "s390x", cfg2)),
],
)
class OSTreeThreadTest(helpers.PungiTestCase):
def setUp(self):
super(OSTreeThreadTest, self).setUp()
self.repo = os.path.join(self.topdir, 'place/for/atomic')
os.makedirs(os.path.join(self.repo, 'refs', 'heads'))
self.repo = os.path.join(self.topdir, "place/for/atomic")
os.makedirs(os.path.join(self.repo, "refs", "heads"))
self.cfg = {
'repo': 'Everything',
'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git',
'config_branch': 'f24',
'treefile': 'fedora-atomic-docker-host.json',
'ostree_repo': self.repo,
"repo": "Everything",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"config_branch": "f24",
"treefile": "fedora-atomic-docker-host.json",
"ostree_repo": self.repo,
}
self.compose = helpers.DummyCompose(self.topdir, {
'koji_profile': 'koji',
'runroot_tag': 'rrt',
'translate_paths': [
(self.topdir, 'http://example.com')
],
})
self.compose = helpers.DummyCompose(
self.topdir,
{
"koji_profile": "koji",
"runroot_tag": "rrt",
"translate_paths": [(self.topdir, "http://example.com")],
},
)
self.pool = mock.Mock()
def _dummy_config_repo(self, scm_dict, target, compose=None):
os.makedirs(target)
helpers.touch(os.path.join(target, 'fedora-atomic-docker-host.json'),
json.dumps({'ref': 'fedora-atomic/25/x86_64',
'repos': ['fedora-rawhide', 'fedora-24', 'fedora-23']}))
helpers.touch(os.path.join(target, 'fedora-rawhide.repo'),
'[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall')
helpers.touch(os.path.join(target, 'fedora-24.repo'),
'[fedora-24]\nmetalink=who-is-the-fairest-of-them-all')
helpers.touch(os.path.join(target, 'fedora-23.repo'),
'[fedora-23]\nbaseurl=why-not-zoidberg?')
helpers.touch(
os.path.join(target, "fedora-atomic-docker-host.json"),
json.dumps(
{
"ref": "fedora-atomic/25/x86_64",
"repos": ["fedora-rawhide", "fedora-24", "fedora-23"],
}
),
)
helpers.touch(
os.path.join(target, "fedora-rawhide.repo"),
"[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall",
)
helpers.touch(
os.path.join(target, "fedora-24.repo"),
"[fedora-24]\nmetalink=who-is-the-fairest-of-them-all",
)
helpers.touch(
os.path.join(target, "fedora-23.repo"),
"[fedora-23]\nbaseurl=why-not-zoidberg?",
)
def _mock_runroot(self, retcode, writefiles=None):
"""Pretend to run a task in runroot, creating a log file with given line
Also allows for writing other files of requested"""
def fake_runroot(self, log_file, **kwargs):
if writefiles:
logdir = os.path.dirname(log_file)
for filename in writefiles:
helpers.touch(os.path.join(logdir, filename),
'\n'.join(writefiles[filename]))
helpers.touch(
os.path.join(logdir, filename), "\n".join(writefiles[filename])
)
helpers.touch(os.path.join(logdir, filename + ".stamp"))
return {'task_id': 1234, 'retcode': retcode, 'output': 'Foo bar\n'}
return {"task_id": 1234, "retcode": retcode, "output": "Foo bar\n"}
return fake_runroot
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_extra_config_content(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
self.compose.conf['runroot_weights'] = {'ostree': 123}
self.compose.conf["runroot_weights"] = {"ostree": 123}
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
extra_config_file = os.path.join(self.topdir, 'work/ostree-1/extra_config.json')
extra_config_file = os.path.join(self.topdir, "work/ostree-1/extra_config.json")
self.assertFalse(os.path.isfile(extra_config_file))
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertTrue(os.path.isfile(extra_config_file))
with open(extra_config_file, 'r') as f:
with open(extra_config_file, "r") as f:
extraconf_content = json.load(f)
proper_extraconf_content = {
"repo": [
{"name": "http:__example.com_repo_1",
"baseurl": "http://example.com/repo/1"},
{"name": "http:__example.com_work__basearch_comps_repo_Everything",
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything"}
{
"name": "http:__example.com_repo_1",
"baseurl": "http://example.com/repo/1",
},
{
"name": "http:__example.com_work__basearch_comps_repo_Everything",
"baseurl": "http://example.com/work/$basearch/comps_repo_Everything",
},
]
}
self.assertEqual(proper_extraconf_content, extraconf_content)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
self.compose.conf['runroot_weights'] = {'ostree': 123}
self.compose.conf["runroot_weights"] = {"ostree": 123}
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list,
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git',
'branch': 'f24', 'dir': '.'},
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)])
self.assertEqual(koji.get_runroot_cmd.call_args_list,
[mock.call('rrt', 'x86_64',
['pungi-make-ostree',
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir,
'--treefile=%s/fedora-atomic-docker-host.json' % (
self.topdir + '/work/ostree-1/config_repo'),
'--extra-config=%s/extra_config.json' % (self.topdir + '/work/ostree-1')],
channel=None, mounts=[self.topdir, self.repo],
packages=['pungi', 'ostree', 'rpm-ostree'],
use_shell=True, new_chroot=True, weight=123)])
self.assertEqual(koji.run_runroot_cmd.call_args_list,
[mock.call(koji.get_runroot_cmd.return_value,
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')])
self.assertEqual(
get_dir_from_scm.call_args_list,
[
mock.call(
{
"scm": "git",
"repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
"branch": "f24",
"dir": ".",
},
self.topdir + "/work/ostree-1/config_repo",
compose=self.compose,
)
],
)
self.assertEqual(
koji.get_runroot_cmd.call_args_list,
[
mock.call(
"rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--extra-config=%s/extra_config.json"
% (self.topdir + "/work/ostree-1"),
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=123,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
self.assertTrue(os.path.isfile(os.path.join(self.topdir, 'work/ostree-1/extra_config.json')))
self.assertTrue(
os.path.isfile(os.path.join(self.topdir, "work/ostree-1/extra_config.json"))
)
self.assertTrue(os.path.isdir(self.repo))
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_fail(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
self.cfg['failable'] = ['*']
self.cfg["failable"] = ["*"]
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(1)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.compose._logger.error.assert_has_calls([
mock.call('[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway.'),
mock.call('Runroot task failed: 1234. See %s for more details.'
% (self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log'))
])
self.compose._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
),
mock.call(
"Runroot task failed: 1234. See %s for more details."
% (self.topdir + "/logs/x86_64/Everything/ostree-1/runroot.log")
),
]
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_handle_exception(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
self.cfg['failable'] = ['*']
self.cfg["failable"] = ["*"]
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = helpers.boom
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.compose._logger.error.assert_has_calls([
mock.call('[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway.'),
mock.call('BOOM')
])
self.compose._logger.error.assert_has_calls(
[
mock.call(
"[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway."
),
mock.call("BOOM"),
]
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_message(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
self.compose.notifier = mock.Mock()
self.compose.conf['translate_paths'] = [(self.topdir, 'http://example.com/')]
self.compose.conf["translate_paths"] = [(self.topdir, "http://example.com/")]
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(
0,
{'commitid.log': 'fca3465861a',
'create-ostree-repo.log':
['Doing work', 'fedora-atomic/25/x86_64 -> fca3465861a']})
{
"commitid.log": "fca3465861a",
"create-ostree-repo.log": [
"Doing work",
"fedora-atomic/25/x86_64 -> fca3465861a",
],
},
)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(self.compose.notifier.send.mock_calls,
[mock.call('ostree',
variant='Everything',
arch='x86_64',
ref='fedora-atomic/25/x86_64',
commitid='fca3465861a',
repo_path='http://example.com/place/for/atomic',
local_repo_path=self.repo)])
self.assertEqual(
self.compose.notifier.send.mock_calls,
[
mock.call(
"ostree",
variant="Everything",
arch="x86_64",
ref="fedora-atomic/25/x86_64",
commitid="fca3465861a",
repo_path="http://example.com/place/for/atomic",
local_repo_path=self.repo,
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_message_custom_ref(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
self.cfg["ostree_ref"] = "my/${basearch}"
self.compose.notifier = mock.Mock()
self.compose.conf['translate_paths'] = [(self.topdir, 'http://example.com/')]
self.compose.conf["translate_paths"] = [(self.topdir, "http://example.com/")]
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(
0,
{'commitid.log': 'fca3465861a',
'create-ostree-repo.log':
['Doing work', 'fedora-atomic/25/x86_64 -> fca3465861a']})
{
"commitid.log": "fca3465861a",
"create-ostree-repo.log": [
"Doing work",
"fedora-atomic/25/x86_64 -> fca3465861a",
],
},
)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(self.compose.notifier.send.mock_calls,
[mock.call('ostree',
variant='Everything',
arch='x86_64',
ref='my/x86_64',
commitid='fca3465861a',
repo_path='http://example.com/place/for/atomic',
local_repo_path=self.repo)])
self.assertEqual(
self.compose.notifier.send.mock_calls,
[
mock.call(
"ostree",
variant="Everything",
arch="x86_64",
ref="my/x86_64",
commitid="fca3465861a",
repo_path="http://example.com/place/for/atomic",
local_repo_path=self.repo,
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_message_without_commit_id(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
@ -323,23 +419,31 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji = KojiWrapper.return_value
koji.run_runroot_cmd.side_effect = self._mock_runroot(
0,
{'create-ostree-repo.log': ['Doing work', 'Weird output']})
0, {"create-ostree-repo.log": ["Doing work", "Weird output"]}
)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(self.compose.notifier.send.mock_calls,
[mock.call('ostree',
variant='Everything',
arch='x86_64',
ref='fedora-atomic/25/x86_64',
self.assertEqual(
self.compose.notifier.send.mock_calls,
[
mock.call(
"ostree",
variant="Everything",
arch="x86_64",
ref="fedora-atomic/25/x86_64",
commitid=None,
repo_path='http://example.com/place/for/atomic',
local_repo_path=self.repo)])
repo_path="http://example.com/place/for/atomic",
local_repo_path=self.repo,
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_send_no_message_on_failure(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
@ -349,15 +453,18 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji.run_runroot_cmd.side_effect = self._mock_runroot(1)
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
self.assertRaises(RuntimeError, t.process,
(self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg),
1)
self.assertRaises(
RuntimeError,
t.process,
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg),
1,
)
self.assertEqual(self.compose.notifier.send.mock_calls, [])
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_with_update_summary(self, KojiWrapper, get_dir_from_scm):
self.cfg['update_summary'] = True
self.cfg["update_summary"] = True
get_dir_from_scm.side_effect = self._dummy_config_repo
@ -366,33 +473,66 @@ class OSTreeThreadTest(helpers.PungiTestCase):
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list,
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git',
'branch': 'f24', 'dir': '.'},
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)])
self.assertEqual(koji.get_runroot_cmd.call_args_list,
[mock.call('rrt', 'x86_64',
['pungi-make-ostree',
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir,
'--treefile=%s/fedora-atomic-docker-host.json' % (
self.topdir + '/work/ostree-1/config_repo'),
'--extra-config=%s/work/ostree-1/extra_config.json' % self.topdir,
'--update-summary'],
channel=None, mounts=[self.topdir, self.repo],
packages=['pungi', 'ostree', 'rpm-ostree'],
use_shell=True, new_chroot=True, weight=None)])
self.assertEqual(koji.run_runroot_cmd.call_args_list,
[mock.call(koji.get_runroot_cmd.return_value,
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')])
self.assertEqual(
get_dir_from_scm.call_args_list,
[
mock.call(
{
"scm": "git",
"repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
"branch": "f24",
"dir": ".",
},
self.topdir + "/work/ostree-1/config_repo",
compose=self.compose,
)
],
)
self.assertEqual(
koji.get_runroot_cmd.call_args_list,
[
mock.call(
"rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--extra-config=%s/work/ostree-1/extra_config.json"
% self.topdir,
"--update-summary",
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=None,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_with_versioning_metadata(self, KojiWrapper, get_dir_from_scm):
self.cfg['version'] = '24'
self.cfg["version"] = "24"
get_dir_from_scm.side_effect = self._dummy_config_repo
@ -401,33 +541,68 @@ class OSTreeThreadTest(helpers.PungiTestCase):
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list,
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git',
'branch': 'f24', 'dir': '.'},
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)])
self.assertEqual(koji.get_runroot_cmd.call_args_list,
[mock.call('rrt', 'x86_64',
['pungi-make-ostree',
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir,
'--treefile=%s/fedora-atomic-docker-host.json' % (
self.topdir + '/work/ostree-1/config_repo'),
'--version=24',
'--extra-config=%s/work/ostree-1/extra_config.json' % self.topdir],
channel=None, mounts=[self.topdir, self.repo],
packages=['pungi', 'ostree', 'rpm-ostree'],
use_shell=True, new_chroot=True, weight=None)])
self.assertEqual(koji.run_runroot_cmd.call_args_list,
[mock.call(koji.get_runroot_cmd.return_value,
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')])
self.assertEqual(
get_dir_from_scm.call_args_list,
[
mock.call(
{
"scm": "git",
"repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
"branch": "f24",
"dir": ".",
},
self.topdir + "/work/ostree-1/config_repo",
compose=self.compose,
)
],
)
self.assertEqual(
koji.get_runroot_cmd.call_args_list,
[
mock.call(
"rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--version=24",
"--extra-config=%s/work/ostree-1/extra_config.json"
% self.topdir,
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=None,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
def test_run_with_generated_versioning_metadata(self, KojiWrapper, get_dir_from_scm):
self.cfg['version'] = '!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN'
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run_with_generated_versioning_metadata(
self, KojiWrapper, get_dir_from_scm
):
self.cfg["version"] = "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN"
get_dir_from_scm.side_effect = self._dummy_config_repo
@ -436,31 +611,64 @@ class OSTreeThreadTest(helpers.PungiTestCase):
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', self.cfg), 1)
t.process(
(self.compose, self.compose.variants["Everything"], "x86_64", self.cfg), 1
)
self.assertEqual(get_dir_from_scm.call_args_list,
[mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git',
'branch': 'f24', 'dir': '.'},
self.topdir + '/work/ostree-1/config_repo', compose=self.compose)])
self.assertEqual(koji.get_runroot_cmd.call_args_list,
[mock.call('rrt', 'x86_64',
['pungi-make-ostree',
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s/logs/x86_64/Everything/ostree-1' % self.topdir,
'--treefile=%s/fedora-atomic-docker-host.json' % (
self.topdir + '/work/ostree-1/config_repo'),
'--version=25.20151203.t.0',
'--extra-config=%s/work/ostree-1/extra_config.json' % self.topdir],
channel=None, mounts=[self.topdir, self.repo],
packages=['pungi', 'ostree', 'rpm-ostree'],
use_shell=True, new_chroot=True, weight=None)])
self.assertEqual(koji.run_runroot_cmd.call_args_list,
[mock.call(koji.get_runroot_cmd.return_value,
log_file=self.topdir + '/logs/x86_64/Everything/ostree-1/runroot.log')])
self.assertEqual(
get_dir_from_scm.call_args_list,
[
mock.call(
{
"scm": "git",
"repo": "https://git.fedorahosted.org/git/fedora-atomic.git",
"branch": "f24",
"dir": ".",
},
self.topdir + "/work/ostree-1/config_repo",
compose=self.compose,
)
],
)
self.assertEqual(
koji.get_runroot_cmd.call_args_list,
[
mock.call(
"rrt",
"x86_64",
[
"pungi-make-ostree",
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s/logs/x86_64/Everything/ostree-1" % self.topdir,
"--treefile=%s/fedora-atomic-docker-host.json"
% (self.topdir + "/work/ostree-1/config_repo"),
"--version=25.20151203.t.0",
"--extra-config=%s/work/ostree-1/extra_config.json"
% self.topdir,
],
channel=None,
mounts=[self.topdir, self.repo],
packages=["pungi", "ostree", "rpm-ostree"],
use_shell=True,
new_chroot=True,
weight=None,
)
],
)
self.assertEqual(
koji.run_runroot_cmd.call_args_list,
[
mock.call(
koji.get_runroot_cmd.return_value,
log_file=self.topdir
+ "/logs/x86_64/Everything/ostree-1/runroot.log",
)
],
)
@mock.patch('pungi.wrappers.scm.get_dir_from_scm')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.wrappers.scm.get_dir_from_scm")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_write_extra_config_file(self, KojiWrapper, get_dir_from_scm):
get_dir_from_scm.side_effect = self._dummy_config_repo
@ -468,38 +676,44 @@ class OSTreeThreadTest(helpers.PungiTestCase):
koji.run_runroot_cmd.side_effect = self._mock_runroot(0)
cfg = {
'repo': [ # Variant type repos will not be included into extra_config. This part of the config is deprecated
'Everything', # do not include
"repo": [ # Variant type repos will not be included into extra_config. This part of the config is deprecated
"Everything", # do not include
{
'name': 'repo_a',
'baseurl': 'http://url/to/repo/a',
'exclude': 'systemd-container'
"name": "repo_a",
"baseurl": "http://url/to/repo/a",
"exclude": "systemd-container",
},
{ # do not include
'name': 'Server',
'baseurl': 'Server',
'exclude': 'systemd-container'
}
"name": "Server",
"baseurl": "Server",
"exclude": "systemd-container",
},
],
'keep_original_sources': True,
'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git',
'config_branch': 'f24',
'treefile': 'fedora-atomic-docker-host.json',
'ostree_repo': self.repo
"keep_original_sources": True,
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"config_branch": "f24",
"treefile": "fedora-atomic-docker-host.json",
"ostree_repo": self.repo,
}
t = ostree.OSTreeThread(self.pool, ["http://example.com/repo/1"])
t.process((self.compose, self.compose.variants['Everything'], 'x86_64', cfg), 1)
t.process((self.compose, self.compose.variants["Everything"], "x86_64", cfg), 1)
extra_config_file = os.path.join(self.topdir, 'work/ostree-1/extra_config.json')
extra_config_file = os.path.join(self.topdir, "work/ostree-1/extra_config.json")
self.assertTrue(os.path.isfile(extra_config_file))
with open(extra_config_file, 'r') as extra_config_fd:
with open(extra_config_file, "r") as extra_config_fd:
extra_config = json.load(extra_config_fd)
self.assertTrue(extra_config.get('keep_original_sources', False))
self.assertTrue(extra_config.get("keep_original_sources", False))
# should equal to number of valid repositories in cfg['repo'] + default repository + comps repository
self.assertEqual(len(extra_config.get('repo', [])), 3)
self.assertEqual(extra_config.get('repo').pop()['baseurl'],
'http://example.com/work/$basearch/comps_repo_Everything')
self.assertEqual(extra_config.get("repo").pop()["baseurl"], "http://example.com/repo/1")
self.assertEqual(extra_config.get('repo').pop()['baseurl'], 'http://url/to/repo/a')
self.assertEqual(len(extra_config.get("repo", [])), 3)
self.assertEqual(
extra_config.get("repo").pop()["baseurl"],
"http://example.com/work/$basearch/comps_repo_Everything",
)
self.assertEqual(
extra_config.get("repo").pop()["baseurl"], "http://example.com/repo/1"
)
self.assertEqual(
extra_config.get("repo").pop()["baseurl"], "http://url/to/repo/a"
)

View File

@ -14,24 +14,41 @@ from pungi import ostree
class OstreeTreeScriptTest(helpers.PungiTestCase):
def setUp(self):
super(OstreeTreeScriptTest, self).setUp()
self.repo = os.path.join(self.topdir, "atomic")
def _make_dummy_config_dir(self, path):
helpers.touch(os.path.join(path, 'fedora-atomic-docker-host.json'),
json.dumps({'ref': 'fedora-atomic/25/x86_64',
'repos': ['fedora-rawhide', 'fedora-24', 'fedora-23']}))
helpers.touch(os.path.join(path, 'fedora-atomic-docker-host.yaml'),
yaml.dump({'ref': 'fedora-atomic/25/x86_64',
'repos': ['fedora-rawhide', 'fedora-24', 'fedora-23']}))
helpers.touch(os.path.join(path, 'fedora-rawhide.repo'),
'[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall')
helpers.touch(os.path.join(path, 'fedora-24.repo'),
'[fedora-24]\nmetalink=who-is-the-fairest-of-them-all')
helpers.touch(os.path.join(path, 'fedora-23.repo'),
'[fedora-23]\nbaseurl=why-not-zoidberg?')
helpers.touch(
os.path.join(path, "fedora-atomic-docker-host.json"),
json.dumps(
{
"ref": "fedora-atomic/25/x86_64",
"repos": ["fedora-rawhide", "fedora-24", "fedora-23"],
}
),
)
helpers.touch(
os.path.join(path, "fedora-atomic-docker-host.yaml"),
yaml.dump(
{
"ref": "fedora-atomic/25/x86_64",
"repos": ["fedora-rawhide", "fedora-24", "fedora-23"],
}
),
)
helpers.touch(
os.path.join(path, "fedora-rawhide.repo"),
"[fedora-rawhide]\nmirrorlist=mirror-mirror-on-the-wall",
)
helpers.touch(
os.path.join(path, "fedora-24.repo"),
"[fedora-24]\nmetalink=who-is-the-fairest-of-them-all",
)
helpers.touch(
os.path.join(path, "fedora-23.repo"),
"[fedora-23]\nbaseurl=why-not-zoidberg?",
)
def assertCorrectCall(self, mock_run, extra_calls=[], extra_args=[]):
six.assertCountEqual(
@ -44,65 +61,76 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
"compose",
"tree",
"--repo=%s" % self.repo,
"--write-commitid-to=%s" % (self.topdir + "/logs/Atomic/commitid.log"),
"--touch-if-changed=%s.stamp" % (self.topdir + "/logs/Atomic/commitid.log"),
] + extra_args + [
self.topdir + "/fedora-atomic-docker-host.json"
],
"--write-commitid-to=%s"
% (self.topdir + "/logs/Atomic/commitid.log"),
"--touch-if-changed=%s.stamp"
% (self.topdir + "/logs/Atomic/commitid.log"),
]
+ extra_args
+ [self.topdir + "/fedora-atomic-docker-host.json"],
logfile=self.topdir + "/logs/Atomic/create-ostree-repo.log",
show_cmd=True,
stdout=True,
universal_newlines=True,
)
] + extra_calls
]
+ extra_calls,
)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_full_run(self, run):
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]
)
self.assertCorrectCall(run)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_run_on_existing_empty_dir(self, run):
os.mkdir(self.repo)
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]
)
self.assertCorrectCall(run)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_run_on_initialized_repo(self, run):
helpers.touch(os.path.join(self.repo, 'initialized'))
helpers.touch(os.path.join(self.repo, "initialized"))
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
]
)
self.assertCorrectCall(run)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_update_summary(self, run):
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
'--update-summary',
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
"--update-summary",
]
)
self.assertCorrectCall(
run,
@ -114,133 +142,137 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
stdout=True,
universal_newlines=True,
)
],
)
@mock.patch("kobo.shortcuts.run")
def test_versioning_metadata(self, run):
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
"--version=24",
]
)
@mock.patch('kobo.shortcuts.run')
def test_versioning_metadata(self, run):
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
'--version=24',
])
self.assertCorrectCall(run, extra_args=["--add-metadata-string=version=24"])
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_ostree_ref(self, run):
self._make_dummy_config_dir(self.topdir)
treefile = os.path.join(self.topdir, 'fedora-atomic-docker-host.json')
treefile = os.path.join(self.topdir, "fedora-atomic-docker-host.json")
with open(treefile, 'r') as f:
with open(treefile, "r") as f:
treefile_content = json.load(f)
original_repos = treefile_content['repos']
original_ref = treefile_content['ref']
replacing_ref = original_ref + '-changed'
original_repos = treefile_content["repos"]
original_ref = treefile_content["ref"]
replacing_ref = original_ref + "-changed"
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s' % treefile,
'--ostree-ref=%s' % replacing_ref,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s" % treefile,
"--ostree-ref=%s" % replacing_ref,
]
)
with open(treefile, 'r') as f:
with open(treefile, "r") as f:
treefile_content = json.load(f)
new_repos = treefile_content['repos']
new_ref = treefile_content['ref']
new_repos = treefile_content["repos"]
new_ref = treefile_content["ref"]
# ref value in treefile should be overrided with new ref
self.assertEqual(replacing_ref, new_ref)
# repos should stay unchanged
self.assertEqual(original_repos, new_repos)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_run_with_yaml_file(self, run):
self._make_dummy_config_dir(self.topdir)
treefile = os.path.join(self.topdir, 'fedora-atomic-docker-host.yaml')
treefile = os.path.join(self.topdir, "fedora-atomic-docker-host.yaml")
with open(treefile, 'r') as f:
with open(treefile, "r") as f:
# Read initial content from YAML file
treefile_content = yaml.safe_load(f)
original_repos = treefile_content['repos']
original_ref = treefile_content['ref']
replacing_ref = original_ref + '-changed'
original_repos = treefile_content["repos"]
original_ref = treefile_content["ref"]
replacing_ref = original_ref + "-changed"
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s' % treefile,
'--ostree-ref=%s' % replacing_ref,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s" % treefile,
"--ostree-ref=%s" % replacing_ref,
]
)
with open(treefile.replace(".yaml", ".json"), 'r') as f:
with open(treefile.replace(".yaml", ".json"), "r") as f:
# There is now a tweaked JSON file
treefile_content = json.load(f)
new_repos = treefile_content['repos']
new_ref = treefile_content['ref']
new_repos = treefile_content["repos"]
new_ref = treefile_content["ref"]
# ref value in treefile should be overrided with new ref
self.assertEqual(replacing_ref, new_ref)
# repos should stay unchanged
self.assertEqual(original_repos, new_repos)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_force_new_commit(self, run):
helpers.touch(os.path.join(self.repo, 'initialized'))
helpers.touch(os.path.join(self.repo, "initialized"))
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,
'--force-new-commit',
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
"--force-new-commit",
]
)
self.assertCorrectCall(run, extra_args=["--force-nocache"])
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_extra_config_with_extra_repos(self, run):
configdir = os.path.join(self.topdir, 'config')
configdir = os.path.join(self.topdir, "config")
self._make_dummy_config_dir(configdir)
treefile = os.path.join(configdir, 'fedora-atomic-docker-host.json')
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
extra_config_file = os.path.join(self.topdir, 'extra_config.json')
extra_config_file = os.path.join(self.topdir, "extra_config.json")
extra_config = {
"repo": [
{
"name": "server",
"baseurl": "http://www.example.com/Server/repo",
},
{"name": "server", "baseurl": "http://www.example.com/Server/repo"},
{
"name": "optional",
"baseurl": "http://example.com/repo/x86_64/optional",
"exclude": "systemd-container",
"gpgcheck": False
"gpgcheck": False,
},
{
"name": "extra",
"baseurl": "http://example.com/repo/x86_64/extra",
}
{"name": "extra", "baseurl": "http://example.com/repo/x86_64/extra"},
]
}
helpers.touch(extra_config_file, json.dumps(extra_config))
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s' % treefile,
'--extra-config=%s' % extra_config_file,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s" % treefile,
"--extra-config=%s" % extra_config_file,
]
)
pungi_repo = os.path.join(configdir, "pungi.repo")
self.assertTrue(os.path.isfile(pungi_repo))
with open(pungi_repo, 'r') as f:
with open(pungi_repo, "r") as f:
content = f.read().strip()
result_template = (
"[repo-0]",
@ -257,57 +289,59 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
"baseurl=http://www.example.com/Server/repo",
"gpgcheck=0",
)
result = '\n'.join(result_template).strip()
result = "\n".join(result_template).strip()
self.assertEqual(content, result)
treeconf = json.load(open(treefile, 'r'))
repos = treeconf['repos']
treeconf = json.load(open(treefile, "r"))
repos = treeconf["repos"]
self.assertEqual(len(repos), 3)
for name in ("repo-0", "repo-1", "repo-2"):
self.assertIn(name, repos)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_extra_config_with_keep_original_sources(self, run):
configdir = os.path.join(self.topdir, 'config')
configdir = os.path.join(self.topdir, "config")
self._make_dummy_config_dir(configdir)
treefile = os.path.join(configdir, 'fedora-atomic-docker-host.json')
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
extra_config_file = os.path.join(self.topdir, 'extra_config.json')
extra_config_file = os.path.join(self.topdir, "extra_config.json")
extra_config = {
"repo": [
{
"name": "server",
"baseurl": "http://www.example.com/Server/repo",
},
{"name": "server", "baseurl": "http://www.example.com/Server/repo"},
{
"name": "optional",
"baseurl": "http://example.com/repo/x86_64/optional",
"exclude": "systemd-container",
"gpgcheck": False
"gpgcheck": False,
},
{
"name": "extra",
"baseurl": "http://example.com/repo/x86_64/extra",
}
{"name": "extra", "baseurl": "http://example.com/repo/x86_64/extra"},
],
"keep_original_sources": True
"keep_original_sources": True,
}
helpers.touch(extra_config_file, json.dumps(extra_config))
ostree.main([
'tree',
'--repo=%s' % self.repo,
'--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),
'--treefile=%s' % treefile,
'--extra-config=%s' % extra_config_file,
])
ostree.main(
[
"tree",
"--repo=%s" % self.repo,
"--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
"--treefile=%s" % treefile,
"--extra-config=%s" % extra_config_file,
]
)
treeconf = json.load(open(treefile, 'r'))
repos = treeconf['repos']
treeconf = json.load(open(treefile, "r"))
repos = treeconf["repos"]
self.assertEqual(len(repos), 6)
for name in ['fedora-rawhide', 'fedora-24', 'fedora-23',
'repo-0', 'repo-1', 'repo-2']:
for name in [
"fedora-rawhide",
"fedora-24",
"fedora-23",
"repo-0",
"repo-1",
"repo-2",
]:
self.assertIn(name, repos)
@ -317,30 +351,32 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
self.product = "dummyproduct"
self.version = "1.0"
self.release = "20160101.t.0"
self.output = os.path.join(self.topdir, 'output')
self.logdir = os.path.join(self.topdir, 'logs')
self.volid = '%s-%s' % (self.product, self.version)
self.variant = 'dummy'
self.output = os.path.join(self.topdir, "output")
self.logdir = os.path.join(self.topdir, "logs")
self.volid = "%s-%s" % (self.product, self.version)
self.variant = "dummy"
self.rootfs_size = None
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_run_with_args(self, run):
args = ['installer',
'--product=%s' % self.product,
'--version=%s' % self.version,
'--release=%s' % self.release,
'--output=%s' % self.output,
'--variant=%s' % self.variant,
'--rootfs-size=%s' % self.rootfs_size,
'--nomacboot',
'--isfinal']
args.append('--source=%s' % 'http://www.example.com/dummy/repo')
args.append('--installpkgs=dummy-foo')
args.append('--installpkgs=dummy-bar')
args.append('--add-template=/path/to/lorax.tmpl')
args.append('--add-template-var=ostree_osname=dummy')
args.append('--add-arch-template=/path/to/lorax-embed.tmpl')
args.append('--add-arch-template-var=ostree_repo=http://www.example.com/ostree')
args = [
"installer",
"--product=%s" % self.product,
"--version=%s" % self.version,
"--release=%s" % self.release,
"--output=%s" % self.output,
"--variant=%s" % self.variant,
"--rootfs-size=%s" % self.rootfs_size,
"--nomacboot",
"--isfinal",
]
args.append("--source=%s" % "http://www.example.com/dummy/repo")
args.append("--installpkgs=dummy-foo")
args.append("--installpkgs=dummy-bar")
args.append("--add-template=/path/to/lorax.tmpl")
args.append("--add-template-var=ostree_osname=dummy")
args.append("--add-arch-template=/path/to/lorax-embed.tmpl")
args.append("--add-arch-template-var=ostree_repo=http://www.example.com/ostree")
ostree.main(args)
self.maxDiff = None
six.assertCountEqual(
@ -370,29 +406,41 @@ class OstreeInstallerScriptTest(helpers.PungiTestCase):
],
)
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_run_with_extra_config_file(self, run):
extra_config_file = os.path.join(self.topdir, 'extra_config.json')
helpers.touch(extra_config_file,
json.dumps({'repo': 'http://www.example.com/another/repo',
'installpkgs': ['dummy-foo', 'dummy-bar'],
'add_template': ['/path/to/lorax.tmpl'],
'add_template_var': ['ostree_osname=dummy-atomic',
'ostree_ref=dummy/x86_64/docker'],
'add_arch_template': ['/path/to/lorax-embed.tmpl'],
'add_arch_template_var': ['ostree_osname=dummy-atomic',
'ostree_repo=http://www.example.com/ostree']}))
args = ['installer',
'--product=%s' % self.product,
'--version=%s' % self.version,
'--release=%s' % self.release,
'--output=%s' % self.output,
'--variant=%s' % self.variant,
'--rootfs-size=%s' % self.rootfs_size,
'--nomacboot',
'--isfinal']
args.append('--source=%s' % 'http://www.example.com/dummy/repo')
args.append('--extra-config=%s' % extra_config_file)
extra_config_file = os.path.join(self.topdir, "extra_config.json")
helpers.touch(
extra_config_file,
json.dumps(
{
"repo": "http://www.example.com/another/repo",
"installpkgs": ["dummy-foo", "dummy-bar"],
"add_template": ["/path/to/lorax.tmpl"],
"add_template_var": [
"ostree_osname=dummy-atomic",
"ostree_ref=dummy/x86_64/docker",
],
"add_arch_template": ["/path/to/lorax-embed.tmpl"],
"add_arch_template_var": [
"ostree_osname=dummy-atomic",
"ostree_repo=http://www.example.com/ostree",
],
}
),
)
args = [
"installer",
"--product=%s" % self.product,
"--version=%s" % self.version,
"--release=%s" % self.release,
"--output=%s" % self.output,
"--variant=%s" % self.variant,
"--rootfs-size=%s" % self.rootfs_size,
"--nomacboot",
"--isfinal",
]
args.append("--source=%s" % "http://www.example.com/dummy/repo")
args.append("--extra-config=%s" % extra_config_file)
ostree.main(args)
self.maxDiff = None
six.assertCountEqual(

View File

@ -3,6 +3,7 @@
import mock
import os
import sys
try:
import unittest2 as unittest
except ImportError:
@ -17,39 +18,39 @@ class TestUnifiedIsos(unittest.TestCase):
class TestGetLoraxDir(unittest.TestCase):
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_success(self, mock_run):
mock_run.return_value = (0, 'hello')
self.assertEqual(patch_iso.get_lorax_dir(None), 'hello')
mock_run.return_value = (0, "hello")
self.assertEqual(patch_iso.get_lorax_dir(None), "hello")
self.assertEqual(1, len(mock_run.call_args_list))
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_crash(self, mock_run):
mock_run.side_effect = boom
self.assertEqual(patch_iso.get_lorax_dir('hello'), 'hello')
self.assertEqual(patch_iso.get_lorax_dir("hello"), "hello")
self.assertEqual(1, len(mock_run.call_args_list))
class TestSh(unittest.TestCase):
@mock.patch('kobo.shortcuts.run')
@mock.patch("kobo.shortcuts.run")
def test_cmd(self, mock_run):
mock_run.return_value = (0, 'ok')
mock_run.return_value = (0, "ok")
log = mock.Mock()
patch_iso.sh(log, ['ls'], foo='bar')
self.assertEqual(mock_run.call_args_list,
[mock.call(['ls'], foo='bar', universal_newlines=True)])
self.assertEqual(log.info.call_args_list,
[mock.call('Running: %s', 'ls')])
self.assertEqual(log.debug.call_args_list,
[mock.call('%s', 'ok')])
patch_iso.sh(log, ["ls"], foo="bar")
self.assertEqual(
mock_run.call_args_list,
[mock.call(["ls"], foo="bar", universal_newlines=True)],
)
self.assertEqual(log.info.call_args_list, [mock.call("Running: %s", "ls")])
self.assertEqual(log.debug.call_args_list, [mock.call("%s", "ok")])
class TestAsBool(unittest.TestCase):
def test_true(self):
self.assertTrue(patch_iso.as_bool('true'))
self.assertTrue(patch_iso.as_bool("true"))
def test_false(self):
self.assertFalse(patch_iso.as_bool('false'))
self.assertFalse(patch_iso.as_bool("false"))
def test_anything_else(self):
obj = mock.Mock()
@ -61,171 +62,206 @@ class EqualsAny(object):
return True
def __repr__(self):
return u'ANYTHING'
return u"ANYTHING"
ANYTHING = EqualsAny()
class TestPatchingIso(unittest.TestCase):
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_whole(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
log = mock.Mock(name="logger")
opts = mock.Mock(
target='test.iso',
source='source.iso',
target="test.iso",
source="source.iso",
force_arch=None,
volume_id='FOOBAR',
dirs=[]
)
patch_iso.run(log, opts)
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=None,
exclude=['./lost+found'],
graft_points=ANYTHING,
input_charset=None,
volid='FOOBAR')])
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
def test_detect_arch_discinfo(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
touch(os.path.join(dest, '.discinfo'),
'1487578537.111417\nDummy Product 1.0\nppc64\n1')
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
opts = mock.Mock(
target='test.iso',
source='source.iso',
force_arch=None,
volume_id=None,
dirs=[]
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=iso.get_boot_options.return_value,
exclude=['./lost+found'],
graft_points=ANYTHING,
input_charset=None,
volid=iso.get_volume_id.return_value)])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
def test_run_isohybrid(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
copy_fixture(
'DP-1.0-20161013.t.4/compose/Server/x86_64/os/.treeinfo',
os.path.join(dest, '.treeinfo')
)
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
opts = mock.Mock(
target='test.iso',
source='source.iso',
force_arch=None,
volume_id=None,
dirs=[]
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=iso.get_boot_options.return_value,
exclude=['./lost+found'],
graft_points=ANYTHING,
input_charset='utf-8',
volid=iso.get_volume_id.return_value)])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_isohybrid_cmd.return_value),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
@mock.patch('pungi_utils.patch_iso.tweak_configs')
@mock.patch('pungi_utils.patch_iso.util.copy_all')
@mock.patch('pungi_utils.patch_iso.iso')
@mock.patch('pungi_utils.patch_iso.sh')
def test_add_ks_cfg(self, sh, iso, copy_all, tweak_configs):
iso.mount.return_value.__enter__.return_value = 'mounted-iso-dir'
iso.get_graft_points.return_value = {
'ks.cfg': 'path/to/ks.cfg',
}
def _create_files(src, dest):
touch(os.path.join(dest, 'dir', 'file.txt'), 'Hello')
copy_all.side_effect = _create_files
log = mock.Mock(name='logger')
opts = mock.Mock(
target='test.iso',
source='source.iso',
force_arch='s390',
volume_id='foobar',
volume_id="FOOBAR",
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list,
[mock.call('source.iso')])
self.assertEqual(iso.get_mkisofs_cmd.call_args_list,
[mock.call(os.path.abspath(opts.target), None,
boot_args=iso.get_boot_options.return_value,
exclude=['./lost+found'],
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=None,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset='utf-8',
volid='foobar')])
self.assertEqual(tweak_configs.call_args_list,
[mock.call(ANYTHING, 'foobar', 'path/to/ks.cfg', logger=log)])
self.assertEqual(copy_all.mock_calls,
[mock.call('mounted-iso-dir', ANYTHING)])
input_charset=None,
volid="FOOBAR",
)
],
)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value)])
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_detect_arch_discinfo(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
def _create_files(src, dest):
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
touch(
os.path.join(dest, ".discinfo"),
"1487578537.111417\nDummy Product 1.0\nppc64\n1",
)
copy_all.side_effect = _create_files
log = mock.Mock(name="logger")
opts = mock.Mock(
target="test.iso",
source="source.iso",
force_arch=None,
volume_id=None,
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=iso.get_boot_options.return_value,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset=None,
volid=iso.get_volume_id.return_value,
)
],
)
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_run_isohybrid(self, sh, iso, copy_all):
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
def _create_files(src, dest):
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
copy_fixture(
"DP-1.0-20161013.t.4/compose/Server/x86_64/os/.treeinfo",
os.path.join(dest, ".treeinfo"),
)
copy_all.side_effect = _create_files
log = mock.Mock(name="logger")
opts = mock.Mock(
target="test.iso",
source="source.iso",
force_arch=None,
volume_id=None,
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=iso.get_boot_options.return_value,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset="utf-8",
volid=iso.get_volume_id.return_value,
)
],
)
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_isohybrid_cmd.return_value),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)
@mock.patch("pungi_utils.patch_iso.tweak_configs")
@mock.patch("pungi_utils.patch_iso.util.copy_all")
@mock.patch("pungi_utils.patch_iso.iso")
@mock.patch("pungi_utils.patch_iso.sh")
def test_add_ks_cfg(self, sh, iso, copy_all, tweak_configs):
iso.mount.return_value.__enter__.return_value = "mounted-iso-dir"
iso.get_graft_points.return_value = {
"ks.cfg": "path/to/ks.cfg",
}
def _create_files(src, dest):
touch(os.path.join(dest, "dir", "file.txt"), "Hello")
copy_all.side_effect = _create_files
log = mock.Mock(name="logger")
opts = mock.Mock(
target="test.iso",
source="source.iso",
force_arch="s390",
volume_id="foobar",
dirs=[],
)
patch_iso.run(log, opts)
self.assertEqual(iso.mount.call_args_list, [mock.call("source.iso")])
self.assertEqual(
iso.get_mkisofs_cmd.call_args_list,
[
mock.call(
os.path.abspath(opts.target),
None,
boot_args=iso.get_boot_options.return_value,
exclude=["./lost+found"],
graft_points=ANYTHING,
input_charset="utf-8",
volid="foobar",
)
],
)
self.assertEqual(
tweak_configs.call_args_list,
[mock.call(ANYTHING, "foobar", "path/to/ks.cfg", logger=log)],
)
self.assertEqual(copy_all.mock_calls, [mock.call("mounted-iso-dir", ANYTHING)])
self.assertEqual(
sh.call_args_list,
[
mock.call(log, iso.get_mkisofs_cmd.return_value, workdir=ANYTHING),
mock.call(log, iso.get_implantisomd5_cmd.return_value),
],
)

View File

@ -32,7 +32,6 @@ class TestHeadTailSplit(unittest.TestCase):
class TestPathMatch(unittest.TestCase):
def setUp(self):
self.pm = PathMatch()
@ -56,7 +55,9 @@ class TestPathMatch(unittest.TestCase):
self.pm["/*/*"] = "/star/star1"
self.assertEqual(list(self.pm._patterns.keys()), ["*"])
self.assertEqual(list(self.pm._patterns["*"]._final_patterns.keys()), ["*"])
self.assertEqual(self.pm._patterns["*"]._final_patterns["*"]._values, ["/star/star1"])
self.assertEqual(
self.pm._patterns["*"]._final_patterns["*"]._values, ["/star/star1"]
)
self.assertEqual(sorted(self.pm["/lib/asd"]), ["/star/star1"])
self.pm["/*"] = "/star2"

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import mock
try:
import unittest2 as unittest
except ImportError:
@ -78,7 +79,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start()
weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception))
self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1)
self.assertInterrupted(self.p2)
self.assertMissed(self.p3)
@ -92,7 +93,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start()
weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception))
self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1)
self.assertInterrupted(self.p2)
self.assertFinalized(self.p3)
@ -107,7 +108,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start()
weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception))
self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1)
self.assertInterrupted(self.p2)
self.assertMissed(self.p3)
@ -125,7 +126,7 @@ class TestWeaver(unittest.TestCase):
weaver_phase.start()
weaver_phase.stop()
self.assertEqual('BOOM', str(ctx.exception))
self.assertEqual("BOOM", str(ctx.exception))
self.assertFinalized(self.p1)
self.assertInterrupted(self.p2)
self.assertMissed(self.p3)

View File

@ -4,6 +4,7 @@ import mock
import os
import six
import sys
try:
import unittest2 as unittest
except ImportError:
@ -25,24 +26,24 @@ class MockPathInfo(object):
return self.topdir
def get_filename(self, rpm_info):
return '{name}@{version}@{release}@{arch}'.format(**rpm_info)
return "{name}@{version}@{release}@{arch}".format(**rpm_info)
def signed(self, rpm_info, sigkey):
return os.path.join('signed', sigkey, self.get_filename(rpm_info))
return os.path.join("signed", sigkey, self.get_filename(rpm_info))
def rpm(self, rpm_info):
return os.path.join('rpms', self.get_filename(rpm_info))
return os.path.join("rpms", self.get_filename(rpm_info))
class MockFile(object):
def __init__(self, path):
if path.startswith('/tmp'):
if path.startswith("/tmp"):
# Drop /tmp/something/ from path
path = path.split('/', 3)[-1]
path = path.split("/", 3)[-1]
self.file_path = path
self.file_name = os.path.basename(path)
self.name, self.version, self.release, self.arch = self.file_name.split('@')
self.sourcerpm = '{0.name}-{0.version}-{0.release}.{0.arch}'.format(self)
self.name, self.version, self.release, self.arch = self.file_name.split("@")
self.sourcerpm = "{0.name}-{0.version}-{0.release}.{0.arch}".format(self)
self.exclusivearch = []
self.excludearch = []
@ -78,6 +79,7 @@ class MockFileCache(dict):
"""Mock for kobo.pkgset.FileCache.
It gets data from filename and does not touch filesystem.
"""
def __init__(self, _wrapper):
super(MockFileCache, self).__init__()
self.file_cache = self
@ -93,6 +95,7 @@ class FakePool(object):
It implements the same interface, but uses only the last added worker to
process all tasks sequentially.
"""
def __init__(self, package_set, logger=None):
self.queue = []
self.worker = None
@ -128,13 +131,12 @@ class PkgsetCompareMixin(object):
self.assertEqual({}, actual)
@mock.patch('pungi.phases.pkgset.pkgsets.ReaderPool', new=FakePool)
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache)
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
def setUp(self):
super(TestKojiPkgset, self).setUp()
with open(os.path.join(helpers.FIXTURE_DIR, 'tagged-rpms.json')) as f:
with open(os.path.join(helpers.FIXTURE_DIR, "tagged-rpms.json")) as f:
self.tagged_rpms = json.load(f)
self.path_info = MockPathInfo(self.topdir)
@ -152,163 +154,208 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
self.assertIn(k, actual)
v2 = actual.pop(k)
six.assertCountEqual(self, v1, v2)
self.assertEqual({}, actual, msg='Some architectures were missing')
self.assertEqual({}, actual, msg="Some architectures were missing")
def test_all_arches(self):
self._touch_files([
'rpms/pungi@4.1.3@3.fc25@noarch',
'rpms/pungi@4.1.3@3.fc25@src',
'rpms/bash@4.3.42@4.fc24@i686',
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@src',
'rpms/bash-debuginfo@4.3.42@4.fc24@i686',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
self._touch_files(
[
"rpms/pungi@4.1.3@3.fc25@noarch",
"rpms/pungi@4.1.3@3.fc25@src",
"rpms/bash@4.3.42@4.fc24@i686",
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@src",
"rpms/bash-debuginfo@4.3.42@4.fc24@i686",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
pkgset = pkgsets.KojiPackageSet("pkgset", self.koji_wrapper, [None])
result = pkgset.populate('f25')
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result,
{'src': ['rpms/pungi@4.1.3@3.fc25@src',
'rpms/bash@4.3.42@4.fc24@src'],
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch'],
'i686': ['rpms/bash@4.3.42@4.fc24@i686',
'rpms/bash-debuginfo@4.3.42@4.fc24@i686'],
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64']})
self.assertPkgsetEqual(
result,
{
"src": ["rpms/pungi@4.1.3@3.fc25@src", "rpms/bash@4.3.42@4.fc24@src"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
"i686": [
"rpms/bash@4.3.42@4.fc24@i686",
"rpms/bash-debuginfo@4.3.42@4.fc24@i686",
],
"x86_64": [
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
],
},
)
def test_only_one_arch(self):
self._touch_files([
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, [None], arches=['x86_64']
self._touch_files(
[
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
result = pkgset.populate('f25')
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, [None], arches=["x86_64"]
)
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@x86_64']})
self.assertPkgsetEqual(
result,
{
"x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_find_signed_with_preference(self):
self._touch_files([
'signed/cafebabe/bash@4.3.42@4.fc24@x86_64',
'signed/deadbeef/bash@4.3.42@4.fc24@x86_64',
'signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe', 'deadbeef'], arches=['x86_64']
self._touch_files(
[
"signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
"signed/deadbeef/bash@4.3.42@4.fc24@x86_64",
"signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
result = pkgset.populate('f25')
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ["cafebabe", "deadbeef"], arches=["x86_64"]
)
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result,
{'x86_64': ['signed/cafebabe/bash@4.3.42@4.fc24@x86_64',
'signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64']})
self.assertPkgsetEqual(
result,
{
"x86_64": [
"signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
"signed/deadbeef/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
},
)
def test_find_signed_fallback_unsigned(self):
self._touch_files([
'signed/cafebabe/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe', None], arches=['x86_64']
self._touch_files(
[
"signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
result = pkgset.populate('f25')
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
)
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
'signed/cafebabe/bash@4.3.42@4.fc24@x86_64']})
self.assertPkgsetEqual(
result,
{
"x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"signed/cafebabe/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_can_not_find_signed_package(self):
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe'], arches=['x86_64']
"pkgset", self.koji_wrapper, ["cafebabe"], arches=["x86_64"]
)
with self.assertRaises(RuntimeError) as ctx:
pkgset.populate('f25')
pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
figure = re.compile(
r'^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$',
re.DOTALL)
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
re.DOTALL,
)
self.assertRegexpMatches(str(ctx.exception), figure)
def test_can_not_find_signed_package_allow_invalid_sigkeys(self):
pkgset = pkgsets.KojiPackageSet(
"pkgset",
self.koji_wrapper,
['cafebabe'],
arches=['x86_64'],
["cafebabe"],
arches=["x86_64"],
allow_invalid_sigkeys=True,
)
pkgset.populate('f25')
pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
with self.assertRaises(RuntimeError) as ctx:
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
figure = re.compile(
r'^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$',
re.DOTALL)
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$",
re.DOTALL,
)
self.assertRegexpMatches(str(ctx.exception), figure)
def test_can_not_find_any_package(self):
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, ['cafebabe', None], arches=['x86_64']
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
)
with self.assertRaises(RuntimeError) as ctx:
pkgset.populate('f25')
pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertRegexpMatches(
str(ctx.exception),
r'^RPM\(s\) not found for sigs: .+Check log for details.+')
r"^RPM\(s\) not found for sigs: .+Check log for details.+",
)
def test_packages_attribute(self):
self._touch_files([
'rpms/pungi@4.1.3@3.fc25@noarch',
'rpms/pungi@4.1.3@3.fc25@src',
'rpms/bash@4.3.42@4.fc24@i686',
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@src',
'rpms/bash-debuginfo@4.3.42@4.fc24@i686',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
self._touch_files(
[
"rpms/pungi@4.1.3@3.fc25@noarch",
"rpms/pungi@4.1.3@3.fc25@src",
"rpms/bash@4.3.42@4.fc24@i686",
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@src",
"rpms/bash-debuginfo@4.3.42@4.fc24@i686",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
pkgset = pkgsets.KojiPackageSet(
"pkgset",
@ -318,56 +365,75 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
populate_only_packages=True,
)
result = pkgset.populate('f25')
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result,
{'src': ['rpms/bash@4.3.42@4.fc24@src'],
'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64']})
self.assertPkgsetEqual(
result,
{
"src": ["rpms/bash@4.3.42@4.fc24@src"],
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
"x86_64": ["rpms/bash@4.3.42@4.fc24@x86_64"],
},
)
def test_get_latest_rpms_cache(self):
self._touch_files([
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
self._touch_files(
[
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
cache_region = make_region().configure("dogpile.cache.memory")
pkgset = pkgsets.KojiPackageSet(
"pkgset",
self.koji_wrapper,
[None],
arches=['x86_64'],
arches=["x86_64"],
cache_region=cache_region,
)
# Try calling the populate twice, but expect just single listTaggedRPMs
# call - that means the caching worked.
for i in range(2):
result = pkgset.populate('f25')
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[
mock.call.listTaggedRPMS(
"f25", event=None, inherit=True, latest=True
)
],
)
self.assertPkgsetEqual(
result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@x86_64']})
{
"x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_get_latest_rpms_cache_different_id(self):
self._touch_files([
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
self._touch_files(
[
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
cache_region = make_region().configure("dogpile.cache.memory")
pkgset = pkgsets.KojiPackageSet(
"pkgset",
self.koji_wrapper,
[None],
arches=['x86_64'],
arches=["x86_64"],
cache_region=cache_region,
)
@ -376,185 +442,215 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
expected_calls = []
for i in range(2):
expected_calls.append(
mock.call.listTaggedRPMS('f25', event=i, inherit=True, latest=True))
result = pkgset.populate('f25', event={"id": i})
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
expected_calls)
mock.call.listTaggedRPMS("f25", event=i, inherit=True, latest=True)
)
result = pkgset.populate("f25", event={"id": i})
self.assertEqual(self.koji_wrapper.koji_proxy.mock_calls, expected_calls)
self.assertPkgsetEqual(
result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@x86_64']})
{
"x86_64": [
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@x86_64",
]
},
)
def test_extra_builds_attribute(self):
self._touch_files([
'rpms/pungi@4.1.3@3.fc25@noarch',
'rpms/pungi@4.1.3@3.fc25@src',
'rpms/bash@4.3.42@4.fc24@i686',
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@src',
'rpms/bash-debuginfo@4.3.42@4.fc24@i686',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
self._touch_files(
[
"rpms/pungi@4.1.3@3.fc25@noarch",
"rpms/pungi@4.1.3@3.fc25@src",
"rpms/bash@4.3.42@4.fc24@i686",
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash@4.3.42@4.fc24@src",
"rpms/bash-debuginfo@4.3.42@4.fc24@i686",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
]
)
# Return "pungi" RPMs and builds using "get_latest_rpms" which gets
# them from Koji multiCall.
extra_rpms = [rpm for rpm in self.tagged_rpms[0]
if rpm["name"] == "pungi"]
extra_builds = [build for build in self.tagged_rpms[1]
if build["package_name"] == "pungi"]
extra_rpms = [rpm for rpm in self.tagged_rpms[0] if rpm["name"] == "pungi"]
extra_builds = [
build for build in self.tagged_rpms[1] if build["package_name"] == "pungi"
]
self.koji_wrapper.retrying_multicall_map.side_effect = [
extra_builds, [extra_rpms]]
extra_builds,
[extra_rpms],
]
# Do not return "pungi" RPMs and builds using the listTaggedRPMs, so
# we can be sure "pungi" gets into compose using the `extra_builds`.
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = [
[rpm for rpm in self.tagged_rpms[0] if rpm["name"] != "pungi"],
[b for b in self.tagged_rpms[1] if b["package_name"] != "pungi"]]
[b for b in self.tagged_rpms[1] if b["package_name"] != "pungi"],
]
pkgset = pkgsets.KojiPackageSet(
"pkgset", self.koji_wrapper, [None], extra_builds=["pungi-4.1.3-3.fc25"]
)
result = pkgset.populate('f25')
result = pkgset.populate("f25")
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
[mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
)
self.assertPkgsetEqual(result,
{'src': ['rpms/pungi@4.1.3@3.fc25@src',
'rpms/bash@4.3.42@4.fc24@src'],
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch'],
'i686': ['rpms/bash@4.3.42@4.fc24@i686',
'rpms/bash-debuginfo@4.3.42@4.fc24@i686'],
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64']})
self.assertPkgsetEqual(
result,
{
"src": ["rpms/pungi@4.1.3@3.fc25@src", "rpms/bash@4.3.42@4.fc24@src"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
"i686": [
"rpms/bash@4.3.42@4.fc24@i686",
"rpms/bash-debuginfo@4.3.42@4.fc24@i686",
],
"x86_64": [
"rpms/bash@4.3.42@4.fc24@x86_64",
"rpms/bash-debuginfo@4.3.42@4.fc24@x86_64",
],
},
)
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache)
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase):
def test_merge_in_another_arch(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
for name in ['rpms/pungi@4.1.3@3.fc25@noarch', 'rpms/pungi@4.1.3@3.fc25@src']:
for name in ["rpms/pungi@4.1.3@3.fc25@noarch", "rpms/pungi@4.1.3@3.fc25@src"]:
pkg = first.file_cache.add(name)
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
for name in ['rpms/bash@4.3.42@4.fc24@i686']:
for name in ["rpms/bash@4.3.42@4.fc24@i686"]:
pkg = second.file_cache.add(name)
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686'])
first.merge(second, "i386", ["i686"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'src': ['rpms/pungi@4.1.3@3.fc25@src'],
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch'],
'i686': ['rpms/bash@4.3.42@4.fc24@i686']})
self.assertPkgsetEqual(
first.rpms_by_arch,
{
"src": ["rpms/pungi@4.1.3@3.fc25@src"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
},
)
def test_merge_includes_noarch_with_different_exclude_arch(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch')
pkg.excludearch = ['x86_64']
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.excludearch = ["x86_64"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch'])
first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch']})
self.assertPkgsetEqual(
first.rpms_by_arch,
{
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
},
)
def test_merge_excludes_noarch_exclude_arch(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch')
pkg.excludearch = ['i686']
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.excludearch = ["i686"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch'])
first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'noarch': []})
self.assertPkgsetEqual(
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
)
def test_merge_excludes_noarch_exclusive_arch(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch')
pkg.exclusivearch = ['x86_64']
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.exclusivearch = ["x86_64"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch'])
first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'noarch': []})
self.assertPkgsetEqual(
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
)
def test_merge_includes_noarch_with_same_exclusive_arch(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@noarch')
pkg.exclusivearch = ['i686']
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
pkg.exclusivearch = ["i686"]
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'noarch'])
first.merge(second, "i386", ["i686", "noarch"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'noarch': ['rpms/pungi@4.1.3@3.fc25@noarch']})
self.assertPkgsetEqual(
first.rpms_by_arch,
{
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
},
)
def test_merge_skips_package_in_cache(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = second.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686'])
first.merge(second, "i386", ["i686"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'i686': ['rpms/bash@4.3.42@4.fc24@i686']})
self.assertPkgsetEqual(
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"]}
)
def test_merge_skips_src_without_binary(self):
first = pkgsets.PackageSetBase("first", [None])
second = pkgsets.PackageSetBase("second", [None])
pkg = first.file_cache.add('rpms/bash@4.3.42@4.fc24@i686')
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkg = second.file_cache.add('rpms/pungi@4.1.3@3.fc25@src')
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@src")
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
first.merge(second, 'i386', ['i686', 'src'])
first.merge(second, "i386", ["i686", "src"])
self.assertPkgsetEqual(first.rpms_by_arch,
{'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'src': [],
'nosrc': []})
self.assertPkgsetEqual(
first.rpms_by_arch,
{"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "src": [], "nosrc": []},
)
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache)
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestSaveFileList(unittest.TestCase):
def setUp(self):
fd, self.tmpfile = tempfile.mkstemp()
@ -565,30 +661,37 @@ class TestSaveFileList(unittest.TestCase):
def test_save_arches_alphabetically(self):
pkgset = pkgsets.PackageSetBase("pkgset", [None])
for name in ['rpms/pungi@4.1.3@3.fc25@x86_64',
'rpms/pungi@4.1.3@3.fc25@src',
'rpms/pungi@4.1.3@3.fc25@ppc64']:
for name in [
"rpms/pungi@4.1.3@3.fc25@x86_64",
"rpms/pungi@4.1.3@3.fc25@src",
"rpms/pungi@4.1.3@3.fc25@ppc64",
]:
pkg = pkgset.file_cache.add(name)
pkgset.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkgset.save_file_list(self.tmpfile)
with open(self.tmpfile) as f:
rpms = f.read().strip().split('\n')
self.assertEqual(rpms, ['rpms/pungi@4.1.3@3.fc25@ppc64',
'rpms/pungi@4.1.3@3.fc25@src',
'rpms/pungi@4.1.3@3.fc25@x86_64'])
rpms = f.read().strip().split("\n")
self.assertEqual(
rpms,
[
"rpms/pungi@4.1.3@3.fc25@ppc64",
"rpms/pungi@4.1.3@3.fc25@src",
"rpms/pungi@4.1.3@3.fc25@x86_64",
],
)
def test_save_strip_prefix(self):
pkgset = pkgsets.PackageSetBase("pkgset", [None])
for name in ['rpms/pungi@4.1.3@3.fc25@noarch', 'rpms/pungi@4.1.3@3.fc25@src']:
for name in ["rpms/pungi@4.1.3@3.fc25@noarch", "rpms/pungi@4.1.3@3.fc25@src"]:
pkg = pkgset.file_cache.add(name)
pkgset.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
pkgset.save_file_list(self.tmpfile, remove_path_prefix='rpms/')
pkgset.save_file_list(self.tmpfile, remove_path_prefix="rpms/")
with open(self.tmpfile) as f:
rpms = f.read().strip().split('\n')
rpms = f.read().strip().split("\n")
six.assertCountEqual(
self, rpms, ["pungi@4.1.3@3.fc25@noarch", "pungi@4.1.3@3.fc25@src"]
)

View File

@ -6,6 +6,7 @@ import os
import re
import six
import sys
try:
import unittest2 as unittest
except ImportError:
@ -15,29 +16,26 @@ from pungi.phases.pkgset.sources import source_koji
from tests import helpers
from pungi.module_util import Modulemd
EVENT_INFO = {'id': 15681980, 'ts': 1460956382.81936}
EVENT_INFO = {"id": 15681980, "ts": 1460956382.81936}
TAG_INFO = {
"maven_support": False,
"locked": False,
"name": "f25",
"extra": {
"mock.package_manager": "dnf"
},
"extra": {"mock.package_manager": "dnf"},
"perm": None,
"id": 335,
"arches": None,
"maven_include_all": None,
"perm_id": None
"perm_id": None,
}
class TestGetKojiEvent(helpers.PungiTestCase):
def setUp(self):
super(TestGetKojiEvent, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {})
self.event_file = self.topdir + '/work/global/koji-event'
self.event_file = self.topdir + "/work/global/koji-event"
def test_use_preconfigured_event(self):
koji_wrapper = mock.Mock()
@ -49,9 +47,8 @@ class TestGetKojiEvent(helpers.PungiTestCase):
self.assertEqual(event, EVENT_INFO)
six.assertCountEqual(
self,
koji_wrapper.mock_calls,
[mock.call.koji_proxy.getEvent(123456)])
self, koji_wrapper.mock_calls, [mock.call.koji_proxy.getEvent(123456)]
)
with open(self.event_file) as f:
self.assertEqual(json.load(f), EVENT_INFO)
@ -65,9 +62,8 @@ class TestGetKojiEvent(helpers.PungiTestCase):
self.assertEqual(event, EVENT_INFO)
six.assertCountEqual(
self,
koji_wrapper.mock_calls,
[mock.call.koji_proxy.getLastEvent()])
self, koji_wrapper.mock_calls, [mock.call.koji_proxy.getLastEvent()]
)
with open(self.event_file) as f:
self.assertEqual(json.load(f), EVENT_INFO)
@ -75,16 +71,19 @@ class TestGetKojiEvent(helpers.PungiTestCase):
class TestPopulateGlobalPkgset(helpers.PungiTestCase):
def setUp(self):
super(TestPopulateGlobalPkgset, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': 'f25',
'sigkeys': ["foo", "bar"],
})
self.compose = helpers.DummyCompose(
self.topdir, {"pkgset_koji_tag": "f25", "sigkeys": ["foo", "bar"]}
)
self.koji_wrapper = mock.Mock()
self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle')
self.koji_module_path = os.path.join(self.topdir, 'work', 'global', 'koji-module-Server.yaml')
self.pkgset_path = os.path.join(
self.topdir, "work", "global", "pkgset_global.pickle"
)
self.koji_module_path = os.path.join(
self.topdir, "work", "global", "koji-module-Server.yaml"
)
@mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet')
@mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet")
def test_populate(self, KojiPackageSet, materialize):
materialize.side_effect = self.mock_materialize
@ -106,14 +105,12 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
return pkgset
@mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet')
def test_populate_with_multiple_koji_tags(
self, KojiPackageSet, materialize
):
self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': ['f25', 'f25-extra'],
'sigkeys': ["foo", "bar"],
})
@mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet")
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, materialize):
self.compose = helpers.DummyCompose(
self.topdir,
{"pkgset_koji_tag": ["f25", "f25-extra"], "sigkeys": ["foo", "bar"]},
)
materialize.side_effect = self.mock_materialize
@ -123,7 +120,9 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
self.assertEqual(len(pkgsets), 2)
init_calls = KojiPackageSet.call_args_list
six.assertCountEqual(self, [call[0][0] for call in init_calls], ["f25", "f25-extra"])
six.assertCountEqual(
self, [call[0][0] for call in init_calls], ["f25", "f25-extra"]
)
six.assertCountEqual(
self, [call[0][1] for call in init_calls], [self.koji_wrapper] * 2
)
@ -143,22 +142,24 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
)
@mock.patch("pungi.phases.pkgset.sources.source_koji.MaterializedPackageSet.create")
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate')
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list')
@mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet.populate")
@mock.patch("pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list")
def test_populate_packages_to_gather(self, save_file_list, popuplate, materialize):
self.compose = helpers.DummyCompose(self.topdir, {
'gather_method': 'nodeps',
'pkgset_koji_tag': 'f25',
'sigkeys': ["foo", "bar"],
'additional_packages': [
('.*', {'*': ['pkg', 'foo.x86_64']}),
]
})
self.compose = helpers.DummyCompose(
self.topdir,
{
"gather_method": "nodeps",
"pkgset_koji_tag": "f25",
"sigkeys": ["foo", "bar"],
"additional_packages": [(".*", {"*": ["pkg", "foo.x86_64"]})],
},
)
materialize.side_effect = self.mock_materialize
pkgsets = source_koji.populate_global_pkgset(
self.compose, self.koji_wrapper, '/prefix', 123456)
self.compose, self.koji_wrapper, "/prefix", 123456
)
self.assertEqual(len(pkgsets), 1)
six.assertCountEqual(self, pkgsets[0].packages, ["pkg", "foo"])
@ -166,57 +167,55 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
class TestGetPackageSetFromKoji(helpers.PungiTestCase):
def setUp(self):
super(TestGetPackageSetFromKoji, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {
'pkgset_koji_tag': 'f25',
})
self.compose = helpers.DummyCompose(self.topdir, {"pkgset_koji_tag": "f25"})
self.compose.koji_event = None
self.koji_wrapper = mock.Mock()
self.koji_wrapper.koji_proxy.getLastEvent.return_value = EVENT_INFO
self.koji_wrapper.koji_proxy.getTag.return_value = TAG_INFO
@mock.patch('pungi.phases.pkgset.sources.source_koji.populate_global_pkgset')
@mock.patch("pungi.phases.pkgset.sources.source_koji.populate_global_pkgset")
def test_get_package_sets(self, pgp):
pkgsets = source_koji.get_pkgset_from_koji(
self.compose, self.koji_wrapper, "/prefix"
)
six.assertCountEqual(
self,
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.getLastEvent()]
self, self.koji_wrapper.koji_proxy.mock_calls, [mock.call.getLastEvent()]
)
self.assertEqual(pkgsets, pgp.return_value)
self.assertEqual(
pgp.call_args_list,
[mock.call(self.compose, self.koji_wrapper, '/prefix', EVENT_INFO)],
[mock.call(self.compose, self.koji_wrapper, "/prefix", EVENT_INFO)],
)
def test_get_koji_modules(self):
mock_build_ids = [{'id': 1065873, 'name': 'testmodule2-master_dash-20180406051653.96c371af'}]
mock_build_ids = [
{"id": 1065873, "name": "testmodule2-master_dash-20180406051653.96c371af"}
]
mock_extra = {
'typeinfo': {
'module': {
'content_koji_tag': 'module-b62270b82443edde',
'modulemd_str': mock.Mock(),
'name': 'testmodule2',
'stream': 'master',
'version': '20180406051653',
'context': '96c371af',
"typeinfo": {
"module": {
"content_koji_tag": "module-b62270b82443edde",
"modulemd_str": mock.Mock(),
"name": "testmodule2",
"stream": "master",
"version": "20180406051653",
"context": "96c371af",
}
}
}
mock_build_md = [
{
'id': 1065873,
'epoch': None,
'extra': mock_extra,
'name': 'testmodule2',
'nvr': 'testmodule2-master_dash-20180406051653.2e6f5e0a',
'release': '20180406051653.2e6f5e0a',
'state': 1,
'version': 'master_dash',
'completion_ts': 1433473124.0,
"id": 1065873,
"epoch": None,
"extra": mock_extra,
"name": "testmodule2",
"nvr": "testmodule2-master_dash-20180406051653.2e6f5e0a",
"release": "20180406051653.2e6f5e0a",
"state": 1,
"version": "master_dash",
"completion_ts": 1433473124.0,
}
]
@ -239,9 +238,12 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.assertIn("tag", module)
expected_query = "testmodule2-master_dash-20180406051653.96c371af"
self.koji_wrapper.koji_proxy.search.assert_called_once_with(expected_query, "build",
"glob")
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(mock_build_ids[0]["id"])
self.koji_wrapper.koji_proxy.search.assert_called_once_with(
expected_query, "build", "glob"
)
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(
mock_build_ids[0]["id"]
)
def test_get_koji_modules_filter_by_event(self):
mock_build_ids = [
@ -251,7 +253,8 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
"typeinfo": {
"module": {
"content_koji_tag": "module-b62270b82443edde",
"modulemd_str": mock.Mock()}
"modulemd_str": mock.Mock(),
}
}
}
mock_build_md = [
@ -282,64 +285,66 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.koji_wrapper.koji_proxy.search.assert_called_once_with(
"testmodule2-master_dash-*", "build", "glob"
)
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(mock_build_ids[0]["id"])
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(
mock_build_ids[0]["id"]
)
self.koji_wrapper.koji_proxy.listArchives.assert_not_called()
self.koji_wrapper.koji_proxy.listRPMs.assert_not_called()
def test_get_koji_modules_no_version(self):
mock_build_ids = [
{'id': 1065873, 'name': 'testmodule2-master-20180406051653.2e6f5e0a'},
{'id': 1065874, 'name': 'testmodule2-master-20180406051653.96c371af'}
{"id": 1065873, "name": "testmodule2-master-20180406051653.2e6f5e0a"},
{"id": 1065874, "name": "testmodule2-master-20180406051653.96c371af"},
]
mock_extra = [
{
'typeinfo': {
'module': {
'content_koji_tag': 'module-b62270b82443edde',
'modulemd_str': mock.Mock(),
'name': 'testmodule2',
'stream': 'master',
'version': '20180406051653',
'context': '2e6f5e0a',
"typeinfo": {
"module": {
"content_koji_tag": "module-b62270b82443edde",
"modulemd_str": mock.Mock(),
"name": "testmodule2",
"stream": "master",
"version": "20180406051653",
"context": "2e6f5e0a",
}
}
},
{
'typeinfo': {
'module': {
'content_koji_tag': 'module-52e40b9cdd3c0f7d',
'modulemd_str': mock.Mock(),
'name': 'testmodule2',
'stream': 'master',
'version': '20180406051653',
'context': '96c371af',
}
"typeinfo": {
"module": {
"content_koji_tag": "module-52e40b9cdd3c0f7d",
"modulemd_str": mock.Mock(),
"name": "testmodule2",
"stream": "master",
"version": "20180406051653",
"context": "96c371af",
}
}
},
]
mock_build_md = [
{
'id': 1065873,
'epoch': None,
'extra': mock_extra[0],
'name': 'testmodule2',
'nvr': 'testmodule2-master-20180406051653.2e6f5e0a',
'release': '20180406051653.2e6f5e0a',
'state': 1,
'version': 'master',
'completion_ts': 1433473124.0,
"id": 1065873,
"epoch": None,
"extra": mock_extra[0],
"name": "testmodule2",
"nvr": "testmodule2-master-20180406051653.2e6f5e0a",
"release": "20180406051653.2e6f5e0a",
"state": 1,
"version": "master",
"completion_ts": 1433473124.0,
},
{
'id': 1065874,
'epoch': None,
'extra': mock_extra[1],
'name': 'testmodule2',
'nvr': 'testmodule2-master-20180406051653.96c371af',
'release': '20180406051653.96c371af',
'state': 1,
'version': 'master',
'completion_ts': 1433473124.0,
}
"id": 1065874,
"epoch": None,
"extra": mock_extra[1],
"name": "testmodule2",
"nvr": "testmodule2-master-20180406051653.96c371af",
"release": "20180406051653.96c371af",
"state": 1,
"version": "master",
"completion_ts": 1433473124.0,
},
]
self.koji_wrapper.koji_proxy.search.return_value = mock_build_ids
@ -362,10 +367,14 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.assertIn("module_context", module)
expected_query = "testmodule2-master-*"
self.koji_wrapper.koji_proxy.search.assert_called_once_with(expected_query, "build",
"glob")
self.koji_wrapper.koji_proxy.search.assert_called_once_with(
expected_query, "build", "glob"
)
expected_calls = [mock.call(mock_build_ids[0]["id"]), mock.call(mock_build_ids[1]["id"])]
expected_calls = [
mock.call(mock_build_ids[0]["id"]),
mock.call(mock_build_ids[1]["id"]),
]
self.koji_wrapper.koji_proxy.getBuild.mock_calls == expected_calls
def test_get_koji_modules_ignore_deleted(self):
@ -416,31 +425,29 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
self.koji_wrapper.koji_proxy.search.assert_called_once_with(
"testmodule2-master_dash-*", "build", "glob"
)
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(mock_build_ids[0]["id"])
self.koji_wrapper.koji_proxy.getBuild.assert_called_once_with(
mock_build_ids[0]["id"]
)
self.koji_wrapper.koji_proxy.listArchives.assert_not_called()
self.koji_wrapper.koji_proxy.listRPMs.assert_not_called()
class TestSourceKoji(helpers.PungiTestCase):
@mock.patch('pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji')
@mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')
@mock.patch("pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji")
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_run(self, KojiWrapper, gpfk):
compose = helpers.DummyCompose(self.topdir, {
'koji_profile': 'koji'
})
KojiWrapper.return_value.koji_module.config.topdir = '/prefix'
compose = helpers.DummyCompose(self.topdir, {"koji_profile": "koji"})
KojiWrapper.return_value.koji_module.config.topdir = "/prefix"
phase = source_koji.PkgsetSourceKoji(compose)
pkgsets, path_prefix = phase()
self.assertEqual(pkgsets, gpfk.return_value)
self.assertEqual(path_prefix, '/prefix/')
self.assertEqual(KojiWrapper.mock_calls, [mock.call('koji')])
self.assertEqual(path_prefix, "/prefix/")
self.assertEqual(KojiWrapper.mock_calls, [mock.call("koji")])
class TestCorrectNVR(helpers.PungiTestCase):
def setUp(self):
super(TestCorrectNVR, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {})
@ -467,36 +474,39 @@ class TestCorrectNVR(helpers.PungiTestCase):
def test_new_nv(self):
module_info = source_koji.variant_dict_from_str(self.compose, self.new_nv)
expected = {
'name': 'base-runtime',
'stream': 'f26'}
expected = {"name": "base-runtime", "stream": "f26"}
self.assertEqual(module_info, expected)
def test_new_nvr(self):
module_info = source_koji.variant_dict_from_str(self.compose, self.new_nvr)
expected = {
'name': 'base-runtime',
'stream': 'f26',
'version': '20170502134116'}
"name": "base-runtime",
"stream": "f26",
"version": "20170502134116",
}
self.assertEqual(module_info, expected)
def test_new_nvrc(self):
module_info = source_koji.variant_dict_from_str(self.compose, self.new_nvrc)
expected = {
'name': 'base-runtime',
'stream': 'f26',
'version': '20170502134116',
'context': '0123abcd'}
"name": "base-runtime",
"stream": "f26",
"version": "20170502134116",
"context": "0123abcd",
}
self.assertEqual(module_info, expected)
def test_new_garbage_value(self):
self.assertRaises(ValueError, source_koji.variant_dict_from_str,
self.compose, 'foo:bar:baz:quux:qaar')
self.assertRaises(
ValueError,
source_koji.variant_dict_from_str,
self.compose,
"foo:bar:baz:quux:qaar",
)
class TestFilterInherited(unittest.TestCase):
def test_empty_module_list(self):
event = {"id": 123456}
koji_proxy = mock.Mock()
@ -504,7 +514,8 @@ class TestFilterInherited(unittest.TestCase):
top_tag = "top-tag"
koji_proxy.getFullInheritance.return_value = [
{"name": "middle-tag"}, {"name": "bottom-tag"}
{"name": "middle-tag"},
{"name": "bottom-tag"},
]
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
@ -521,7 +532,8 @@ class TestFilterInherited(unittest.TestCase):
top_tag = "top-tag"
koji_proxy.getFullInheritance.return_value = [
{"name": "middle-tag"}, {"name": "bottom-tag"}
{"name": "middle-tag"},
{"name": "bottom-tag"},
]
module_builds = [
{"name": "foo", "version": "1", "release": "1", "tag_name": "top-tag"},
@ -547,7 +559,8 @@ class TestFilterInherited(unittest.TestCase):
top_tag = "top-tag"
koji_proxy.getFullInheritance.return_value = [
{"name": "middle-tag"}, {"name": "bottom-tag"}
{"name": "middle-tag"},
{"name": "bottom-tag"},
]
module_builds = [
{"name": "foo", "version": "1", "release": "2", "tag_name": "bottom-tag"},
@ -671,7 +684,6 @@ class MockModule(object):
@mock.patch("pungi.module_util.Modulemd.ModuleStream.read_file", new=MockModule)
@unittest.skipIf(Modulemd is None, "Skipping tests, no module support")
class TestAddModuleToVariant(helpers.PungiTestCase):
def setUp(self):
super(TestAddModuleToVariant, self).setUp()
self.koji = mock.Mock()
@ -695,9 +707,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
}
def test_adding_module(self):
variant = mock.Mock(
arches=["armhfp", "x86_64"], arch_mmds={}, modules=[]
)
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
@ -705,10 +715,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds,
{
"armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
},
"x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
},
},
)
@ -729,10 +743,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds,
{
"armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
},
"x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
"m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"),
},
},
@ -742,9 +760,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
)
def test_adding_module_with_add_module(self):
variant = mock.Mock(
arches=["armhfp", "x86_64"], arch_mmds={}, modules=[]
)
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
source_koji._add_module_to_variant(
self.koji, variant, self.buildinfo, add_to_variant_modules=True
@ -754,10 +770,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds,
{
"armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
},
"x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
},
},
)
@ -782,10 +802,14 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant.arch_mmds,
{
"armhfp": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.armv7hl.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.armv7hl.txt"
),
},
"x86_64": {
"module:master:20190318:abcdef": MockModule("/koji/modulemd.x86_64.txt"),
"module:master:20190318:abcdef": MockModule(
"/koji/modulemd.x86_64.txt"
),
"m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"),
},
},

View File

@ -11,95 +11,101 @@ from . import helpers
class RepoclosureWrapperTestCase(helpers.BaseTestCase):
def test_minimal_command(self):
self.assertEqual(rc.get_repoclosure_cmd(),
['/usr/bin/repoclosure', '--tempcache'])
self.assertEqual(
rc.get_repoclosure_cmd(), ["/usr/bin/repoclosure", "--tempcache"]
)
def test_minimal_dnf_command(self):
self.assertEqual(rc.get_repoclosure_cmd(backend='dnf'),
['dnf', 'repoclosure'])
self.assertEqual(rc.get_repoclosure_cmd(backend="dnf"), ["dnf", "repoclosure"])
def test_unknown_backend(self):
with self.assertRaises(RuntimeError) as ctx:
rc.get_repoclosure_cmd(backend='rpm')
rc.get_repoclosure_cmd(backend="rpm")
self.assertEqual(str(ctx.exception), 'Unknown repoclosure backend: rpm')
self.assertEqual(str(ctx.exception), "Unknown repoclosure backend: rpm")
def test_multiple_arches(self):
self.assertEqual(rc.get_repoclosure_cmd(arch=['x86_64', 'ppc64']),
['/usr/bin/repoclosure', '--tempcache', '--arch=x86_64', '--arch=ppc64'])
self.assertEqual(
rc.get_repoclosure_cmd(arch=["x86_64", "ppc64"]),
["/usr/bin/repoclosure", "--tempcache", "--arch=x86_64", "--arch=ppc64"],
)
def test_full_command(self):
repos = {'my-repo': '/mnt/koji/repo'}
lookaside = {'fedora': 'http://kojipkgs.fp.o/repo'}
repos = {"my-repo": "/mnt/koji/repo"}
lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
cmd = rc.get_repoclosure_cmd(arch='x86_64', repos=repos, lookaside=lookaside)
self.assertEqual(cmd[0], '/usr/bin/repoclosure')
cmd = rc.get_repoclosure_cmd(arch="x86_64", repos=repos, lookaside=lookaside)
self.assertEqual(cmd[0], "/usr/bin/repoclosure")
six.assertCountEqual(
self,
cmd[1:],
[
'--tempcache',
'--arch=x86_64',
'--repofrompath=my-repo,file:///mnt/koji/repo',
'--repofrompath=fedora,http://kojipkgs.fp.o/repo',
'--repoid=my-repo',
'--lookaside=fedora',
]
"--tempcache",
"--arch=x86_64",
"--repofrompath=my-repo,file:///mnt/koji/repo",
"--repofrompath=fedora,http://kojipkgs.fp.o/repo",
"--repoid=my-repo",
"--lookaside=fedora",
],
)
def test_full_dnf_command(self):
repos = {'my-repo': '/mnt/koji/repo'}
lookaside = {'fedora': 'http://kojipkgs.fp.o/repo'}
repos = {"my-repo": "/mnt/koji/repo"}
lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
cmd = rc.get_repoclosure_cmd(backend='dnf', arch='x86_64',
repos=repos, lookaside=lookaside)
self.assertEqual(cmd[:2], ['dnf', 'repoclosure'])
cmd = rc.get_repoclosure_cmd(
backend="dnf", arch="x86_64", repos=repos, lookaside=lookaside
)
self.assertEqual(cmd[:2], ["dnf", "repoclosure"])
six.assertCountEqual(
self,
cmd[2:],
['--arch=x86_64',
'--repofrompath=my-repo,file:///mnt/koji/repo',
'--repofrompath=fedora,http://kojipkgs.fp.o/repo',
'--repo=my-repo',
'--check=my-repo',
'--repo=fedora'])
[
"--arch=x86_64",
"--repofrompath=my-repo,file:///mnt/koji/repo",
"--repofrompath=fedora,http://kojipkgs.fp.o/repo",
"--repo=my-repo",
"--check=my-repo",
"--repo=fedora",
],
)
def test_expand_repo(self):
repos = {
'local': '/mnt/koji/repo',
'remote': 'http://kojipkgs.fp.o/repo',
"local": "/mnt/koji/repo",
"remote": "http://kojipkgs.fp.o/repo",
}
cmd = rc.get_repoclosure_cmd(repos=repos)
self.assertEqual(cmd[0], '/usr/bin/repoclosure')
self.assertEqual(cmd[0], "/usr/bin/repoclosure")
six.assertCountEqual(
self,
cmd[1:],
[
'--tempcache',
'--repofrompath=local,file:///mnt/koji/repo',
'--repofrompath=remote,http://kojipkgs.fp.o/repo',
'--repoid=local',
'--repoid=remote',
]
"--tempcache",
"--repofrompath=local,file:///mnt/koji/repo",
"--repofrompath=remote,http://kojipkgs.fp.o/repo",
"--repoid=local",
"--repoid=remote",
],
)
def test_expand_lookaside(self):
repos = {
'local': '/mnt/koji/repo',
'remote': 'http://kojipkgs.fp.o/repo',
"local": "/mnt/koji/repo",
"remote": "http://kojipkgs.fp.o/repo",
}
cmd = rc.get_repoclosure_cmd(lookaside=repos)
self.assertEqual(cmd[0], '/usr/bin/repoclosure')
self.assertEqual(cmd[0], "/usr/bin/repoclosure")
six.assertCountEqual(
self,
cmd[1:],
[
'--tempcache',
'--repofrompath=local,file:///mnt/koji/repo',
'--repofrompath=remote,http://kojipkgs.fp.o/repo',
'--lookaside=local',
'--lookaside=remote',
]
"--tempcache",
"--repofrompath=local,file:///mnt/koji/repo",
"--repofrompath=remote,http://kojipkgs.fp.o/repo",
"--lookaside=local",
"--lookaside=remote",
],
)
@ -118,7 +124,7 @@ class FusExtractorTestCase(helpers.PungiTestCase):
def test_error(self):
helpers.touch(
self.input1,
"fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n"
"fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n",
)
with self.assertRaises(RuntimeError) as ctx:
rc.extract_from_fus_logs([self.input1], self.output)
@ -130,11 +136,11 @@ class FusExtractorTestCase(helpers.PungiTestCase):
def test_errors_in_multiple_files(self):
helpers.touch(
self.input1,
"fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n"
"fus-DEBUG: Installing bar\nProblem 1/1\n - nothing provides foo\n",
)
helpers.touch(
self.input2,
"fus-DEBUG: Installing baz\nProblem 1/1\n - nothing provides quux\n"
"fus-DEBUG: Installing baz\nProblem 1/1\n - nothing provides quux\n",
)
with self.assertRaises(RuntimeError) as ctx:
rc.extract_from_fus_logs([self.input1, self.input2], self.output)

View File

@ -11,15 +11,16 @@ from tests import helpers
class TestRunrootOpenSSH(helpers.PungiTestCase):
def setUp(self):
super(TestRunrootOpenSSH, self).setUp()
self.compose = helpers.DummyCompose(self.topdir, {
self.compose = helpers.DummyCompose(
self.topdir,
{
"runroot": True,
"runroot_method": "openssh",
"runroot_ssh_user": "root",
"runroot_ssh_hostnames": {
"x86_64": "localhost"
},
"runroot_ssh_hostnames": {"x86_64": "localhost"},
"runroot_tag": "f28-build",
})
},
)
self.runroot = Runroot(self.compose)
@ -52,7 +53,7 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
"""
logfile = ("/foo/runroot." + suffix + ".log") if suffix else "/foo/runroot.log"
return mock.call(
['ssh', '-oBatchMode=yes', '-n', '-l', 'root', 'localhost', cmd],
["ssh", "-oBatchMode=yes", "-n", "-l", "root", "localhost", cmd],
logfile=logfile,
show_cmd=True,
)
@ -61,12 +62,15 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
def test_run(self, run):
run.return_value = (0, "dummy output\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64")
run.assert_has_calls([
self._ssh_call('df -h'),
run.assert_has_calls(
[
self._ssh_call("df -h"),
self._ssh_call(
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'", suffix="rpms"
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
),
])
]
)
@mock.patch("pungi.runroot.run")
def test_get_buildroot_rpms(self, run):
@ -75,92 +79,123 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64")
rpms = self.runroot.get_buildroot_rpms()
self.assertEqual(
set(rpms), set(["foo-1-1.fc29.noarch", "bar-1-1.fc29.noarch"]))
self.assertEqual(set(rpms), set(["foo-1-1.fc29.noarch", "bar-1-1.fc29.noarch"]))
@mock.patch("pungi.runroot.run")
def test_run_templates(self, run):
self.compose.conf["runroot_ssh_init_template"] = "/usr/sbin/init_runroot {runroot_tag}"
self.compose.conf["runroot_ssh_install_packages_template"] = \
"install {runroot_key} {packages}"
self.compose.conf[
"runroot_ssh_init_template"
] = "/usr/sbin/init_runroot {runroot_tag}"
self.compose.conf[
"runroot_ssh_install_packages_template"
] = "install {runroot_key} {packages}"
self.compose.conf["runroot_ssh_run_template"] = "run {runroot_key} {command}"
run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64",
packages=["lorax", "automake"])
run.assert_has_calls([
self._ssh_call('/usr/sbin/init_runroot f28-build', suffix="init"),
self._ssh_call('install key lorax automake', suffix="install_packages"),
self._ssh_call('run key df -h'),
self.runroot.run(
"df -h",
log_file="/foo/runroot.log",
arch="x86_64",
packages=["lorax", "automake"],
)
run.assert_has_calls(
[
self._ssh_call("/usr/sbin/init_runroot f28-build", suffix="init"),
self._ssh_call("install key lorax automake", suffix="install_packages"),
self._ssh_call("run key df -h"),
self._ssh_call(
"run key rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
),
])
]
)
@mock.patch("pungi.runroot.run")
def test_run_templates_no_init(self, run):
self.compose.conf["runroot_ssh_install_packages_template"] = \
"install {packages}"
self.compose.conf[
"runroot_ssh_install_packages_template"
] = "install {packages}"
self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64",
packages=["lorax", "automake"])
run.assert_has_calls([
self._ssh_call('install lorax automake', suffix="install_packages"),
self._ssh_call('run df -h'),
self.runroot.run(
"df -h",
log_file="/foo/runroot.log",
arch="x86_64",
packages=["lorax", "automake"],
)
run.assert_has_calls(
[
self._ssh_call("install lorax automake", suffix="install_packages"),
self._ssh_call("run df -h"),
self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
),
])
]
)
@mock.patch("pungi.runroot.run")
def test_run_templates_no_packages(self, run):
self.compose.conf["runroot_ssh_install_packages_template"] = \
"install {packages}"
self.compose.conf[
"runroot_ssh_install_packages_template"
] = "install {packages}"
self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64")
run.assert_has_calls([
self._ssh_call('run df -h'),
run.assert_has_calls(
[
self._ssh_call("run df -h"),
self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
),
])
]
)
@mock.patch("pungi.runroot.run")
def test_run_templates_no_install_packages(self, run):
self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64",
packages=["lorax", "automake"])
run.assert_has_calls([
self._ssh_call('run df -h'),
self.runroot.run(
"df -h",
log_file="/foo/runroot.log",
arch="x86_64",
packages=["lorax", "automake"],
)
run.assert_has_calls(
[
self._ssh_call("run df -h"),
self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
),
])
]
)
@mock.patch("pungi.runroot.run")
def test_run_templates_output_dir(self, run):
self.compose.conf["runroot_ssh_run_template"] = "run {command}"
run.return_value = (0, "key\n")
self.runroot.run("df -h", log_file="/foo/runroot.log", arch="x86_64",
self.runroot.run(
"df -h",
log_file="/foo/runroot.log",
arch="x86_64",
packages=["lorax", "automake"],
chown_paths=["/mnt/foo/compose", "/mnt/foo/x"])
run.assert_has_calls([
chown_paths=["/mnt/foo/compose", "/mnt/foo/x"],
)
run.assert_has_calls(
[
self._ssh_call(
"run df -h && chmod -R a+r /mnt/foo/compose /mnt/foo/x && "
"chown -R %d /mnt/foo/compose /mnt/foo/x" % os.getuid()),
"chown -R %d /mnt/foo/compose /mnt/foo/x" % os.getuid()
),
self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
suffix="rpms",
),
])
]
)

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import mock
try:
import unittest2 as unittest
except ImportError:
@ -53,52 +54,58 @@ class FileSCMTestCase(SCMBaseTest):
"""
super(FileSCMTestCase, self).setUp()
self.srcdir = tempfile.mkdtemp()
touch(os.path.join(self.srcdir, 'in_root'))
touch(os.path.join(self.srcdir, 'subdir', 'first'))
touch(os.path.join(self.srcdir, 'subdir', 'second'))
touch(os.path.join(self.srcdir, "in_root"))
touch(os.path.join(self.srcdir, "subdir", "first"))
touch(os.path.join(self.srcdir, "subdir", "second"))
def tearDown(self):
super(FileSCMTestCase, self).tearDown()
shutil.rmtree(self.srcdir)
def test_get_file_by_name(self):
file = os.path.join(self.srcdir, 'in_root')
file = os.path.join(self.srcdir, "in_root")
retval = scm.get_file_from_scm(file, self.destdir)
self.assertStructure(retval, ['in_root'])
self.assertStructure(retval, ["in_root"])
def test_get_file_by_dict(self):
retval = scm.get_file_from_scm({
'scm': 'file', 'repo': None, 'file': os.path.join(self.srcdir, 'subdir', 'first')},
self.destdir)
self.assertStructure(retval, ['first'])
retval = scm.get_file_from_scm(
{
"scm": "file",
"repo": None,
"file": os.path.join(self.srcdir, "subdir", "first"),
},
self.destdir,
)
self.assertStructure(retval, ["first"])
def test_get_dir_by_name(self):
retval = scm.get_dir_from_scm(os.path.join(self.srcdir, 'subdir'), self.destdir)
self.assertStructure(retval, ['first', 'second'])
retval = scm.get_dir_from_scm(os.path.join(self.srcdir, "subdir"), self.destdir)
self.assertStructure(retval, ["first", "second"])
def test_get_dir_by_dict(self):
retval = scm.get_dir_from_scm(
{'scm': 'file', 'repo': None, 'dir': os.path.join(self.srcdir, 'subdir')},
self.destdir)
self.assertStructure(retval, ['first', 'second'])
{"scm": "file", "repo": None, "dir": os.path.join(self.srcdir, "subdir")},
self.destdir,
)
self.assertStructure(retval, ["first", "second"])
def test_get_missing_file(self):
with self.assertRaises(RuntimeError) as ctx:
scm.get_file_from_scm({'scm': 'file',
'repo': None,
'file': 'this-is-really-not-here.txt'},
self.destdir)
scm.get_file_from_scm(
{"scm": "file", "repo": None, "file": "this-is-really-not-here.txt"},
self.destdir,
)
self.assertIn('No files matched', str(ctx.exception))
self.assertIn("No files matched", str(ctx.exception))
def test_get_missing_dir(self):
with self.assertRaises(RuntimeError) as ctx:
scm.get_dir_from_scm({'scm': 'file',
'repo': None,
'dir': 'this-is-really-not-here'},
self.destdir)
scm.get_dir_from_scm(
{"scm": "file", "repo": None, "dir": "this-is-really-not-here"},
self.destdir,
)
self.assertIn('No directories matched', str(ctx.exception))
self.assertIn("No directories matched", str(ctx.exception))
class GitSCMTestCase(SCMBaseTest):
@ -110,26 +117,30 @@ class GitSCMTestCase(SCMBaseTest):
["git", "init"],
["git", "fetch", "--depth=1", url, branch],
["git", "checkout", "FETCH_HEAD"],
] + command,
]
+ command,
)
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_file(self, run):
def process(cmd, workdir=None, **kwargs):
touch(os.path.join(workdir, 'some_file.txt'))
touch(os.path.join(workdir, 'other_file.txt'))
touch(os.path.join(workdir, "some_file.txt"))
touch(os.path.join(workdir, "other_file.txt"))
run.side_effect = process
retval = scm.get_file_from_scm({'scm': 'git',
'repo': 'git://example.com/git/repo.git',
'file': 'some_file.txt'},
self.destdir)
self.assertStructure(retval, ['some_file.txt'])
retval = scm.get_file_from_scm(
{
"scm": "git",
"repo": "git://example.com/git/repo.git",
"file": "some_file.txt",
},
self.destdir,
)
self.assertStructure(retval, ["some_file.txt"])
self.assertCalls(run, "git://example.com/git/repo.git", "master")
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_file_fetch_fails(self, run):
url = "git://example.com/git/repo.git"
@ -138,15 +149,15 @@ class GitSCMTestCase(SCMBaseTest):
exc = RuntimeError()
exc.output = ""
raise exc
touch(os.path.join(workdir, 'some_file.txt'))
touch(os.path.join(workdir, 'other_file.txt'))
touch(os.path.join(workdir, "some_file.txt"))
touch(os.path.join(workdir, "other_file.txt"))
run.side_effect = process
retval = scm.get_file_from_scm(
{"scm": "git", "repo": url, "file": "some_file.txt"}, self.destdir
)
self.assertStructure(retval, ['some_file.txt'])
self.assertStructure(retval, ["some_file.txt"])
self.assertEqual(
[call[0][0] for call in run.call_args_list],
[
@ -158,77 +169,85 @@ class GitSCMTestCase(SCMBaseTest):
],
)
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_file_generated_by_command(self, run):
def process(cmd, workdir=None, **kwargs):
if cmd[0] == "git":
touch(os.path.join(workdir, 'some_file.txt'))
return 0, ''
touch(os.path.join(workdir, "some_file.txt"))
return 0, ""
run.side_effect = process
retval = scm.get_file_from_scm({'scm': 'git',
'repo': 'git://example.com/git/repo.git',
'file': 'some_file.txt',
'command': 'make'},
self.destdir)
self.assertStructure(retval, ['some_file.txt'])
retval = scm.get_file_from_scm(
{
"scm": "git",
"repo": "git://example.com/git/repo.git",
"file": "some_file.txt",
"command": "make",
},
self.destdir,
)
self.assertStructure(retval, ["some_file.txt"])
self.assertCalls(run, "git://example.com/git/repo.git", "master", "make")
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_file_and_fail_to_generate(self, run):
def process(cmd, workdir=None, **kwargs):
if cmd[0] == "git":
touch(os.path.join(workdir, 'some_file.txt'))
touch(os.path.join(workdir, "some_file.txt"))
return 0, "output"
return 1, "output"
run.side_effect = process
with self.assertRaises(RuntimeError) as ctx:
scm.get_file_from_scm({'scm': 'git',
'repo': 'git://example.com/git/repo.git',
'file': 'some_file.txt',
'command': 'make'},
self.destdir)
scm.get_file_from_scm(
{
"scm": "git",
"repo": "git://example.com/git/repo.git",
"file": "some_file.txt",
"command": "make",
},
self.destdir,
)
self.assertEqual(str(ctx.exception), "'make' failed with exit code 1")
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_dir(self, run):
def process(cmd, workdir=None, **kwargs):
touch(os.path.join(workdir, "subdir", 'first'))
touch(os.path.join(workdir, "subdir", 'second'))
touch(os.path.join(workdir, "subdir", "first"))
touch(os.path.join(workdir, "subdir", "second"))
run.side_effect = process
retval = scm.get_dir_from_scm({'scm': 'git',
'repo': 'git://example.com/git/repo.git',
'dir': 'subdir'},
self.destdir)
self.assertStructure(retval, ['first', 'second'])
retval = scm.get_dir_from_scm(
{"scm": "git", "repo": "git://example.com/git/repo.git", "dir": "subdir"},
self.destdir,
)
self.assertStructure(retval, ["first", "second"])
self.assertCalls(run, "git://example.com/git/repo.git", "master")
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_dir_and_generate(self, run):
def process(cmd, workdir=None, **kwargs):
if cmd[0] == "git":
touch(os.path.join(workdir, 'subdir', 'first'))
touch(os.path.join(workdir, 'subdir', 'second'))
return 0, ''
touch(os.path.join(workdir, "subdir", "first"))
touch(os.path.join(workdir, "subdir", "second"))
return 0, ""
run.side_effect = process
retval = scm.get_dir_from_scm({'scm': 'git',
'repo': 'git://example.com/git/repo.git',
'dir': 'subdir',
'command': 'make'},
self.destdir)
self.assertStructure(retval, ['first', 'second'])
retval = scm.get_dir_from_scm(
{
"scm": "git",
"repo": "git://example.com/git/repo.git",
"dir": "subdir",
"command": "make",
},
self.destdir,
)
self.assertStructure(retval, ["first", "second"])
self.assertCalls(run, "git://example.com/git/repo.git", "master", "make")
@ -237,8 +256,11 @@ class RpmSCMTestCase(SCMBaseTest):
super(RpmSCMTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
self.exploded = set()
self.rpms = [self.tmpdir + '/whatever.rpm', self.tmpdir + '/another.rpm']
self.numbered = [self.tmpdir + x for x in ['/one1.rpm', '/one2.rpm', '/two1.rpm', '/two2.rpm']]
self.rpms = [self.tmpdir + "/whatever.rpm", self.tmpdir + "/another.rpm"]
self.numbered = [
self.tmpdir + x
for x in ["/one1.rpm", "/one2.rpm", "/two1.rpm", "/two2.rpm"]
]
for rpm in self.rpms + self.numbered:
touch(rpm)
@ -248,155 +270,180 @@ class RpmSCMTestCase(SCMBaseTest):
def _explode_rpm(self, path, dest):
self.exploded.add(path)
touch(os.path.join(dest, 'some-file.txt'))
touch(os.path.join(dest, 'subdir', 'foo.txt'))
touch(os.path.join(dest, 'subdir', 'bar.txt'))
touch(os.path.join(dest, "some-file.txt"))
touch(os.path.join(dest, "subdir", "foo.txt"))
touch(os.path.join(dest, "subdir", "bar.txt"))
def _explode_multiple(self, path, dest):
self.exploded.add(path)
cnt = len(self.exploded)
touch(os.path.join(dest, 'some-file-%d.txt' % cnt))
touch(os.path.join(dest, 'subdir-%d' % cnt, 'foo-%d.txt' % cnt))
touch(os.path.join(dest, 'common', 'foo-%d.txt' % cnt))
touch(os.path.join(dest, "some-file-%d.txt" % cnt))
touch(os.path.join(dest, "subdir-%d" % cnt, "foo-%d.txt" % cnt))
touch(os.path.join(dest, "common", "foo-%d.txt" % cnt))
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_file(self, explode):
explode.side_effect = self._explode_rpm
retval = scm.get_file_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], 'file': 'some-file.txt'},
self.destdir)
{"scm": "rpm", "repo": self.rpms[0], "file": "some-file.txt"}, self.destdir
)
self.assertStructure(retval, ['some-file.txt'])
self.assertStructure(retval, ["some-file.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_more_files(self, explode):
explode.side_effect = self._explode_rpm
retval = scm.get_file_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0],
'file': ['some-file.txt', 'subdir/foo.txt']},
self.destdir)
{
"scm": "rpm",
"repo": self.rpms[0],
"file": ["some-file.txt", "subdir/foo.txt"],
},
self.destdir,
)
self.assertStructure(retval, ['some-file.txt', 'foo.txt'])
self.assertStructure(retval, ["some-file.txt", "foo.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_whole_dir(self, explode):
explode.side_effect = self._explode_rpm
retval = scm.get_dir_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], 'dir': 'subdir'},
self.destdir)
{"scm": "rpm", "repo": self.rpms[0], "dir": "subdir"}, self.destdir
)
self.assertStructure(retval, ['subdir/foo.txt', 'subdir/bar.txt'])
self.assertStructure(retval, ["subdir/foo.txt", "subdir/bar.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_dir_contents(self, explode):
explode.side_effect = self._explode_rpm
retval = scm.get_dir_from_scm(
{'scm': 'rpm', 'repo': self.rpms[0], 'dir': 'subdir/'},
self.destdir)
{"scm": "rpm", "repo": self.rpms[0], "dir": "subdir/"}, self.destdir
)
self.assertStructure(retval, ['foo.txt', 'bar.txt'])
self.assertStructure(retval, ["foo.txt", "bar.txt"])
self.assertEqual(self.exploded, set([self.rpms[0]]))
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_files_from_two_rpms(self, explode):
explode.side_effect = self._explode_multiple
retval = scm.get_file_from_scm(
{'scm': 'rpm', 'repo': self.rpms,
'file': ['some-file-1.txt', 'some-file-2.txt']},
self.destdir)
{
"scm": "rpm",
"repo": self.rpms,
"file": ["some-file-1.txt", "some-file-2.txt"],
},
self.destdir,
)
self.assertStructure(retval, ['some-file-1.txt', 'some-file-2.txt'])
self.assertStructure(retval, ["some-file-1.txt", "some-file-2.txt"])
six.assertCountEqual(self, self.exploded, self.rpms)
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_files_from_glob_rpms(self, explode):
explode.side_effect = self._explode_multiple
retval = scm.get_file_from_scm(
{'scm': 'rpm', 'file': 'some-file-*.txt',
'repo': [self.tmpdir + '/one*.rpm', self.tmpdir + '/two*.rpm']},
self.destdir)
{
"scm": "rpm",
"file": "some-file-*.txt",
"repo": [self.tmpdir + "/one*.rpm", self.tmpdir + "/two*.rpm"],
},
self.destdir,
)
self.assertStructure(retval,
['some-file-1.txt', 'some-file-2.txt', 'some-file-3.txt', 'some-file-4.txt'])
self.assertStructure(
retval,
[
"some-file-1.txt",
"some-file-2.txt",
"some-file-3.txt",
"some-file-4.txt",
],
)
six.assertCountEqual(self, self.exploded, self.numbered)
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_dir_from_two_rpms(self, explode):
explode.side_effect = self._explode_multiple
retval = scm.get_dir_from_scm({'scm': 'rpm',
'repo': self.rpms,
'dir': 'common'},
self.destdir)
retval = scm.get_dir_from_scm(
{"scm": "rpm", "repo": self.rpms, "dir": "common"}, self.destdir
)
self.assertStructure(retval, ['common/foo-1.txt', 'common/foo-2.txt'])
self.assertStructure(retval, ["common/foo-1.txt", "common/foo-2.txt"])
six.assertCountEqual(self, self.exploded, self.rpms)
@mock.patch('pungi.wrappers.scm.explode_rpm_package')
@mock.patch("pungi.wrappers.scm.explode_rpm_package")
def test_get_dir_from_glob_rpms(self, explode):
explode.side_effect = self._explode_multiple
retval = scm.get_dir_from_scm(
{'scm': 'rpm', 'dir': 'common/',
'repo': [self.tmpdir + '/one*.rpm', self.tmpdir + '/two*.rpm']},
self.destdir)
{
"scm": "rpm",
"dir": "common/",
"repo": [self.tmpdir + "/one*.rpm", self.tmpdir + "/two*.rpm"],
},
self.destdir,
)
self.assertStructure(retval,
['foo-1.txt', 'foo-2.txt', 'foo-3.txt', 'foo-4.txt'])
self.assertStructure(
retval, ["foo-1.txt", "foo-2.txt", "foo-3.txt", "foo-4.txt"]
)
six.assertCountEqual(self, self.exploded, self.numbered)
class CvsSCMTestCase(SCMBaseTest):
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_file(self, run):
commands = []
def process(cmd, workdir=None, **kwargs):
fname = cmd[-1]
touch(os.path.join(workdir, fname))
commands.append(' '.join(cmd))
commands.append(" ".join(cmd))
run.side_effect = process
retval = scm.get_file_from_scm({'scm': 'cvs',
'repo': 'http://example.com/cvs',
'file': 'some_file.txt'},
self.destdir)
self.assertStructure(retval, ['some_file.txt'])
retval = scm.get_file_from_scm(
{"scm": "cvs", "repo": "http://example.com/cvs", "file": "some_file.txt"},
self.destdir,
)
self.assertStructure(retval, ["some_file.txt"])
self.assertEqual(
commands,
['/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD some_file.txt'])
["/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD some_file.txt"],
)
@mock.patch('pungi.wrappers.scm.run')
@mock.patch("pungi.wrappers.scm.run")
def test_get_dir(self, run):
commands = []
def process(cmd, workdir=None, **kwargs):
fname = cmd[-1]
touch(os.path.join(workdir, fname, 'first'))
touch(os.path.join(workdir, fname, 'second'))
commands.append(' '.join(cmd))
touch(os.path.join(workdir, fname, "first"))
touch(os.path.join(workdir, fname, "second"))
commands.append(" ".join(cmd))
run.side_effect = process
retval = scm.get_dir_from_scm({'scm': 'cvs',
'repo': 'http://example.com/cvs',
'dir': 'subdir'},
self.destdir)
self.assertStructure(retval, ['first', 'second'])
retval = scm.get_dir_from_scm(
{"scm": "cvs", "repo": "http://example.com/cvs", "dir": "subdir"},
self.destdir,
)
self.assertStructure(retval, ["first", "second"])
self.assertEqual(
commands,
['/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD subdir'])
["/usr/bin/cvs -q -d http://example.com/cvs export -r HEAD subdir"],
)
@mock.patch("pungi.wrappers.scm.urlretrieve")

View File

@ -16,27 +16,36 @@ from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom
try:
import dnf
HAS_DNF = True
except ImportError:
HAS_DNF = False
try:
import yum
HAS_YUM = True
except ImportError:
HAS_YUM = False
PAD = b'\0' * 100
UNBOOTABLE_ISO = (b'\0' * 0x8001) + b'CD001' + PAD
ISO_WITH_MBR = (b'\0' * 0x1fe) + b'\x55\xAA' + (b'\0' * 0x7e01) + b'CD001' + PAD
ISO_WITH_GPT = (b'\0' * 0x200) + b'EFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD
ISO_WITH_MBR_AND_GPT = (b'\0' * 0x1fe) + b'\x55\xAAEFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD
ISO_WITH_TORITO = (b'\0' * 0x8001) + b'CD001' + (b'\0' * 0x7fa) + b'\0CD001\1EL TORITO SPECIFICATION' + PAD
PAD = b"\0" * 100
UNBOOTABLE_ISO = (b"\0" * 0x8001) + b"CD001" + PAD
ISO_WITH_MBR = (b"\0" * 0x1FE) + b"\x55\xAA" + (b"\0" * 0x7E01) + b"CD001" + PAD
ISO_WITH_GPT = (b"\0" * 0x200) + b"EFI PART" + (b"\0" * 0x7DF9) + b"CD001" + PAD
ISO_WITH_MBR_AND_GPT = (
(b"\0" * 0x1FE) + b"\x55\xAAEFI PART" + (b"\0" * 0x7DF9) + b"CD001" + PAD
)
ISO_WITH_TORITO = (
(b"\0" * 0x8001)
+ b"CD001"
+ (b"\0" * 0x7FA)
+ b"\0CD001\1EL TORITO SPECIFICATION"
+ PAD
)
class TestCheckImageSanity(PungiTestCase):
def test_missing_file_reports_error(self):
compose = DummyCompose(self.topdir, {})
@ -45,141 +54,154 @@ class TestCheckImageSanity(PungiTestCase):
def test_missing_file_doesnt_report_if_failable(self):
compose = DummyCompose(self.topdir, {})
compose.image.deliverable = 'iso'
compose.image.deliverable = "iso"
compose.image.can_fail = True
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Failable deliverable must not raise')
self.fail("Failable deliverable must not raise")
def test_correct_iso_does_not_raise(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO)
touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Correct unbootable image must not raise')
self.fail("Correct unbootable image must not raise")
def test_incorrect_iso_raises(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
touch(os.path.join(self.topdir, 'compose', compose.image.path), 'Hey there')
touch(os.path.join(self.topdir, "compose", compose.image.path), "Hey there")
with self.assertRaises(RuntimeError) as ctx:
test_phase.check_image_sanity(compose)
self.assertIn('does not look like an ISO file', str(ctx.exception))
self.assertIn("does not look like an ISO file", str(ctx.exception))
def test_bootable_iso_without_mbr_or_gpt_raises_on_x86_64(self):
compose = DummyCompose(self.topdir, {})
compose.image.arch = 'x86_64'
compose.image.format = 'iso'
compose.image.arch = "x86_64"
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO)
touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
with self.assertRaises(RuntimeError) as ctx:
test_phase.check_image_sanity(compose)
self.assertIn('is supposed to be bootable, but does not have MBR nor GPT',
str(ctx.exception))
self.assertIn(
"is supposed to be bootable, but does not have MBR nor GPT",
str(ctx.exception),
)
def test_bootable_iso_without_mbr_or_gpt_doesnt_raise_on_arm(self):
compose = DummyCompose(self.topdir, {})
compose.image.arch = 'armhfp'
compose.image.format = 'iso'
compose.image.arch = "armhfp"
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO)
touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Failable deliverable must not raise')
self.fail("Failable deliverable must not raise")
def test_failable_bootable_iso_without_mbr_gpt_doesnt_raise(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = True
compose.image.deliverable = 'iso'
compose.image.deliverable = "iso"
compose.image.can_fail = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), UNBOOTABLE_ISO)
touch(os.path.join(self.topdir, "compose", compose.image.path), UNBOOTABLE_ISO)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Failable deliverable must not raise')
self.fail("Failable deliverable must not raise")
def test_bootable_iso_with_mbr_does_not_raise(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_MBR)
touch(os.path.join(self.topdir, "compose", compose.image.path), ISO_WITH_MBR)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Bootable image with MBR must not raise')
self.fail("Bootable image with MBR must not raise")
def test_bootable_iso_with_gpt_does_not_raise(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_GPT)
touch(os.path.join(self.topdir, "compose", compose.image.path), ISO_WITH_GPT)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Bootable image with GPT must not raise')
self.fail("Bootable image with GPT must not raise")
def test_bootable_iso_with_mbr_and_gpt_does_not_raise(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_MBR_AND_GPT)
touch(
os.path.join(self.topdir, "compose", compose.image.path),
ISO_WITH_MBR_AND_GPT,
)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Bootable image with MBR and GPT must not raise')
self.fail("Bootable image with MBR and GPT must not raise")
def test_bootable_iso_with_el_torito_does_not_raise(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_TORITO)
touch(os.path.join(self.topdir, "compose", compose.image.path), ISO_WITH_TORITO)
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Bootable image with El Torito must not raise')
self.fail("Bootable image with El Torito must not raise")
def test_checks_with_optional_variant(self):
compose = DummyCompose(self.topdir, {})
compose.variants['Server'].variants = {
'optional': mock.Mock(uid='Server-optional', arches=['x86_64'],
type='optional', is_empty=False)
compose.variants["Server"].variants = {
"optional": mock.Mock(
uid="Server-optional",
arches=["x86_64"],
type="optional",
is_empty=False,
)
}
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = True
touch(os.path.join(self.topdir, 'compose', compose.image.path), ISO_WITH_MBR_AND_GPT)
touch(
os.path.join(self.topdir, "compose", compose.image.path),
ISO_WITH_MBR_AND_GPT,
)
image = mock.Mock(path="Server/i386/optional/iso/image.iso",
format='iso', bootable=False)
compose.im.images['Server-optional'] = {'i386': [image]}
image = mock.Mock(
path="Server/i386/optional/iso/image.iso", format="iso", bootable=False
)
compose.im.images["Server-optional"] = {"i386": [image]}
try:
test_phase.check_image_sanity(compose)
except Exception:
self.fail('Checking optional variant must not raise')
self.fail("Checking optional variant must not raise")
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_iso(self):
compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 10})]})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
compose.image.size = 20
@ -200,7 +222,7 @@ class TestCheckImageSanity(PungiTestCase):
"createiso_max_size_is_strict": [(".*", {"*": True})],
},
)
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
compose.image.size = 20
@ -221,7 +243,7 @@ class TestCheckImageSanity(PungiTestCase):
"createiso_max_size_is_strict": [(".*", {"*": False})],
},
)
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
compose.image.size = 20
@ -236,7 +258,7 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_unified(self):
compose = DummyCompose(self.topdir, {})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
compose.image.size = 20
compose.image.unified = True
@ -253,10 +275,9 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_unified_strict(self):
compose = DummyCompose(
self.topdir,
{"createiso_max_size_is_strict": [(".*", {"*": True})]},
self.topdir, {"createiso_max_size_is_strict": [(".*", {"*": True})]},
)
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
compose.image.size = 20
compose.image.unified = True
@ -273,7 +294,7 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_fits_in_limit(self):
compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 20})]})
compose.image.format = 'iso'
compose.image.format = "iso"
compose.image.bootable = False
compose.image.size = 5
@ -284,7 +305,7 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_non_iso(self):
compose = DummyCompose(self.topdir, {"createiso_max_size": [(".*", {"*": 10})]})
compose.image.format = 'qcow2'
compose.image.format = "qcow2"
compose.image.bootable = False
compose.image.size = 20
@ -294,32 +315,32 @@ class TestCheckImageSanity(PungiTestCase):
class TestRepoclosure(PungiTestCase):
def setUp(self):
super(TestRepoclosure, self).setUp()
self.maxDiff = None
def _get_repo(self, compose_id, variant, arch, path=None):
path = path or arch + '/os'
path = path or arch + "/os"
return {
'%s-repoclosure-%s.%s' % (compose_id, variant, arch): self.topdir + '/compose/%s/%s' % (variant, path)
"%s-repoclosure-%s.%s" % (compose_id, variant, arch): self.topdir
+ "/compose/%s/%s" % (variant, path)
}
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch("pungi.phases.test.run")
def test_repoclosure_skip_if_disabled(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, {
'repoclosure_strictness': [('^.*$', {'*': 'off'})]
})
compose = DummyCompose(
self.topdir, {"repoclosure_strictness": [("^.*$", {"*": "off"})]}
)
test_phase.run_repoclosure(compose)
self.assertEqual(mock_grc.call_args_list, [])
@unittest.skipUnless(HAS_YUM, 'YUM is not available')
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')
@unittest.skipUnless(HAS_YUM, "YUM is not available")
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch("pungi.phases.test.run")
def test_repoclosure_default_backend(self, mock_run, mock_grc):
with mock.patch('six.PY2', new=True):
with mock.patch("six.PY2", new=True):
compose = DummyCompose(self.topdir, {})
test_phase.run_repoclosure(compose)
@ -327,37 +348,83 @@ class TestRepoclosure(PungiTestCase):
six.assertCountEqual(
self,
mock_grc.call_args_list,
[mock.call(backend='yum', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Everything', 'amd64')),
mock.call(backend='yum', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Client', 'amd64')),
mock.call(backend='yum', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'amd64')),
mock.call(backend='yum', arch=['x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'x86_64')),
mock.call(backend='yum', arch=['x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Everything', 'x86_64'))])
[
mock.call(
backend="yum",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Everything", "amd64"),
),
mock.call(
backend="yum",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Client", "amd64"),
),
mock.call(
backend="yum",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
),
mock.call(
backend="yum",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
),
mock.call(
backend="yum",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Everything", "x86_64"),
),
],
)
@unittest.skipUnless(HAS_DNF, 'DNF is not available')
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')
@unittest.skipUnless(HAS_DNF, "DNF is not available")
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch("pungi.phases.test.run")
def test_repoclosure_dnf_backend(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, {'repoclosure_backend': 'dnf'})
compose = DummyCompose(self.topdir, {"repoclosure_backend": "dnf"})
test_phase.run_repoclosure(compose)
six.assertCountEqual(
self,
mock_grc.call_args_list,
[mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Everything', 'amd64')),
mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Client', 'amd64')),
mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'amd64')),
mock.call(backend='dnf', arch=['x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'x86_64')),
mock.call(backend='dnf', arch=['x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Everything', 'x86_64'))])
[
mock.call(
backend="dnf",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Everything", "amd64"),
),
mock.call(
backend="dnf",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Client", "amd64"),
),
mock.call(
backend="dnf",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
),
mock.call(
backend="dnf",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
),
mock.call(
backend="dnf",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Everything", "x86_64"),
),
],
)
@mock.patch("glob.glob")
@mock.patch("pungi.wrappers.repoclosure.extract_from_fus_logs")
@ -385,53 +452,71 @@ class TestRepoclosure(PungiTestCase):
mock.call([f], _log("amd64", "Server")),
mock.call([f], _log("x86_64", "Server")),
mock.call([f], _log("x86_64", "Everything")),
]
],
)
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch("pungi.phases.test.run")
def test_repoclosure_report_error(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, {
'repoclosure_strictness': [('^.*$', {'*': 'fatal'})]
})
compose = DummyCompose(
self.topdir, {"repoclosure_strictness": [("^.*$", {"*": "fatal"})]}
)
mock_run.side_effect = mk_boom(cls=RuntimeError)
with self.assertRaises(RuntimeError):
test_phase.run_repoclosure(compose)
@unittest.skipUnless(HAS_DNF, 'DNF is not available')
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')
def test_repoclosure_overwrite_options_creates_correct_commands(self, mock_run, mock_grc):
compose = DummyCompose(self.topdir, {
'repoclosure_backend': 'dnf',
'repoclosure_strictness': [
('^.*$', {'*': 'off'}),
('^Server$', {'*': 'fatal'}),
]
})
@unittest.skipUnless(HAS_DNF, "DNF is not available")
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch("pungi.phases.test.run")
def test_repoclosure_overwrite_options_creates_correct_commands(
self, mock_run, mock_grc
):
compose = DummyCompose(
self.topdir,
{
"repoclosure_backend": "dnf",
"repoclosure_strictness": [
("^.*$", {"*": "off"}),
("^Server$", {"*": "fatal"}),
],
},
)
test_phase.run_repoclosure(compose)
six.assertCountEqual(
self,
mock_grc.call_args_list,
[mock.call(backend='dnf', arch=['amd64', 'x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'amd64')),
mock.call(backend='dnf', arch=['x86_64', 'noarch'], lookaside={},
repos=self._get_repo(compose.compose_id, 'Server', 'x86_64')),
])
[
mock.call(
backend="dnf",
arch=["amd64", "x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "amd64"),
),
mock.call(
backend="dnf",
arch=["x86_64", "noarch"],
lookaside={},
repos=self._get_repo(compose.compose_id, "Server", "x86_64"),
),
],
)
@mock.patch('pungi.phases.test._delete_repoclosure_cache_dirs')
@mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')
@mock.patch('pungi.phases.test.run')
@mock.patch("pungi.phases.test._delete_repoclosure_cache_dirs")
@mock.patch("pungi.wrappers.repoclosure.get_repoclosure_cmd")
@mock.patch("pungi.phases.test.run")
def test_repoclosure_uses_correct_behaviour(self, mock_run, mock_grc, mock_del):
compose = DummyCompose(self.topdir, {
'repoclosure_backend': 'dnf',
'repoclosure_strictness': [
('^.*$', {'*': 'off'}),
('^Server$', {'*': 'fatal'}),
]
})
compose = DummyCompose(
self.topdir,
{
"repoclosure_backend": "dnf",
"repoclosure_strictness": [
("^.*$", {"*": "off"}),
("^Server$", {"*": "fatal"}),
],
},
)
mock_run.side_effect = mk_boom(cls=RuntimeError)
with self.assertRaises(RuntimeError):

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,8 @@ filename = *.py
# H306: imports not in alphabetical order
# E226: missing whitespace around arithmetic operator
# W503: line break occured before a binary operator
ignore = E501,E402,H301,H306,E226,W503
# E203: whitespace before ':'
ignore = E501,E402,H301,H306,E226,W503,E203
[run]
omit = tests/*