[osbs] Add a phase to build images in OSBS
It will take RPM repo from a variant in this compose and a Dockerfile from configured git and use it to build an image. The build images are uploaded to some a Docker registry by OSBS and are not directly part of compose (because there is no export function). There is a new metadata file `osbs.json` that has some information that can be used to find the image. Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
parent
95cfbfb3fe
commit
b4fc97be03
@ -233,6 +233,7 @@ def run_compose(compose):
|
|||||||
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
||||||
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
||||||
image_build_phase = pungi.phases.ImageBuildPhase(compose)
|
image_build_phase = pungi.phases.ImageBuildPhase(compose)
|
||||||
|
osbs_phase = pungi.phases.OSBSPhase(compose)
|
||||||
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
|
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
|
||||||
test_phase = pungi.phases.TestPhase(compose)
|
test_phase = pungi.phases.TestPhase(compose)
|
||||||
|
|
||||||
@ -241,7 +242,8 @@ def run_compose(compose):
|
|||||||
buildinstall_phase, productimg_phase, gather_phase,
|
buildinstall_phase, productimg_phase, gather_phase,
|
||||||
extrafiles_phase, createiso_phase, liveimages_phase,
|
extrafiles_phase, createiso_phase, liveimages_phase,
|
||||||
livemedia_phase, image_build_phase, image_checksum_phase,
|
livemedia_phase, image_build_phase, image_checksum_phase,
|
||||||
test_phase, ostree_phase, ostree_installer_phase):
|
test_phase, ostree_phase, ostree_installer_phase,
|
||||||
|
osbs_phase):
|
||||||
if phase.skip():
|
if phase.skip():
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
@ -348,17 +350,20 @@ def run_compose(compose):
|
|||||||
image_build_phase.start()
|
image_build_phase.start()
|
||||||
livemedia_phase.start()
|
livemedia_phase.start()
|
||||||
ostree_installer_phase.start()
|
ostree_installer_phase.start()
|
||||||
|
osbs_phase.start()
|
||||||
|
|
||||||
createiso_phase.stop()
|
createiso_phase.stop()
|
||||||
liveimages_phase.stop()
|
liveimages_phase.stop()
|
||||||
image_build_phase.stop()
|
image_build_phase.stop()
|
||||||
livemedia_phase.stop()
|
livemedia_phase.stop()
|
||||||
ostree_installer_phase.stop()
|
ostree_installer_phase.stop()
|
||||||
|
osbs_phase.stop()
|
||||||
|
|
||||||
image_checksum_phase.start()
|
image_checksum_phase.start()
|
||||||
image_checksum_phase.stop()
|
image_checksum_phase.stop()
|
||||||
|
|
||||||
pungi.metadata.write_compose_info(compose)
|
pungi.metadata.write_compose_info(compose)
|
||||||
|
osbs_phase.dump_metadata()
|
||||||
|
|
||||||
# TEST phase
|
# TEST phase
|
||||||
test_phase.start()
|
test_phase.start()
|
||||||
|
@ -148,6 +148,7 @@ Options
|
|||||||
* live-media
|
* live-media
|
||||||
* ostree
|
* ostree
|
||||||
* ostree-installer
|
* ostree-installer
|
||||||
|
* osbs
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@ -327,11 +328,13 @@ Example
|
|||||||
signing_key_password_file = '~/password_for_fedora-24_key'
|
signing_key_password_file = '~/password_for_fedora-24_key'
|
||||||
|
|
||||||
|
|
||||||
|
.. _git-urls:
|
||||||
|
|
||||||
Git URLs
|
Git URLs
|
||||||
========
|
========
|
||||||
|
|
||||||
In multiple places the config requires URL of a Git repository to download
|
In multiple places the config requires URL of a Git repository to download some
|
||||||
kickstart file from. This URL is passed on to *Koji*. It is possible to which
|
file from. This URL is passed on to *Koji*. It is possible to specify which
|
||||||
commit to use using this syntax: ::
|
commit to use using this syntax: ::
|
||||||
|
|
||||||
git://git.example.com/git/repo-name.git?#<rev_spec>
|
git://git.example.com/git/repo-name.git?#<rev_spec>
|
||||||
@ -1138,6 +1141,54 @@ Example config
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
OSBS Settings
|
||||||
|
=============
|
||||||
|
|
||||||
|
*Pungi* can build docker images in OSBS. The build is initiated through Koji
|
||||||
|
``container-build`` plugin. The base image will be using RPMs from the current
|
||||||
|
compose and a ``Dockerfile`` from specified Git repository.
|
||||||
|
|
||||||
|
Please note that the image is uploaded to a Docker v2 registry and not exported
|
||||||
|
into compose directory. There will be a metadata file in
|
||||||
|
``compose/metadata/osbs.json`` with details about the built images (assuming
|
||||||
|
they are not scratch builds).
|
||||||
|
|
||||||
|
**osbs**
|
||||||
|
(*dict*) -- a mapping from variant regexes to configuration blocks. The
|
||||||
|
format should be ``{variant_uid_regex: [config_dict]}``.
|
||||||
|
|
||||||
|
The configuration for each image must have at least these keys:
|
||||||
|
|
||||||
|
* ``url`` -- (*str*) URL pointing to a Git repository with ``Dockerfile``.
|
||||||
|
Please see :ref:`git-urls` section for more details.
|
||||||
|
* ``target`` -- (*str*) A Koji target to build the image for.
|
||||||
|
|
||||||
|
The configuration will pass other attributes directly to the Koji task.
|
||||||
|
This includes ``name``, ``version``, ``release``, ``scratch`` and
|
||||||
|
``priority``.
|
||||||
|
|
||||||
|
If ``release`` is set explicitly to ``None``, the value will be retrieved
|
||||||
|
from Koji. If this feature is used, a ``name`` key must be set as well..
|
||||||
|
|
||||||
|
A value for ``yum_repourls`` will be created automatically and point at a
|
||||||
|
repository in the current compose.
|
||||||
|
|
||||||
|
|
||||||
|
Example config
|
||||||
|
--------------
|
||||||
|
::
|
||||||
|
|
||||||
|
osbs = {
|
||||||
|
"^Server$": {
|
||||||
|
"url": "git://example.com/dockerfiles.git?#HEAD",
|
||||||
|
"name": "fedora-docker-base",
|
||||||
|
"target": "f24-docker-candidate",
|
||||||
|
"version": "24",
|
||||||
|
"release": None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Media Checksums Settings
|
Media Checksums Settings
|
||||||
========================
|
========================
|
||||||
|
|
||||||
|
@ -31,3 +31,4 @@ from image_checksum import ImageChecksumPhase # noqa
|
|||||||
from livemedia_phase import LiveMediaPhase # noqa
|
from livemedia_phase import LiveMediaPhase # noqa
|
||||||
from ostree import OSTreePhase # noqa
|
from ostree import OSTreePhase # noqa
|
||||||
from ostree_installer import OstreeInstallerPhase # noqa
|
from ostree_installer import OstreeInstallerPhase # noqa
|
||||||
|
from osbs import OSBSPhase # noqa
|
||||||
|
146
pungi/phases/osbs.py
Normal file
146
pungi/phases/osbs.py
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
|
|
||||||
|
from .base import ConfigGuardedPhase
|
||||||
|
from .. import util
|
||||||
|
from ..wrappers import kojiwrapper
|
||||||
|
from ..paths import translate_path
|
||||||
|
|
||||||
|
|
||||||
|
class OSBSPhase(ConfigGuardedPhase):
|
||||||
|
name = 'osbs'
|
||||||
|
|
||||||
|
config_options = [
|
||||||
|
{
|
||||||
|
"name": "osbs",
|
||||||
|
"expected_types": [dict],
|
||||||
|
"optional": True,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, compose):
|
||||||
|
super(OSBSPhase, self).__init__(compose)
|
||||||
|
self.pool = ThreadPool(logger=self.compose._logger)
|
||||||
|
self.pool.metadata = {}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for variant in self.compose.get_variants():
|
||||||
|
for conf in util.get_variant_data(self.compose.conf, self.name, variant):
|
||||||
|
self.pool.add(OSBSThread(self.pool))
|
||||||
|
self.pool.queue_put((self.compose, variant, conf))
|
||||||
|
|
||||||
|
self.pool.start()
|
||||||
|
|
||||||
|
def dump_metadata(self):
|
||||||
|
"""Create a file with image metadata if the phase actually ran."""
|
||||||
|
if self._skipped:
|
||||||
|
return
|
||||||
|
with open(self.compose.paths.compose.metadata('osbs.json'), 'w') as f:
|
||||||
|
json.dump(self.pool.metadata, f, indent=4, sort_keys=True,
|
||||||
|
separators=(',', ': '))
|
||||||
|
|
||||||
|
|
||||||
|
class OSBSThread(WorkerThread):
|
||||||
|
def process(self, item, num):
|
||||||
|
compose, variant, config = item
|
||||||
|
self.num = num
|
||||||
|
with util.failable(compose, variant, '*', 'osbs'):
|
||||||
|
self.worker(compose, variant, config)
|
||||||
|
|
||||||
|
def worker(self, compose, variant, config):
|
||||||
|
msg = 'OSBS phase for variant %s' % variant.uid
|
||||||
|
self.pool.log_info('[BEGIN] %s' % msg)
|
||||||
|
koji = kojiwrapper.KojiWrapper(compose.conf['koji_profile'])
|
||||||
|
koji.login()
|
||||||
|
|
||||||
|
# Start task
|
||||||
|
try:
|
||||||
|
source = util.resolve_git_url(config.pop('url'))
|
||||||
|
target = config.pop('target')
|
||||||
|
|
||||||
|
# Set release dynamically
|
||||||
|
if 'release' in config and config['release'] is None:
|
||||||
|
config['release'] = self._get_release(koji, target, config['name'])
|
||||||
|
except KeyError as exc:
|
||||||
|
raise RuntimeError('OSBS: missing config key %s for %s'
|
||||||
|
% (exc, variant.uid))
|
||||||
|
priority = config.pop('priority', None)
|
||||||
|
|
||||||
|
config['yum_repourls'] = [self._get_repo(compose, variant)]
|
||||||
|
|
||||||
|
task_id = koji.koji_proxy.buildContainer(source, target, config,
|
||||||
|
priority=priority)
|
||||||
|
|
||||||
|
# Wait for it to finish and capture the output into log file (even
|
||||||
|
# though there is not much there).
|
||||||
|
log_dir = os.path.join(compose.paths.log.topdir(), 'osbs')
|
||||||
|
util.makedirs(log_dir)
|
||||||
|
log_file = os.path.join(log_dir, '%s-%s-watch-task.log'
|
||||||
|
% (variant.uid, self.num))
|
||||||
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
|
raise RuntimeError('OSBS: task %s failed: see %s for details'
|
||||||
|
% (task_id, log_file))
|
||||||
|
|
||||||
|
# Only real builds get the metadata.
|
||||||
|
if not config.get('scratch', False):
|
||||||
|
self._add_metadata(koji.koji_proxy, variant, task_id)
|
||||||
|
|
||||||
|
self.pool.log_info('[DONE ] %s' % msg)
|
||||||
|
|
||||||
|
def _add_metadata(self, koji_proxy, variant, task_id):
|
||||||
|
# Create metadata
|
||||||
|
result = koji_proxy.getTaskResult(task_id)
|
||||||
|
build_id = result['koji_builds'][0]
|
||||||
|
buildinfo = koji_proxy.getBuild(build_id)
|
||||||
|
archives = koji_proxy.listArchives(build_id)
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
'name': buildinfo['name'],
|
||||||
|
'version': buildinfo['version'],
|
||||||
|
'release': buildinfo['release'],
|
||||||
|
'creation_time': buildinfo['creation_time'],
|
||||||
|
}
|
||||||
|
for archive in archives:
|
||||||
|
data = {
|
||||||
|
'filename': archive['filename'],
|
||||||
|
'size': archive['size'],
|
||||||
|
'checksum': archive['checksum'],
|
||||||
|
}
|
||||||
|
data.update(archive['extra'])
|
||||||
|
data.update(metadata)
|
||||||
|
arch = archive['extra']['image']['arch']
|
||||||
|
self.pool.metadata.setdefault(
|
||||||
|
variant.uid, {}).setdefault(arch, []).append(data)
|
||||||
|
|
||||||
|
def _get_repo(self, compose, variant):
|
||||||
|
"""
|
||||||
|
Write a .repo file pointing to current variant and return URL to the
|
||||||
|
file.
|
||||||
|
"""
|
||||||
|
os_tree = compose.paths.compose.os_tree('$basearch', variant,
|
||||||
|
create_dir=False)
|
||||||
|
repo_file = os.path.join(compose.paths.work.tmp_dir(None, variant),
|
||||||
|
'compose-rpms-%s.repo' % self.num)
|
||||||
|
|
||||||
|
with open(repo_file, 'w') as f:
|
||||||
|
f.write('[%s]\n' % compose.compose_id)
|
||||||
|
f.write('name=Compose %s (RPMs)\n' % compose.compose_id)
|
||||||
|
f.write('baseurl=%s\n' % translate_path(compose, os_tree))
|
||||||
|
f.write('enabled=1\n')
|
||||||
|
f.write('gpgcheck=0\n')
|
||||||
|
|
||||||
|
return translate_path(compose, repo_file)
|
||||||
|
|
||||||
|
def _get_release(self, koji, target, name):
|
||||||
|
"""
|
||||||
|
Get next release value based on last build. If no build has been done
|
||||||
|
yet (in given target), use 1 as initial value.
|
||||||
|
"""
|
||||||
|
latest_builds = koji.koji_proxy.getLatestBuilds(target, package=name)
|
||||||
|
try:
|
||||||
|
return koji.koji_proxy.getNextRelease(latest_builds[0])
|
||||||
|
except IndexError:
|
||||||
|
return 1
|
@ -36,7 +36,29 @@ class KojiWrapper(object):
|
|||||||
self.executable = self.profile.replace("_", "-")
|
self.executable = self.profile.replace("_", "-")
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self.koji_module = koji.get_profile_module(profile)
|
self.koji_module = koji.get_profile_module(profile)
|
||||||
self.koji_proxy = koji.ClientSession(self.koji_module.config.server)
|
session_opts = {}
|
||||||
|
for key in ('krbservice', 'timeout', 'keepalive',
|
||||||
|
'max_retries', 'retry_interval', 'anon_retry',
|
||||||
|
'offline_retry', 'offline_retry_interval',
|
||||||
|
'debug', 'debug_xmlrpc',
|
||||||
|
'use_fast_upload'):
|
||||||
|
value = getattr(self.koji_module.config, key, None)
|
||||||
|
if value is not None:
|
||||||
|
session_opts[key] = value
|
||||||
|
self.koji_proxy = koji.ClientSession(self.koji_module.config.server, session_opts)
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
"""Authenticate to the hub."""
|
||||||
|
auth_type = self.koji_module.config.authtype
|
||||||
|
if auth_type == 'ssl' or (os.path.isfile(os.path.expanduser(self.koji_module.config.cert))
|
||||||
|
and auth_type is None):
|
||||||
|
self.koji_proxy.ssl_login(os.path.expanduser(self.koji_module.config.cert),
|
||||||
|
os.path.expanduser(self.koji_module.config.ca),
|
||||||
|
os.path.expanduser(self.koji_module.config.serverca))
|
||||||
|
elif auth_type == 'kerberos':
|
||||||
|
self.koji_proxy.krb_login()
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Unsupported authentication type in Koji')
|
||||||
|
|
||||||
def get_runroot_cmd(self, target, arch, command, quiet=False, use_shell=True, channel=None, packages=None, mounts=None, weight=None, task_id=True):
|
def get_runroot_cmd(self, target, arch, command, quiet=False, use_shell=True, channel=None, packages=None, mounts=None, weight=None, task_id=True):
|
||||||
cmd = [self.executable, "runroot"]
|
cmd = [self.executable, "runroot"]
|
||||||
@ -287,6 +309,10 @@ class KojiWrapper(object):
|
|||||||
"task_id": task_id,
|
"task_id": task_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def watch_task(self, task_id, log_file=None, max_retries=None):
|
||||||
|
retcode, _ = self._wait_for_task(task_id, logfile=log_file, max_retries=max_retries)
|
||||||
|
return retcode
|
||||||
|
|
||||||
def get_image_paths(self, task_id):
|
def get_image_paths(self, task_id):
|
||||||
"""
|
"""
|
||||||
Given an image task in Koji, get a mapping from arches to a list of
|
Given an image task in Koji, get a mapping from arches to a list of
|
||||||
|
379
tests/test_osbs_phase.py
Executable file
379
tests/test_osbs_phase.py
Executable file
@ -0,0 +1,379 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
try:
|
||||||
|
import unittest2 as unittest
|
||||||
|
except ImportError:
|
||||||
|
import unittest
|
||||||
|
import mock
|
||||||
|
import json
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
|
||||||
|
from tests import helpers
|
||||||
|
from pungi.phases import osbs
|
||||||
|
|
||||||
|
|
||||||
|
class OSBSPhaseTest(helpers.PungiTestCase):
|
||||||
|
|
||||||
|
def test_validate(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {
|
||||||
|
'osbs': {"^Server$": {}}
|
||||||
|
})
|
||||||
|
|
||||||
|
phase = osbs.OSBSPhase(compose)
|
||||||
|
try:
|
||||||
|
phase.validate()
|
||||||
|
except:
|
||||||
|
self.fail('Correct config must validate')
|
||||||
|
|
||||||
|
def test_validate_bad_conf(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {
|
||||||
|
'osbs': 'yes please'
|
||||||
|
})
|
||||||
|
|
||||||
|
phase = osbs.OSBSPhase(compose)
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
phase.validate()
|
||||||
|
|
||||||
|
@mock.patch('pungi.phases.osbs.ThreadPool')
|
||||||
|
def test_run(self, ThreadPool):
|
||||||
|
cfg = mock.Mock()
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {
|
||||||
|
'osbs': {'^Everything$': cfg}
|
||||||
|
})
|
||||||
|
|
||||||
|
pool = ThreadPool.return_value
|
||||||
|
|
||||||
|
phase = osbs.OSBSPhase(compose)
|
||||||
|
phase.run()
|
||||||
|
|
||||||
|
self.assertEqual(len(pool.add.call_args_list), 1)
|
||||||
|
self.assertEqual(pool.queue_put.call_args_list,
|
||||||
|
[mock.call((compose, compose.variants['Everything'], cfg))])
|
||||||
|
|
||||||
|
@mock.patch('pungi.phases.osbs.ThreadPool')
|
||||||
|
def test_skip_without_config(self, ThreadPool):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {})
|
||||||
|
compose.just_phases = None
|
||||||
|
compose.skip_phases = []
|
||||||
|
phase = osbs.OSBSPhase(compose)
|
||||||
|
self.assertTrue(phase.skip())
|
||||||
|
|
||||||
|
@mock.patch('pungi.phases.osbs.ThreadPool')
|
||||||
|
def test_dump_metadata(self, ThreadPool):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {
|
||||||
|
'osbs': {'^Everything$': {}}
|
||||||
|
})
|
||||||
|
compose.just_phases = None
|
||||||
|
compose.skip_phases = []
|
||||||
|
compose.notifier = mock.Mock()
|
||||||
|
phase = osbs.OSBSPhase(compose)
|
||||||
|
phase.start()
|
||||||
|
phase.stop()
|
||||||
|
phase.pool.metadata = METADATA
|
||||||
|
phase.dump_metadata()
|
||||||
|
|
||||||
|
with open(self.topdir + '/compose/metadata/osbs.json') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
self.assertEqual(data, METADATA)
|
||||||
|
|
||||||
|
@mock.patch('pungi.phases.osbs.ThreadPool')
|
||||||
|
def test_dump_metadata_after_skip(self, ThreadPool):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {})
|
||||||
|
compose.just_phases = None
|
||||||
|
compose.skip_phases = []
|
||||||
|
phase = osbs.OSBSPhase(compose)
|
||||||
|
phase.start()
|
||||||
|
phase.stop()
|
||||||
|
phase.dump_metadata()
|
||||||
|
|
||||||
|
self.assertFalse(os.path.isfile(self.topdir + '/compose/metadata/osbs.json'))
|
||||||
|
|
||||||
|
|
||||||
|
TASK_RESULT = {
|
||||||
|
'koji_builds': ['54321'],
|
||||||
|
'repositories': [
|
||||||
|
'registry.example.com:8888/rcm/buildroot:f24-docker-candidate-20160617141632',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
BUILD_INFO = {
|
||||||
|
'completion_time': '2016-06-17 18:25:30',
|
||||||
|
'completion_ts': 1466187930.0,
|
||||||
|
'creation_event_id': 13227702,
|
||||||
|
'creation_time': '2016-06-17 18:25:57.611172',
|
||||||
|
'creation_ts': 1466187957.61117,
|
||||||
|
'epoch': None,
|
||||||
|
'extra': {'container_koji_task_id': '12345', 'image': {}},
|
||||||
|
'id': 54321,
|
||||||
|
'name': 'my-name',
|
||||||
|
'nvr': 'my-name-1.0-1',
|
||||||
|
'owner_id': 3436,
|
||||||
|
'owner_name': 'osbs',
|
||||||
|
'package_id': 50072,
|
||||||
|
'package_name': 'my-name',
|
||||||
|
'release': '1',
|
||||||
|
'source': 'git://example.com/repo?#BEEFCAFE',
|
||||||
|
'start_time': '2016-06-17 18:16:37',
|
||||||
|
'start_ts': 1466187397.0,
|
||||||
|
'state': 1,
|
||||||
|
'task_id': None,
|
||||||
|
'version': '1.0',
|
||||||
|
'volume_id': 0,
|
||||||
|
'volume_name': 'DEFAULT'
|
||||||
|
}
|
||||||
|
|
||||||
|
ARCHIVES = [
|
||||||
|
{'build_id': 54321,
|
||||||
|
'buildroot_id': 2955357,
|
||||||
|
'checksum': 'a2922842dc80873ac782da048c54f6cc',
|
||||||
|
'checksum_type': 0,
|
||||||
|
'extra': {
|
||||||
|
'docker': {
|
||||||
|
'id': '408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7',
|
||||||
|
'parent_id': '6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e',
|
||||||
|
'repositories': ['registry.example.com:8888/rcm/buildroot:1.0-1']},
|
||||||
|
'image': {'arch': 'x86_64'}},
|
||||||
|
'filename': 'docker-image-408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7.x86_64.tar.gz',
|
||||||
|
'id': 1436049,
|
||||||
|
'metadata_only': False,
|
||||||
|
'size': 174038795,
|
||||||
|
'type_description': 'Tar file',
|
||||||
|
'type_extensions': 'tar tar.gz tar.bz2 tar.xz',
|
||||||
|
'type_id': 4,
|
||||||
|
'type_name': 'tar'}
|
||||||
|
]
|
||||||
|
|
||||||
|
METADATA = {
|
||||||
|
'Server': {'x86_64': [{
|
||||||
|
'name': 'my-name',
|
||||||
|
'version': '1.0',
|
||||||
|
'release': '1',
|
||||||
|
'creation_time': BUILD_INFO['creation_time'],
|
||||||
|
'filename': ARCHIVES[0]['filename'],
|
||||||
|
'size': ARCHIVES[0]['size'],
|
||||||
|
'docker': {
|
||||||
|
'id': '408c4cd37a87a807bec65dd13b049a32fe090d2fa1a8e891f65e3e3e683996d7',
|
||||||
|
'parent_id': '6c3a84d798dc449313787502060b6d5b4694d7527d64a7c99ba199e3b2df834e',
|
||||||
|
'repositories': ['registry.example.com:8888/rcm/buildroot:1.0-1']},
|
||||||
|
'image': {'arch': 'x86_64'},
|
||||||
|
'checksum': ARCHIVES[0]['checksum'],
|
||||||
|
}]}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class OSBSThreadTest(helpers.PungiTestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(OSBSThreadTest, self).setUp()
|
||||||
|
self.pool = mock.Mock(metadata={})
|
||||||
|
self.t = osbs.OSBSThread(self.pool)
|
||||||
|
self.compose = helpers.DummyCompose(self.topdir, {
|
||||||
|
'koji_profile': 'koji',
|
||||||
|
'translate_paths': [
|
||||||
|
(self.topdir, 'http://root'),
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
def _setupMock(self, KojiWrapper, resolve_git_url):
|
||||||
|
resolve_git_url.return_value = 'git://example.com/repo?#BEEFCAFE'
|
||||||
|
self.wrapper = KojiWrapper.return_value
|
||||||
|
self.wrapper.koji_proxy.buildContainer.return_value = 12345
|
||||||
|
self.wrapper.koji_proxy.getTaskResult.return_value = TASK_RESULT
|
||||||
|
self.wrapper.koji_proxy.getBuild.return_value = BUILD_INFO
|
||||||
|
self.wrapper.koji_proxy.listArchives.return_value = ARCHIVES
|
||||||
|
self.wrapper.koji_proxy.getLatestBuilds.return_value = [mock.Mock(), mock.Mock()]
|
||||||
|
self.wrapper.koji_proxy.getNextRelease.return_value = 3
|
||||||
|
self.wrapper.watch_task.return_value = 0
|
||||||
|
|
||||||
|
def _assertCorrectMetadata(self):
|
||||||
|
self.maxDiff = None
|
||||||
|
self.assertEqual(self.pool.metadata, METADATA)
|
||||||
|
|
||||||
|
def _assertCorrectCalls(self, opts, setupCalls=None):
|
||||||
|
setupCalls = setupCalls or []
|
||||||
|
options = {'yum_repourls': ['http://root/work/global/tmp-Server/compose-rpms-1.repo']}
|
||||||
|
options.update(opts)
|
||||||
|
self.assertEqual(
|
||||||
|
self.wrapper.mock_calls,
|
||||||
|
[mock.call.login()] + setupCalls + [
|
||||||
|
mock.call.koji_proxy.buildContainer(
|
||||||
|
'git://example.com/repo?#BEEFCAFE',
|
||||||
|
'f24-docker-candidate',
|
||||||
|
options,
|
||||||
|
priority=None),
|
||||||
|
mock.call.watch_task(
|
||||||
|
12345, self.topdir + '/logs/global/osbs/Server-1-watch-task.log'),
|
||||||
|
mock.call.koji_proxy.getTaskResult(12345),
|
||||||
|
mock.call.koji_proxy.getBuild('54321'),
|
||||||
|
mock.call.koji_proxy.listArchives('54321')])
|
||||||
|
|
||||||
|
def _assertRepoFile(self):
|
||||||
|
with open(self.topdir + '/work/global/tmp-Server/compose-rpms-1.repo') as f:
|
||||||
|
lines = f.read().split('\n')
|
||||||
|
self.assertIn('baseurl=http://root/compose/Server/$baseurl/os', lines)
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_minimal_run(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'f24-docker-candidate',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self._assertCorrectCalls({})
|
||||||
|
self._assertCorrectMetadata()
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_run_with_more_args(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'f24-docker-candidate',
|
||||||
|
'name': 'my-name',
|
||||||
|
'version': '1.0',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self._assertCorrectCalls({'name': 'my-name', 'version': '1.0'})
|
||||||
|
self._assertCorrectMetadata()
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_run_with_missing_url(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'target': 'f24-docker-candidate',
|
||||||
|
'name': 'my-name',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
|
||||||
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self.assertIn("missing config key 'url' for Server", str(ctx.exception))
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_run_with_missing_target(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'name': 'my-name',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
|
||||||
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self.assertIn("missing config key 'target' for Server", str(ctx.exception))
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_set_release_dynamically(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'f24-docker-candidate',
|
||||||
|
'release': None,
|
||||||
|
'name': 'fedora-server-docker',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
last_build = mock.Mock()
|
||||||
|
self.wrapper.koji_proxy.getLatestBuilds.return_value = [last_build, mock.Mock()]
|
||||||
|
self.wrapper.koji_proxy.getNextRelease.return_value = 3
|
||||||
|
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self._assertCorrectCalls(
|
||||||
|
{'release': 3, 'name': 'fedora-server-docker'},
|
||||||
|
[mock.call.koji_proxy.getLatestBuilds(
|
||||||
|
'f24-docker-candidate', package='fedora-server-docker'),
|
||||||
|
mock.call.koji_proxy.getNextRelease(last_build)])
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_set_release_dynamically_no_previous_build(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'f24-docker-candidate',
|
||||||
|
'release': None,
|
||||||
|
'name': 'fedora-server-docker',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
self.wrapper.koji_proxy.getLatestBuilds.return_value = []
|
||||||
|
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self._assertCorrectCalls(
|
||||||
|
{'release': 1, 'name': 'fedora-server-docker'},
|
||||||
|
[mock.call.koji_proxy.getLatestBuilds(
|
||||||
|
'f24-docker-candidate', package='fedora-server-docker')])
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_set_release_dynamically_missing_name(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'fedora-24-docker-candidate',
|
||||||
|
'release': None,
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
self.wrapper.koji_proxy.getLatestBuilds.return_value = []
|
||||||
|
|
||||||
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self.assertIn("missing config key 'name' for Server", str(ctx.exception))
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_failing_task(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'fedora-24-docker-candidate',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
self.wrapper.watch_task.return_value = 1
|
||||||
|
|
||||||
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self.assertRegexpMatches(str(ctx.exception), r"task 12345 failed: see .+ for details")
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_failing_task_with_failable(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'fedora-24-docker-candidate',
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
self.wrapper.watch_task.return_value = 1
|
||||||
|
self.compose.conf['failable_deliverables'] = [('.*', {'*': ['osbs']})]
|
||||||
|
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
@mock.patch('pungi.util.resolve_git_url')
|
||||||
|
@mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')
|
||||||
|
def test_scratch_has_no_metadata(self, KojiWrapper, resolve_git_url):
|
||||||
|
cfg = {
|
||||||
|
'url': 'git://example.com/repo?#HEAD',
|
||||||
|
'target': 'fedora-24-docker-candidate',
|
||||||
|
'scratch': True,
|
||||||
|
}
|
||||||
|
self._setupMock(KojiWrapper, resolve_git_url)
|
||||||
|
|
||||||
|
self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)
|
||||||
|
|
||||||
|
self.assertEqual(self.pool.metadata, {})
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Loading…
Reference in New Issue
Block a user