Merge #444 Add script to generate unified ISOs

This commit is contained in:
Dennis Gilmore 2017-01-04 14:42:23 +00:00
commit 345432ac90
60 changed files with 1745 additions and 34 deletions

View File

@ -8,6 +8,7 @@ include share/*
include share/multilib/* include share/multilib/*
include doc/* include doc/*
include tests/* include tests/*
exclude tests/*.pyc
include tests/data/* include tests/data/*
include tests/data/*/* include tests/data/*/*
recursive-include tests/fixtures *.json *.xml *.bz2 *.gz recursive-include tests/fixtures *.json *.xml *.bz2 *.gz *.iso *.log MD5SUM SHA1SUM SHA256SUM treeinfo

42
bin/pungi-create-unified-isos Executable file
View File

@ -0,0 +1,42 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This script creates unified ISOs for a specified compose.
Unified ISOs are created per architecture and contain all variant packages and
repos.
"""
import argparse
import os
import sys
here = sys.path[0]
if here != '/usr/bin':
# Git checkout
sys.path[0] = os.path.dirname(here)
from pungi_utils.unified_isos import UnifiedISO
def parse_args():
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument(
'compose',
metavar='<compose-path>',
nargs=1,
help='path to compose',
)
return parser.parse_args()
def main():
args = parse_args()
iso = UnifiedISO(args.compose[0])
iso.create(delete_temp=True)
if __name__ == '__main__':
main()

View File

@ -44,6 +44,16 @@ BuildArch: noarch
%description %description
A tool to create anaconda based installation trees/isos of a set of rpms. A tool to create anaconda based installation trees/isos of a set of rpms.
%package utils
Summary: Utilities for working with finished composes
Requires: pungi = %{version}-%{release}
%description utils
These utilities work with finished composes produced by Pungi. They can be used
for creating unified ISO images, validating config file or sending progress
notification to Fedora Message Bus.
%prep %prep
%setup -q %setup -q
@ -62,9 +72,18 @@ rm -rf %{buildroot}
%doc AUTHORS %doc AUTHORS
%{python_sitelib}/%{name} %{python_sitelib}/%{name}
%{python_sitelib}/%{name}-%{version}-py?.?.egg-info %{python_sitelib}/%{name}-%{version}-py?.?.egg-info
%{_bindir}/* %{_bindir}/%{name}
%{_datadir}/pungi %{_bindir}/%{name}-koji
/var/cache/pungi %{_bindir}/comps_filter
%{_bindir}/%{name}-make-ostree
%{_datadir}/%{name}
/var/cache/%{name}
%files utils
%{python_sitelib}/%{name}_utils
%{_bindir}/%{name}-create-unified-isos
%{_bindir}/%{name}-config-validate
%{_bindir}/%{name}-fedmsg-notification
%check %check
./tests/data/specs/build.sh ./tests/data/specs/build.sh

View File

@ -68,33 +68,39 @@ class ImageChecksumPhase(PhaseBase):
return base_checksum_name return base_checksum_name
def run(self): def run(self):
for (variant, arch, path), images in self._get_images().iteritems(): topdir = self.compose.paths.compose.topdir()
checksums = {} for (variant, arch, path), images in get_images(topdir, self.compose.im).iteritems():
base_checksum_name = self._get_base_filename(variant, arch) base_checksum_name = self._get_base_filename(variant, arch)
make_checksums(variant, arch, path, images,
self.checksums, base_checksum_name, self.one_file)
def make_checksums(variant, arch, path, images, checksum_types, base_checksum_name, one_file):
checksums = {}
for image in images: for image in images:
filename = os.path.basename(image.path) filename = os.path.basename(image.path)
full_path = os.path.join(path, filename) full_path = os.path.join(path, filename)
if not os.path.exists(full_path): if not os.path.exists(full_path):
continue continue
digests = shortcuts.compute_file_checksums(full_path, self.checksums) digests = shortcuts.compute_file_checksums(full_path, checksum_types)
for checksum, digest in digests.iteritems(): for checksum, digest in digests.iteritems():
checksums.setdefault(checksum, {})[filename] = digest checksums.setdefault(checksum, {})[filename] = digest
image.add_checksum(None, checksum, digest) image.add_checksum(None, checksum, digest)
if not self.one_file: if not one_file:
dump_checksums(path, checksum, dump_checksums(path, checksum,
{filename: digest}, {filename: digest},
'%s.%sSUM' % (filename, checksum.upper())) '%s.%sSUM' % (filename, checksum.upper()))
if not checksums: if not checksums:
continue return
if self.one_file: if one_file:
dump_checksums(path, self.checksums[0], dump_checksums(path, checksum_types[0],
checksums[self.checksums[0]], checksums[checksum_types[0]],
base_checksum_name + 'CHECKSUM') base_checksum_name + 'CHECKSUM')
else: else:
for checksum in self.checksums: for checksum in checksums:
dump_checksums(path, checksum, dump_checksums(path, checksum,
checksums[checksum], checksums[checksum],
'%s%sSUM' % (base_checksum_name, checksum.upper())) '%s%sSUM' % (base_checksum_name, checksum.upper()))
@ -108,6 +114,22 @@ def dump_checksums(dir, alg, checksums, filename):
:param checksums: mapping from filenames to checksums :param checksums: mapping from filenames to checksums
:param filename: what to call the file :param filename: what to call the file
""" """
with open(os.path.join(dir, filename), 'w') as f: checksum_file = os.path.join(dir, filename)
with open(checksum_file, 'w') as f:
for file, checksum in checksums.iteritems(): for file, checksum in checksums.iteritems():
f.write('%s (%s) = %s\n' % (alg.upper(), file, checksum)) f.write('%s (%s) = %s\n' % (alg.upper(), file, checksum))
return checksum_file
def get_images(top_dir, manifest):
"""Returns a mapping from directories to sets of ``Image``s.
The paths to dirs are absolute.
"""
images = {}
for variant in manifest.images:
for arch in manifest.images[variant]:
for image in manifest.images[variant][arch]:
path = os.path.dirname(os.path.join(top_dir, image.path))
images.setdefault((variant, arch, path), []).append(image)
return images

0
pungi_utils/__init__.py Normal file
View File

380
pungi_utils/unified_isos.py Normal file
View File

@ -0,0 +1,380 @@
# -*- coding: utf-8 -*-
"""
This script creates unified ISOs for a specified compose.
Unified ISOs are created per architecture and
contain all variant packages and repos.
TODO:
* jigdo
"""
from __future__ import print_function
import copy
import errno
import glob
import json
import os
import shutil
import sys
import tempfile
import productmd
import productmd.compose
import productmd.images
import productmd.treeinfo
from kobo.shortcuts import run, compute_file_checksums
import pungi.linker
import pungi.wrappers.createrepo
from pungi.util import makedirs
from pungi.compose_metadata.discinfo import write_discinfo as create_discinfo
from pungi.wrappers import iso
from pungi.phases.image_checksum import dump_checksums, get_images, make_checksums
def ti_merge(one, two):
assert one.tree.arch == two.tree.arch
for variant in two.variants.get_variants(recursive=False):
if variant.uid in one.variants:
continue
var = productmd.treeinfo.Variant(one)
var.id = variant.id
var.uid = variant.uid
var.name = variant.name
var.type = variant.type
for i in ("debug_packages", "debug_repository", "packages", "repository",
"source_packages", "source_repository"):
setattr(var, i, getattr(variant, i, None))
one.variants.add(var)
class UnifiedISO(object):
def __init__(self, compose_path, output_path=None):
self.compose_path = os.path.abspath(compose_path)
compose_subdir = os.path.join(self.compose_path, "compose")
if os.path.exists(compose_subdir):
self.compose_path = compose_subdir
self.compose = productmd.compose.Compose(compose_path)
self.ci = self.compose.info
self.linker = pungi.linker.Linker()
temp_topdir = os.path.abspath(os.path.join(self.compose_path, "..", "work"))
self.temp_dir = tempfile.mkdtemp(prefix="unified_isos_", dir=temp_topdir)
self.treeinfo = {} # {arch/src: TreeInfo}
self.repos = {} # {arch/src: {variant: new_path}
self.comps = {} # {arch/src: {variant: old_path}
self.productid = {} # {arch/stc: {variant: old_path}
self.images = {} # {arch/src: [*.iso, *.iso.{md5,sha1,sha256}sum]}
self.conf = self.read_config()
def create(self, delete_temp=True):
print("Creating unified ISOs for: {0}".format(self.compose_path))
try:
self.link_to_temp()
self.createrepo()
self.discinfo()
self.createiso()
self.link_to_compose()
self.update_checksums()
finally:
if delete_temp:
shutil.rmtree(self.temp_dir)
def _link_tree(self, dir, variant, arch):
blacklist_files = [".treeinfo", ".discinfo", "boot.iso", "media.repo"]
blacklist_dirs = ["repodata"]
for root, dirs, files in os.walk(dir):
for i in blacklist_dirs:
if i in dirs:
dirs.remove(i)
for fn in files:
if fn in blacklist_files:
continue
old_path = os.path.join(root, fn)
if fn.endswith(".rpm"):
new_path = os.path.join(self.temp_dir, "trees", arch, variant.uid, fn)
self.repos.setdefault(arch, {})[variant.uid] = os.path.dirname(new_path)
else:
old_relpath = os.path.relpath(old_path, dir)
new_path = os.path.join(self.temp_dir, "trees", arch, old_relpath)
makedirs(os.path.dirname(new_path))
self.linker.link(old_path, new_path)
def link_to_temp(self):
# copy files to new location; change RPM location to $variant_uid
for variant in self.ci.get_variants(recursive=False):
for arch in variant.arches:
print("Processing: {0}.{1}".format(variant.uid, arch))
tree_dir = os.path.join(self.compose_path, variant.paths.os_tree[arch])
ti = productmd.treeinfo.TreeInfo()
try:
ti.load(os.path.join(tree_dir, ".treeinfo"))
except IOError as exc:
if exc.errno != errno.ENOENT:
raise
print('Tree %s.%s has no .treeinfo, skipping...'
% (variant.uid, arch),
file=sys.stderr)
continue
arch_ti = self.treeinfo.get(arch)
if arch_ti is None:
arch_ti = ti
self.treeinfo[arch] = arch_ti
else:
ti_merge(arch_ti, ti)
if arch_ti.tree.arch != arch:
raise RuntimeError('Treeinfo arch mismatch')
# override paths
arch_ti[variant.uid].repository = variant.uid
arch_ti[variant.uid].packages = variant.uid
comps_path = glob.glob(os.path.join(self.compose_path,
variant.paths.repository[arch],
"repodata", "*comps*.xml"))
if comps_path:
self.comps.setdefault(arch, {})[variant.uid] = comps_path[0]
productid_path = os.path.join(self.compose_path, variant.paths.repository[arch],
"repodata", "productid")
self.productid.setdefault(arch, {})[variant.uid] = productid_path
self._link_tree(tree_dir, variant, arch)
# sources
print("Processing: {0}.{1}".format(variant.uid, "src"))
tree_dir = os.path.join(self.compose_path, variant.paths.source_tree[arch])
ti = productmd.treeinfo.TreeInfo()
ti.load(os.path.join(tree_dir, ".treeinfo"))
arch_ti = self.treeinfo.get("src")
if arch_ti is None:
arch_ti = ti
self.treeinfo["src"] = arch_ti
else:
ti_merge(arch_ti, ti)
if arch_ti.tree.arch != "src":
raise RuntimeError('Treeinfo arch mismatch')
# override paths
arch_ti[variant.uid].repository = variant.uid
arch_ti[variant.uid].packages = variant.uid
# set to None, replace with source_*; requires productmd changes or upstream version
# arch_ti[variant.uid].source_repository = variant.uid
# arch_ti[variant.uid].source_packages = variant.uid
self._link_tree(tree_dir, variant, 'src')
def createrepo(self):
# remove old repomd.xml checksums from treeinfo
for arch, ti in self.treeinfo.iteritems():
print("Removing old repomd.xml checksums from treeinfo: {0}".format(arch))
for i in ti.checksums.checksums.keys():
if "repomd.xml" in i:
del ti.checksums.checksums[i]
# write new per-variant repodata
cr = pungi.wrappers.createrepo.CreaterepoWrapper(createrepo_c=True)
for arch in self.repos:
ti = self.treeinfo[arch]
for variant in self.repos[arch]:
print("Creating repodata: {0}.{1}".format(variant, arch))
tree_dir = os.path.join(self.temp_dir, "trees", arch)
repo_path = self.repos[arch][variant]
comps_path = self.comps.get(arch, {}).get(variant, None)
cmd = cr.get_createrepo_cmd(repo_path, groupfile=comps_path, update=True)
run(cmd, show_cmd=True)
productid_path = self.productid.get(arch, {}).get(variant, None)
if productid_path:
print("Adding productid to repodata: {0}.{1}".format(variant, arch))
repo_dir = os.path.join(self.repos[arch][variant], "repodata")
new_path = os.path.join(repo_dir, os.path.basename(productid_path))
if os.path.exists(productid_path):
shutil.copy2(productid_path, new_path)
cmd = cr.get_modifyrepo_cmd(repo_dir, new_path, compress_type="gz")
run(cmd)
else:
print("WARNING: productid not found in {0}.{1}".format(variant, arch))
print("Inserting new repomd.xml checksum to treeinfo: {0}.{1}".format(variant, arch))
# insert new repomd.xml checksum to treeinfo
repomd_path = os.path.join(repo_path, "repodata", "repomd.xml")
ti.checksums.add(os.path.relpath(repomd_path, tree_dir), 'sha256', root_dir=tree_dir)
# write treeinfo
for arch, ti in self.treeinfo.iteritems():
print("Writing treeinfo: {0}".format(arch))
ti_path = os.path.join(self.temp_dir, "trees", arch, ".treeinfo")
ti.dump(ti_path)
def discinfo(self):
# write discinfo and media repo
for arch, ti in self.treeinfo.iteritems():
di_path = os.path.join(self.temp_dir, "trees", arch, ".discinfo")
description = "%s %s" % (ti.release.name, ti.release.version)
if ti.release.is_layered:
description += " for %s %s" % (ti.base_product.name, ti.base_product.version)
create_discinfo(di_path, description, arch)
def read_config(self):
try:
conf_dump = glob.glob(os.path.join(self.compose_path,
'../logs/global/config-dump*.global.log'))[0]
except IndexError:
raise RuntimeError('Config dump not found, can not generate checksums...')
with open(conf_dump) as f:
return json.load(f)
def createiso(self):
# create ISOs
im = self.compose.images
for arch, ti in self.treeinfo.items():
source_dir = os.path.join(self.temp_dir, "trees", arch)
# XXX: HARDCODED
disc_type = "dvd"
iso_arch = arch
if arch == "src":
iso_arch = "source"
iso_name = "%s-%s-%s.iso" % (self.ci.compose.id, iso_arch, disc_type)
iso_dir = os.path.join(self.temp_dir, "iso", iso_arch)
iso_path = os.path.join(iso_dir, iso_name)
print("Creating ISO for {0}: {1}".format(arch, iso_name))
makedirs(iso_dir)
volid = "%s %s %s" % (ti.release.short, ti.release.version, arch)
# create ISO
run(iso.get_mkisofs_cmd(iso_path, [source_dir], volid=volid, exclude=["./lost+found"]))
# implant MD5
supported = True
run(iso.get_implantisomd5_cmd(iso_path, supported))
checksums = compute_file_checksums(iso_path, self.conf['media_checksums'])
# write manifest file
run(iso.get_manifest_cmd(iso_path))
img = productmd.images.Image(im)
# temporary path, just a file name; to be replaced with variant specific path
img.path = os.path.basename(iso_path)
img.mtime = int(os.stat(iso_path).st_mtime)
img.size = os.path.getsize(iso_path)
img.arch = arch
# XXX: HARDCODED
img.type = "dvd"
img.format = "iso"
img.disc_number = 1
img.disc_count = 1
img.bootable = False
img.unified = True
self.images.setdefault(arch, set()).add(iso_path)
self.images.setdefault(arch, set()).add(iso_path + ".manifest")
for checksum_type, checksum in checksums.iteritems():
if not self.conf['media_checksum_one_file']:
checksum_path = dump_checksums(iso_dir, checksum_type,
{iso_name: checksum},
'%s.%sSUM' % (iso_name, checksum_type.upper()))
self.images.setdefault(arch, set()).add(checksum_path)
img.add_checksum(self.compose_path, checksum_type=checksum_type, checksum_value=checksum)
img.implant_md5 = iso.get_implanted_md5(iso_path)
try:
img.volume_id = iso.get_volume_id(iso_path)
except RuntimeError:
pass
if arch == "src":
all_arches = [i for i in self.treeinfo if i != "src"]
else:
all_arches = [arch]
for tree_arch in all_arches:
ti = self.treeinfo[tree_arch]
for variant_uid in ti.variants:
variant = ti.variants[variant_uid]
# We don't want to copy the manifest.
img.parent = None
variant_img = copy.deepcopy(img)
variant_img.parent = im
variant_img.subvariant = variant.id
paths_attr = 'isos' if arch != 'src' else 'source_isos'
paths = getattr(self.ci.variants[variant.uid].paths, paths_attr)
variant_img.path = os.path.join(
paths.get(tree_arch, os.path.join(variant.uid, tree_arch, "iso")),
os.path.basename(img.path)
)
im.add(variant.uid, tree_arch, variant_img)
im.dump(os.path.join(self.compose_path, 'metadata', 'images.json'))
def link_to_compose(self):
for variant in self.ci.get_variants(recursive=False):
for arch in variant.arches | set(['src']):
if arch == 'src':
path_type, dir = 'source_isos', 'source'
else:
path_type, dir = 'isos', arch
default_path = os.path.join(variant.uid, dir, "iso")
isos = os.path.join(self.compose_path,
getattr(variant.paths, path_type).get(arch, default_path))
makedirs(isos)
for image in self.images.get(arch, []):
dst = os.path.join(isos, os.path.basename(image))
print("Linking {0} -> {1}".format(image, dst))
self.linker.link(image, dst)
def _get_base_filename(self, variant, arch):
substs = {
'compose_id': self.compose.info.compose.id,
'release_short': self.compose.info.release.short,
'version': self.compose.info.release.version,
'date': self.compose.info.compose.date,
'respin': self.compose.info.compose.respin,
'type': self.compose.info.compose.type,
'type_suffix': self.compose.info.compose.type_suffix,
'label': self.compose.info.compose.label,
'label_major_version': self.compose.info.compose.label_major_version,
'variant': variant,
'arch': arch,
}
base_name = self.conf['media_checksum_base_filename']
if base_name:
base_name = (base_name % substs).format(**substs)
base_name += '-'
return base_name
def update_checksums(self):
for (variant, arch, path), images in get_images(self.compose_path, self.compose.images).iteritems():
base_checksum_name = self._get_base_filename(variant, arch)
make_checksums(variant, arch, path, images,
self.conf['media_checksums'],
base_checksum_name,
self.conf['media_checksum_one_file'])

View File

@ -14,7 +14,7 @@ distutils.command.sdist.sdist.default_format = {"posix": "bztar"}
# recursively scan for python modules to be included # recursively scan for python modules to be included
package_root_dirs = ["pungi"] package_root_dirs = ["pungi", "pungi_utils"]
packages = set() packages = set()
for package_root_dir in package_root_dirs: for package_root_dir in package_root_dirs:
for root, dirs, files in os.walk(package_root_dir): for root, dirs, files in os.walk(package_root_dir):
@ -37,6 +37,7 @@ setup(
'bin/comps_filter', 'bin/comps_filter',
'bin/pungi', 'bin/pungi',
'bin/pungi-config-validate', 'bin/pungi-config-validate',
'bin/pungi-create-unified-isos',
'bin/pungi-fedmsg-notification', 'bin/pungi-fedmsg-notification',
'bin/pungi-koji', 'bin/pungi-koji',
'bin/pungi-make-ostree', 'bin/pungi-make-ostree',

View File

@ -0,0 +1 @@
MD5 (DP-1.0-20161013.t.4-Client-i386-dvd1.iso) = 7d1e43fdb5e921a8c5d4841d1ec69f03

View File

@ -0,0 +1 @@
SHA1 (DP-1.0-20161013.t.4-Client-i386-dvd1.iso) = e9f5fd1b38febf0b8cdeafe1a4c9beb94f985c0d

View File

@ -0,0 +1 @@
SHA256 (DP-1.0-20161013.t.4-Client-i386-dvd1.iso) = 1157a2fbc0c959d6bf2ea1223cd4b097659212d0f14e621fe2801ab06c4f8413

View File

@ -0,0 +1,33 @@
[general]
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
; WARNING.1 = Read productmd documentation for details about new format.
arch = i386
family = Dummy Product
name = Dummy Product 1.0
platforms = i386
timestamp = 1476343975
variant = Client
variants = Client
version = 1.0
[header]
type = productmd.treeinfo
version = 1.2
[release]
name = Dummy Product
short = DP
version = 1.0
[tree]
arch = i386
build_timestamp = 1476343975
platforms = i386
variants = Client
[variant-Client]
id = Client
name = Client
type = variant
uid = Client

View File

@ -0,0 +1 @@
MD5 (DP-1.0-20161013.t.4-Client-source-dvd1.iso) = c61c9adc0c553dc8c65a1af0fc822952

View File

@ -0,0 +1 @@
SHA1 (DP-1.0-20161013.t.4-Client-source-dvd1.iso) = 9ad372897848ee7d89f483d9db5b6412a6cafa85

View File

@ -0,0 +1 @@
SHA256 (DP-1.0-20161013.t.4-Client-source-dvd1.iso) = 4993325bec4cb83fe83feaf2a66f9dfaa88b95b7a66491a6f26c104e69885586

View File

@ -0,0 +1,33 @@
[general]
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
; WARNING.1 = Read productmd documentation for details about new format.
arch = src
family = Dummy Product
name = Dummy Product 1.0
platforms = src
timestamp = 1476343975
variant = Client
variants = Client
version = 1.0
[header]
type = productmd.treeinfo
version = 1.2
[release]
name = Dummy Product
short = DP
version = 1.0
[tree]
arch = src
build_timestamp = 1476343975
platforms = src
variants = Client
[variant-Client]
id = Client
name = Client
type = variant
uid = Client

View File

@ -0,0 +1 @@
MD5 (DP-1.0-20161013.t.4-Client-x86_64-dvd1.iso) = 96f7e1e30fdc6681007f549ff01e4fe9

View File

@ -0,0 +1 @@
SHA1 (DP-1.0-20161013.t.4-Client-x86_64-dvd1.iso) = d4f158227510ad98d4f2f59e0885fccfb2813bee

View File

@ -0,0 +1 @@
SHA256 (DP-1.0-20161013.t.4-Client-x86_64-dvd1.iso) = 4baa0baf4f9e6c5e31c8fb278c0b4e7209ca6c041001d54c3c70f5110f54cbaa

View File

@ -0,0 +1,33 @@
[general]
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
; WARNING.1 = Read productmd documentation for details about new format.
arch = x86_64
family = Dummy Product
name = Dummy Product 1.0
platforms = x86_64
timestamp = 1476343975
variant = Client
variants = Client
version = 1.0
[header]
type = productmd.treeinfo
version = 1.2
[release]
name = Dummy Product
short = DP
version = 1.0
[tree]
arch = x86_64
build_timestamp = 1476343975
platforms = x86_64
variants = Client
[variant-Client]
id = Client
name = Client
type = variant
uid = Client

View File

@ -0,0 +1 @@
MD5 (DP-1.0-20161013.t.4-Server-s390x-dvd1.iso) = b236231385152239560a90d96746f44f

View File

@ -0,0 +1 @@
SHA1 (DP-1.0-20161013.t.4-Server-s390x-dvd1.iso) = 6645d6f24c7c3e88709991597a4a80420a10e461

View File

@ -0,0 +1 @@
SHA256 (DP-1.0-20161013.t.4-Server-s390x-dvd1.iso) = 2038d477509691e19940e545169328e9c9a9a2cc16a582d5fc82a89f4482b105

View File

@ -0,0 +1,33 @@
[general]
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
; WARNING.1 = Read productmd documentation for details about new format.
arch = s390x
family = Dummy Product
name = Dummy Product 1.0
platforms = s390x
timestamp = 1476343975
variant = Server
variants = Server
version = 1.0
[header]
type = productmd.treeinfo
version = 1.2
[release]
name = Dummy Product
short = DP
version = 1.0
[tree]
arch = s390x
build_timestamp = 1476343975
platforms = s390x
variants = Server
[variant-Server]
id = Server
name = Server
type = variant
uid = Server

View File

@ -0,0 +1 @@
MD5 (DP-1.0-20161013.t.4-Server-source-dvd1.iso) = 319e0e5e81da4726e8826d66405d4eff

View File

@ -0,0 +1 @@
SHA1 (DP-1.0-20161013.t.4-Server-source-dvd1.iso) = f4620a41bce51db1ac99ca09c5ae74b91bea2869

View File

@ -0,0 +1 @@
SHA256 (DP-1.0-20161013.t.4-Server-source-dvd1.iso) = 555c53081e0c8413b761898e436932d9066b3ca6794d7f042afae4388b3700f6

View File

@ -0,0 +1,33 @@
[general]
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
; WARNING.1 = Read productmd documentation for details about new format.
arch = src
family = Dummy Product
name = Dummy Product 1.0
platforms = src
timestamp = 1476343975
variant = Server
variants = Server
version = 1.0
[header]
type = productmd.treeinfo
version = 1.2
[release]
name = Dummy Product
short = DP
version = 1.0
[tree]
arch = src
build_timestamp = 1476343975
platforms = src
variants = Server
[variant-Server]
id = Server
name = Server
type = variant
uid = Server

View File

@ -0,0 +1 @@
MD5 (DP-1.0-20161013.t.4-Server-x86_64-dvd1.iso) = d2b474a3e7656781e23b4c1745e31c79

View File

@ -0,0 +1 @@
SHA1 (DP-1.0-20161013.t.4-Server-x86_64-dvd1.iso) = c5d91e52dec6b7edd9d5eeaf6e1926c97b5d7a3d

View File

@ -0,0 +1 @@
SHA256 (DP-1.0-20161013.t.4-Server-x86_64-dvd1.iso) = a435be0622d9e61febdad160907967303fbf381350cd5ee4d31916f403f4fb28

View File

@ -0,0 +1,33 @@
[general]
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
; WARNING.1 = Read productmd documentation for details about new format.
arch = x86_64
family = Dummy Product
name = Dummy Product 1.0
platforms = x86_64
timestamp = 1476343975
variant = Server
variants = Server
version = 1.0
[header]
type = productmd.treeinfo
version = 1.2
[release]
name = Dummy Product
short = DP
version = 1.0
[tree]
arch = x86_64
build_timestamp = 1476343975
platforms = x86_64
variants = Server
[variant-Server]
id = Server
name = Server
type = variant
uid = Server

View File

@ -0,0 +1,135 @@
{
"header": {
"type": "productmd.composeinfo",
"version": "1.2"
},
"payload": {
"compose": {
"date": "20161013",
"id": "DP-1.0-20161013.t.4",
"respin": 4,
"type": "test"
},
"release": {
"internal": false,
"name": "Dummy Product",
"short": "DP",
"type": "ga",
"version": "1.0"
},
"variants": {
"Client": {
"arches": [
"i386",
"x86_64"
],
"id": "Client",
"name": "Client",
"paths": {
"debug_packages": {
"i386": "Client/i386/debug/tree/Packages",
"x86_64": "Client/x86_64/debug/tree/Packages"
},
"debug_repository": {
"i386": "Client/i386/debug/tree",
"x86_64": "Client/x86_64/debug/tree"
},
"debug_tree": {
"i386": "Client/i386/debug/tree",
"x86_64": "Client/x86_64/debug/tree"
},
"isos": {
"i386": "Client/i386/iso",
"x86_64": "Client/x86_64/iso"
},
"os_tree": {
"i386": "Client/i386/os",
"x86_64": "Client/x86_64/os"
},
"packages": {
"i386": "Client/i386/os/Packages",
"x86_64": "Client/x86_64/os/Packages"
},
"repository": {
"i386": "Client/i386/os",
"x86_64": "Client/x86_64/os"
},
"source_isos": {
"i386": "Client/source/iso",
"x86_64": "Client/source/iso"
},
"source_packages": {
"i386": "Client/source/tree/Packages",
"x86_64": "Client/source/tree/Packages"
},
"source_repository": {
"i386": "Client/source/tree",
"x86_64": "Client/source/tree"
},
"source_tree": {
"i386": "Client/source/tree",
"x86_64": "Client/source/tree"
}
},
"type": "variant",
"uid": "Client"
},
"Server": {
"arches": [
"s390x",
"x86_64"
],
"id": "Server",
"name": "Server",
"paths": {
"debug_packages": {
"s390x": "Server/s390x/debug/tree/Packages",
"x86_64": "Server/x86_64/debug/tree/Packages"
},
"debug_repository": {
"s390x": "Server/s390x/debug/tree",
"x86_64": "Server/x86_64/debug/tree"
},
"debug_tree": {
"s390x": "Server/s390x/debug/tree",
"x86_64": "Server/x86_64/debug/tree"
},
"isos": {
"s390x": "Server/s390x/iso",
"x86_64": "Server/x86_64/iso"
},
"os_tree": {
"s390x": "Server/s390x/os",
"x86_64": "Server/x86_64/os"
},
"packages": {
"s390x": "Server/s390x/os/Packages",
"x86_64": "Server/x86_64/os/Packages"
},
"repository": {
"s390x": "Server/s390x/os",
"x86_64": "Server/x86_64/os"
},
"source_isos": {
"s390x": "Server/source/iso",
"x86_64": "Server/source/iso"
},
"source_packages": {
"s390x": "Server/source/tree/Packages",
"x86_64": "Server/source/tree/Packages"
},
"source_repository": {
"s390x": "Server/source/tree",
"x86_64": "Server/source/tree"
},
"source_tree": {
"s390x": "Server/source/tree",
"x86_64": "Server/source/tree"
}
},
"type": "variant",
"uid": "Server"
}
}
}
}

View File

@ -0,0 +1,180 @@
{
"header": {
"type": "productmd.images",
"version": "1.2"
},
"payload": {
"compose": {
"date": "20161013",
"id": "DP-1.0-20161013.t.4",
"respin": 4,
"type": "test"
},
"images": {
"Client": {
"i386": [
{
"arch": "i386",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "cd55df8932aa40c992392a12d867622d",
"mtime": 1476343975,
"path": "Client/i386/iso/DP-1.0-20161013.t.4-Client-i386-dvd1.iso",
"size": 505856,
"subvariant": "Client",
"type": "dvd",
"volume_id": "DP-1.0 Client.i386"
},
{
"arch": "src",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "8f14699db503e223ecce265f050bdcb2",
"mtime": 1476343975,
"path": "Client/source/iso/DP-1.0-20161013.t.4-Client-source-dvd1.iso",
"size": 473088,
"subvariant": "Client",
"type": "dvd",
"volume_id": "DP-1.0 Client.src"
}
],
"x86_64": [
{
"arch": "src",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "8f14699db503e223ecce265f050bdcb2",
"mtime": 1476343975,
"path": "Client/source/iso/DP-1.0-20161013.t.4-Client-source-dvd1.iso",
"size": 473088,
"subvariant": "Client",
"type": "dvd",
"volume_id": "DP-1.0 Client.src"
},
{
"arch": "x86_64",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "ae708b67ba4594fd57fc390057a73ee2",
"mtime": 1476343975,
"path": "Client/x86_64/iso/DP-1.0-20161013.t.4-Client-x86_64-dvd1.iso",
"size": 516096,
"subvariant": "Client",
"type": "dvd",
"volume_id": "DP-1.0 Client.x86_64"
}
]
},
"Server": {
"s390x": [
{
"arch": "s390x",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "7ca8b25c099330d3cc7e9fa0e610745a",
"mtime": 1476343975,
"path": "Server/s390x/iso/DP-1.0-20161013.t.4-Server-s390x-dvd1.iso",
"size": 493568,
"subvariant": "Server",
"type": "dvd",
"volume_id": "DP-1.0 Server.s390x"
},
{
"arch": "src",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "b03d9e01da352cbe01ba4906bc9ce3ca",
"mtime": 1476343975,
"path": "Server/source/iso/DP-1.0-20161013.t.4-Server-source-dvd1.iso",
"size": 458752,
"subvariant": "Server",
"type": "dvd",
"volume_id": "DP-1.0 Server.src"
}
],
"x86_64": [
{
"arch": "src",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "b03d9e01da352cbe01ba4906bc9ce3ca",
"mtime": 1476343975,
"path": "Server/source/iso/DP-1.0-20161013.t.4-Server-source-dvd1.iso",
"size": 458752,
"subvariant": "Server",
"type": "dvd",
"volume_id": "DP-1.0 Server.src"
},
{
"arch": "x86_64",
"bootable": false,
"checksums": {
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
"disc_count": 1,
"disc_number": 1,
"format": "iso",
"implant_md5": "4e4c4c67e158b507e1084b7bd925cee1",
"mtime": 1476343975,
"path": "Server/x86_64/iso/DP-1.0-20161013.t.4-Server-x86_64-dvd1.iso",
"size": 493568,
"subvariant": "Server",
"type": "dvd",
"volume_id": "DP-1.0 Server.x86_64"
}
]
}
}
}
}

View File

@ -0,0 +1,164 @@
{
"additional_packages": [
[
"^Server$",
{
"*": [
"dummy-libtool"
]
}
],
[
"^Client-optional$",
{
"*": [
"dummy-httpd"
]
}
]
],
"base_product_type": "ga",
"bootable": false,
"check_deps": false,
"comps_file": "dummy-comps.xml",
"comps_filter_environments": true,
"create_jigdo": false,
"create_optional_isos": false,
"createiso_skip": [
[
"^Server-Gluster$",
{
"*": true,
"src": true
}
]
],
"createrepo_c": true,
"createrepo_checksum": "sha256",
"createrepo_deltas": false,
"createrepo_use_xz": false,
"disc_types": {},
"extra_files": [],
"failable_deliverables": [],
"filter_packages": [
[
"^.*$",
{
"*": [
"dummy-pacemaker"
]
}
],
[
"^Client$",
{
"*": [
"dummy-httpd"
]
}
],
[
"^Server-optional$",
{
"*": [
"dummy-httpd.i686"
]
}
],
[
"^.*-ResilientStorage$",
{
"*": [
"dummy-glusterfs-resource-agents"
]
}
]
],
"filter_system_release_packages": true,
"gather_fulltree": false,
"gather_lookaside_repos": [],
"gather_method": "deps",
"gather_selfhosting": false,
"gather_source": "comps",
"greedy_method": "build",
"hashed_directories": true,
"image_volid_formats": [
"{release_short}-{version} {variant}.{arch}",
"{release_short}-{version} {arch}"
],
"image_volid_layered_product_formats": [
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}",
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}"
],
"iso_size": 4700000000,
"link_type": "hardlink-or-copy",
"live_images": [],
"live_images_no_rename": false,
"live_target": "rhel-7.0-candidate",
"lorax_options": [],
"media_checksum_base_filename": "",
"media_checksum_one_file": false,
"media_checksums": [
"md5",
"sha1",
"sha256"
],
"multilib": [
[
"^Server.*$",
{
"*": [
"devel",
"runtime"
]
}
]
],
"multilib_blacklist": {
"*": [
"kernel-devel",
"httpd-devel"
]
},
"multilib_whitelist": {
"*": [
"dummy-glibc"
]
},
"ostree": [],
"ostree_installer": [],
"pkgset_koji_inherit": true,
"pkgset_repos": {
"i386": [
"repo"
],
"s390x": [
"repo"
],
"x86_64": [
"repo"
]
},
"pkgset_source": "repos",
"product_id_allow_missing": false,
"productimg": false,
"release_is_layered": false,
"release_name": "Dummy Product",
"release_short": "DP",
"release_type": "ga",
"release_version": "1.0",
"runroot": false,
"sigkeys": [
null
],
"skip_phases": [],
"split_iso_reserve": 10485760,
"translate_paths": [],
"tree_arches": [],
"tree_variants": [
"Client",
"Server"
],
"variants_file": "dummy-variants.xml",
"volume_id_substitutions": {}
}

View File

View File

@ -117,6 +117,7 @@ def touch(path, content=None):
pass pass
with open(path, 'w') as f: with open(path, 'w') as f:
f.write(content) f.write(content)
return path
FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixtures') FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixtures')

View File

@ -1,7 +1,10 @@
#!/bin/sh #!/bin/sh
export PYTHONPATH=$(pwd)/../:$PYTHONPATH set -e
export PATH=$(pwd)/../bin:$PATH
PYTHONPATH=$(pwd)/../:$PYTHONPATH
PATH=$(pwd)/../bin:$PATH
export PYTHONPATH PATH
mkdir -p _composes mkdir -p _composes
@ -10,3 +13,6 @@ pungi-koji \
--old-composes=_composes \ --old-composes=_composes \
--config=data/dummy-pungi.conf \ --config=data/dummy-pungi.conf \
--test "$@" --test "$@"
# Run this to create unified ISOs for the just created compose
#pungi-create-unified-isos _composes/latest-DP-1/

544
tests/test_unified_isos.py Executable file
View File

@ -0,0 +1,544 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import mock
import os
import shutil
import sys
from ConfigParser import SafeConfigParser
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from tests.helpers import PungiTestCase, FIXTURE_DIR, touch
from pungi_utils import unified_isos
COMPOSE_ID = 'DP-1.0-20161013.t.4'
class TestUnifiedIsos(PungiTestCase):
def setUp(self):
super(TestUnifiedIsos, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
def test_can_init(self):
compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
isos = unified_isos.UnifiedISO(compose_path)
self.assertEqual(isos.compose_path, compose_path)
self.assertRegexpMatches(isos.temp_dir,
'^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))
def test_can_find_compose_subdir(self):
isos = unified_isos.UnifiedISO(os.path.join(self.topdir, COMPOSE_ID))
self.assertEqual(isos.compose_path, os.path.join(self.topdir, COMPOSE_ID, 'compose'))
self.assertRegexpMatches(isos.temp_dir,
'^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))
class TestCreate(PungiTestCase):
def setUp(self):
super(TestCreate, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(compose_path)
def test_create_method(self):
methods = ('link_to_temp', 'createrepo', 'discinfo', 'createiso',
'link_to_compose', 'update_checksums')
for attr in methods:
setattr(self.isos, attr, mock.Mock())
with mock.patch('shutil.rmtree') as rmtree:
self.isos.create()
for attr in methods:
self.assertEqual(len(getattr(self.isos, attr).call_args_list), 1)
self.assertEqual(rmtree.call_args_list,
[mock.call(self.isos.temp_dir)])
def get_comps_mapping(path):
def _comps(variant, arch):
return os.path.join(path, variant, arch, 'os', 'repodata',
'comps-%s.%s.xml' % (variant, arch))
return {
'i386': {'Client': _comps('Client', 'i386')},
's390x': {'Server': _comps('Server', 's390x')},
'x86_64': {'Client': _comps('Client', 'x86_64'),
'Server': _comps('Server', 'x86_64')}
}
def get_productid_mapping(path):
def _productid(variant, arch):
return os.path.join(path, variant, arch, 'os', 'repodata', 'productid')
return {
'i386': {'Client': _productid('Client', 'i386')},
's390x': {'Server': _productid('Server', 's390x')},
'x86_64': {'Client': _productid('Client', 'x86_64'),
'Server': _productid('Server', 'x86_64')}
}
def get_repos_mapping(path):
def _repo(variant, arch):
return os.path.join(path, 'trees', arch, variant)
return {
'i386': {'Client': _repo('Client', 'i386')},
's390x': {'Server': _repo('Server', 's390x')},
'src': {'Client': _repo('Client', 'src'),
'Server': _repo('Server', 'src')},
'x86_64': {'Client': _repo('Client', 'x86_64'),
'Server': _repo('Server', 'x86_64')}
}
class TestLinkToTemp(PungiTestCase):
def setUp(self):
super(TestLinkToTemp, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
self.compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(self.compose_path)
self.isos.linker = mock.Mock()
def _linkCall(self, variant, arch, file):
return mock.call(os.path.join(self.compose_path, variant,
arch if arch != 'src' else 'source',
'tree' if arch == 'src' else 'os',
'Packages', file[0].lower(), file),
os.path.join(self.isos.temp_dir, 'trees', arch, variant, file))
def test_link_to_temp(self):
self.isos.link_to_temp()
self.assertItemsEqual(self.isos.treeinfo.keys(),
['i386', 's390x', 'src', 'x86_64'])
self.assertEqual(self.isos.comps,
get_comps_mapping(self.compose_path))
self.assertEqual(self.isos.productid,
get_productid_mapping(self.compose_path))
self.assertEqual(self.isos.repos,
get_repos_mapping(self.isos.temp_dir))
self.assertItemsEqual(self.isos.linker.link.call_args_list,
[self._linkCall('Server', 's390x', 'dummy-filesystem-4.2.37-6.s390x.rpm'),
self._linkCall('Server', 'x86_64', 'dummy-filesystem-4.2.37-6.x86_64.rpm'),
self._linkCall('Server', 'src', 'dummy-filesystem-4.2.37-6.src.rpm'),
self._linkCall('Server', 'src', 'dummy-filesystem-4.2.37-6.src.rpm'),
self._linkCall('Client', 'i386', 'dummy-bash-4.2.37-6.i686.rpm'),
self._linkCall('Client', 'x86_64', 'dummy-bash-4.2.37-6.x86_64.rpm'),
self._linkCall('Client', 'src', 'dummy-bash-4.2.37-6.src.rpm'),
self._linkCall('Client', 'src', 'dummy-bash-4.2.37-6.src.rpm')])
def test_link_to_temp_without_treefile(self):
os.remove(os.path.join(self.compose_path, 'Client', 'i386', 'os', '.treeinfo'))
with mock.patch('sys.stderr'):
self.isos.link_to_temp()
self.assertItemsEqual(self.isos.treeinfo.keys(),
['s390x', 'src', 'x86_64'])
comps = get_comps_mapping(self.compose_path)
comps.pop('i386')
self.assertEqual(self.isos.comps, comps)
productid = get_productid_mapping(self.compose_path)
productid.pop('i386')
self.assertEqual(self.isos.productid, productid)
repos = get_repos_mapping(self.isos.temp_dir)
repos.pop('i386')
self.assertEqual(self.isos.repos, repos)
self.assertItemsEqual(self.isos.linker.link.call_args_list,
[self._linkCall('Server', 's390x', 'dummy-filesystem-4.2.37-6.s390x.rpm'),
self._linkCall('Server', 'x86_64', 'dummy-filesystem-4.2.37-6.x86_64.rpm'),
self._linkCall('Server', 'src', 'dummy-filesystem-4.2.37-6.src.rpm'),
self._linkCall('Server', 'src', 'dummy-filesystem-4.2.37-6.src.rpm'),
self._linkCall('Client', 'x86_64', 'dummy-bash-4.2.37-6.x86_64.rpm'),
self._linkCall('Client', 'src', 'dummy-bash-4.2.37-6.src.rpm')])
def test_link_to_temp_extra_file(self):
gpl_file = touch(os.path.join(self.compose_path, 'Server', 'x86_64', 'os', 'GPL'))
self.isos.link_to_temp()
self.assertItemsEqual(self.isos.treeinfo.keys(),
['i386', 's390x', 'src', 'x86_64'])
self.assertEqual(self.isos.comps,
get_comps_mapping(self.compose_path))
self.assertEqual(self.isos.productid,
get_productid_mapping(self.compose_path))
self.assertEqual(self.isos.repos,
get_repos_mapping(self.isos.temp_dir))
self.assertItemsEqual(self.isos.linker.link.call_args_list,
[self._linkCall('Server', 's390x', 'dummy-filesystem-4.2.37-6.s390x.rpm'),
self._linkCall('Server', 'x86_64', 'dummy-filesystem-4.2.37-6.x86_64.rpm'),
self._linkCall('Server', 'src', 'dummy-filesystem-4.2.37-6.src.rpm'),
self._linkCall('Server', 'src', 'dummy-filesystem-4.2.37-6.src.rpm'),
self._linkCall('Client', 'i386', 'dummy-bash-4.2.37-6.i686.rpm'),
self._linkCall('Client', 'x86_64', 'dummy-bash-4.2.37-6.x86_64.rpm'),
self._linkCall('Client', 'src', 'dummy-bash-4.2.37-6.src.rpm'),
self._linkCall('Client', 'src', 'dummy-bash-4.2.37-6.src.rpm'),
mock.call(os.path.join(gpl_file),
os.path.join(self.isos.temp_dir, 'trees', 'x86_64', 'GPL'))])
class TestCreaterepo(PungiTestCase):
def setUp(self):
super(TestCreaterepo, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
self.compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(self.compose_path)
self.isos.linker = mock.Mock()
# TODO mock treeinfo and use mappings for other data
self.isos.link_to_temp()
self.maxDiff = None
self.comps = get_comps_mapping(self.compose_path)
def mock_cr(self, path, groupfile, update):
self.assertTrue(update)
touch(os.path.join(path, 'repodata', 'repomd.xml'))
return ('/'.join(path.split('/')[-2:]), groupfile)
def mock_mr(self, path, pid, compress_type):
self.assertEqual(compress_type, 'gz')
return ('/'.join(path.split('/')[-3:-1]), pid)
@mock.patch('pungi.wrappers.createrepo.CreaterepoWrapper')
@mock.patch('pungi_utils.unified_isos.run')
def test_createrepo(self, run, cr):
cr.return_value.get_createrepo_cmd.side_effect = self.mock_cr
self.isos.createrepo()
self.assertItemsEqual(
run.call_args_list,
[mock.call(('src/Client', None), show_cmd=True),
mock.call(('src/Server', None), show_cmd=True),
mock.call(('i386/Client', self.comps['i386']['Client']), show_cmd=True),
mock.call(('s390x/Server', self.comps['s390x']['Server']), show_cmd=True),
mock.call(('x86_64/Client', self.comps['x86_64']['Client']), show_cmd=True),
mock.call(('x86_64/Server', self.comps['x86_64']['Server']), show_cmd=True)]
)
checksums = {}
# treeinfo checksums
for arch in self.isos.treeinfo.keys():
parser = SafeConfigParser()
parser.optionxform = str
parser.read(os.path.join(self.isos.temp_dir, 'trees', arch, '.treeinfo'))
checksums[arch] = [k for k, v in parser.items('checksums')]
self.assertEqual(
checksums,
{'i386': ['Client/repodata/repomd.xml'],
's390x': ['Server/repodata/repomd.xml'],
'src': ['Client/repodata/repomd.xml', 'Server/repodata/repomd.xml'],
'x86_64': ['Client/repodata/repomd.xml', 'Server/repodata/repomd.xml']}
)
@mock.patch('pungi.wrappers.createrepo.CreaterepoWrapper')
@mock.patch('pungi_utils.unified_isos.run')
def test_createrepo_with_productid(self, run, cr):
for x in self.isos.productid.values():
for f in x.values():
touch(f)
cr.return_value.get_createrepo_cmd.side_effect = self.mock_cr
cr.return_value.get_modifyrepo_cmd.side_effect = self.mock_mr
self.isos.createrepo()
self.assertItemsEqual(
run.call_args_list,
[mock.call(('src/Client', None), show_cmd=True),
mock.call(('src/Server', None), show_cmd=True),
mock.call(('i386/Client', self.comps['i386']['Client']), show_cmd=True),
mock.call(('s390x/Server', self.comps['s390x']['Server']), show_cmd=True),
mock.call(('x86_64/Client', self.comps['x86_64']['Client']), show_cmd=True),
mock.call(('x86_64/Server', self.comps['x86_64']['Server']), show_cmd=True),
mock.call(('x86_64/Server', os.path.join(self.isos.temp_dir,
'trees/x86_64/Server/repodata/productid'))),
mock.call(('x86_64/Client', os.path.join(self.isos.temp_dir,
'trees/x86_64/Client/repodata/productid'))),
mock.call(('s390x/Server', os.path.join(self.isos.temp_dir,
'trees/s390x/Server/repodata/productid'))),
mock.call(('i386/Client', os.path.join(self.isos.temp_dir,
'trees/i386/Client/repodata/productid')))]
)
checksums = {}
# treeinfo checksums
for arch in self.isos.treeinfo.keys():
parser = SafeConfigParser()
parser.optionxform = str
parser.read(os.path.join(self.isos.temp_dir, 'trees', arch, '.treeinfo'))
checksums[arch] = [k for k, v in parser.items('checksums')]
self.assertEqual(
checksums,
{'i386': ['Client/repodata/repomd.xml'],
's390x': ['Server/repodata/repomd.xml'],
'src': ['Client/repodata/repomd.xml', 'Server/repodata/repomd.xml'],
'x86_64': ['Client/repodata/repomd.xml', 'Server/repodata/repomd.xml']}
)
class TestDiscinfo(PungiTestCase):
def setUp(self):
super(TestDiscinfo, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
self.compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(self.compose_path)
self.isos.linker = mock.Mock()
# TODO mock treeinfo and use mappings for other data
self.isos.link_to_temp()
self.maxDiff = None
@mock.patch('pungi_utils.unified_isos.create_discinfo')
def test_discinfo(self, create_discinfo):
self.isos.discinfo()
self.assertItemsEqual(
create_discinfo.call_args_list,
[mock.call(os.path.join(self.isos.temp_dir, 'trees', 'i386', '.discinfo'),
'Dummy Product 1.0', 'i386'),
mock.call(os.path.join(self.isos.temp_dir, 'trees', 's390x', '.discinfo'),
'Dummy Product 1.0', 's390x'),
mock.call(os.path.join(self.isos.temp_dir, 'trees', 'src', '.discinfo'),
'Dummy Product 1.0', 'src'),
mock.call(os.path.join(self.isos.temp_dir, 'trees', 'x86_64', '.discinfo'),
'Dummy Product 1.0', 'x86_64')]
)
CHECKSUMS = {
'MD5': 'cbc3a5767b22babfe3578a2b82d83fcb',
'SHA1': 'afaf8621bfbc22781edfc81b774a2b2f66fdc8b0',
'SHA256': '84c1c8611b287209e1e76d657e7e69e6192ad72dd2531e0fb7a43b95070fabb1',
}
class TestCreateiso(PungiTestCase):
def setUp(self):
super(TestCreateiso, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
self.compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(self.compose_path)
self.isos.linker = mock.Mock()
# TODO mock treeinfo and use mappings for other data
self.isos.link_to_temp()
self.maxDiff = None
self.mkisofs_cmd = None
def mock_gmc(self, path, *args, **kwargs):
touch(path, 'ISO FILE\n')
self.mkisofs_cmd = self.mkisofs_cmd or mock.Mock(name='mkisofs cmd')
return self.mkisofs_cmd
def _img(self, arch, exts):
exts = ['manifest'] + exts
base_path = os.path.join(self.isos.temp_dir, 'iso', arch,
u'DP-1.0-20161013.t.4-%s-dvd.iso' % arch)
yield base_path
for ext in exts:
yield base_path + '.' + ext
def _imgs(self, arches, exts):
images = {}
exts = [e + 'SUM' for e in exts]
for arch in arches:
images[arch] = set(self._img(arch if arch != 'src' else 'source', exts))
return images
def assertResults(self, iso, run, arches, checksums):
self.assertEqual(
run.mock_calls,
[mock.call(self.mkisofs_cmd),
mock.call(iso.get_implantisomd5_cmd.return_value),
mock.call(iso.get_manifest_cmd.return_value)] * 2
)
self.assertEqual(
self.isos.images,
self._imgs(arches, checksums),
)
with open(os.path.join(self.compose_path, 'metadata', 'images.json')) as f:
manifest = json.load(f)
for v in ('Client', 'Server'):
for a in arches:
for image in manifest['payload']['images'][v]['x86_64']:
if image.get('unified', False) and image['arch'] == a:
arch = 'source' if image['arch'] == 'src' else image['arch']
self.assertEqual(image['path'],
'{0}/{1}/iso/DP-1.0-20161013.t.4-{1}-dvd.iso'.format(v, arch))
checksum_file_base = os.path.join(self.isos.temp_dir, 'iso',
arch, os.path.basename(image['path']))
for ch in checksums:
fp = '%s.%sSUM' % (checksum_file_base, ch)
with open(fp) as f:
self.assertEqual(
f.read(),
'%s (%s) = %s\n' % (ch, os.path.basename(image['path']),
CHECKSUMS[ch])
)
break
else:
self.fail('Image for %s.%s missing' % (v, a))
@mock.patch('pungi_utils.unified_isos.iso')
@mock.patch('pungi_utils.unified_isos.run')
def test_createiso(self, run, iso):
iso.get_mkisofs_cmd.side_effect = self.mock_gmc
iso.get_implanted_md5.return_value = 'beefcafebabedeadbeefcafebabedead'
iso.get_volume_id.return_value = 'VOLID'
self.isos.treeinfo = {'x86_64': self.isos.treeinfo['x86_64'],
'src': self.isos.treeinfo['src']}
self.isos.createiso()
self.assertResults(iso, run, ['src', 'x86_64'], ['MD5', 'SHA1', 'SHA256'])
@mock.patch('pungi_utils.unified_isos.iso')
@mock.patch('pungi_utils.unified_isos.run')
def test_createiso_checksum_one_file(self, run, iso):
iso.get_mkisofs_cmd.side_effect = self.mock_gmc
iso.get_implanted_md5.return_value = 'beefcafebabedeadbeefcafebabedead'
iso.get_volume_id.return_value = 'VOLID'
self.isos.conf['media_checksum_one_file'] = True
self.isos.treeinfo = {'x86_64': self.isos.treeinfo['x86_64'],
'src': self.isos.treeinfo['src']}
self.isos.createiso()
self.assertResults(iso, run, ['src', 'x86_64'], [])
@mock.patch('pungi_utils.unified_isos.iso')
@mock.patch('pungi_utils.unified_isos.run')
def test_createiso_single_checksum(self, run, iso):
iso.get_mkisofs_cmd.side_effect = self.mock_gmc
iso.get_implanted_md5.return_value = 'beefcafebabedeadbeefcafebabedead'
iso.get_volume_id.return_value = 'VOLID'
self.isos.conf['media_checksums'] = ['sha256']
self.isos.treeinfo = {'x86_64': self.isos.treeinfo['x86_64'],
'src': self.isos.treeinfo['src']}
self.isos.createiso()
self.assertResults(iso, run, ['src', 'x86_64'], ['SHA256'])
class TestLinkToCompose(PungiTestCase):
def setUp(self):
super(TestLinkToCompose, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
self.compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(self.compose_path)
self.isos.linker = mock.Mock()
self.binary = os.path.join(self.isos.temp_dir, 'isos', 'x86_64', 'binary.iso')
self.source = os.path.join(self.isos.temp_dir, 'isos', 'src', 'source.iso')
self.isos.images = {
'x86_64': set([self.binary]),
'src': set([self.source]),
}
self.maxDiff = None
def _iso(self, variant, arch, name):
return os.path.join(self.compose_path, variant, arch, 'iso', name)
def test_link_to_compose(self):
self.isos.link_to_compose()
self.assertItemsEqual(
self.isos.linker.link.call_args_list,
[mock.call(self.binary, self._iso('Client', 'x86_64', 'binary.iso')),
mock.call(self.binary, self._iso('Server', 'x86_64', 'binary.iso')),
mock.call(self.source, self._iso('Client', 'source', 'source.iso')),
mock.call(self.source, self._iso('Server', 'source', 'source.iso'))]
)
class MockImage(mock.Mock):
def __eq__(self, another):
return self.path == another.path
class TestUpdateChecksums(PungiTestCase):
def setUp(self):
super(TestUpdateChecksums, self).setUp()
shutil.copytree(os.path.join(FIXTURE_DIR, COMPOSE_ID),
os.path.join(self.topdir, COMPOSE_ID))
self.compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')
self.isos = unified_isos.UnifiedISO(self.compose_path)
self.maxDiff = None
def _isodir(self, variant, arch):
return os.path.join(self.compose_path, variant, arch, 'iso')
def _call(self, variant, arch, source=False, basename='', one_file=False):
archdir = arch if not source else 'source'
isodir = self._isodir(variant, archdir)
filename = 'DP-1.0-20161013.t.4-%s-%s-dvd1.iso' % (variant, archdir)
return mock.call(variant, arch, isodir,
[MockImage(path=os.path.join(variant, archdir, 'iso', filename))],
['md5', 'sha1', 'sha256'], basename, one_file)
@mock.patch('pungi_utils.unified_isos.make_checksums')
def test_update_checksums(self, mmc):
self.isos.update_checksums()
self.assertItemsEqual(
mmc.call_args_list,
[self._call('Client', 'i386'),
self._call('Client', 'x86_64'),
self._call('Server', 's390x'),
self._call('Server', 'x86_64'),
self._call('Client', 'i386', source=True),
self._call('Client', 'x86_64', source=True),
self._call('Server', 's390x', source=True),
self._call('Server', 'x86_64', source=True)]
)
@mock.patch('pungi_utils.unified_isos.make_checksums')
def test_update_checksums_one_file(self, mmc):
self.isos.conf['media_checksum_one_file'] = True
self.isos.update_checksums()
self.assertItemsEqual(
mmc.call_args_list,
[self._call('Client', 'i386', one_file=True),
self._call('Client', 'x86_64', one_file=True),
self._call('Server', 's390x', one_file=True),
self._call('Server', 'x86_64', one_file=True),
self._call('Client', 'i386', source=True, one_file=True),
self._call('Client', 'x86_64', source=True, one_file=True),
self._call('Server', 's390x', source=True, one_file=True),
self._call('Server', 'x86_64', source=True, one_file=True)]
)
@mock.patch('pungi_utils.unified_isos.make_checksums')
def test_update_checksums_basename(self, mmc):
self.isos.conf['media_checksum_base_filename'] = '{variant}-{arch}'
self.isos.update_checksums()
self.assertItemsEqual(
mmc.call_args_list,
[self._call('Client', 'i386', basename='Client-i386-'),
self._call('Client', 'x86_64', basename='Client-x86_64-'),
self._call('Server', 's390x', basename='Server-s390x-'),
self._call('Server', 'x86_64', basename='Server-x86_64-'),
self._call('Client', 'i386', source=True, basename='Client-i386-'),
self._call('Client', 'x86_64', source=True, basename='Client-x86_64-'),
self._call('Server', 's390x', source=True, basename='Server-s390x-'),
self._call('Server', 'x86_64', source=True, basename='Server-x86_64-')]
)