Add JSON Schema for configuration

The schema is written in Python to reduce duplication. When
configuration is loaded, the validation checks if it's correct and fills
in default values.

There is a custom extension to the schema to report deprecated options.

The config dependencies are implemented as a separate pass. While it's
technically possible to express the dependencies in the schema itself,
the error messages are not very helpful and it makes the schema much
harder to read.

Phases no longer define `config_options`. New options should be added to
the schema. Since the default values are populated automatically during
validation, there is no need to duplicate them into the code.

The `pungi-config-validate` script is updated to use the schema and
report errors even for deeply nested fields.

The dependencies are updated: pungi now depends on `python-jsonschema`
(which is already available in Fedora).

Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
Lubomír Sedlář 2016-08-22 16:08:25 +02:00
parent 5534fda192
commit f9a6c8418f
53 changed files with 1423 additions and 903 deletions

View File

@ -4,12 +4,12 @@
from __future__ import print_function
import argparse
import contextlib
import kobo.conf
import os
import shutil
import sys
import tempfile
import contextlib
import shutil
here = sys.path[0]
if here != '/usr/bin':
@ -17,8 +17,9 @@ if here != '/usr/bin':
sys.path[0] = os.path.dirname(here)
import pungi.compose
import pungi.phases
import pungi.checks
import pungi.paths
import pungi.phases
class ValidationCompose(pungi.compose.Compose):
@ -63,6 +64,12 @@ def run(config, topdir, has_old):
conf = kobo.conf.PyConfigParser()
conf.load_from_file(config)
errors = pungi.checks.validate(conf)
if errors:
for error in errors:
print(error)
sys.exit(1)
compose = ValidationCompose(conf, has_old, topdir)
pkgset_phase = pungi.phases.PkgsetPhase(compose)

View File

@ -173,6 +173,11 @@ def main():
if not pungi.checks.check(conf):
sys.exit(1)
pungi.checks.check_umask(logger)
errors = pungi.checks.validate(conf)
if errors:
for error in errors:
print >>sys.stderr, error
sys.exit(1)
if opts.target_dir:
compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)

View File

@ -22,6 +22,7 @@ will have to installed:
* koji
* libselinux-python
* lorax
* python-jsonschema
* python-kickstart
* python-lockfile
* python-lxml

View File

@ -13,6 +13,7 @@ BuildRequires: python-lockfile, kobo, kobo-rpmlib, python-kickstart, createrepo
BuildRequires: python-lxml, libselinux-python, yum-utils, lorax
BuildRequires: yum => 3.4.3-28, createrepo >= 0.4.11
BuildRequires: gettext, git-core, cvs
BuildRequires: python-jsonschema
Requires: createrepo >= 0.4.11
Requires: yum => 3.4.3-28
@ -35,6 +36,7 @@ Requires: genisoimage
Requires: gettext
Requires: syslinux
Requires: git
Requires: python-jsonschema
BuildArch: noarch

View File

@ -10,13 +10,36 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module exports a couple functions for checking configuration and
environment.
Validation of the configuration is the most complicated part here: here is the
outline of the process:
1. The configuration is checked against JSON schema. The errors encountered are
reported as string. The validator also populates default values.
2. The requirements/conflicts among options are resolved separately. This is
because expressing those relationships in JSON Schema is very verbose and
the error message is not very descriptive.
3. Extra validation can happen in ``validate()`` method of any phase.
When a new config option is added, the schema must be updated (see the
``_make_schema`` function). The dependencies should be encoded into
``CONFIG_DEPS`` mapping.
"""
import os.path
import platform
import jsonschema
from . import util
def _will_productimg_run(conf):
@ -80,6 +103,7 @@ imports = [
def check(conf):
"""Check runtime environment and report errors about missing dependencies."""
fail = False
# Check python modules
@ -112,62 +136,788 @@ def check_umask(logger):
'expect files with broken permissions.', mask)
def validate_options(conf, valid_options):
def _validate_requires(schema, conf, valid_options):
"""
Check if all requires and conflicts are ok in configuration.
:param conf: Python dict with configuration to check
:param valid_options: mapping with option dependencies
:param with_default: a set of options that have default value
:returns: list of errors
"""
errors = []
for i in valid_options:
name = i["name"]
def has_default(x):
return schema['properties'].get(x, {}).get('default') == conf[x]
for name, opt in valid_options.iteritems():
value = conf.get(name)
if i.get("deprecated", False):
if name in conf:
errors.append("Deprecated config option: %s; %s" % (name, i["comment"]))
continue
if name not in conf:
if not i.get("optional", False):
errors.append("Config option not set: %s" % name)
continue
# verify type
if "expected_types" in i:
etypes = i["expected_types"]
if not isinstance(etypes, list) and not isinstance(etypes, tuple):
raise TypeError("The 'expected_types' value must be wrapped in a list: %s" % i)
found = False
for etype in etypes:
if isinstance(value, etype):
found = True
break
if not found:
errors.append("Config option '%s' has invalid type: %s. Expected: %s." % (name, str(type(value)), etypes))
continue
# verify value
if "expected_values" in i:
evalues = i["expected_values"]
if not isinstance(evalues, list) and not isinstance(evalues, tuple):
raise TypeError("The 'expected_values' value must be wrapped in a list: %s" % i)
found = False
for evalue in evalues:
if value == evalue:
found = True
break
if not found:
errors.append("Config option '%s' has invalid value: %s. Expected: %s." % (name, value, evalues))
continue
if "requires" in i:
for func, requires in i["requires"]:
if func(value):
for req in requires:
if req not in conf:
errors.append("Config option %s=%s requires %s which is not set" % (name, value, req))
if "conflicts" in i:
for func, conflicts in i["conflicts"]:
if func(value):
for con in conflicts:
if con in conf:
errors.append("Config option %s=%s conflicts with option %s" % (name, value, con))
errors.extend(_check_dep(name, value, opt.get('conflicts', []),
lambda x: x in conf and not has_default(x), CONFLICTS))
errors.extend(_check_dep(name, value, opt.get('requires', []),
lambda x: x not in conf, REQUIRES))
return errors
def _check_dep(name, value, lst, matcher, fmt):
for deps in [deps for (func, deps) in lst if func(value)]:
for dep in [d for d in deps if matcher(d)]:
yield fmt.format(name, value, dep)
def validate(config):
"""Test the configuration against schema.
Undefined values for which a default value exists will be filled in.
"""
schema = _make_schema()
DefaultValidator = _extend_with_default(jsonschema.Draft4Validator)
validator = DefaultValidator(schema, {'array': (tuple, list)})
errors = []
for error in validator.iter_errors(config):
if isinstance(error, ConfigDeprecation):
errors.append(DEPRECATED.format('.'.join(error.path), error.message))
else:
if not error.path and error.validator == 'additionalProperties':
allowed_keys = set(error.schema['properties'].keys())
used_keys = set(error.instance.keys())
for key in used_keys - allowed_keys:
suggestion = _get_suggestion(key, allowed_keys)
if suggestion:
errors.append(UNKNOWN_SUGGEST.format(key, suggestion))
else:
errors.append(UNKNOWN.format(key))
else:
errors.append('Failed validation in %s: %s' % (
'.'.join([str(x) for x in error.path]), error.message))
return errors + _validate_requires(schema, config, CONFIG_DEPS)
def _get_suggestion(desired, names):
"""Find a value in ``names`` that is the closest match for ``desired``.
The edit distance must be at most half the length of target string.
"""
closest = None
closest_match = len(desired) + 1
for name in names:
match = util.levenshtein(desired, name)
if match < closest_match and match < len(desired) // 2:
closest = name
closest_match = match
return closest
CONFLICTS = 'Config option {0}={1} conflicts with option {2}.'
REQUIRES = 'Config option {0}={1} requires {2} which is not set.'
DEPRECATED = 'Deprecated config option: {0}; {1}.'
UNKNOWN = 'Unrecognized config option: {0}.'
UNKNOWN_SUGGEST = 'Unrecognized config option: {0}. Did you mean {1}?'
def _extend_with_default(validator_class):
validate_properties = validator_class.VALIDATORS["properties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems():
if "default" in subschema and property not in instance:
instance.setdefault(property, subschema["default"])
for error in validate_properties(validator, properties, instance, schema):
yield error
def error_on_deprecated(validator, properties, instance, schema):
yield ConfigDeprecation(
'use %s instead' % properties
)
return jsonschema.validators.extend(
validator_class, {"properties": set_defaults,
"deprecated": error_on_deprecated},
)
class ConfigDeprecation(jsonschema.exceptions.ValidationError):
pass
def _make_schema():
return {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pungi Configuration",
"definitions": {
"multilib_list": {
"type": "object",
"patternProperties": {
"^.+$": {"$ref": "#/definitions/list_of_strings"},
},
"additionalProperties": False,
},
"package_mapping": {
"type": "array",
"items": {
"type": "array",
"items": [
{
"type": "string",
},
{
"type": "object",
"patternProperties": {
".+": {"$ref": "#/definitions/list_of_strings"},
},
"additionalProperties": False,
}
],
"additionalItems": False,
},
},
"scm_dict": {
"type": "object",
"properties": {
"scm": {
"type": "string",
"enum": ["file", "cvs", "git", "rpm"],
},
"repo": {"type": "string"},
"branch": {"type": "string"},
"file": {"type": "string"},
"dir": {"type": "string"},
},
"additionalProperties": False,
},
"str_or_scm_dict": {
"anyOf": [
{"type": "string"},
{"$ref": "#/definitions/scm_dict"},
]
},
"list_of_strings": {
"type": "array",
"items": {"type": "string"},
},
"strings": {
"anyOf": [
{"type": "string"},
{"$ref": "#/definitions/list_of_strings"},
]
},
"optional_string": {
"anyOf": [
{"type": "string"},
{"type": "null"},
],
},
"live_image_config": {
"type": "object",
"properties": {
"kickstart": {"type": "string"},
"ksurl": {"type": "string"},
"name": {"type": "string"},
"subvariant": {"type": "string"},
"version": {"type": "string"},
"additional_repos": {"$ref": "#/definitions/strings"},
"repo_from": {"$ref": "#/definitions/strings"},
"specfile": {"type": "string"},
"scratch": {"type": "boolean"},
"type": {"type": "string"},
"sign": {"type": "boolean"},
"failable": {"type": "boolean"},
"release": {"$ref": "#/definitions/optional_string"},
},
"required": ["kickstart"],
"additionalProperties": False,
"type": "object",
},
"string_tuples": {
"type": "array",
"items": {
"type": "array",
"items": [
{"type": "string"},
{"type": "string"},
],
"additionalItems": False,
}
}
},
"type": "object",
"properties": {
"release_name": {"type": "string"},
"release_short": {"type": "string"},
"release_version": {"type": "string"},
"release_type": {
"type": "string",
"enum": ["fast", "ga", "updates", "eus", "aus", "els"],
"default": "ga",
},
"release_is_layered": {"type": "boolean"},
"release_discinfo_description": {"type": "string"},
"base_product_name": {"type": "string"},
"base_product_short": {"type": "string"},
"base_product_version": {"type": "string"},
"base_product_type": {
"type": "string",
"default": "ga"
},
"runroot": {
"type": "boolean",
"default": False,
},
"create_jigdo": {
"type": "boolean",
"default": True,
},
"check_deps": {
"type": "boolean",
"default": True
},
"bootable": {
"type": "boolean",
"default": False
},
"gather_method": {
"type": "string",
"enum": ["deps", "nodeps"],
},
"gather_source": {
"type": "string",
"enum": ["json", "comps", "none"],
},
"gather_fulltree": {
"type": "boolean",
"default": False,
},
"gather_selfhosting": {
"type": "boolean",
"default": False,
},
"gather_prepopulate": {"$ref": "#/definitions/str_or_scm_dict"},
"gather_source_mapping": {"type": "string"},
"pkgset_source": {
"type": "string",
"enum": ["koji", "repos"],
},
"createrepo_c": {
"type": "boolean",
"default": True,
},
"createrepo_checksum": {
"type": "string",
"enum": ["sha", "sha256"],
},
"hashed_directories": {
"type": "boolean",
"default": False,
},
"multilib_whitelist": {
"$ref": "#/definitions/multilib_list",
"default": {},
},
"multilib_blacklist": {
"$ref": "#/definitions/multilib_list",
"default": {},
},
"greedy_method": {
"type": "string",
"enum": ["none", "all", "build"],
"default": "none",
},
"additional_packages": {
"$ref": "#/definitions/package_mapping",
"default": [],
},
"filter_packages": {
"$ref": "#/definitions/package_mapping",
"default": [],
},
"sigkeys": {
"type": "array",
"items": {"$ref": "#/definitions/optional_string"},
},
"variants_file": {"$ref": "#/definitions/str_or_scm_dict"},
"comps_file": {"$ref": "#/definitions/str_or_scm_dict"},
"comps_filter_environments": {
"type": "boolean",
"default": True
},
"pkgset_repos": {
"type": "object",
"patternProperties": {
".+": {"$ref": "#/definitions/strings"},
},
"additionalProperties": False,
},
"create_optional_isos": {
"type": "boolean",
"default": False
},
"symlink_isos_to": {"type": "string"},
"createiso_skip": _variant_arch_mapping({"type": "boolean"}),
"multilib": _variant_arch_mapping({
"$ref": "#/definitions/list_of_strings"
}),
"runroot_tag": {"type": "string"},
"runroot_channel": {
"$ref": "#/definitions/optional_string",
},
"createrepo_deltas": {
"type": "boolean",
"default": False,
},
"buildinstall_method": {
"type": "string",
"enum": ["lorax", "buildinstall"],
},
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
"global_ksurl": {"type": "string"},
"global_version": {"type": "string"},
"global_target": {"type": "string"},
"global_release": {"$ref": "#/definitions/optional_string"},
"koji_profile": {"type": "string"},
"pkgset_koji_tag": {"type": "string"},
"pkgset_koji_inherit": {
"type": "boolean",
"default": True
},
"disc_types": {
"type": "object",
"default": {},
},
"paths_module": {"type": "string"},
"skip_phases": {
"$ref": "#/definitions/list_of_strings",
"default": [],
},
"image_name_format": {"type": "string"},
"image_volid_formats": {
"$ref": "#/definitions/list_of_strings",
"default": [
"{release_short}-{version} {variant}.{arch}",
"{release_short}-{version} {arch}",
],
},
"image_volid_layered_product_formats": {
"$ref": "#/definitions/list_of_strings",
"default": [
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}",
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}",
],
},
"volume_id_substitutions": {
"type": "object",
"default": {},
},
"live_images_no_rename": {
"type": "boolean",
"default": False,
},
"live_images_ksurl": {"type": "string"},
"live_images_release": {"$ref": "#/definitions/optional_string"},
"live_images_version": {"type": "string"},
"image_build_ksurl": {"type": "string"},
"image_build_target": {"type": "string"},
"image_build_release": {"$ref": "#/definitions/optional_string"},
"image_build_version": {"type": "string"},
"live_media_ksurl": {"type": "string"},
"live_media_target": {"type": "string"},
"live_media_release": {"$ref": "#/definitions/optional_string"},
"live_media_version": {"type": "string"},
"media_checksums": {
"$ref": "#/definitions/list_of_strings",
"default": ['md5', 'sha1', 'sha256']
},
"media_checksum_one_file": {
"type": "boolean",
"default": False
},
"media_checksum_base_filename": {
"type": "string",
"default": ""
},
"filter_system_release_packages": {
"type": "boolean",
"default": True,
},
"keep_original_comps": {
"$ref": "#/definitions/list_of_strings",
"default": []
},
"link_type": {
"type": "string",
"enum": ["hardlink", "copy", "hardlink-or-copy", "symlink", "abspath-symlink"],
"default": "hardlink-or-copy"
},
"product_id": {"$ref": "#/definitions/str_or_scm_dict"},
"product_id_allow_missing": {
"type": "boolean",
"default": False
},
"live_target": {
"type": "string",
"default": "rhel-7.0-candidate",
},
"tree_arches": {
"$ref": "#/definitions/list_of_strings",
"default": []
},
"tree_variants": {
"$ref": "#/definitions/list_of_strings",
"default": []
},
"translate_paths": {
"$ref": "#/definitions/string_tuples",
"default": [],
},
"failable_deliverables": _variant_arch_mapping({
"$ref": "#/definitions/list_of_strings"
}),
"live_media": {
"type": "object",
"patternProperties": {
".+": {
"type": "array",
"items": {
"type": "object",
"properties": {
"install_tree_from": {"type": "string"},
"kickstart": {"type": "string"},
"ksversion": {"type": "string"},
"ksurl": {"type": "string"},
"version": {"type": "string"},
"scratch": {"type": "boolean"},
"skip_tag": {"type": "boolean"},
"name": {"type": "string"},
"subvariant": {"type": "string"},
"title": {"type": "string"},
"repo": {"$ref": "#/definitions/strings"},
"repo_from": {"$ref": "#/definitions/strings"},
"target": {"type": "string"},
"arches": {"$ref": "#/definitions/list_of_strings"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"release": {"$ref": "#/definitions/optional_string"},
},
"required": ["name", "kickstart"],
"additionalProperties": False,
},
}
},
"additionalProperties": False,
},
"ostree": _variant_arch_mapping({
"type": "object",
"properties": {
"treefile": {"type": "string"},
"config_url": {"type": "string"},
"source_repo_from": {"type": "string"},
"ostree_repo": {"type": "string"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"config_branch": {"type": "string"},
},
"required": ["treefile", "config_url", "source_repo_from", "ostree_repo"],
"additionalProperties": False,
}),
"ostree_installer": _variant_arch_mapping({
"type": "object",
"properties": {
"source_repo_from": {"type": "string"},
"release": {"$ref": "#/definitions/optional_string"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"installpkgs": {"$ref": "#/definitions/list_of_strings"},
"add_template": {"$ref": "#/definitions/list_of_strings"},
"add_arch_template": {"$ref": "#/definitions/list_of_strings"},
"add_template_var": {"$ref": "#/definitions/list_of_strings"},
"add_arch_template_var": {"$ref": "#/definitions/list_of_strings"},
"template_repo": {"type": "string"},
"template_branch": {"type": "string"},
},
"required": ["source_repo_from"],
"additionalProperties": False,
}),
"live_images": _variant_arch_mapping({
"anyOf": [
{"$ref": "#/definitions/live_image_config"},
{
"type": "array",
"items": {
"$ref": "#/definitions/live_image_config"
}
}
]
}),
"image_build": {
"type": "object",
"patternProperties": {
".+": {
"type": "array",
"items": {
"type": "object",
"properties": {
"image-build": {
"type": "object",
"properties": {
"failable": {"$ref": "#/definitions/list_of_strings"},
"disc_size": {"type": "number"},
"distro": {"type": "string"},
"name": {"type": "string"},
"kickstart": {"type": "string"},
"arches": {"$ref": "#/definitions/list_of_strings"},
"repo_from": {"$ref": "#/definitions/strings"},
"install_tree_from": {"type": "string"},
"subvariant": {"type": "string"},
"format": {"$ref": "#/definitions/string_tuples"},
},
},
"factory-parameters": {
"type": "object",
},
},
"required": ["image-build"],
"additionalProperties": False,
}
}
},
"additionalProperties": False,
},
"lorax_options": _variant_arch_mapping({
"type": "object",
"properties": {
"bugurl": {"type": "string"},
"nomacboot": {"type": "boolean"},
"noupgrade": {"type": "boolean"},
},
"additionalProperties": False,
}),
"signing_key_id": {"type": "string"},
"signing_key_password_file": {"type": "string"},
"signing_command": {"type": "string"},
"productimg": {
"type": "boolean",
"default": False
},
"productimg_install_class": {"$ref": "#/definitions/str_or_scm_dict"},
"productimg_po_files": {"$ref": "#/definitions/str_or_scm_dict"},
"iso_size": {
"anyOf": [
{"type": "string"},
{"type": "number"},
],
"default": 4700000000,
},
"split_iso_reserve": {
"anyOf": [
{"type": "string"},
{"type": "number"},
],
"default": 10 * 1024 * 1024
},
"osbs": {
"type": "object",
"patternProperties": {
".+": {
"type": "object",
"properties": {
"url": {"type": "string"},
"target": {"type": "string"},
"name": {"type": "string"},
"version": {"type": "string"},
"scratch": {"type": "boolean"},
"priority": {"type": "number"},
},
"required": ["url", "target"]
}
},
"additionalProperties": False,
},
"extra_files": _variant_arch_mapping({
"type": "array",
"items": {
"type": "object",
"properties": {
"scm": {"type": "string"},
"repo": {"type": "string"},
"branch": {"$ref": "#/definitions/optional_string"},
"file": {"$ref": "#/definitions/strings"},
"dir": {"type": "string"},
"target": {"type": "string"},
},
"additionalProperties": False,
}
}),
"gather_lookaside_repos": _variant_arch_mapping({
"$ref": "#/definitions/strings",
}),
# Deprecated options
"multilib_arches": {
"deprecated": "multilib"
},
"multilib_methods": {
"deprecated": "multilib"
},
"additional_packages_multiarch": {
"deprecated": "multilib_whitelist"
},
"filter_packages_multiarch": {
"deprecated": "multilib_blacklist"
},
"buildinstall_upgrade_image": {
"deprecated": "lorax_options"
},
"pkgset_koji_path_prefix": {
"deprecated": "koji_profile",
},
"pkgset_koji_url": {
"deprecated": "koji_profile",
},
},
"required": ["release_name", "release_short", "release_version",
"release_is_layered",
"variants_file", "sigkeys", "createrepo_checksum",
"runroot", "pkgset_source",
"gather_source", "gather_method"],
"additionalProperties": False,
}
def _variant_arch_mapping(value):
return {
"type": "array",
"items": {
"type": "array",
"items": [
{"type": "string"},
{
"type": "object",
"patternProperties": {".+": value},
"additionalProperties": False
}
],
"additionalItems": False,
"minItems": 2,
},
"default": []
}
# This is a mapping of configuration option dependencies and conflicts.
#
# The key in this mapping is the trigger for the check. When the option is
# encountered and its value satisfies the lambda, an error is reported for each
# missing (for requires) option in the list.
CONFIG_DEPS = {
"gather_source": {
"conflicts": [
(lambda val: val != 'json', ['gather_source_mapping']),
(lambda val: val != 'comps', ['comps_file']),
],
"requires": [
(lambda val: val == 'json', ['gather_source_mapping']),
(lambda val: val == 'comps', ['comps_file']),
]
},
"productimg": {
"requires": (
(lambda x: bool(x), ["productimg_install_class"]),
(lambda x: bool(x), ["productimg_po_files"]),
),
},
"bootable": {
"requires": (
(lambda x: x, ["buildinstall_method"]),
),
"conflicts": (
(lambda x: not x, ["buildinstall_method"]),
),
},
"buildinstall_method": {
"conflicts": (
(lambda val: val == "buildinstall", ["lorax_options"]),
(lambda val: not val, ["lorax_options", "buildinstall_kickstart"]),
),
},
"release_is_layered": {
"requires": (
(lambda x: x, ["base_product_name", "base_product_short",
"base_product_version", "base_product_type"]),
),
"conflicts": (
(lambda x: not x, ["base_product_name", "base_product_short",
"base_product_version", "base_product_type"]),
),
},
"runroot": {
"requires": (
(lambda x: x, ["koji_profile", "runroot_tag", "runroot_channel"]),
),
"conflicts": (
(lambda x: not x, ["runroot_tag", "runroot_channel"]),
),
},
"product_id": {
"conflicts": [
(lambda x: not x, ['product_id_allow_missing']),
],
},
"pkgset_source": {
"requires": [
(lambda x: x == "koji", ["pkgset_koji_tag"]),
(lambda x: x == "repos", ["pkgset_repos"]),
],
"conflicts": [
(lambda x: x == "koji", ["pkgset_repos"]),
(lambda x: x == "repos", ["pkgset_koji_tag", "pkgset_koji_inherit"]),
],
},
}

View File

@ -35,7 +35,7 @@ def get_description(compose, variant, arch):
result = "%s %s for %s %s" % (variant.release_name, variant.release_version, compose.conf["release_name"], get_major_version(compose.conf["release_version"]))
else:
result = "%s %s" % (compose.conf["release_name"], compose.conf["release_version"])
if compose.conf.get("release_is_layered", False):
if compose.conf["release_is_layered"]:
result += " for %s %s" % (compose.conf["base_product_name"], compose.conf["base_product_version"])
result = result % {"variant_name": variant.name, "arch": arch}
@ -77,20 +77,20 @@ def compose_to_composeinfo(compose):
ci.release.name = compose.conf["release_name"]
ci.release.version = compose.conf["release_version"]
ci.release.short = compose.conf["release_short"]
ci.release.is_layered = compose.conf.get("release_is_layered", False)
ci.release.type = compose.conf.get("release_type", "ga").lower()
ci.release.is_layered = compose.conf["release_is_layered"]
ci.release.type = compose.conf["release_type"].lower()
# base product
if ci.release.is_layered:
ci.base_product.name = compose.conf["base_product_name"]
ci.base_product.version = compose.conf["base_product_version"]
ci.base_product.short = compose.conf["base_product_short"]
ci.base_product.type = compose.conf.get("base_product_type", "ga").lower()
ci.base_product.type = compose.conf["base_product_type"].lower()
def dump_variant(variant, parent=None):
var = productmd.composeinfo.Variant(ci)
tree_arches = compose.conf.get("tree_arches", None)
tree_arches = compose.conf.get("tree_arches")
if tree_arches and not (set(variant.arches) & set(tree_arches)):
return None
@ -195,7 +195,7 @@ def write_tree_info(compose, arch, variant, timestamp=None):
ti.release.version = variant.release_version
ti.release.short = variant.release_short
ti.release.is_layered = True
ti.release.type = compose.conf.get("release_type", "ga").lower()
ti.release.type = compose.conf["release_type"].lower()
# base product
ti.base_product.name = compose.conf["release_name"]
@ -210,8 +210,8 @@ def write_tree_info(compose, arch, variant, timestamp=None):
ti.release.name = compose.conf["release_name"]
ti.release.version = compose.conf["release_version"]
ti.release.short = compose.conf["release_short"]
ti.release.is_layered = compose.conf.get("release_is_layered", False)
ti.release.type = compose.conf.get("release_type", "ga").lower()
ti.release.is_layered = compose.conf["release_is_layered"]
ti.release.type = compose.conf["release_type"].lower()
# base product
if ti.release.is_layered:

View File

@ -32,7 +32,7 @@ def translate_path(compose, path):
@param path
"""
normpath = os.path.normpath(path)
mapping = compose.conf.get("translate_paths", [])
mapping = compose.conf["translate_paths"]
for prefix, newvalue in mapping:
prefix = os.path.normpath(prefix)
@ -47,7 +47,7 @@ def translate_path(compose, path):
class Paths(object):
def __init__(self, compose):
paths_module_name = compose.conf.get("paths_module", None)
paths_module_name = compose.conf.get("paths_module")
if paths_module_name:
# custom paths
compose.log_info("Using custom paths from module %s" % paths_module_name)

View File

@ -14,12 +14,10 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from pungi.checks import validate_options
from pungi import util
class PhaseBase(object):
config_options = ()
def __init__(self, compose):
self.compose = compose
@ -28,9 +26,7 @@ class PhaseBase(object):
self._skipped = False
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
pass
def conf_assert_str(self, name):
missing = []
@ -48,7 +44,7 @@ class PhaseBase(object):
return True
if self.name in self.compose.skip_phases:
return True
if self.name in self.compose.conf.get("skip_phases", []):
if self.name in self.compose.conf["skip_phases"]:
return True
return False

View File

@ -40,46 +40,6 @@ from pungi.phases.base import PhaseBase
class BuildinstallPhase(PhaseBase):
name = "buildinstall"
config_options = (
{
"name": "bootable",
"expected_types": [bool],
"expected_values": [True],
},
{
"name": "buildinstall_method",
"extected_types": [str],
"expected_values": ["lorax", "buildinstall"],
"requires": (
(lambda x: bool(x) is True, ["bootable"]),
),
"conflicts": (
(lambda val: val == "buildinstall", ["lorax_options"]),
),
},
{
"name": "buildinstall_upgrade_image",
"expected_types": [bool],
"optional": True,
"deprecated": True,
"comment": "use lorax_options instead",
},
{
"name": "lorax_options",
"optional": True,
},
{
"name": "buildinstall_kickstart",
"expected_types": [str],
"optional": True,
},
{
"name": "buildinstall_symlink",
"expected_types": [bool],
"optional": True,
},
)
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -128,7 +88,7 @@ class BuildinstallPhase(PhaseBase):
version = self.compose.conf["release_version"]
release = self.compose.conf["release_version"]
buildinstall_method = self.compose.conf["buildinstall_method"]
disc_type = self.compose.conf.get('disc_types', {}).get('dvd', 'dvd')
disc_type = self.compose.conf['disc_types'].get('dvd', 'dvd')
for arch in self.compose.get_arches():
commands = []
@ -170,7 +130,7 @@ class BuildinstallPhase(PhaseBase):
def copy_files(self):
buildinstall_method = self.compose.conf["buildinstall_method"]
disc_type = self.compose.conf.get('disc_types', {}).get('dvd', 'dvd')
disc_type = self.compose.conf['disc_types'].get('dvd', 'dvd')
# copy buildinstall files to the 'os' dir
kickstart_file = get_kickstart_file(self.compose)
@ -200,7 +160,7 @@ class BuildinstallPhase(PhaseBase):
def get_kickstart_file(compose):
scm_dict = compose.conf.get("buildinstall_kickstart", None)
scm_dict = compose.conf.get("buildinstall_kickstart")
if not scm_dict:
compose.log_debug("Path to ks.cfg (buildinstall_kickstart) not specified.")
return
@ -323,9 +283,9 @@ def link_boot_iso(compose, arch, variant, can_fail):
if arch == "src":
return
disc_type = compose.conf.get('disc_types', {}).get('boot', 'boot')
disc_type = compose.conf['disc_types'].get('boot', 'boot')
symlink_isos_to = compose.conf.get("symlink_isos_to", None)
symlink_isos_to = compose.conf.get("symlink_isos_to")
os_tree = compose.paths.compose.os_tree(arch, variant)
# TODO: find in treeinfo?
boot_iso_path = os.path.join(os_tree, "images", "boot.iso")
@ -392,7 +352,7 @@ class BuildinstallThread(WorkerThread):
self.worker(compose, arch, variant, cmd, num)
def worker(self, compose, arch, variant, cmd, num):
runroot = compose.conf.get("runroot", False)
runroot = compose.conf["runroot"]
buildinstall_method = compose.conf["buildinstall_method"]
log_filename = ('buildinstall-%s' % variant.uid) if variant else 'buildinstall'
log_file = compose.paths.log.log_file(arch, log_filename)
@ -419,7 +379,7 @@ class BuildinstallThread(WorkerThread):
packages += ["lorax"]
elif buildinstall_method == "buildinstall":
packages += ["anaconda"]
runroot_channel = compose.conf.get("runroot_channel", None)
runroot_channel = compose.conf.get("runroot_channel")
runroot_tag = compose.conf["runroot_tag"]
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])

View File

@ -41,14 +41,6 @@ from .. import createiso
class CreateisoPhase(PhaseBase):
name = "createiso"
config_options = (
{
"name": "createiso_skip",
"expected_types": [list],
"optional": True,
},
)
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -66,11 +58,11 @@ class CreateisoPhase(PhaseBase):
return False
if variant.type != "variant":
return False
return self.compose.conf.get("bootable", False)
return self.compose.conf["bootable"]
def run(self):
symlink_isos_to = self.compose.conf.get("symlink_isos_to", None)
disc_type = self.compose.conf.get('disc_types', {}).get('dvd', 'dvd')
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
disc_type = self.compose.conf['disc_types'].get('dvd', 'dvd')
deliverables = []
commands = []
@ -139,7 +131,7 @@ class CreateisoPhase(PhaseBase):
if bootable:
opts = opts._replace(buildinstall_method=self.compose.conf['buildinstall_method'])
if self.compose.conf.get('create_jigdo', True):
if self.compose.conf['create_jigdo']:
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
@ -191,7 +183,7 @@ class CreateIsoThread(WorkerThread):
if "mount" in cmd:
mounts.append(cmd["mount"])
runroot = compose.conf.get("runroot", False)
runroot = compose.conf["runroot"]
bootable = cmd['bootable']
log_file = compose.paths.log.log_file(
arch, "createiso-%s" % os.path.basename(cmd["iso_path"]))
@ -203,7 +195,7 @@ class CreateIsoThread(WorkerThread):
if runroot:
# run in a koji build root
packages = ["coreutils", "genisoimage", "isomd5sum"]
if compose.conf.get('create_jigdo', True):
if compose.conf['create_jigdo']:
packages.append('jigdo')
extra_packages = {
'lorax': ['lorax'],
@ -212,7 +204,7 @@ class CreateIsoThread(WorkerThread):
if bootable:
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
runroot_channel = compose.conf.get("runroot_channel", None)
runroot_channel = compose.conf.get("runroot_channel")
runroot_tag = compose.conf["runroot_tag"]
# get info about build arches in buildroot_tag
@ -299,8 +291,8 @@ def split_iso(compose, arch, variant):
All files from the directory are taken except for possible boot.iso image.
Files added in extra_files phase are put on all disks.
"""
media_size = compose.conf.get('iso_size', 4700000000)
media_reserve = compose.conf.get('split_iso_reserve', 10 * 1024 * 1024)
media_size = compose.conf['iso_size']
media_reserve = compose.conf['split_iso_reserve']
ms = MediaSplitter(convert_media_size(media_size) - convert_media_size(media_reserve), compose)
@ -387,7 +379,7 @@ def prepare_iso(compose, arch, variant, disc_num=1, disc_count=None, split_iso_d
del ti.checksums.checksums["repodata/repomd.xml"]
# rebuild repodata
createrepo_c = compose.conf.get("createrepo_c", True)
createrepo_c = compose.conf["createrepo_c"]
createrepo_checksum = compose.conf["createrepo_checksum"]
repo = CreaterepoWrapper(createrepo_c=createrepo_c)

View File

@ -44,34 +44,6 @@ createrepo_dirs = set()
class CreaterepoPhase(PhaseBase):
name = "createrepo"
config_options = (
{
"name": "createrepo_c",
"expected_types": [bool],
"optional": True,
},
{
"name": "createrepo_checksum",
"expected_types": [str],
"expected_values": ["sha256", "sha"],
},
{
"name": "createrepo_deltas",
"expected_types": [bool],
"optional": True,
},
{
"name": "product_id",
"expected_types": [dict],
"optional": True,
},
{
"name": "product_id_allow_missing",
"expected_types": [bool],
"optional": True,
},
)
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -83,7 +55,7 @@ class CreaterepoPhase(PhaseBase):
except ValueError as exc:
errors = exc.message.split('\n')
if not self.compose.old_composes and self.compose.conf.get('createrepo_deltas', False):
if not self.compose.old_composes and self.compose.conf['createrepo_deltas']:
errors.append('Can not generate deltas without old compose')
if errors:
@ -119,9 +91,9 @@ def create_variant_repo(compose, arch, variant, pkg_type):
compose.log_info("[SKIP ] Creating repo (arch: %s, variant: %s): %s" % (arch, variant))
return
createrepo_c = compose.conf.get("createrepo_c", True)
createrepo_c = compose.conf["createrepo_c"]
createrepo_checksum = compose.conf["createrepo_checksum"]
createrepo_deltas = compose.conf.get("createrepo_deltas", False)
createrepo_deltas = compose.conf["createrepo_deltas"]
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
repo_dir_arch = compose.paths.work.arch_repo(arch='global' if pkg_type == 'srpm' else arch)
@ -227,7 +199,7 @@ def get_productids_from_scm(compose):
compose.log_info("No product certificates specified")
return
product_id_allow_missing = compose.conf.get("product_id_allow_missing", False)
product_id_allow_missing = compose.conf["product_id_allow_missing"]
msg = "Getting product certificates from SCM..."
compose.log_info("[BEGIN] %s" % msg)

View File

@ -29,14 +29,6 @@ class ExtraFilesPhase(ConfigGuardedPhase):
"""EXTRA_FILES"""
name = "extra_files"
config_options = (
{
"name": "extra_files",
"expected_types": [list],
"optional": True
},
)
def __init__(self, compose, pkgset_phase):
super(ExtraFilesPhase, self).__init__(compose)
# pkgset_phase provides package_sets

View File

@ -51,65 +51,6 @@ class GatherPhase(PhaseBase):
"""GATHER"""
name = "gather"
config_options = (
{
"name": "gather_lookaside_repos",
"expected_types": [list],
"optional": True,
},
{
"name": "greedy_method",
"expected_values": ["none", "all", "build"],
"optional": True,
},
{
"name": "gather_fulltree",
"expected_types": [bool],
"optional": True,
},
{
"name": "gather_prepopulate",
"expected_types": [str, dict],
"optional": True,
},
{
"name": "hashed_directories",
"expected_types": [bool],
"optional": True,
},
{
"name": "filter_system_release_packages",
"expected_types": [bool],
"optional": True,
},
{
"name": "multilib",
"expected_types": [list],
"optional": True,
},
# DEPRECATED OPTIONS
{
"name": "multilib_arches",
"deprecated": True,
"comment": "Use multilib instead",
},
{
"name": "multilib_methods",
"deprecated": True,
"comment": "Use multilib instead",
},
{
"name": "additional_packages_multiarch",
"deprecated": True,
"comment": "Use multilib_whitelist instead",
},
{
"name": "filter_packages_multiarch",
"deprecated": True,
"comment": "Use multilib_blacklist instead",
},
)
def __init__(self, compose, pkgset_phase):
PhaseBase.__init__(self, compose)
# pkgset_phase provides package_sets and path_prefix
@ -126,11 +67,6 @@ class GatherPhase(PhaseBase):
def check_deps():
pass
def check_config(self):
errors = []
for i in ["release_name", "release_short", "release_version"]:
errors.append(self.conf_assert_str(i))
def _write_manifest(self):
self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file)
self.manifest.dump(self.manifest_file)
@ -380,7 +316,7 @@ def gather_wrapper(compose, package_sets, path_prefix):
def write_prepopulate_file(compose):
if not compose.conf.get("gather_prepopulate", None):
if 'gather_prepopulate' not in compose.conf:
return
prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json")
@ -479,7 +415,7 @@ def get_variant_packages(compose, arch, variant, package_sets=None):
packages |= get_additional_packages(compose, arch, variant)
filter_packages |= get_filter_packages(compose, arch, variant)
if compose.conf.get('filter_system_release_packages', True):
if compose.conf['filter_system_release_packages']:
system_release_packages, system_release_filter_packages = get_system_release_packages(
compose, arch, variant, package_sets)
packages |= system_release_packages

View File

@ -60,13 +60,13 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
msg = "Linking packages (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
link_type = compose.conf.get("link_type", "hardlink-or-copy")
link_type = compose.conf["link_type"]
pool = LinkerPool(link_type, logger=compose._logger)
for i in range(10):
pool.add(LinkerThread(pool))
hashed_directories = compose.conf.get("hashed_directories", False)
hashed_directories = compose.conf["hashed_directories"]
packages_dir = compose.paths.compose.packages("src", variant)
packages_dir_relpath = compose.paths.compose.packages("src", variant, relative=True)

View File

@ -17,20 +17,12 @@
import kobo.plugins
from pungi.checks import validate_options
class GatherMethodBase(kobo.plugins.Plugin):
config_options = ()
def __init__(self, compose):
self.compose = compose
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
class GatherMethodContainer(kobo.plugins.PluginContainer):
@classmethod

View File

@ -31,27 +31,6 @@ import pungi.phases.gather.method
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
enabled = True
config_options = (
{
"name": "gather_method",
"expected_types": [str],
"expected_values": ["deps"],
},
{
"name": "check_deps",
"expected_types": [bool],
},
{
"name": "gather_fulltree",
"expected_types": [bool],
"optional": True,
},
{
"name": "gather_selfhosting",
"expected_types": [bool],
"optional": True,
},
)
def __call__(self, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_sets, path_prefix=None, fulltree_excludes=None, prepopulate=None):
# result = {
@ -117,11 +96,11 @@ def resolve_deps(compose, arch, variant):
multilib_methods = get_arch_variant_data(compose.conf, 'multilib', arch, variant)
greedy_method = compose.conf.get("greedy_method", "none")
greedy_method = compose.conf["greedy_method"]
# variant
fulltree = compose.conf.get("gather_fulltree", False)
selfhosting = compose.conf.get("gather_selfhosting", False)
fulltree = compose.conf["gather_fulltree"]
selfhosting = compose.conf["gather_selfhosting"]
# optional
if variant.type == "optional":
@ -156,8 +135,7 @@ def resolve_deps(compose, arch, variant):
def check_deps(compose, arch, variant):
check_deps = compose.conf.get("check_deps", True)
if not check_deps:
if not compose.conf["check_deps"]:
return
pungi_wrapper = PungiWrapper()

View File

@ -23,13 +23,6 @@ import pungi.phases.gather.method
class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
enabled = True
config_options = (
{
"name": "gather_method",
"expected_types": [str],
"expected_values": ["nodeps"],
},
)
def __call__(self, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_sets, path_prefix=None, fulltree_excludes=None, prepopulate=None):
global_pkgset = package_sets["global"]

View File

@ -17,20 +17,12 @@
import kobo.plugins
from pungi.checks import validate_options
class GatherSourceBase(kobo.plugins.Plugin):
config_options = ()
def __init__(self, compose):
self.compose = compose
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
class GatherSourceContainer(kobo.plugins.PluginContainer):
@classmethod

View File

@ -32,17 +32,6 @@ import pungi.phases.gather.source
class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
enabled = True
config_options = (
{
"name": "gather_source",
"expected_types": [str],
"expected_values": ["comps"],
},
{
"name": "comps_file",
"expected_types": [str, dict],
},
)
def __call__(self, arch, variant):
groups = set()

View File

@ -39,17 +39,6 @@ import pungi.phases.gather.source
class GatherSourceJson(pungi.phases.gather.source.GatherSourceBase):
enabled = True
config_options = (
{
"name": "gather_source",
"expected_types": [str],
"expected_values": ["json"],
},
{
"name": "gather_source_mapping",
"expected_types": [str],
},
)
def __call__(self, arch, variant):
json_path = self.compose.conf["gather_source_mapping"]

View File

@ -32,13 +32,5 @@ import pungi.phases.gather.source
class GatherSourceNone(pungi.phases.gather.source.GatherSourceBase):
enabled = True
config_options = (
{
"name": "gather_source",
"expected_types": [str],
"expected_values": ["none"],
},
)
def __call__(self, arch, variant):
return set(), set()

View File

@ -18,34 +18,6 @@ class ImageBuildPhase(base.ImageConfigMixin, base.ConfigGuardedPhase):
"""class for wrapping up koji image-build"""
name = "image_build"
config_options = [
{
"name": "image_build",
"expected_types": [dict],
"optional": True,
},
{
"name": "image_build_ksurl",
"expected_types": [str],
"optional": True,
},
{
"name": "image_build_target",
"expected_types": [str],
"optional": True,
},
{
"name": "image_build_release",
"expected_types": [str, type(None)],
"optional": True,
},
{
"name": "image_build_version",
"expected_types": [str],
"optional": True,
},
]
def __init__(self, compose):
super(ImageBuildPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -155,7 +127,7 @@ class ImageBuildPhase(base.ImageConfigMixin, base.ConfigGuardedPhase):
"relative_image_dir": self.compose.paths.compose.image_dir(
variant, relative=True
),
"link_type": self.compose.conf.get("link_type", "hardlink-or-copy"),
"link_type": self.compose.conf["link_type"],
"scratch": image_conf['image-build'].pop('scratch', False),
"failable_arches": image_conf['image-build'].pop('failable', []),
}

View File

@ -20,28 +20,10 @@ class ImageChecksumPhase(PhaseBase):
name = 'image_checksum'
config_options = (
{
"name": "media_checksums",
"expected_types": [list],
"optional": True,
},
{
"name": "media_checksum_one_file",
"expected_types": [bool],
"optional": True,
},
{
"name": "media_checksum_base_filename",
"expected_types": [str],
"optional": True,
}
)
def __init__(self, compose):
super(ImageChecksumPhase, self).__init__(compose)
self.checksums = self.compose.conf.get('media_checksums', ['md5', 'sha1', 'sha256'])
self.one_file = self.compose.conf.get('media_checksum_one_file', False)
self.checksums = self.compose.conf['media_checksums']
self.one_file = self.compose.conf['media_checksum_one_file']
def validate(self):
errors = []
@ -71,7 +53,7 @@ class ImageChecksumPhase(PhaseBase):
return images
def _get_base_filename(self, variant, arch):
base_checksum_name = self.compose.conf.get('media_checksum_base_filename', '')
base_checksum_name = self.compose.conf['media_checksum_base_filename']
if base_checksum_name:
substs = get_format_substs(self.compose, variant=variant, arch=arch)
base_checksum_name = (base_checksum_name % substs).format(**substs)

View File

@ -32,148 +32,6 @@ class InitPhase(PhaseBase):
"""INIT is a mandatory phase"""
name = "init"
config_options = (
# PRODUCT INFO
{
"name": "release_name",
"expected_types": [str],
},
{
"name": "release_short",
"expected_types": [str],
},
{
"name": "release_version",
"expected_types": [str],
},
{
# override description in .discinfo; accepts %(variant_name)s and %(arch)s variables
"name": "release_discinfo_description",
"expected_types": [str],
"optional": True,
},
{
"name": "release_is_layered",
"expected_types": [bool],
"requires": (
(lambda x: bool(x), ["base_product_name", "base_product_short", "base_product_version"]),
),
"conflicts": (
(lambda x: not bool(x), ["base_product_name", "base_product_short", "base_product_version"]),
),
},
# BASE PRODUCT INFO (FOR A LAYERED PRODUCT ONLY)
{
"name": "base_product_name",
"expected_types": [str],
"optional": True,
},
{
"name": "base_product_short",
"expected_types": [str],
"optional": True,
},
{
"name": "base_product_version",
"expected_types": [str],
"optional": True,
},
{
"name": "comps_file",
"expected_types": [str, dict],
"optional": True,
},
{
"name": "comps_filter_environments", # !!! default is True !!!
"expected_types": [bool],
"optional": True,
},
{
"name": "variants_file",
"expected_types": [str, dict],
},
{
"name": "sigkeys",
"expected_types": [list],
},
{
"name": "tree_arches",
"expected_types": [list],
"optional": True,
},
{
"name": "tree_variants",
"expected_types": [list],
"optional": True,
},
# CREATEREPO SETTINGS
{
"name": "createrepo_c",
"expected_types": [bool],
"optional": True,
},
{
"name": "createrepo_checksum",
"expected_types": [str],
"expected_values": ["sha256", "sha"],
},
# RUNROOT SETTINGS
{
"name": "runroot",
"expected_types": [bool],
"requires": (
(lambda x: bool(x), ["koji_profile", "runroot_tag", "runroot_channel"]),
),
"conflicts": (
(lambda x: not bool(x), ["runroot_tag", "runroot_channel"]),
),
},
{
"name": "runroot_tag",
"expected_types": [str],
"optional": True,
},
{
"name": "runroot_channel",
"expected_types": [str],
"optional": True,
},
{
"name": "keep_original_comps",
"expected_types": [list],
"optional": True,
},
# Configuration shared by all image building phases.
{
"name": "global_ksurl",
"expected_types": [str],
"optional": True,
},
{
"name": "global_target",
"expected_types": [str],
"optional": True,
},
{
"name": "global_release",
"expected_types": [str, type(None)],
"optional": True,
},
{
"name": "global_version",
"expected_types": [str],
"optional": True,
},
)
def skip(self):
# INIT must never be skipped,
# because it generates data for LIVEIMAGES
@ -189,7 +47,7 @@ class InitPhase(PhaseBase):
# write variant comps
for variant in self.compose.get_variants():
should_preserve = variant.uid in self.compose.conf.get('keep_original_comps', [])
should_preserve = variant.uid in self.compose.conf['keep_original_comps']
for arch in variant.arches:
if should_preserve:
copy_variant_comps(self.compose, arch, variant)
@ -248,7 +106,7 @@ def write_variant_comps(compose, arch, variant):
comps = CompsWrapper(comps_file)
# groups = variant.groups
comps.filter_groups(variant.groups)
if compose.conf.get("comps_filter_environments", True):
if compose.conf["comps_filter_environments"]:
comps.filter_environments(variant.environments)
compose.log_warning("[SKIP ] %s" % msg)
@ -261,7 +119,7 @@ def write_variant_comps(compose, arch, variant):
comps = CompsWrapper(comps_file)
comps.filter_groups(variant.groups)
if compose.conf.get("comps_filter_environments", True):
if compose.conf["comps_filter_environments"]:
comps.filter_environments(variant.environments)
comps.write_comps()
@ -273,7 +131,7 @@ def copy_variant_comps(compose, arch, variant):
def create_comps_repo(compose, arch):
createrepo_c = compose.conf.get("createrepo_c", True)
createrepo_c = compose.conf["createrepo_c"]
createrepo_checksum = compose.conf["createrepo_checksum"]
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
comps_repo = compose.paths.work.comps_repo(arch=arch)

View File

@ -41,54 +41,6 @@ if sys.version_info[0] == 3:
class LiveImagesPhase(base.ImageConfigMixin, base.ConfigGuardedPhase):
name = "live_images"
config_options = (
{
"name": "live_target",
"expected_types": [str],
"optional": True,
},
{
"name": "live_images",
"expected_types": [list],
"optional": True,
},
{
"name": "signing_key_id",
"expected_types": [str],
"optional": True,
},
{
"name": "signing_key_password_file",
"expected_types": [str],
"optional": True,
},
{
"name": "signing_command",
"expected_types": [str],
"optional": True,
},
{
"name": "live_images_no_rename",
"expected_types": [bool],
"optional": True,
},
{
"name": "live_images_ksurl",
"expected_types": [str],
"optional": True,
},
{
"name": "live_images_release",
"expected_types": [str, type(None)],
"optional": True,
},
{
"name": "live_images_version",
"expected_types": [str],
"optional": True,
},
)
def __init__(self, compose):
super(LiveImagesPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -118,7 +70,7 @@ class LiveImagesPhase(base.ImageConfigMixin, base.ConfigGuardedPhase):
return repos
def run(self):
symlink_isos_to = self.compose.conf.get("symlink_isos_to", None)
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
commands = []
for variant in self.compose.variants.values():
@ -177,10 +129,10 @@ class LiveImagesPhase(base.ImageConfigMixin, base.ConfigGuardedPhase):
self.pool.start()
def _get_file_name(self, arch, variant, name=None, version=None):
if self.compose.conf.get('live_images_no_rename', False):
if self.compose.conf['live_images_no_rename']:
return None
disc_type = self.compose.conf.get('disc_types', {}).get('live', 'live')
disc_type = self.compose.conf['disc_types'].get('live', 'live')
format = "%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
# Custom name (prefix)
@ -227,7 +179,7 @@ class CreateLiveImageThread(WorkerThread):
if cmd["specfile"] and not cmd["scratch"]:
# Non scratch build are allowed only for rpm wrapped images
archive = True
target = compose.conf.get("live_target", "rhel-7.0-candidate") # compatability for hardcoded target
target = compose.conf["live_target"]
koji_cmd = koji_wrapper.get_create_image_cmd(name, version, target,
cmd["build_arch"],
cmd["ks_file"],

View File

@ -17,34 +17,6 @@ class LiveMediaPhase(ImageConfigMixin, ConfigGuardedPhase):
"""class for wrapping up koji spin-livemedia"""
name = 'live_media'
config_options = (
{
"name": "live_media",
"expected_types": [dict],
"optional": True,
},
{
"name": "live_media_ksurl",
"expected_types": [str],
"optional": True,
},
{
"name": "live_media_target",
"expected_types": [str],
"optional": True,
},
{
"name": "live_media_release",
"expected_types": [str, type(None)],
"optional": True,
},
{
"name": "live_media_version",
"expected_types": [str],
"optional": True,
},
)
def __init__(self, compose):
super(LiveMediaPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -183,7 +155,7 @@ class LiveMediaThread(WorkerThread):
raise RuntimeError('Image count mismatch in task %s.' % output['task_id'])
linker = Linker(logger=compose._logger)
link_type = compose.conf.get("link_type", "hardlink-or-copy")
link_type = compose.conf["link_type"]
for image_info in image_infos:
image_dir = compose.paths.compose.iso_dir(image_info['arch'], variant)
makedirs(image_dir)

View File

@ -13,14 +13,6 @@ from ..paths import translate_path
class OSBSPhase(ConfigGuardedPhase):
name = 'osbs'
config_options = [
{
"name": "osbs",
"expected_types": [dict],
"optional": True,
}
]
def __init__(self, compose):
super(OSBSPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.compose._logger)

View File

@ -13,14 +13,6 @@ from ..wrappers import kojiwrapper, scm
class OSTreePhase(ConfigGuardedPhase):
name = 'ostree'
config_options = [
{
"name": "ostree",
"expected_types": [list],
"optional": True,
}
]
def __init__(self, compose):
super(OSTreePhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -74,7 +66,7 @@ class OSTreeThread(WorkerThread):
config['ostree_repo']
]
runroot_channel = compose.conf.get("runroot_channel", None)
runroot_channel = compose.conf.get("runroot_channel")
runroot_tag = compose.conf["runroot_tag"]
packages = ['pungi', 'ostree', 'rpm-ostree']

View File

@ -16,14 +16,6 @@ from ..wrappers import kojiwrapper, iso, lorax, scm
class OstreeInstallerPhase(ConfigGuardedPhase):
name = 'ostree_installer'
config_options = [
{
"name": "ostree_installer",
"expected_types": [list],
"optional": True,
}
]
def __init__(self, compose):
super(OstreeInstallerPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.compose._logger)
@ -61,7 +53,7 @@ class OstreeInstallerThread(WorkerThread):
self._run_ostree_cmd(compose, variant, arch, config, source_repo, output_dir)
disc_type = compose.conf.get('disc_types', {}).get('dvd', 'dvd')
disc_type = compose.conf['disc_types'].get('dvd', 'dvd')
filename = compose.get_image_name(arch, variant, disc_type=disc_type)
self._copy_image(compose, variant, arch, filename, output_dir)
self._add_to_manifest(compose, variant, arch, filename)
@ -160,7 +152,7 @@ class OstreeInstallerThread(WorkerThread):
add_arch_template_var=config.get('add_arch_template_var')
)
runroot_channel = compose.conf.get("runroot_channel", None)
runroot_channel = compose.conf.get("runroot_channel")
runroot_tag = compose.conf["runroot_tag"]
packages = ['pungi', 'lorax', 'ostree']

View File

@ -22,13 +22,6 @@ class PkgsetPhase(PhaseBase):
"""PKGSET"""
name = "pkgset"
config_options = (
{
"name": "pkgset_source",
"expected_types": [str],
},
)
def run(self):
pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
from source import PkgsetSourceContainer

View File

@ -40,7 +40,7 @@ def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
def create_global_repo(compose, path_prefix):
createrepo_c = compose.conf.get("createrepo_c", True)
createrepo_c = compose.conf["createrepo_c"]
createrepo_checksum = compose.conf["createrepo_checksum"]
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
repo_dir_global = compose.paths.work.arch_repo(arch="global")
@ -56,7 +56,7 @@ def create_global_repo(compose, path_prefix):
old_compose_path = None
update_md_path = None
if compose.old_composes:
old_compose_path = find_old_compose(compose.old_composes, compose.conf["release_short"], compose.conf["release_version"], compose.conf.get("base_product_short", None), compose.conf.get("base_product_version", None))
old_compose_path = find_old_compose(compose.old_composes, compose.conf["release_short"], compose.conf["release_version"], compose.conf.get("base_product_short"), compose.conf.get("base_product_version"))
if old_compose_path is None:
compose.log_info("No suitable old compose found in: %s" % compose.old_composes)
else:
@ -74,7 +74,7 @@ def create_global_repo(compose, path_prefix):
def create_arch_repos(compose, arch, path_prefix):
createrepo_c = compose.conf.get("createrepo_c", True)
createrepo_c = compose.conf["createrepo_c"]
createrepo_checksum = compose.conf["createrepo_checksum"]
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
repo_dir_global = compose.paths.work.arch_repo(arch="global")

View File

@ -17,20 +17,11 @@
import kobo.plugins
from pungi.checks import validate_options
class PkgsetSourceBase(kobo.plugins.Plugin):
config_options = ()
def __init__(self, compose):
self.compose = compose
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
class PkgsetSourceContainer(kobo.plugins.PluginContainer):
@classmethod

View File

@ -32,26 +32,6 @@ import pungi.phases.pkgset.source
class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
enabled = True
config_options = (
{
"name": "koji_profile",
"expected_types": [str],
},
{
"name": "pkgset_source",
"expected_types": [str],
"expected_values": "koji",
},
{
"name": "pkgset_koji_tag",
"expected_types": [str],
},
{
"name": "pkgset_koji_inherit",
"expected_types": [bool],
"optional": True,
},
)
def __call__(self):
compose = self.compose
@ -87,7 +67,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, compose_tag, even
all_arches.update(arches)
compose_tag = compose.conf["pkgset_koji_tag"]
inherit = compose.conf.get("pkgset_koji_inherit", True)
inherit = compose.conf["pkgset_koji_inherit"]
msg = "Populating the global package set from tag '%s'" % compose_tag
global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "pkgset_global.pickle")
if compose.DEBUG and os.path.isfile(global_pkgset_path):

View File

@ -35,17 +35,6 @@ import pungi.phases.pkgset.source
class PkgsetSourceRepos(pungi.phases.pkgset.source.PkgsetSourceBase):
enabled = True
config_options = (
{
"name": "pkgset_source",
"expected_types": [str],
"expected_values": "repos",
},
{
"name": "pkgset_repos",
"expected_types": [dict],
},
)
def __call__(self):
package_sets, path_prefix = get_pkgset_from_repos(self.compose)
@ -57,7 +46,7 @@ def get_pkgset_from_repos(compose):
# TODO: noarch hack - secondary arches, use x86_64 noarch where possible
flist = []
link_type = compose.conf.get("link_type", "hardlink-or-copy")
link_type = compose.conf["link_type"]
pool = LinkerPool(link_type, logger=compose._logger)
for i in range(10):
pool.add(LinkerThread(pool))

View File

@ -56,27 +56,6 @@ class ProductimgPhase(PhaseBase):
"""PRODUCTIMG"""
name = "productimg"
config_options = (
{
"name": "productimg",
"expected_types": [bool],
"requires": (
(lambda x: bool(x) is True, ["productimg_install_class"]),
(lambda x: bool(x) is True, ["productimg_po_files"]),
),
},
{
"name": "productimg_install_class",
"expected_types": [dict],
"optional": True,
},
{
"name": "productimg_po_files",
"expected_types": [dict],
"optional": True,
},
)
def __init__(self, compose, pkgset_phase):
PhaseBase.__init__(self, compose)
# pkgset_phase provides package_sets and path_prefix
@ -85,11 +64,11 @@ class ProductimgPhase(PhaseBase):
def skip(self):
if PhaseBase.skip(self):
return True
if not self.compose.conf.get("productimg", False):
if not self.compose.conf["productimg"]:
msg = "Config option 'productimg' not set. Skipping creating product images."
self.compose.log_debug(msg)
return True
if not self.compose.conf.get("bootable", False):
if not self.compose.conf["bootable"]:
msg = "Not a bootable product. Skipping creating product images."
self.compose.log_debug(msg)
return True

View File

@ -64,7 +64,7 @@ def run_repoclosure(compose):
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)
repos[repo_id] = repo_dir
if compose.conf.get("release_is_layered", False):
if compose.conf["release_is_layered"]:
for i, lookaside_url in enumerate(get_lookaside_repos(compose, arch, variant)):
lookaside["lookaside-%s.%s-%s" % (variant.uid, arch, i)] = lookaside_url

View File

@ -327,7 +327,7 @@ def get_buildroot_rpms(compose, task_id):
def _apply_substitutions(compose, volid):
for k, v in compose.conf.get('volume_id_substitutions', {}).iteritems():
for k, v in compose.conf['volume_id_substitutions'].iteritems():
volid = volid.replace(k, v)
return volid
@ -353,16 +353,8 @@ def get_volid(compose, arch, variant=None, escape_spaces=False, disc_type=False)
base_product_version = compose.conf.get("base_product_version", "")
variant_uid = variant and variant.uid or None
products = [
"{release_short}-{version} {variant}.{arch}",
"{release_short}-{version} {arch}",
]
products = compose.conf.get('image_volid_formats', products)
layered_products = [
"{release_short}-{version} {base_product_short}-{base_product_version} {variant}.{arch}",
"{release_short}-{version} {base_product_short}-{base_product_version} {arch}",
]
layered_products = compose.conf.get('image_volid_layered_product_formats', layered_products)
products = compose.conf['image_volid_formats']
layered_products = compose.conf['image_volid_layered_product_formats']
volid = None
if release_is_layered:
@ -526,3 +518,23 @@ def copy_all(src, dest):
shutil.copytree(source, destination)
else:
shutil.copy2(source, destination)
def levenshtein(a, b):
"""Compute Levenshtein edit distance between two strings."""
mat = [[0 for _ in xrange(len(a) + 1)] for _ in xrange(len(b) + 1)]
for i in xrange(len(a) + 1):
mat[0][i] = i
for j in xrange(len(b) + 1):
mat[j][0] = j
for j in xrange(1, len(b) + 1):
for i in xrange(1, len(a) + 1):
cost = 0 if a[i - 1] == b[j - 1] else 1
mat[j][i] = min(mat[j - 1][i] + 1,
mat[j][i - 1] + 1,
mat[j - 1][i - 1] + cost)
return mat[len(b)][len(a)]

View File

@ -53,6 +53,7 @@ setup(
"lockfile",
"lxml",
"productmd",
"jsonschema",
],
tests_require = [
"mock",

View File

@ -11,7 +11,7 @@ import shutil
import errno
from pungi.util import get_arch_variant_data
from pungi import paths
from pungi import paths, checks
class PungiTestCase(unittest.TestCase):
@ -46,7 +46,8 @@ class DummyCompose(object):
),
)
self.topdir = topdir
self.conf = config
self.conf = load_config(PKGSET_REPOS, **config)
checks.validate(self.conf)
self.paths = paths.Paths(self)
self._logger = mock.Mock()
self.variants = {
@ -106,13 +107,32 @@ def copy_fixture(fixture_name, dest):
shutil.copy2(src, dest)
def union(*args):
"""Create a new dict as a union of all arguments."""
res = {}
for arg in args:
res.update(arg)
return res
def boom(*args, **kwargs):
raise Exception('BOOM')
PKGSET_REPOS = dict(
pkgset_source='repos',
pkgset_repos={},
)
BASE_CONFIG = dict(
release_short='test',
release_name='Test',
release_version='1.0',
release_is_layered=False,
variants_file='variants.xml',
runroot=False,
createrepo_checksum='sha256',
gather_method='deps',
gather_source='none',
sigkeys=[],
)
def load_config(data={}, **kwargs):
conf = dict()
conf.update(BASE_CONFIG)
conf.update(data)
conf.update(kwargs)
return conf

View File

@ -183,36 +183,6 @@ class TestBuildinstallPhase(PungiTestCase):
[mock.call(compose, 'x86_64', disc_type='DVD'),
mock.call(compose, 'amd64', disc_type='DVD')])
def test_global_upgrade_with_lorax(self):
compose = BuildInstallCompose(self.topdir, {
'bootable': True,
'buildinstall_method': 'lorax',
'buildinstall_upgrade_image': True,
})
phase = BuildinstallPhase(compose)
with self.assertRaises(ValueError) as ctx:
phase.validate()
self.assertIn('Deprecated config option: buildinstall_upgrade_image',
str(ctx.exception))
def test_lorax_options_with_buildinstall(self):
compose = BuildInstallCompose(self.topdir, {
'bootable': True,
'buildinstall_method': 'buildinstall',
'lorax_options': [],
})
phase = BuildinstallPhase(compose)
with self.assertRaises(ValueError) as ctx:
phase.validate()
self.assertIn('buildinstall', str(ctx.exception))
self.assertIn('lorax_options', str(ctx.exception))
@mock.patch('pungi.phases.buildinstall.ThreadPool')
@mock.patch('pungi.phases.buildinstall.LoraxWrapper')
@mock.patch('pungi.phases.buildinstall.get_volid')
@ -669,7 +639,7 @@ class TestSymlinkIso(PungiTestCase):
@mock.patch('pungi.phases.buildinstall.IsoWrapper')
@mock.patch('pungi.phases.buildinstall.run')
def test_hardlink(self, run, IsoWrapperCls, get_file_size, get_mtime, ImageCls):
self.compose.conf = {'buildinstall_symlink': False}
self.compose.conf = {'buildinstall_symlink': False, 'disc_types': {}}
IsoWrapper = IsoWrapperCls.return_value
get_file_size.return_value = 1024
get_mtime.return_value = 13579

View File

@ -156,6 +156,8 @@ class ComposeTestCase(unittest.TestCase):
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
release_is_layered=False,
)
compose = Compose(conf, self.tmp_dir)
@ -194,6 +196,8 @@ class ComposeTestCase(unittest.TestCase):
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
release_is_layered=False,
tree_arches=['x86_64'],
)
@ -236,6 +240,8 @@ class ComposeTestCase(unittest.TestCase):
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
release_is_layered=False,
tree_variants=['Server', 'Client', 'Server-Gluster'],
)
@ -271,6 +277,8 @@ class ComposeTestCase(unittest.TestCase):
release_name='Test',
release_version='1.0',
release_short='test',
release_type='ga',
release_is_layered=False,
tree_variants=['Server', 'Client', 'Server-optional'],
tree_arches=['x86_64'],
)

394
tests/test_config.py Normal file
View File

@ -0,0 +1,394 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from pungi import checks
from tests.helpers import load_config, PKGSET_REPOS
class PkgsetConfigTestCase(unittest.TestCase):
def test_validate_minimal_pkgset_koji(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag="f25",
)
self.assertEqual(checks.validate(cfg), [])
def test_validate_minimal_pkgset_repos(self):
cfg = load_config(
pkgset_source='repos',
pkgset_repos={'x86_64': '/first', 'ppc64': '/second'},
)
self.assertEqual(checks.validate(cfg), [])
def test_pkgset_mismatch_repos(self):
cfg = load_config(
pkgset_source='repos',
pkgset_koji_tag='f25',
pkgset_koji_inherit=False,
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('pkgset_source', 'repos', 'pkgset_repos'),
checks.CONFLICTS.format('pkgset_source', 'repos', 'pkgset_koji_tag'),
checks.CONFLICTS.format('pkgset_source', 'repos', 'pkgset_koji_inherit')])
def test_pkgset_mismatch_koji(self):
cfg = load_config(
pkgset_source='koji',
pkgset_repos={'whatever': '/foo'},
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('pkgset_source', 'koji', 'pkgset_koji_tag'),
checks.CONFLICTS.format('pkgset_source', 'koji', 'pkgset_repos')])
class ReleaseConfigTestCase(unittest.TestCase):
def test_layered_without_base_product(self):
cfg = load_config(
PKGSET_REPOS,
release_is_layered=True
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('release_is_layered', 'True', 'base_product_name'),
checks.REQUIRES.format('release_is_layered', 'True', 'base_product_short'),
checks.REQUIRES.format('release_is_layered', 'True', 'base_product_version')])
def test_not_layered_with_base_product(self):
cfg = load_config(
PKGSET_REPOS,
base_product_name='Prod',
base_product_short='bp',
base_product_version='1.0',
base_product_type='updates',
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('release_is_layered', 'False', 'base_product_name'),
checks.CONFLICTS.format('release_is_layered', 'False', 'base_product_short'),
checks.CONFLICTS.format('release_is_layered', 'False', 'base_product_type'),
checks.CONFLICTS.format('release_is_layered', 'False', 'base_product_version')])
class RunrootConfigTestCase(unittest.TestCase):
def test_runroot_without_deps(self):
cfg = load_config(
PKGSET_REPOS,
runroot=True,
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('runroot', 'True', 'koji_profile'),
checks.REQUIRES.format('runroot', 'True', 'runroot_tag'),
checks.REQUIRES.format('runroot', 'True', 'runroot_channel')])
def test_koji_settings_without_runroot(self):
cfg = load_config(
PKGSET_REPOS,
runroot=False,
koji_profile='koji',
runroot_tag='f25',
runroot_channel='compose',
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('runroot', 'False', 'runroot_tag'),
checks.CONFLICTS.format('runroot', 'False', 'runroot_channel')])
class BuildinstallConfigTestCase(unittest.TestCase):
def test_bootable_without_method(self):
cfg = load_config(
PKGSET_REPOS,
bootable=True,
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('bootable', 'True', 'buildinstall_method')]
)
def test_non_bootable_with_method(self):
cfg = load_config(
PKGSET_REPOS,
bootable=False,
buildinstall_method='lorax',
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('bootable', 'False', 'buildinstall_method')]
)
def test_buildinstall_method_without_bootable(self):
cfg = load_config(
PKGSET_REPOS,
buildinstall_method='lorax',
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('bootable', 'False', 'buildinstall_method')]
)
def test_buildinstall_with_lorax_options(self):
cfg = load_config(
PKGSET_REPOS,
bootable=True,
buildinstall_method='buildinstall',
lorax_options=[('^Server$', {})]
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('buildinstall_method', 'buildinstall', 'lorax_options')]
)
def test_lorax_with_lorax_options(self):
cfg = load_config(
PKGSET_REPOS,
bootable=True,
buildinstall_method='lorax',
lorax_options=[]
)
self.assertItemsEqual(checks.validate(cfg), [])
def test_lorax_options_without_bootable_and_method(self):
cfg = load_config(
PKGSET_REPOS,
lorax_options=[('^Server$', {})],
buildinstall_kickstart='foo',
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('buildinstall_method', 'None', 'lorax_options'),
checks.CONFLICTS.format('buildinstall_method', 'None', 'buildinstall_kickstart')]
)
def test_deprecated(self):
cfg = load_config(
PKGSET_REPOS,
buildinstall_upgrade_image=True,
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.DEPRECATED.format('buildinstall_upgrade_image', 'use lorax_options instead')]
)
class CreaterepoConfigTestCase(unittest.TestCase):
def test_validate_minimal_pkgset_koji(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag="f25",
product_id_allow_missing=True,
)
self.assertEqual(
checks.validate(cfg),
[checks.CONFLICTS.format('product_id', 'None', 'product_id_allow_missing')]
)
class GatherConfigTestCase(unittest.TestCase):
def test_source_comps_requires_comps(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag="f25",
gather_source='comps',
gather_source_mapping='foo'
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('gather_source', 'comps', 'comps_file'),
checks.CONFLICTS.format('gather_source', 'comps', 'gather_source_mapping')]
)
def test_source_json_requires_mapping(self):
cfg = load_config(
pkgset_source='koji',
pkgset_koji_tag="f25",
gather_source='json',
comps_file='comps',
)
self.assertItemsEqual(
checks.validate(cfg),
[checks.REQUIRES.format('gather_source', 'json', 'gather_source_mapping'),
checks.CONFLICTS.format('gather_source', 'json', 'comps_file')]
)
class OSBSConfigTestCase(unittest.TestCase):
def test_validate(self):
cfg = load_config(
PKGSET_REPOS,
osbs={"^Server$": {
'url': 'http://example.com',
'target': 'f25-build',
}}
)
self.assertItemsEqual(checks.validate(cfg), [])
def test_validate_bad_conf(self):
cfg = load_config(
PKGSET_REPOS,
osbs='yes please'
)
self.assertNotEqual(checks.validate(cfg), [])
class OstreeConfigTestCase(unittest.TestCase):
def test_validate(self):
cfg = load_config(
PKGSET_REPOS,
ostree=[
("^Atomic$", {
"x86_64": {
"treefile": "fedora-atomic-docker-host.json",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"source_repo_from": "Everything",
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/"
}
})
]
)
self.assertEqual(checks.validate(cfg), [])
def test_validate_bad_conf(self):
cfg = load_config(
PKGSET_REPOS,
ostree='yes please'
)
self.assertNotEqual(checks.validate(cfg), [])
class OstreeInstallerConfigTestCase(unittest.TestCase):
def test_validate(self):
cfg = load_config(
PKGSET_REPOS,
ostree_installer=[
("^Atomic$", {
"x86_64": {
"source_repo_from": "Everything",
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],
"add_template_var": [
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
"add_arch_template": ["/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"],
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
]
}
})
]
)
self.assertEqual(checks.validate(cfg), [])
def test_validate_bad_conf(self):
cfg = load_config(
PKGSET_REPOS,
ostree_installer=[
("^Atomic$", {
"x86_64": {
"source_repo_from": "Everything",
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],
"add_template_var": [
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
"add_arch_template": 15,
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
]
}
})
]
)
self.assertNotEqual(checks.validate(cfg), [])
class LiveMediaConfigTestCase(unittest.TestCase):
def test_global_config_validation(self):
cfg = load_config(
PKGSET_REPOS,
live_media_ksurl='git://example.com/repo.git#HEAD',
live_media_target='f24',
live_media_release='RRR',
live_media_version='Rawhide',
)
self.assertEqual(checks.validate(cfg), [])
def test_global_config_null_release(self):
cfg = load_config(
PKGSET_REPOS,
live_media_release=None,
)
self.assertEqual(checks.validate(cfg), [])
class InitConfigTestCase(unittest.TestCase):
def test_validate_keep_original_comps_empty(self):
cfg = load_config(PKGSET_REPOS,
keep_original_comps=[])
self.assertEqual(checks.validate(cfg), [])
def test_validate_keep_original_comps_filled_in(self):
cfg = load_config(PKGSET_REPOS,
keep_original_comps=['Everything'])
self.assertEqual(checks.validate(cfg), [])
class TestSuggestions(unittest.TestCase):
def test_validate_keep_original_comps_empty(self):
cfg = load_config(PKGSET_REPOS,
product_pid=None)
self.assertEqual(
checks.validate(cfg),
[checks.UNKNOWN_SUGGEST.format('product_pid', 'product_id')])
if __name__ == '__main__':
unittest.main()

View File

@ -14,6 +14,7 @@ import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from pungi.phases.image_build import ImageBuildPhase, CreateImageBuildThread
from pungi.checks import validate
from tests.helpers import DummyCompose, PungiTestCase, boom
@ -33,7 +34,8 @@ class TestImageBuildPhase(PungiTestCase):
'ksurl': 'git://git.fedorahosted.org/git/spin-kickstarts.git',
'kickstart': "fedora-docker-base.ks",
'distro': 'Fedora-20',
'disk_size': 3
'disk_size': 3,
'failable': ['x86_64'],
}
}
]
@ -41,6 +43,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -71,7 +75,7 @@ class TestImageBuildPhase(PungiTestCase):
"relative_image_dir": 'Client/%(arch)s/images',
"link_type": 'hardlink-or-copy',
"scratch": False,
"failable_arches": [],
"failable_arches": ['x86_64'],
}
server_args = {
"format": [('docker', 'tar.xz')],
@ -96,7 +100,7 @@ class TestImageBuildPhase(PungiTestCase):
"relative_image_dir": 'Server/%(arch)s/images',
"link_type": 'hardlink-or-copy',
"scratch": False,
"failable_arches": [],
"failable_arches": ['x86_64'],
}
self.maxDiff = None
self.assertItemsEqual(phase.pool.queue_put.mock_calls,
@ -126,6 +130,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -186,6 +192,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -218,6 +226,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -279,6 +289,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -341,6 +353,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -402,6 +416,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -438,6 +454,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = ImageBuildPhase(compose)
phase.run()
@ -474,6 +492,8 @@ class TestImageBuildPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'git://git.fedorahosted.org/git/spin-kickstarts.git?#BEEFCAFE'
phase = ImageBuildPhase(compose)

View File

@ -26,9 +26,10 @@ class TestImageChecksumPhase(PungiTestCase):
'media_checksum_one_file': True
})
phase = ImageChecksumPhase(compose)
with self.assertRaises(ValueError) as err:
with self.assertRaises(ValueError) as ctx:
phase.validate()
self.assertIn('media_checksum_one_file', err.message)
self.assertIn('media_checksum_one_file', str(ctx.exception))
@mock.patch('os.path.exists')
@mock.patch('kobo.shortcuts.compute_file_checksums')

View File

@ -11,18 +11,7 @@ import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from pungi.phases import init
from tests.helpers import DummyCompose, PungiTestCase, touch, union
MIN_CONFIG = {
'release_short': 'Fedora',
'release_name': 'Fedora',
'release_version': 'Rawhide',
'release_is_layered': False,
'variants_file': 'does-not-exist.xml',
'sigkeys': [],
'createrepo_checksum': 'sha256',
'runroot': False,
}
from tests.helpers import DummyCompose, PungiTestCase, touch
class TestInitPhase(PungiTestCase):
@ -100,23 +89,6 @@ class TestInitPhase(PungiTestCase):
self.assertItemsEqual(write_variant.mock_calls, [])
self.assertItemsEqual(copy_comps.mock_calls, [])
def test_validate_keep_original_comps_missing(self):
compose = DummyCompose(self.topdir, MIN_CONFIG)
phase = init.InitPhase(compose)
phase.validate()
def test_validate_keep_original_comps_empty(self):
config = union(MIN_CONFIG, {'keep_original_comps': []})
compose = DummyCompose(self.topdir, config)
phase = init.InitPhase(compose)
phase.validate()
def test_validate_keep_original_comps_filled_in(self):
config = union(MIN_CONFIG, {'keep_original_comps': ['Everything']})
compose = DummyCompose(self.topdir, config)
phase = init.InitPhase(compose)
phase.validate()
class TestWriteArchComps(PungiTestCase):

View File

@ -11,6 +11,7 @@ import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from pungi.phases.live_images import LiveImagesPhase, CreateLiveImageThread
from pungi.checks import validate
from tests.helpers import DummyCompose, PungiTestCase, boom
@ -31,6 +32,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
phase = LiveImagesPhase(compose)
phase.run()
@ -80,6 +83,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
phase = LiveImagesPhase(compose)
phase.run()
@ -126,6 +131,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
phase = LiveImagesPhase(compose)
phase.run()
@ -174,6 +181,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
phase = LiveImagesPhase(compose)
phase.run()
@ -242,6 +251,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'https://git.example.com/kickstarts.git?#CAFEBABE'
phase = LiveImagesPhase(compose)
@ -295,6 +306,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'https://git.example.com/kickstarts.git?#CAFEBABE'
phase = LiveImagesPhase(compose)
@ -348,6 +361,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'https://git.example.com/kickstarts.git?#CAFEBABE'
phase = LiveImagesPhase(compose)
@ -398,6 +413,8 @@ class TestLiveImagesPhase(PungiTestCase):
],
})
self.assertEqual(validate(compose.conf), [])
phase = LiveImagesPhase(compose)
phase.run()

View File

@ -10,30 +10,12 @@ import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from pungi.phases.livemedia_phase import LiveMediaPhase, LiveMediaThread
from pungi.checks import validate
from tests.helpers import DummyCompose, PungiTestCase, boom
class TestLiveMediaPhase(PungiTestCase):
def test_global_config_validation(self):
compose = DummyCompose(self.topdir, {
'live_media_ksurl': 'git://example.com/repo.git#HEAD',
'live_media_target': 'f24',
'live_media_release': 'RRR',
'live_media_version': 'Rawhide',
})
phase = LiveMediaPhase(compose)
phase.validate()
def test_global_config_null_release(self):
compose = DummyCompose(self.topdir, {
'live_media_release': None,
})
phase = LiveMediaPhase(compose)
phase.validate()
@mock.patch('pungi.phases.livemedia_phase.ThreadPool')
def test_live_media_minimal(self, ThreadPool):
compose = DummyCompose(self.topdir, {
@ -51,6 +33,8 @@ class TestLiveMediaPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = LiveMediaPhase(compose)
phase.run()
@ -107,6 +91,8 @@ class TestLiveMediaPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'git://example.com/repo.git#BEEFCAFE'
phase = LiveMediaPhase(compose)
@ -206,6 +192,8 @@ class TestLiveMediaPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'git://example.com/repo.git#BEEFCAFE'
phase = LiveMediaPhase(compose)
@ -292,6 +280,8 @@ class TestLiveMediaPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = LiveMediaPhase(compose)
with self.assertRaisesRegexp(RuntimeError, r'no.+Missing.+when building.+Server'):
@ -315,6 +305,8 @@ class TestLiveMediaPhase(PungiTestCase):
'koji_profile': 'koji',
})
self.assertEqual(validate(compose.conf), [])
phase = LiveMediaPhase(compose)
with self.assertRaisesRegexp(RuntimeError, r'no.+Missing.+when building.+Server'):
@ -348,6 +340,8 @@ class TestLiveMediaPhase(PungiTestCase):
}
})
self.assertEqual(validate(compose.conf), [])
resolve_git_url.return_value = 'resolved'
phase = LiveMediaPhase(compose)

View File

@ -29,6 +29,7 @@ class DiscInfoTestCase(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {
'release_name': 'Test',
'release_version': '1.0',
'release_is_layered': False,
})
metadata.write_discinfo(compose, 'x86_64', compose.variants['Server'])

View File

@ -19,26 +19,6 @@ from pungi.phases import osbs
class OSBSPhaseTest(helpers.PungiTestCase):
def test_validate(self):
compose = helpers.DummyCompose(self.topdir, {
'osbs': {"^Server$": {}}
})
phase = osbs.OSBSPhase(compose)
try:
phase.validate()
except:
self.fail('Correct config must validate')
def test_validate_bad_conf(self):
compose = helpers.DummyCompose(self.topdir, {
'osbs': 'yes please'
})
phase = osbs.OSBSPhase(compose)
with self.assertRaises(ValueError):
phase.validate()
@mock.patch('pungi.phases.osbs.ThreadPool')
def test_run(self, ThreadPool):
cfg = mock.Mock()

View File

@ -16,45 +16,6 @@ from pungi.phases import ostree_installer as ostree
class OstreeInstallerPhaseTest(helpers.PungiTestCase):
def test_validate(self):
compose = helpers.DummyCompose(self.topdir, {
'ostree_installer': [
("^Atomic$", {
"x86_64": {
"source_repo_from": "Everything",
"release": None,
"installpkgs": ["fedora-productimg-atomic"],
"add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],
"add_template_var": [
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
],
"add_arch_template": ["/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"],
"add_arch_template_var": [
"ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",
"ostree_osname=fedora-atomic",
"ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",
]
}
})
]
})
phase = ostree.OstreeInstallerPhase(compose)
try:
phase.validate()
except:
self.fail('Correct config must validate')
def test_validate_bad_conf(self):
compose = helpers.DummyCompose(self.topdir, {
'ostree_installer': 'yes please'
})
phase = ostree.OstreeInstallerPhase(compose)
with self.assertRaises(ValueError):
phase.validate()
@mock.patch('pungi.phases.ostree_installer.ThreadPool')
def test_run(self, ThreadPool):
cfg = mock.Mock()

View File

@ -16,35 +16,6 @@ from pungi.phases import ostree
class OSTreePhaseTest(helpers.PungiTestCase):
def test_validate(self):
compose = helpers.DummyCompose(self.topdir, {
'ostree': [
("^Atomic$", {
"x86_64": {
"treefile": "fedora-atomic-docker-host.json",
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
"source_repo_from": "Everything",
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/"
}
})
]
})
phase = ostree.OSTreePhase(compose)
try:
phase.validate()
except:
self.fail('Correct config must validate')
def test_validate_bad_conf(self):
compose = helpers.DummyCompose(self.topdir, {
'ostree': 'yes please'
})
phase = ostree.OSTreePhase(compose)
with self.assertRaises(ValueError):
phase.validate()
@mock.patch('pungi.phases.ostree.ThreadPool')
def test_run(self, ThreadPool):
cfg = mock.Mock()

View File

@ -13,7 +13,7 @@ from pungi import paths
class TranslatePathTestCase(unittest.TestCase):
def test_does_nothing_without_config(self):
compose = mock.Mock(conf={})
compose = mock.Mock(conf={'translate_paths': []})
ret = paths.translate_path(compose, '/mnt/koji/compose/rawhide/XYZ')
self.assertEqual(ret, '/mnt/koji/compose/rawhide/XYZ')

View File

@ -135,7 +135,9 @@ class TestVolumeIdGenerator(unittest.TestCase):
'release_short': 'rel_short2',
'release_version': '6.0',
'release_is_layered': False,
'image_volid_formats': [format]
'image_volid_formats': [format],
'image_volid_layered_product_formats': [],
'volume_id_substitutions': {},
}
variant = mock.Mock(uid='Server', type='variant')
ci.return_value.compose.respin = 2
@ -363,5 +365,19 @@ class TestGetBuildrootRPMs(unittest.TestCase):
])
class TestLevenshtein(unittest.TestCase):
def test_edit_dist_empty_str(self):
self.assertEqual(util.levenshtein('', ''), 0)
def test_edit_dist_same_str(self):
self.assertEqual(util.levenshtein('aaa', 'aaa'), 0)
def test_edit_dist_one_change(self):
self.assertEqual(util.levenshtein('aab', 'aaa'), 1)
def test_edit_dist_different_words(self):
self.assertEqual(util.levenshtein('kitten', 'sitting'), 3)
if __name__ == "__main__":
unittest.main()