Merge branch 'master' into a8_updated

# Conflicts:
#	pungi.spec
#	pungi/wrappers/
#	tests/
#	tests/
soksanichenko 11 months ago
commit 323d31df2b

.gitignore vendored

@ -11,5 +11,9 @@ tests/data/repo-krb5-lookaside

@ -34,4 +34,6 @@ also moves the artifacts to correct locations.
- Documentation:
- Upstream GIT:
- Issue tracker:
- Questions can be asked on *#fedora-releng* IRC channel on FreeNode
- Questions can be asked in the *#fedora-releng* IRC channel on
or in the matrix room

@ -12,7 +12,7 @@
viewBox="0 0 610.46457 301.1662"
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
@ -24,9 +24,9 @@
@ -70,7 +70,7 @@
rdf:resource="" />
<dc:title />
@ -303,15 +303,15 @@
y="486.55563" />
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
@ -319,7 +319,7 @@
@ -518,5 +518,24 @@
y="486.55563" />
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"


Width:  |  Height:  |  Size: 21 KiB


Width:  |  Height:  |  Size: 22 KiB

@ -51,9 +51,9 @@ copyright = u'2016, Red Hat, Inc.'
# built documents.
# The short X.Y version.
version = '4.2'
version = '4.3'
# The full version, including alpha/beta/rc tags.
release = '4.2.7'
release = '4.3.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

@ -182,6 +182,8 @@ Options
Please note that when ``dnf`` is used, the build dependencies check is
skipped. On Python 3, only ``dnf`` backend is available.
See also: the ``gather_backend`` setting for Pungi's gather phase.
(*str*) -- URL to Compose Tracking Service. If defined, Pungi will add
the compose to Compose Tracking Service and ge the compose ID from it.
@ -457,6 +459,12 @@ Options
cloned files should be split into subdirectories for each architecture of
the variant.
**createrepo_enable_cache** = True
(*bool*) -- whether to use ``--cachedir`` option of ``createrepo``. It will
cache and reuse checksum vaules to speed up createrepo phase.
The cache dir is located at ``/var/cache/pungi/createrepo_c/$release_short-$uid``
e.g. /var/cache/pungi/createrepo_c/Fedora-1000
**product_id** = None
(:ref:`scm_dict <scm_support>`) -- If specified, it should point to a
directory with certificates ``*<variant_uid>-<arch>-*.pem``. Pungi will
@ -581,6 +589,18 @@ Options
(for example) between composes, then Pungi may not respect those changes
in your new compose.
**signed_packages_retries** = 0
(*int*) -- In automated workflows, you might start a compose before Koji
has completely written all signed packages to disk. In this case you may
want Pungi to wait for the package to appear in Koji's storage. This
option controls how many times Pungi will retry looking for the signed
**signed_packages_wait** = 30
(*int*) -- Interval in seconds for how long to wait between attempts to
find signed packages. This option only makes sense when
``signed_packages_retries`` is set higher than 0.
@ -652,6 +672,11 @@ Options
**buildinstall_allow_reuse** = False
(*bool*) -- When set to ``True``, *Pungi* will try to reuse buildinstall
results from old compose specified by ``--old-composes``.
(list) Additional packages to be installed in the runroot environment
where lorax will run to create installer. Format: ``[(variant_uid_regex,
{arch|*: [package_globs]})]``.
@ -686,6 +711,13 @@ Example
# Additional packages to be installed in the Koji runroot environment where
# lorax will run.
buildinstall_packages = [
('^Simple$', {
'*': ['dummy-package'],
.. note::
@ -728,7 +760,7 @@ Options
(*bool*) -- When set to ``True``, *Pungi* will try to reuse gather results
from old compose specified by ``--old-composes``.
**greedy_method** = none
(*str*) -- This option controls how package requirements are satisfied in
case a particular ``Requires`` has multiple candidates.
@ -749,7 +781,7 @@ Options
pulled in.
* With ``greedy_method = "all"`` all three packages will be
pulled in.
* With ``greedy_method = "build" ``pkg-b-provider-1`` and
* With ``greedy_method = "build"`` ``pkg-b-provider-1`` and
``pkg-b-provider-2`` will be pulled in.
@ -763,6 +795,9 @@ Options
``python-multilib`` library. Please refer to ``multilib`` option to see the
See also: the ``repoclosure_backend`` setting for Pungi's repoclosure
(*list*) -- mapping of variant regexes and arches to list of multilib
@ -787,8 +822,14 @@ Options
(*list*) -- additional packages to be included in a variant and
architecture; format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
In contrast to the ``comps_file`` setting, the ``additional_packages``
setting merely adds the list of packages to the compose. When a package
is in a comps group, it is visible to users via ``dnf groupinstall`` and
Anaconda's Groups selection, but ``additional_packages`` does not affect
DNF groups.
The packages specified here are matched against RPM names, not any other
provides in the package not the name of source package. Shell globbing is
provides in the package nor the name of source package. Shell globbing is
used, so wildcards are possible. The package can be specified as name only
or ``name.arch``.
@ -797,6 +838,21 @@ Options
it. If you add a debuginfo package that does not have anything else from
the same build included in the compose, the sources will not be pulled in.
If you list a package in ``additional_packages`` but Pungi cannot find
it (for example, it's not available in the Koji tag), Pungi will log a
warning in the "work" or "logs" directories and continue without aborting.
*Example*: This configuration will add all packages in a Koji tag to an
"Everything" variant::
additional_packages = [
('^Everything$', {
'*': [
(*list*) -- packages to be excluded from a variant and architecture;
format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
@ -867,7 +923,8 @@ Options
(*str*) -- JSON mapping with initial packages for the compose. The value
should be a path to JSON file with following mapping: ``{variant: {arch:
{rpm_name: [rpm_arch|None]}}}``.
{rpm_name: [rpm_arch|None]}}}``. Relative paths are interpreted relative to
the location of main config file.
**gather_profiler** = False
(*bool*) -- When set to ``True`` the gather tool will produce additional
@ -1201,7 +1258,7 @@ Options
Format: ``[(variant_uid_regex, {arch|*: bool})]``
**create_jigdo** = True
**create_jigdo** = False
(*bool*) -- controls the creation of jigdo from ISO
**create_optional_isos** = False
@ -1228,6 +1285,11 @@ Options
meaning size in bytes, or it can be a string with ``k``, ``M``, ``G``
suffix (using multiples of 1024).
(*int|list*) [optional] -- Set the ISO9660 conformance level. This is
either a global single value (a number from 1 to 4), or a variant/arch
**split_iso_reserve** = 10MiB
(*int|str*) -- how much free space should be left on each disk. The format
is the same as for ``iso_size`` option.
@ -1391,6 +1453,7 @@ Live Media Settings
* ``repo`` (*str|[str]*) -- repos specified by URL or variant UID
* ``title`` (*str*)
* ``install_tree_from`` (*str*) -- variant to take install tree from
* ``nomacboot`` (*bool*)
Image Build Settings
@ -1531,7 +1594,9 @@ OSBuild Composer for building images
* ``name`` -- name of the Koji package
* ``distro`` -- image for which distribution should be build TODO examples
* ``image_type`` -- a list of image types to build (e.g. ``qcow2``)
* ``image_types`` -- a list with a single image type string or just a
string representing the image type to build (e.g. ``qcow2``). In any
case, only a single image type can be provided as an argument.
Optional keys:
@ -1547,6 +1612,50 @@ OSBuild Composer for building images
* ``arches`` -- list of architectures for which to build the image. By
default, the variant arches are used. This option can only restrict it,
not add a new one.
* ``ostree_url`` -- URL of the repository that's used to fetch the parent
commit from.
* ``ostree_ref`` -- name of the ostree branch
* ``ostree_parent`` -- commit hash or a a branch-like reference to the
parent commit.
* ``upload_options`` -- a dictionary with upload options specific to the
target cloud environment. If provided, the image will be uploaded to the
cloud environment, in addition to the Koji server. One can't combine
arbitrary image types with arbitrary upload options.
The dictionary keys differ based on the target cloud environment. The
following keys are supported:
* **AWS EC2 upload options** -- upload to Amazon Web Services.
* ``region`` -- AWS region to upload the image to
* ``share_with_accounts`` -- list of AWS account IDs to share the image
* ``snapshot_name`` -- Snapshot name of the uploaded EC2 image
* **AWS S3 upload options** -- upload to Amazon Web Services S3.
* ``region`` -- AWS region to upload the image to
* **Azure upload options** -- upload to Microsoft Azure.
* ``tenant_id`` -- Azure tenant ID to upload the image to
* ``subscription_id`` -- Azure subscription ID to upload the image to
* ``resource_group`` -- Azure resource group to upload the image to
* ``location`` -- Azure location to upload the image to
* ``image_name`` -- Image name of the uploaded Azure image (optional)
* **GCP upload options** -- upload to Google Cloud Platform.
* ``region`` -- GCP region to upload the image to
* ``bucket`` -- GCP bucket to upload the image to
* ``share_with_accounts`` -- list of GCP accounts to share the image
* ``image_name`` -- Image name of the uploaded GCP image (optional)
* **Container upload options** -- upload to a container registry.
* ``name`` -- name of the container image (optional)
* ``tag`` -- container tag to upload the image to (optional)
.. note::
There is initial support for having this task as failable without aborting
@ -1555,6 +1664,56 @@ OSBuild Composer for building images
Image container
This phase supports building containers in OSBS that embed an image created in
the same compose. This can be useful for delivering the image to users running
in containerized environments.
Pungi will start a ``buildContainer`` task in Koji with configured source
repository. The ``Dockerfile`` can expect that a repo file will be injected
into the container that defines a repo named ``image-to-include``, and its
``baseurl`` will point to the image to include. It is possible to extract the
URL with a command like ``dnf config-manager --dump image-to-include | awk
'/baseurl =/{print $3}'```
(*dict*) -- configuration for building containers embedding an image.
Format: ``{variant_uid_regex: [{...}]}``.
The inner object will define a single container. These keys are required:
* ``url``, ``target``, ``git_branch``. See OSBS section for definition of
* ``image_spec`` -- (*object*) A string mapping of filters used to select
the image to embed. All images listed in metadata for the variant will be
processed. The keys of this filter are used to select metadata fields for
the image, and values are regular expression that need to match the
metadata value.
The filter should match exactly one image.
Example config
image_container = {
"^Server$": [{
"url": "git://",
"target": "f24-container-candidate",
"git_branch": "f24",
"image_spec": {
"format": "qcow2",
"arch": "x86_64",
"path": ".*/guest-image-.*$",
OSTree Settings
@ -1594,6 +1753,8 @@ repository with a new commit.
* ``force_new_commit`` -- (*bool*) Do not use rpm-ostree's built-in change
Defaults to ``False``.
* ``unified_core`` -- (*bool*) Use rpm-ostree in unified core mode for composes.
Defaults to ``False``.
* ``version`` -- (*str*) Version string to be added as versioning metadata.
a value will be generated automatically as ``$VERSION.$RELEASE``.
@ -1675,6 +1836,8 @@ an OSTree repository. This always runs in Koji as a ``runroot`` task.
with the optional key:
* ``extra_runroot_pkgs`` -- (*[str]*)
* ``skip_branding`` -- (*bool*) Stops lorax to install packages with branding.
Defaults to ``False``.
**ostree_installer_overwrite** = False
(*bool*) -- by default if a variant including OSTree installer also creates
@ -1754,24 +1917,34 @@ they are not scratch builds).
to create the image will not abort the whole compose.
The configuration will pass other attributes directly to the Koji task.
This includes ``scratch`` and ``priority``.
This includes ``scratch`` and ``priority``. See ``koji list-api
buildContainer`` for more details about these options.
A value for ``yum_repourls`` will be created automatically and point at a
repository in the current compose. You can add extra repositories with
``repo`` key having a list of urls pointing to ``.repo`` files or just
variant uid, Pungi will create the .repo file for that variant. ``gpgkey``
can be specified to enable gpgcheck in repo files for variants.
variant uid, Pungi will create the .repo file for that variant. If
specific URL is used in the ``repo``, the ``$COMPOSE_ID`` variable in
the ``repo`` string will be replaced with the real compose ID.
``gpgkey`` can be specified to enable gpgcheck in repo files for variants.
(*dict*) -- It is possible to configure extra information about where to
push the image (unless it is a scratch build). For each finished build,
Pungi will try to match NVR against a key in this mapping (using shell-style
globbing) and take the corresponding value and collect them across all built
images. The data will be saved into ``logs/global/osbs-registries.json`` as
a mapping from Koji NVR to the registry data. The same data is also sent to
the message bus on ``osbs-request-push`` topic once the compose finishes
successfully. Handling the message and performing the actual push is outside
of scope for Pungi.
(*dict*) -- Use this optional setting to emit ``osbs-request-push``
messages for each non-scratch container build. These messages can guide
other tools how to push the images to other registries. For example, an
external tool might trigger on these messages and copy the images from
OSBS's registry to a staging or production registry.
For each completed container build, Pungi will try to match the NVR against
a key in ``osbs_registries`` mapping (using shell-style globbing) and take
the corresponding value and collect them across all built images. Pungi
will save this data into ``logs/global/osbs-registries.json``, mapping each
Koji NVR to the registry data. Pungi will also send this data to the
message bus on the ``osbs-request-push`` topic once the compose finishes
Pungi simply logs the mapped data and emits the messages. It does not
handle the messages or push images. A separate tool must do that.
Example config

@ -30,9 +30,17 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
module_defaults_dir = {
'scm': 'git',
'repo': '',
'branch': 'master',
'branch': 'main',
'dir': '.'
# Optional module obsoletes configuration which is merged
# into the module index and gets resolved
module_obsoletes_dir = {
'scm': 'git',
'repo': '',
'branch': 'main',
'dir': 'obsoletes'
sigkeys = ['12C944D0']
@ -83,7 +91,6 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
iso_hfs_ppc64le_compatible = False
create_jigdo = False
buildinstall_method = 'lorax'
@ -325,6 +332,8 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
"tag_ref": False,
# Don't use change detection in ostree.
"force_new_commit": True,
# Use unified core mode for rpm-ostree composes
"unified_core": True,
# This is the location for the repo where new commit will be
# created. Note that this is outside of the compose dir.
"ostree_repo": "/mnt/koji/compose/ostree/repo/",

@ -12,8 +12,9 @@ happened. A JSON-encoded object will be passed to standard input to provide
more information about the event. At the very least, the object will contain a
``compose_id`` key.
The script is invoked in compose directory and can read other information
The notification script inherits working directory from the parent process and it
can be called from the same directory ``pungi-koji`` is called from. The working directory
is listed at the start of main log.
Currently these messages are sent:

@ -115,16 +115,30 @@ ImageBuild
This phase wraps up ``koji image-build``. It also updates the metadata
ultimately responsible for ``images.json`` manifest.
Similarly to image build, this phases creates a koji `osbuild` task. In the
background it uses OSBuild Composer to create images.
This phase builds docker base images in `OSBS
This phase builds container base images in `OSBS
The finished images are available in registry provided by OSBS, but not
downloaded directly into the compose. The is metadata about the created image
in ``compose/metadata/osbs.json``.
This phase builds a container image in OSBS, and stores the metadata in the
same file as OSBS phase. The container produced here wraps a different image,
created it ImageBuild or OSBuild phase. It can be useful to deliver a VM image
to containerized environments.

@ -131,8 +131,8 @@ def getArchList(thisarch=None): # pragma: no cover
def _try_read_cpuinfo(): # pragma: no cover
""" Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
mounted). """
"""Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
with open("/proc/cpuinfo", "r") as f:
return f.readlines()
@ -141,8 +141,8 @@ def _try_read_cpuinfo(): # pragma: no cover
def _parse_auxv(): # pragma: no cover
""" Read /proc/self/auxv and parse it into global dict for easier access
later on, very similar to what rpm does. """
"""Read /proc/self/auxv and parse it into global dict for easier access
later on, very similar to what rpm does."""
# In case we can't open and read /proc/self/auxv, just return
with open("/proc/self/auxv", "rb") as f:
@ -326,8 +326,8 @@ def getMultiArchInfo(arch=canonArch): # pragma: no cover
def getBaseArch(myarch=None): # pragma: no cover
"""returns 'base' arch for myarch, if specified, or canonArch if not.
base arch is the arch before noarch in the arches dict if myarch is not
a key in the multilibArches."""
base arch is the arch before noarch in the arches dict if myarch is not
a key in the multilibArches."""
if not myarch:
myarch = canonArch

@ -53,7 +53,7 @@ from . import util
def is_jigdo_needed(conf):
return conf.get("create_jigdo", True)
return conf.get("create_jigdo")
def is_isohybrid_needed(conf):
@ -75,8 +75,7 @@ def is_isohybrid_needed(conf):
def is_genisoimage_needed(conf):
"""This is only needed locally for createiso without runroot.
"""This is only needed locally for createiso without runroot."""
runroot_tag = conf.get("runroot_tag", "")
if runroot_tag or conf.get("createiso_use_xorrisofs"):
return False
@ -94,7 +93,7 @@ def is_xorrisofs_needed(conf):
def is_createrepo_c_needed(conf):
return conf.get("createrepo_c", True)
return conf.get("createrepo_c")
# The first element in the tuple is package name expected to have the
@ -230,7 +229,6 @@ def validate(config, offline=False, schema=None):
validator = DefaultValidator(
{"array": (tuple, list), "regex": six.string_types, "url": six.string_types},
errors = []
warnings = []
@ -446,6 +444,16 @@ def _extend_with_default_and_alias(validator_class, offline=False):
def is_array(checker, instance):
return isinstance(instance, (tuple, list))
def is_string_type(checker, instance):
return isinstance(instance, six.string_types)
type_checker = validator_class.TYPE_CHECKER.redefine_many(
{"array": is_array, "regex": is_string_type, "url": is_string_type}
return jsonschema.validators.extend(
@ -456,6 +464,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
"additionalProperties": _validate_additional_properties,
"anyOf": _validate_any_of,
@ -610,7 +619,7 @@ def make_schema():
"runroot_ssh_init_template": {"type": "string"},
"runroot_ssh_install_packages_template": {"type": "string"},
"runroot_ssh_run_template": {"type": "string"},
"create_jigdo": {"type": "boolean", "default": True},
"create_jigdo": {"type": "boolean", "default": False},
"check_deps": {"type": "boolean", "default": True},
"require_all_comps_packages": {"type": "boolean", "default": False},
"bootable": {
@ -654,6 +663,8 @@ def make_schema():
"gather_profiler": {"type": "boolean", "default": False},
"gather_allow_reuse": {"type": "boolean", "default": False},
"pkgset_allow_reuse": {"type": "boolean", "default": True},
"createiso_allow_reuse": {"type": "boolean", "default": True},
"extraiso_allow_reuse": {"type": "boolean", "default": True},
"pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
"createrepo_c": {"type": "boolean", "default": True},
"createrepo_checksum": {
@ -661,6 +672,7 @@ def make_schema():
"default": "sha256",
"enum": ["sha1", "sha256", "sha512"],
"createrepo_enable_cache": {"type": "boolean", "default": True},
"createrepo_use_xz": {"type": "boolean", "default": False},
"createrepo_num_threads": {"type": "number", "default": get_num_cpus()},
"createrepo_num_workers": {"type": "number", "default": 3},
@ -722,6 +734,8 @@ def make_schema():
"minItems": 1,
"default": [None],
"signed_packages_retries": {"type": "number", "default": 0},
"signed_packages_wait": {"type": "number", "default": 30},
"variants_file": {"$ref": "#/definitions/str_or_scm_dict"},
"comps_file": {"$ref": "#/definitions/str_or_scm_dict"},
"comps_filter_environments": {"type": "boolean", "default": True},
@ -732,6 +746,7 @@ def make_schema():
"patternProperties": {".+": {"$ref": "#/definitions/strings"}},
"additionalProperties": False,
"module_obsoletes_dir": {"$ref": "#/definitions/str_or_scm_dict"},
"create_optional_isos": {"type": "boolean", "default": False},
"symlink_isos_to": {"type": "string"},
"dogpile_cache_backend": {"type": "string"},
@ -744,6 +759,12 @@ def make_schema():
"createiso_break_hardlinks": {"type": "boolean", "default": False},
"createiso_use_xorrisofs": {"type": "boolean", "default": False},
"iso_level": {
"anyOf": [
{"type": "number", "enum": [1, 2, 3, 4]},
_variant_arch_mapping({"type": "number", "enum": [1, 2, 3, 4]}),
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": True},
"multilib": _variant_arch_mapping(
{"$ref": "#/definitions/list_of_strings"}
@ -785,6 +806,10 @@ def make_schema():
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
"buildinstall_skip": _variant_arch_mapping({"type": "boolean"}),
"buildinstall_packages": {
"$ref": "#/definitions/package_mapping",
"default": [],
"global_ksurl": {"type": "url"},
"global_version": {"type": "string"},
"global_target": {"type": "string"},
@ -976,6 +1001,7 @@ def make_schema():
"arches": {"$ref": "#/definitions/list_of_strings"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"release": {"$ref": "#/definitions/optional_string"},
"nomacboot": {"type": "boolean"},
"required": ["name", "kickstart"],
"additionalProperties": False,
@ -1009,6 +1035,7 @@ def make_schema():
"update_summary": {"type": "boolean"},
"force_new_commit": {"type": "boolean"},
"unified_core": {"type": "boolean"},
"version": {"type": "string"},
"config_branch": {"type": "string"},
"tag_ref": {"type": "boolean"},
@ -1043,6 +1070,7 @@ def make_schema():
"failable": {"$ref": "#/definitions/list_of_strings"},
"update_summary": {"type": "boolean"},
"force_new_commit": {"type": "boolean"},
"unified_core": {"type": "boolean"},
"version": {"type": "string"},
"config_branch": {"type": "string"},
"tag_ref": {"type": "boolean"},
@ -1072,6 +1100,7 @@ def make_schema():
"template_repo": {"type": "string"},
"template_branch": {"type": "string"},
"extra_runroot_pkgs": {"$ref": "#/definitions/list_of_strings"},
"skip_branding": {"type": "boolean"},
"additionalProperties": False,
@ -1082,6 +1111,7 @@ def make_schema():
"live_images": _variant_arch_mapping(
_one_or_list({"$ref": "#/definitions/live_image_config"})
"image_build_allow_reuse": {"type": "boolean", "default": False},
"image_build": {
"type": "object",
"patternProperties": {
@ -1149,12 +1179,109 @@ def make_schema():
"version": {"type": "string"},
"distro": {"type": "string"},
"target": {"type": "string"},
"image_types": {"$ref": "#/definitions/strings"},
# Only a single image_type can be specified
"image_types": {
"oneOf": [
"type": "array",
"items": {"type": "string"},
"description": "Deprecated variant",
"minItems": 1,
"maxItems": 1,
{"type": "string"},
"arches": {"$ref": "#/definitions/list_of_strings"},
"release": {"type": "string"},
"repo": {"$ref": "#/definitions/list_of_strings"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"subvariant": {"type": "string"},
"ostree_url": {"type": "string"},
"ostree_ref": {"type": "string"},
"ostree_parent": {"type": "string"},
"upload_options": {
"oneOf": [
# AWSEC2UploadOptions
"type": "object",
"additionalProperties": False,
"required": [
"properties": {
"region": {
"type": "string",
"snapshot_name": {
"type": "string",
"share_with_accounts": {
"type": "array",
"items": {"type": "string"},
# AWSS3UploadOptions
"type": "object",
"additionalProperties": False,
"required": ["region"],
"properties": {
"region": {"type": "string"}
# AzureUploadOptions
"type": "object",
"additionalProperties": False,
"required": [
"properties": {
"tenant_id": {"type": "string"},
"subscription_id": {"type": "string"},
"resource_group": {"type": "string"},
"location": {"type": "string"},
"image_name": {
"type": "string",
# GCPUploadOptions
"type": "object",
"additionalProperties": False,
"required": ["region", "bucket"],
"properties": {
"region": {"type": "string"},
"bucket": {"type": "string"},
"image_name": {
"type": "string",
"share_with_accounts": {
"type": "array",
"items": {"type": "string"},
# ContainerUploadOptions
"type": "object",
"additionalProperties": False,
"properties": {
"name": {"type": "string"},
"tag": {"type": "string"},
"required": ["name", "distro", "image_types"],
"additionalProperties": False,
@ -1203,6 +1330,7 @@ def make_schema():
"anyOf": [{"type": "string"}, {"type": "number"}],
"default": 10 * 1024 * 1024,
"osbs_allow_reuse": {"type": "boolean", "default": False},
"osbs": {
"type": "object",
"patternProperties": {
@ -1221,6 +1349,26 @@ def make_schema():
"additionalProperties": False,
"image_container": {
"type": "object",
"patternProperties": {
".+": _one_or_list(
"type": "object",
"properties": {
"url": {"type": "url"},
"target": {"type": "string"},
"priority": {"type": "number"},
"failable": {"type": "boolean"},
"git_branch": {"type": "string"},
"image_spec": {"type": "object"},
"required": ["url", "target", "git_branch", "image_spec"],
"additionalProperties": False,
"extra_files": _variant_arch_mapping(
"type": "array",
@ -1325,6 +1473,7 @@ CONFIG_DEPS = {
"requires": ((lambda x: x, ["base_product_name", "base_product_short"]),),
"conflicts": ((lambda x: not x, ["base_product_name", "base_product_short"]),),
"cts_url": {"requires": ((lambda x: x, ["translate_paths"]),)},
"product_id": {"conflicts": [(lambda x: not x, ["product_id_allow_missing"])]},
"pkgset_scratch_modules": {"requires": ((lambda x: x, ["mbs_api_url"]),)},
"pkgset_source": {

@ -24,8 +24,12 @@ import time
import tempfile
import shutil
import json
import socket
import kobo.log
import kobo.tback
import requests
from requests.exceptions import RequestException
from productmd.composeinfo import ComposeInfo
from productmd.images import Images
from dogpile.cache import make_region
@ -40,6 +44,8 @@ from pungi.util import (
from pungi.metadata import compose_to_composeinfo
@ -51,6 +57,14 @@ except ImportError:
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
def retry_request(method, url, data=None, auth=None):
request_method = getattr(requests, method)
rv = request_method(url, json=data, auth=auth)
return rv
def get_compose_info(
@ -83,20 +97,19 @@ def get_compose_info(
cts_url = conf.get("cts_url", None)
if cts_url:
# Import requests and requests-kerberos here so it is not needed
# if running without Compose Tracking Service.
import requests
from requests_kerberos import HTTPKerberosAuth
# Requests-kerberos cannot accept custom keytab, we need to use
# environment variable for this. But we need to change environment
# only temporarily just for this single
# So at first backup the current environment and revert to it
# after the call.
cts_keytab = conf.get("cts_keytab", None)
authentication = get_authentication(conf)
if cts_keytab:
environ_copy = dict(os.environ)
if "$HOSTNAME" in cts_keytab:
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
# Create compose in CTS and get the reserved compose ID.
@ -107,10 +120,10 @@ def get_compose_info(
"parent_compose_ids": parent_compose_ids,
"respin_of": respin_of,
rv =, json=data, auth=HTTPKerberosAuth())
rv = retry_request("post", url, data=data, auth=authentication)
if cts_keytab:
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
@ -119,12 +132,23 @@ def get_compose_info(
ci.compose.respin = cts_ci.compose.respin =
else: = ci.create_compose_id()
return ci
def get_authentication(conf):
authentication = None
cts_keytab = conf.get("cts_keytab", None)
if cts_keytab:
from requests_kerberos import HTTPKerberosAuth
authentication = HTTPKerberosAuth()
return authentication
def write_compose_info(compose_dir, ci):
Write ComposeInfo `ci` to `compose_dir` subdirectories.
@ -137,6 +161,20 @@ def write_compose_info(compose_dir, ci):
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
def update_compose_url(compose_id, compose_dir, conf):
authentication = get_authentication(conf)
cts_url = conf.get("cts_url", None)
if cts_url:
url = os.path.join(cts_url, "api/1/composes", compose_id)
tp = conf.get("translate_paths", None)
compose_url = translate_path_raw(tp, compose_dir)
data = {
"action": "set_url",
"compose_url": compose_url,
return retry_request("patch", url, data=data, auth=authentication)
def get_compose_dir(
@ -222,6 +260,8 @@ class Compose(kobo.log.LoggingBase):
self.koji_event = koji_event or conf.get("koji_event")
self.notifier = notifier
self._old_config = None
# path definitions
self.paths = Paths(self)
@ -284,6 +324,8 @@ class Compose(kobo.log.LoggingBase): = self.compose_respin = self.paths.compose.metadata()
self.containers_metadata = {}
# Stores list of deliverables that failed, but did not abort the
# compose.
# {deliverable: [(Variant.uid, arch, subvariant)]}
@ -303,6 +345,7 @@ class Compose(kobo.log.LoggingBase):
get_compose_info = staticmethod(get_compose_info)
write_compose_info = staticmethod(write_compose_info)
get_compose_dir = staticmethod(get_compose_dir)
update_compose_url = staticmethod(update_compose_url)
def __getitem__(self, name):
return self.variants[name]
@ -343,6 +386,10 @@ class Compose(kobo.log.LoggingBase):
def has_module_defaults(self):
return bool(self.conf.get("module_defaults_dir", False))
def has_module_obsoletes(self):
return bool(self.conf.get("module_obsoletes_dir", False))
def config_dir(self):
return os.path.dirname(self.conf._open_file or "")
@ -370,7 +417,7 @@ class Compose(kobo.log.LoggingBase):
file_name = os.path.basename(scm_dict)
scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
scm_dict = os.path.join(self.config_dir, scm_dict)
self.log_debug("Writing variants file: %s", variants_file)
tmp_dir = self.mkdtemp(prefix="variants_file_")
@ -573,7 +620,52 @@ class Compose(kobo.log.LoggingBase):
path = os.path.join(, variant=variant))
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
tmpdir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
os.chmod(tmpdir, 0o755)
return tmpdir
def dump_containers_metadata(self):
"""Create a file with container metadata if there are any containers."""
if not self.containers_metadata:
with open(self.paths.compose.metadata("osbs.json"), "w") as f:
separators=(",", ": "),
def traceback(self, detail=None):
"""Store an extended traceback. This method should only be called when
handling an exception.
:param str detail: Extra information appended to the filename
basename = "traceback"
if detail:
basename += "-" + detail
tb_path = self.paths.log.log_file("global", basename)
self.log_error("Extended traceback in: %s", tb_path)
with open(tb_path, "wb") as f:
def load_old_compose_config(self):
Helper method to load Pungi config dump from old compose.
if not self._old_config:
config_dump_full = self.paths.log.log_file("global", "config-dump")
config_dump_full = self.paths.old_compose_path(config_dump_full)
if not config_dump_full:
return None
self.log_info("Loading old config file: %s", config_dump_full)
with open(config_dump_full, "r") as f:
self._old_config = json.load(f)
return self._old_config
def get_ordered_variant_uids(compose):

@ -15,6 +15,7 @@ CreateIsoOpts = namedtuple(
@ -25,6 +26,8 @@ CreateIsoOpts = namedtuple(
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
@ -76,6 +79,8 @@ def make_image(f, opts):
emit(f, cmd)
@ -97,7 +102,7 @@ def run_isohybrid(f, opts):
def make_manifest(f, opts):
emit(f, iso.get_manifest_cmd(opts.iso_name))
emit(f, iso.get_manifest_cmd(opts.iso_name, opts.use_xorrisofs))
def make_jigdo(f, opts):
@ -113,6 +118,27 @@ def make_jigdo(f, opts):
emit(f, cmd)
def write_xorriso_commands(opts):
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
with open(script, "w") as f:
emit(f, "-indev %s" % opts.boot_iso)
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
emit(f, "-boot_image any replay")
emit(f, "-volid %s" % opts.volid)
with open(opts.graft_points) as gp:
for line in gp:
iso_path, fs_path = line.strip().split("=", 1)
emit(f, "-map %s %s" % (fs_path, iso_path))
if opts.arch == "ppc64le":
# This is needed for the image to be bootable.
emit(f, "-as mkisofs -U --")
emit(f, "-end")
return script
def write_script(opts, f):
if bool(opts.jigdo_dir) != bool(opts.os_tree):
raise RuntimeError("jigdo_dir must be used together with os_tree")
@ -120,8 +146,14 @@ def write_script(opts, f):
emit(f, "#!/bin/bash")
emit(f, "set -ex")
emit(f, "cd %s" % opts.output_dir)
make_image(f, opts)
run_isohybrid(f, opts)
if opts.use_xorrisofs and opts.buildinstall_method:
script = write_xorriso_commands(opts)
emit(f, "xorriso -dialog on <%s" % script)
make_image(f, opts)
run_isohybrid(f, opts)
implant_md5(f, opts)
make_manifest(f, opts)
if opts.jigdo_dir:

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# GNU Library General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <>.
class UnsignedPackagesError(RuntimeError):
"""Raised when package set fails to find a properly signed copy of an

@ -35,7 +35,7 @@ from pungi.wrappers.createrepo import CreaterepoWrapper
class ReentrantYumLock(object):
""" A lock that can be acquired multiple times by the same process. """
"""A lock that can be acquired multiple times by the same process."""
def __init__(self, lock, log):
self.lock = lock
@ -60,7 +60,7 @@ class ReentrantYumLock(object):
def yumlocked(method):
""" A locking decorator. """
"""A locking decorator."""
def wrapper(self, *args, **kwargs):
with self.yumlock:
@ -519,7 +519,7 @@ class Pungi(PungiBase):
def verifyCachePkg(self, po, path): # Stolen from yum
"""check the package checksum vs the cache
return True if pkg is good, False if not"""
return True if pkg is good, False if not"""
(csum_type, csum) = po.returnIdSum()
@ -682,7 +682,7 @@ class Pungi(PungiBase):
def get_package_deps(self, po):
"""Add the dependencies for a given package to the
transaction info"""
transaction info"""
added = set()