diff --git a/.gitignore b/.gitignore
index 07fb4417..fe291c82 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,5 +11,9 @@ tests/data/repo-krb5-lookaside
tests/_composes
htmlcov/
.coverage
+.eggs
.idea/
.tox
+.venv
+.kdev4/
+pungi.kdev4
diff --git a/README.md b/README.md
index 5828077b..ae7d9a48 100644
--- a/README.md
+++ b/README.md
@@ -34,4 +34,6 @@ also moves the artifacts to correct locations.
- Documentation: https://docs.pagure.org/pungi/
- Upstream GIT: https://pagure.io/pungi/
- Issue tracker: https://pagure.io/pungi/issues
-- Questions can be asked on *#fedora-releng* IRC channel on FreeNode
+- Questions can be asked in the *#fedora-releng* IRC channel on irc.libera.chat
+ or in the matrix room
+ [`#releng:fedoraproject.org`](https://matrix.to/#/#releng:fedoraproject.org)
diff --git a/doc/_static/phases.svg b/doc/_static/phases.svg
index 5083e3b7..b973798a 100644
--- a/doc/_static/phases.svg
+++ b/doc/_static/phases.svg
@@ -12,7 +12,7 @@
viewBox="0 0 610.46457 301.1662"
id="svg2"
version="1.1"
- inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
+ inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
sodipodi:docname="phases.svg"
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
inkscape:export-xdpi="90"
@@ -24,9 +24,9 @@
borderopacity="1.0"
inkscape:pageopacity="1"
inkscape:pageshadow="2"
- inkscape:zoom="2.1213203"
- inkscape:cx="276.65806"
- inkscape:cy="189.24198"
+ inkscape:zoom="1.5"
+ inkscape:cx="9.4746397"
+ inkscape:cy="58.833855"
inkscape:document-units="px"
inkscape:current-layer="layer1"
showgrid="false"
@@ -70,7 +70,7 @@
image/svg+xml
-
+
@@ -303,15 +303,15 @@
ImageChecksum
+ y="921.73846">ImageChecksum
@@ -518,5 +518,24 @@
id="tspan301-5"
style="font-size:12px;line-height:0">OSBuild
+
+ ImageContainer
diff --git a/doc/conf.py b/doc/conf.py
index 6aa7e512..d5cd50b5 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -51,9 +51,9 @@ copyright = u'2016, Red Hat, Inc.'
# built documents.
#
# The short X.Y version.
-version = '4.2'
+version = '4.3'
# The full version, including alpha/beta/rc tags.
-release = '4.2.7'
+release = '4.3.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/configuration.rst b/doc/configuration.rst
index d1175baa..907f72f4 100644
--- a/doc/configuration.rst
+++ b/doc/configuration.rst
@@ -182,6 +182,8 @@ Options
Please note that when ``dnf`` is used, the build dependencies check is
skipped. On Python 3, only ``dnf`` backend is available.
+ See also: the ``gather_backend`` setting for Pungi's gather phase.
+
**cts_url**
(*str*) -- URL to Compose Tracking Service. If defined, Pungi will add
the compose to Compose Tracking Service and ge the compose ID from it.
@@ -457,6 +459,12 @@ Options
cloned files should be split into subdirectories for each architecture of
the variant.
+**createrepo_enable_cache** = True
+ (*bool*) -- whether to use ``--cachedir`` option of ``createrepo``. It will
+ cache and reuse checksum vaules to speed up createrepo phase.
+ The cache dir is located at ``/var/cache/pungi/createrepo_c/$release_short-$uid``
+ e.g. /var/cache/pungi/createrepo_c/Fedora-1000
+
**product_id** = None
(:ref:`scm_dict `) -- If specified, it should point to a
directory with certificates ``*--*.pem``. Pungi will
@@ -581,6 +589,18 @@ Options
(for example) between composes, then Pungi may not respect those changes
in your new compose.
+**signed_packages_retries** = 0
+ (*int*) -- In automated workflows, you might start a compose before Koji
+ has completely written all signed packages to disk. In this case you may
+ want Pungi to wait for the package to appear in Koji's storage. This
+ option controls how many times Pungi will retry looking for the signed
+ copy.
+
+**signed_packages_wait** = 30
+ (*int*) -- Interval in seconds for how long to wait between attempts to
+ find signed packages. This option only makes sense when
+ ``signed_packages_retries`` is set higher than 0.
+
Example
-------
@@ -652,6 +672,11 @@ Options
**buildinstall_allow_reuse** = False
(*bool*) -- When set to ``True``, *Pungi* will try to reuse buildinstall
results from old compose specified by ``--old-composes``.
+**buildinstall_packages**
+ (list) – Additional packages to be installed in the runroot environment
+ where lorax will run to create installer. Format: ``[(variant_uid_regex,
+ {arch|*: [package_globs]})]``.
+
Example
-------
@@ -686,6 +711,13 @@ Example
})
]
+ # Additional packages to be installed in the Koji runroot environment where
+ # lorax will run.
+ buildinstall_packages = [
+ ('^Simple$', {
+ '*': ['dummy-package'],
+ })
+ ]
.. note::
@@ -728,7 +760,7 @@ Options
(*bool*) -- When set to ``True``, *Pungi* will try to reuse gather results
from old compose specified by ``--old-composes``.
-**greedy_method**
+**greedy_method** = none
(*str*) -- This option controls how package requirements are satisfied in
case a particular ``Requires`` has multiple candidates.
@@ -749,7 +781,7 @@ Options
pulled in.
* With ``greedy_method = "all"`` all three packages will be
pulled in.
- * With ``greedy_method = "build" ``pkg-b-provider-1`` and
+ * With ``greedy_method = "build"`` ``pkg-b-provider-1`` and
``pkg-b-provider-2`` will be pulled in.
**gather_backend**
@@ -763,6 +795,9 @@ Options
``python-multilib`` library. Please refer to ``multilib`` option to see the
differences.
+ See also: the ``repoclosure_backend`` setting for Pungi's repoclosure
+ phase.
+
**multilib**
(*list*) -- mapping of variant regexes and arches to list of multilib
methods
@@ -787,8 +822,14 @@ Options
(*list*) -- additional packages to be included in a variant and
architecture; format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
+ In contrast to the ``comps_file`` setting, the ``additional_packages``
+ setting merely adds the list of packages to the compose. When a package
+ is in a comps group, it is visible to users via ``dnf groupinstall`` and
+ Anaconda's Groups selection, but ``additional_packages`` does not affect
+ DNF groups.
+
The packages specified here are matched against RPM names, not any other
- provides in the package not the name of source package. Shell globbing is
+ provides in the package nor the name of source package. Shell globbing is
used, so wildcards are possible. The package can be specified as name only
or ``name.arch``.
@@ -797,6 +838,21 @@ Options
it. If you add a debuginfo package that does not have anything else from
the same build included in the compose, the sources will not be pulled in.
+ If you list a package in ``additional_packages`` but Pungi cannot find
+ it (for example, it's not available in the Koji tag), Pungi will log a
+ warning in the "work" or "logs" directories and continue without aborting.
+
+ *Example*: This configuration will add all packages in a Koji tag to an
+ "Everything" variant::
+
+ additional_packages = [
+ ('^Everything$', {
+ '*': [
+ '*',
+ ],
+ })
+ ]
+
**filter_packages**
(*list*) -- packages to be excluded from a variant and architecture;
format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
@@ -867,7 +923,8 @@ Options
**gather_source_mapping**
(*str*) -- JSON mapping with initial packages for the compose. The value
should be a path to JSON file with following mapping: ``{variant: {arch:
- {rpm_name: [rpm_arch|None]}}}``.
+ {rpm_name: [rpm_arch|None]}}}``. Relative paths are interpreted relative to
+ the location of main config file.
**gather_profiler** = False
(*bool*) -- When set to ``True`` the gather tool will produce additional
@@ -1201,7 +1258,7 @@ Options
Format: ``[(variant_uid_regex, {arch|*: bool})]``
-**create_jigdo** = True
+**create_jigdo** = False
(*bool*) -- controls the creation of jigdo from ISO
**create_optional_isos** = False
@@ -1228,6 +1285,11 @@ Options
meaning size in bytes, or it can be a string with ``k``, ``M``, ``G``
suffix (using multiples of 1024).
+**iso_level**
+ (*int|list*) [optional] -- Set the ISO9660 conformance level. This is
+ either a global single value (a number from 1 to 4), or a variant/arch
+ mapping.
+
**split_iso_reserve** = 10MiB
(*int|str*) -- how much free space should be left on each disk. The format
is the same as for ``iso_size`` option.
@@ -1391,6 +1453,7 @@ Live Media Settings
* ``repo`` (*str|[str]*) -- repos specified by URL or variant UID
* ``title`` (*str*)
* ``install_tree_from`` (*str*) -- variant to take install tree from
+ * ``nomacboot`` (*bool*)
Image Build Settings
@@ -1531,7 +1594,9 @@ OSBuild Composer for building images
* ``name`` -- name of the Koji package
* ``distro`` -- image for which distribution should be build TODO examples
- * ``image_type`` -- a list of image types to build (e.g. ``qcow2``)
+ * ``image_types`` -- a list with a single image type string or just a
+ string representing the image type to build (e.g. ``qcow2``). In any
+ case, only a single image type can be provided as an argument.
Optional keys:
@@ -1547,6 +1612,50 @@ OSBuild Composer for building images
* ``arches`` -- list of architectures for which to build the image. By
default, the variant arches are used. This option can only restrict it,
not add a new one.
+ * ``ostree_url`` -- URL of the repository that's used to fetch the parent
+ commit from.
+ * ``ostree_ref`` -- name of the ostree branch
+ * ``ostree_parent`` -- commit hash or a a branch-like reference to the
+ parent commit.
+ * ``upload_options`` -- a dictionary with upload options specific to the
+ target cloud environment. If provided, the image will be uploaded to the
+ cloud environment, in addition to the Koji server. One can't combine
+ arbitrary image types with arbitrary upload options.
+ The dictionary keys differ based on the target cloud environment. The
+ following keys are supported:
+
+ * **AWS EC2 upload options** -- upload to Amazon Web Services.
+
+ * ``region`` -- AWS region to upload the image to
+ * ``share_with_accounts`` -- list of AWS account IDs to share the image
+ with
+ * ``snapshot_name`` -- Snapshot name of the uploaded EC2 image
+ (optional)
+
+ * **AWS S3 upload options** -- upload to Amazon Web Services S3.
+
+ * ``region`` -- AWS region to upload the image to
+
+ * **Azure upload options** -- upload to Microsoft Azure.
+
+ * ``tenant_id`` -- Azure tenant ID to upload the image to
+ * ``subscription_id`` -- Azure subscription ID to upload the image to
+ * ``resource_group`` -- Azure resource group to upload the image to
+ * ``location`` -- Azure location to upload the image to
+ * ``image_name`` -- Image name of the uploaded Azure image (optional)
+
+ * **GCP upload options** -- upload to Google Cloud Platform.
+
+ * ``region`` -- GCP region to upload the image to
+ * ``bucket`` -- GCP bucket to upload the image to
+ * ``share_with_accounts`` -- list of GCP accounts to share the image
+ with
+ * ``image_name`` -- Image name of the uploaded GCP image (optional)
+
+ * **Container upload options** -- upload to a container registry.
+
+ * ``name`` -- name of the container image (optional)
+ * ``tag`` -- container tag to upload the image to (optional)
.. note::
There is initial support for having this task as failable without aborting
@@ -1555,6 +1664,56 @@ OSBuild Composer for building images
arch.
+Image container
+===============
+
+This phase supports building containers in OSBS that embed an image created in
+the same compose. This can be useful for delivering the image to users running
+in containerized environments.
+
+Pungi will start a ``buildContainer`` task in Koji with configured source
+repository. The ``Dockerfile`` can expect that a repo file will be injected
+into the container that defines a repo named ``image-to-include``, and its
+``baseurl`` will point to the image to include. It is possible to extract the
+URL with a command like ``dnf config-manager --dump image-to-include | awk
+'/baseurl =/{print $3}'```
+
+**image_container**
+ (*dict*) -- configuration for building containers embedding an image.
+
+ Format: ``{variant_uid_regex: [{...}]}``.
+
+ The inner object will define a single container. These keys are required:
+
+ * ``url``, ``target``, ``git_branch``. See OSBS section for definition of
+ these.
+ * ``image_spec`` -- (*object*) A string mapping of filters used to select
+ the image to embed. All images listed in metadata for the variant will be
+ processed. The keys of this filter are used to select metadata fields for
+ the image, and values are regular expression that need to match the
+ metadata value.
+
+ The filter should match exactly one image.
+
+
+Example config
+--------------
+::
+
+ image_container = {
+ "^Server$": [{
+ "url": "git://example.com/dockerfiles.git?#HEAD",
+ "target": "f24-container-candidate",
+ "git_branch": "f24",
+ "image_spec": {
+ "format": "qcow2",
+ "arch": "x86_64",
+ "path": ".*/guest-image-.*$",
+ }
+ }]
+ }
+
+
OSTree Settings
===============
@@ -1594,6 +1753,8 @@ repository with a new commit.
* ``force_new_commit`` -- (*bool*) Do not use rpm-ostree's built-in change
detection.
Defaults to ``False``.
+ * ``unified_core`` -- (*bool*) Use rpm-ostree in unified core mode for composes.
+ Defaults to ``False``.
* ``version`` -- (*str*) Version string to be added as versioning metadata.
If this option is set to ``!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN``,
a value will be generated automatically as ``$VERSION.$RELEASE``.
@@ -1675,6 +1836,8 @@ an OSTree repository. This always runs in Koji as a ``runroot`` task.
with the optional key:
* ``extra_runroot_pkgs`` -- (*[str]*)
+ * ``skip_branding`` -- (*bool*) Stops lorax to install packages with branding.
+ Defaults to ``False``.
**ostree_installer_overwrite** = False
(*bool*) -- by default if a variant including OSTree installer also creates
@@ -1754,24 +1917,34 @@ they are not scratch builds).
to create the image will not abort the whole compose.
The configuration will pass other attributes directly to the Koji task.
- This includes ``scratch`` and ``priority``.
+ This includes ``scratch`` and ``priority``. See ``koji list-api
+ buildContainer`` for more details about these options.
A value for ``yum_repourls`` will be created automatically and point at a
repository in the current compose. You can add extra repositories with
``repo`` key having a list of urls pointing to ``.repo`` files or just
- variant uid, Pungi will create the .repo file for that variant. ``gpgkey``
- can be specified to enable gpgcheck in repo files for variants.
+ variant uid, Pungi will create the .repo file for that variant. If
+ specific URL is used in the ``repo``, the ``$COMPOSE_ID`` variable in
+ the ``repo`` string will be replaced with the real compose ID.
+ ``gpgkey`` can be specified to enable gpgcheck in repo files for variants.
**osbs_registries**
- (*dict*) -- It is possible to configure extra information about where to
- push the image (unless it is a scratch build). For each finished build,
- Pungi will try to match NVR against a key in this mapping (using shell-style
- globbing) and take the corresponding value and collect them across all built
- images. The data will be saved into ``logs/global/osbs-registries.json`` as
- a mapping from Koji NVR to the registry data. The same data is also sent to
- the message bus on ``osbs-request-push`` topic once the compose finishes
- successfully. Handling the message and performing the actual push is outside
- of scope for Pungi.
+ (*dict*) -- Use this optional setting to emit ``osbs-request-push``
+ messages for each non-scratch container build. These messages can guide
+ other tools how to push the images to other registries. For example, an
+ external tool might trigger on these messages and copy the images from
+ OSBS's registry to a staging or production registry.
+
+ For each completed container build, Pungi will try to match the NVR against
+ a key in ``osbs_registries`` mapping (using shell-style globbing) and take
+ the corresponding value and collect them across all built images. Pungi
+ will save this data into ``logs/global/osbs-registries.json``, mapping each
+ Koji NVR to the registry data. Pungi will also send this data to the
+ message bus on the ``osbs-request-push`` topic once the compose finishes
+ successfully.
+
+ Pungi simply logs the mapped data and emits the messages. It does not
+ handle the messages or push images. A separate tool must do that.
Example config
diff --git a/doc/examples.rst b/doc/examples.rst
index 956383da..aef18e34 100644
--- a/doc/examples.rst
+++ b/doc/examples.rst
@@ -30,9 +30,17 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
module_defaults_dir = {
'scm': 'git',
'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
- 'branch': 'master',
+ 'branch': 'main',
'dir': '.'
}
+ # Optional module obsoletes configuration which is merged
+ # into the module index and gets resolved
+ module_obsoletes_dir = {
+ 'scm': 'git',
+ 'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
+ 'branch': 'main',
+ 'dir': 'obsoletes'
+ }
variants_file='variants-fedora.xml'
sigkeys = ['12C944D0']
@@ -83,7 +91,6 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
# CREATEISO
iso_hfs_ppc64le_compatible = False
- create_jigdo = False
# BUILDINSTALL
buildinstall_method = 'lorax'
@@ -325,6 +332,8 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
"tag_ref": False,
# Don't use change detection in ostree.
"force_new_commit": True,
+ # Use unified core mode for rpm-ostree composes
+ "unified_core": True,
# This is the location for the repo where new commit will be
# created. Note that this is outside of the compose dir.
"ostree_repo": "/mnt/koji/compose/ostree/repo/",
diff --git a/doc/messaging.rst b/doc/messaging.rst
index 291206f2..c94ef40b 100644
--- a/doc/messaging.rst
+++ b/doc/messaging.rst
@@ -12,8 +12,9 @@ happened. A JSON-encoded object will be passed to standard input to provide
more information about the event. At the very least, the object will contain a
``compose_id`` key.
-The script is invoked in compose directory and can read other information
-there.
+The notification script inherits working directory from the parent process and it
+can be called from the same directory ``pungi-koji`` is called from. The working directory
+is listed at the start of main log.
Currently these messages are sent:
diff --git a/doc/phases.rst b/doc/phases.rst
index 2cb810a8..7ae5bcdc 100644
--- a/doc/phases.rst
+++ b/doc/phases.rst
@@ -115,16 +115,30 @@ ImageBuild
This phase wraps up ``koji image-build``. It also updates the metadata
ultimately responsible for ``images.json`` manifest.
+OSBuild
+-------
+
+Similarly to image build, this phases creates a koji `osbuild` task. In the
+background it uses OSBuild Composer to create images.
+
OSBS
----
-This phase builds docker base images in `OSBS
+This phase builds container base images in `OSBS
`_.
The finished images are available in registry provided by OSBS, but not
downloaded directly into the compose. The is metadata about the created image
in ``compose/metadata/osbs.json``.
+ImageContainer
+--------------
+
+This phase builds a container image in OSBS, and stores the metadata in the
+same file as OSBS phase. The container produced here wraps a different image,
+created it ImageBuild or OSBuild phase. It can be useful to deliver a VM image
+to containerized environments.
+
OSTreeInstaller
---------------
diff --git a/pungi.spec b/pungi.spec
index a26a4a75..760f021b 100644
--- a/pungi.spec
+++ b/pungi.spec
@@ -1,14 +1,15 @@
%{?python_enable_dependency_generator}
Name: pungi
-Version: 4.2.17
-Release: 1%{?dist}
+Version: 4.3.6
+Release: 2%{?dist}.alma
Summary: Distribution compose tool
License: GPLv2
URL: https://pagure.io/pungi
Source0: %{name}-%{version}.tar.bz2
+BuildRequires: make
BuildRequires: python3-nose
BuildRequires: python3-pytest
BuildRequires: python3-mock
@@ -33,13 +34,14 @@ BuildRequires: python3-kobo
BuildRequires: python3-koji
BuildRequires: lorax
BuildRequires: python3-PyYAML
-BuildRequires: libmodulemd >= 2.8.0
+BuildRequires: python3-libmodulemd >= 2.8.0
BuildRequires: python3-gobject
BuildRequires: python3-createrepo_c >= 0.20.1
BuildRequires: python3-dogpile-cache
BuildRequires: python3-parameterized
BuildRequires: python3-gobject-base
BuildRequires: python3-dataclasses
+BuildRequires: python3-pgpy
#deps for doc building
BuildRequires: python3-sphinx
@@ -59,12 +61,14 @@ Requires: python3-dnf
Requires: python3-multilib
Requires: python3-libcomps
Requires: python3-koji
-Requires: libmodulemd >= 2.8.0
+Requires: python3-libmodulemd >= 2.8.0
Requires: python3-gobject
Requires: python3-createrepo_c >= 0.20.1
Requires: python3-PyYAML
+Requires: python3-productmd >= 1.28R
Requires: python3-gobject-base
Requires: lorax
+Requires: python3-pgpy
# This package is not available on i686, hence we cannot require it
# See https://bugzilla.redhat.com/show_bug.cgi?id=1743421
@@ -80,7 +84,6 @@ A tool to create anaconda based installation trees/isos of a set of rpms.
%package utils
Summary: Utilities for working with finished composes
Requires: pungi = %{version}-%{release}
-# Requires: python3-fedmsg
%description utils
These utilities work with finished composes produced by Pungi. They can be used
@@ -110,21 +113,14 @@ gzip _build/man/pungi.1
%install
%py3_install
-%{__install} -d %{buildroot}/var/cache/pungi
+%{__install} -d %{buildroot}/var/cache/pungi/createrepo_c
%{__install} -d %{buildroot}%{_mandir}/man1
%{__install} -m 0644 doc/_build/man/pungi.1.gz %{buildroot}%{_mandir}/man1
rm %{buildroot}%{_bindir}/pungi
-# CLOUDLINUX: We don't need fedmsg stuff
-rm %{buildroot}%{_bindir}/%{name}-fedmsg-notification
-
%check
-python3 -m pytest
-# master branch part of %check segment. Currently it doesn't work
-# because of pungi-koji requirement in bash tests
-#./tests/data/specs/build.sh
-#cd tests && ./test_compose.sh
+%pytest
%files
%license COPYING GPL
@@ -151,7 +147,6 @@ python3 -m pytest
%{_bindir}/%{name}-create-unified-isos
%{_bindir}/%{name}-config-dump
%{_bindir}/%{name}-config-validate
-# %{_bindir}/%{name}-fedmsg-notification
%{_bindir}/%{name}-notification-report-progress
%{_bindir}/%{name}-orchestrate
%{_bindir}/%{name}-patch-iso
@@ -161,13 +156,96 @@ python3 -m pytest
%changelog
+* Mon Nov 07 2022 Lubomír Sedlář - 4.3.6-2
+- Stop including comps in modular repos
+
* Wed Oct 19 2022 stepan_oksanichenko - 4.2.17-1
- Replace list of cr.packages by cr.PackageIterator in package JSON generator
- Do not lose a module from koji if we have more than one arch (e.g. x86_64 + i686)
+* Fri Aug 26 2022 Lubomír Sedlář - 4.3.6-1
+- pkgset: Report better error when module is missing an arch (lsedlar)
+- osbuild: add support for building ostree artifacts (ondrej)
+- ostree: Add unified core mode for compose in rpm-ostree (tim)
+- createiso: Make ISO level more granular (lsedlar)
+- Create DVDs with xorriso (lsedlar)
+- Fix compatibility with jsonschema >= 4.0.0 (lsedlar)
+- Fix black complaint (lsedlar)
+- doc: fix osbuild's image_types field name (ondrej)
+- Convert _ssh_run output to str for python3 (hlin)
+- Print more logs for git_ls_remote (hlin)
+- Log time taken of each phase (hlin)
+- Avoid crash when loading pickle file failed (hlin)
+- extra_isos: Fix detection of changed packages (lsedlar)
+
+* Thu Aug 11 2022 Lubomír Sedlář - 4.3.5-8
+- Backport jsonschema compatibility patch (rhbz#2113607)
+
+* Mon Jul 25 2022 Lubomír Sedlář - 4.3.5-7
+- Update xorriso patch
+
+* Fri Jul 22 2022 Fedora Release Engineering - 4.3.5-6
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_37_Mass_Rebuild
+
+* Mon Jun 20 2022 Python Maint - 4.3.5-5
+- Rebuilt for Python 3.11
+
+* Thu Jun 16 2022 Adam Williamson - 4.3.5-4
+- Don't try and run isohybrid when using xorriso
+
+* Wed Jun 15 2022 Python Maint - 4.3.5-3
+- Rebuilt for Python 3.11
+
+* Wed Jun 15 2022 Lubomír Sedlář - 4.3.5-2
+- Backport patch for building DVDs with xorriso command again
+
+* Wed Jun 15 2022 Lubomír Sedlář - 4.3.5-1
+- Fix module defaults and obsoletes validation (mkulik)
+- Update the cts_keytab field in order to get the hostname of the server
+ (ounsal)
+- Add skip_branding to ostree_installer. (lzhuang)
+- kojiwrapper: Ignore warnings before task id (lsedlar)
+- Restrict jsonschema version (lsedlar)
+- Revert "Do not clone the same repository multiple times, re-use already
+ cloned repository" (hlin)
+- Involve bandit (hlin)
+
+* Wed Jun 08 2022 Lubomír Sedlář - 4.3.4-2
+- Backport patch for building DVDs with xorriso command
+
* Wed May 4 2022 stepan_oksanichenko - 4.2.16-1
- ALBS-334: Make the ability of Pungi to give module_defaults from remote sources
+* Mon Apr 04 2022 Ondřej Nosek - 4.3.4-1
+- kojiwrapper: Add retries to login call (lsedlar)
+- Variants file in config can contain path (onosek)
+- nomacboot option for livemedia koji tasks (cobrien)
+- doc: improve osbs_registries explanation (kdreyer)
+- osbs: only handle archives of type "image" (kdreyer)
+- Update the default greedy_method value in doc (ounsal)
+- Fix the wrong working directory for the progress_notification script (ounsal)
+- Filter out environment groups unmatch given arch (hlin)
+- profiler: Respect provided output stream (lsedlar)
+- modules: Correct a typo in loading obsoletes (ppisar)
+- Do not clone the same repository multiple times, re-use already cloned
+ repository (ounsal)
+
+* Fri Feb 04 2022 Lubomír Sedlář - 4.3.3-3
+- Backport typo fix
+
+* Fri Jan 21 2022 Fedora Release Engineering - 4.3.3-2
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_36_Mass_Rebuild
+
+* Fri Jan 14 2022 Haibo Lin - 4.3.3-1
+- hybrid: Explicitly pull in debugsource packages (lsedlar)
+- Add module obsoletes feature (fvalder)
+- buildinstall: Add ability to install extra packages in runroot (ounsal)
+- Ignore osbs/osbuild config when reusing iso images (hlin)
+- compose: Make sure temporary dirs are world readable (lsedlar)
+- Pass compose parameter for debugging git issue (hlin)
+- Generate images.json for extra_isos phase (hlin)
+- Fix tests for python 2.6 (hlin)
+
* Thu Dec 30 2021 stepan_oksanichenio - 4.2.15-1
- ALBS-97: The scripts `gather_modules` and `generate_packages_json` support LZMA compression
- ALBS-97: The script `generate_packages_json` can use repos with different architectures
@@ -175,21 +253,116 @@ python3 -m pytest
* Mon Dec 20 2021 stepan_oksanichenko - 4.2.14-1
- ALBS-66: The generator of packages JSON can process the same packages with different versions
+* Mon Nov 15 2021 Haibo Lin - 4.3.2-2
+- Backport patch for generating images.json
+
+* Thu Nov 11 2021 Haibo Lin - 4.3.2-1
+- gather: Load JSON mapping relative to config dir (lsedlar)
+- gather: Stop requiring all variants/arches in JSON (lsedlar)
+- doc: make dnf "backend" settings easier to discover (kdreyer)
+- Remove with_jigdo argument (lsedlar)
+- Check dependencies after config validation (lsedlar)
+- default "with_jigdo" to False (kdreyer)
+- Stop trying to validate non-existent metadata (lsedlar)
+- test images for metadata deserialization error (fdipretre)
+- repoclosure: Use --forcearch for dnf repoclosure (lsedlar)
+- extra_isos: Allow reusing old images (lsedlar)
+- createiso: Allow reusing old images (lsedlar)
+- Remove default runroot channel (lsedlar)
+
+* Tue Oct 26 2021 Lubomír Sedlář - 4.3.1-1
+- Correct irc network name & add matrix room (dan.cermak)
+- Add missing mock to osbs tests (lsedlar)
+- osbs: Reuse images from old compose (hlin)
+- image_build: Allow reusing old image_build results (hlin)
+- Allow ISO-Level configuration within the config file (ounsal)
+- Work around ODCS creating COMPOSE_ID later (lsedlar)
+- When `cts_url` is configured, use CTS `/repo` API for buildContainer
+ yum_repourls. (jkaluza)
+- Add COMPOSE_ID into the pungi log file (ounsal)
+- buildinstall: Add easy way to check if previous result was reused (lsedlar)
+
+* Mon Oct 04 2021 Lubomír Sedlář - 4.3.0-2
+- Backport patch to avoid crash on missing COMPOSE_ID
+
+* Wed Sep 15 2021 Lubomír Sedlář - 4.3.0-1
+- Only build CTS url when configured (lsedlar)
+- Require requests_kerberos only when needed (lsedlar)
+- Allow specifying $COMPOSE_ID in the `repo` value for osbs phase. (jkaluza)
+- Make getting old compose config reusable (lsedlar)
+- paths: Allow customizing log file extension (lsedlar)
+- Add authentication for updating the compose URL in CTS. (ounsal)
+- Fix type detection for osbuild images (lsedlar)
+- Enable pungi to send compose_url patches to CTS (ounsal)
+- Use xorriso instead of isoinfo when createiso_use_xorrisofs is enabled
+ (ounsal)
+- Fix tests for createrepo (drumian)
+- Formatted files according to flake8 and black feedback (drumian)
+- Handle the pungi failures to ensure creation of log files (ounsal)
+- Add createrepo_enable_cache to configuration doc (hlin)
+- Fix formatting (hlin)
+- Install missing deps in ci image (hlin)
+- Use pytest directly incl. support for posargs, e.g.: tox -- -s -vvv
+ tests/path/to/a/single/test_something.py (fvalder)
+- Supersede ModuleStream loading with ModuleIndex (fvalder)
+- Better error message than 'KeyError' in pungi (drumian)
+- Adding multithreading support for pungi/phases/image_checksum.py (jkunstle)
+- doc: more additional_packages documentation (kdreyer)
+- doc: fix typo in additional_packages description (kdreyer)
+- doc: improve signed packages retry docs (kdreyer)
+- Better error message than 'KeyError' in pungi (drumian)
+- doc: explain buildContainer API (kdreyer)
+
+* Wed Aug 04 2021 Haibo Lin - 4.2.10-1
+- Show and log command when using the run_blocking_cmd() method (fdipretre)
+- Use cachedir when createrepo (hlin)
+- gather: Add all srpms to variant lookaside repo (lsedlar)
+- Add task URL to watch task log (hlin)
+- Log warning when module defined in variants.xml not found (hlin)
+- pkgset: Compare future events correctly (lsedlar)
+- util: Strip file:// from local urls (lsedlar)
+- Clean up temporary yumroot dir (hlin)
+
+* Fri Jul 23 2021 Fedora Release Engineering - 4.2.9-3
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild
+
* Fri Jun 18 2021 stepan_oksanichenko - 4.2.13-1
- LNX-326: Add the ability to include any package by mask in packages.json to the generator
- LNX-318: Modify build scripts for building CloudLinux OS 8.4
+* Fri Jun 04 2021 Python Maint - 4.2.9-2
+- Rebuilt for Python 3.10
+
* Tue May 25 2021 Stepan Oksanichenko - 4.2.12-1
- LNX-108: Add multiarch support to pungi
* Thu Apr 29 2021 Stepan Oksanichenko - 4.2.11-1
- LNX-311: Add ability to productmd set a main variant while dumping TreeInfo
+* Thu Apr 29 2021 onosek - 4.2.9-1
+- New upstream release 4.2.9
+- Fix can't link XDEV using repos as pkgset_sources (romain.forlot)
+- Updated the deprecated ks argument name (to the current inst.ks) (lveyde)
+- gather: Adjust reusing with lookaside (hlin)
+- hybrid: Optimize getting lookaside packages (lsedlar)
+- gather: Copy old logs when reusing gather result (hlin)
+- Cancel koji tasks when pungi terminated (hlin)
+- Add Dockerfile for building testing image (hlin)
+- image_container: Fix incorrect arch processing (lsedlar)
+- runroot: Adjust permissions always (hlin)
+- Format code (hlin)
+- pkgset: Fix meaning of retries (lsedlar)
+- pkgset: Store module tag only if module is used (lsedlar)
+- Store extended traceback for gather errors (lsedlar)
+
* Wed Feb 24 2021 Danylo Kuropiatnyk , Stepan Oksanichenko - 4.2.10-1
- LU-2186 .treeinfo file in AlmaLinux public kickstart repo should contain AppStream variant
- LU-2195 Change path to sources and iso when generating repositories
- LU-2202: Start unittests during installation or build of pungi
+* Fri Feb 12 2021 Ondrej Nosek - 4.2.8-1
+- New upstream version
+
* Thu Feb 11 2021 Stepan Oksanichenko - 4.2.9-1
- LNX-133: Create a server for building nightly builds of AlmaLinux
- LU-2133: Prepare CI for iso builds of CLOSS 8
@@ -202,6 +375,18 @@ python3 -m pytest
- LNX-102: Add tool that collects information about modules
- LNX-103 Update .spec file for AlmaLinux
+* Wed Jan 27 2021 Fedora Release Engineering - 4.2.7-3
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_34_Mass_Rebuild
+
+* Fri Jan 22 2021 Lubomír Sedlář - 4.2.7-2
+- Backport patch for preserving default attribute in comps
+
+* Tue Dec 8 09:01:52 CET 2020 Lubomír Sedlář - 4.2.7-1
+- New upstream version
+
+* Thu Nov 05 2020 Lubomír Sedlář - 4.2.6-1
+- New upstream release
+
* Fri Sep 25 2020 Lubomír Sedlář - 4.2.5-1
- New upstream release
diff --git a/pungi/arch_utils.py b/pungi/arch_utils.py
index c78082f0..d01eccd2 100644
--- a/pungi/arch_utils.py
+++ b/pungi/arch_utils.py
@@ -131,8 +131,8 @@ def getArchList(thisarch=None): # pragma: no cover
def _try_read_cpuinfo(): # pragma: no cover
- """ Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
- mounted). """
+ """Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
+ mounted)."""
try:
with open("/proc/cpuinfo", "r") as f:
return f.readlines()
@@ -141,8 +141,8 @@ def _try_read_cpuinfo(): # pragma: no cover
def _parse_auxv(): # pragma: no cover
- """ Read /proc/self/auxv and parse it into global dict for easier access
- later on, very similar to what rpm does. """
+ """Read /proc/self/auxv and parse it into global dict for easier access
+ later on, very similar to what rpm does."""
# In case we can't open and read /proc/self/auxv, just return
try:
with open("/proc/self/auxv", "rb") as f:
@@ -326,8 +326,8 @@ def getMultiArchInfo(arch=canonArch): # pragma: no cover
def getBaseArch(myarch=None): # pragma: no cover
"""returns 'base' arch for myarch, if specified, or canonArch if not.
- base arch is the arch before noarch in the arches dict if myarch is not
- a key in the multilibArches."""
+ base arch is the arch before noarch in the arches dict if myarch is not
+ a key in the multilibArches."""
if not myarch:
myarch = canonArch
diff --git a/pungi/checks.py b/pungi/checks.py
index 9cbddd20..25a343fb 100644
--- a/pungi/checks.py
+++ b/pungi/checks.py
@@ -53,7 +53,7 @@ from . import util
def is_jigdo_needed(conf):
- return conf.get("create_jigdo", True)
+ return conf.get("create_jigdo")
def is_isohybrid_needed(conf):
@@ -75,8 +75,7 @@ def is_isohybrid_needed(conf):
def is_genisoimage_needed(conf):
- """This is only needed locally for createiso without runroot.
- """
+ """This is only needed locally for createiso without runroot."""
runroot_tag = conf.get("runroot_tag", "")
if runroot_tag or conf.get("createiso_use_xorrisofs"):
return False
@@ -94,7 +93,7 @@ def is_xorrisofs_needed(conf):
def is_createrepo_c_needed(conf):
- return conf.get("createrepo_c", True)
+ return conf.get("createrepo_c")
# The first element in the tuple is package name expected to have the
@@ -230,7 +229,6 @@ def validate(config, offline=False, schema=None):
)
validator = DefaultValidator(
schema,
- {"array": (tuple, list), "regex": six.string_types, "url": six.string_types},
)
errors = []
warnings = []
@@ -446,7 +444,13 @@ def _extend_with_default_and_alias(validator_class, offline=False):
context=all_errors,
)
- return jsonschema.validators.extend(
+ def is_array(checker, instance):
+ return isinstance(instance, (tuple, list))
+
+ def is_string_type(checker, instance):
+ return isinstance(instance, six.string_types)
+
+ validator = jsonschema.validators.extend(
validator_class,
{
"properties": properties_validator,
@@ -457,6 +461,12 @@ def _extend_with_default_and_alias(validator_class, offline=False):
"anyOf": _validate_any_of,
},
)
+ validator.DEFAULT_TYPES.update({
+ "array": (list, tuple),
+ "regex": six.string_types,
+ "url": six.string_types,
+ })
+ return validator
class ConfigDeprecation(jsonschema.exceptions.ValidationError):
@@ -610,7 +620,7 @@ def make_schema():
"runroot_ssh_init_template": {"type": "string"},
"runroot_ssh_install_packages_template": {"type": "string"},
"runroot_ssh_run_template": {"type": "string"},
- "create_jigdo": {"type": "boolean", "default": True},
+ "create_jigdo": {"type": "boolean", "default": False},
"check_deps": {"type": "boolean", "default": True},
"require_all_comps_packages": {"type": "boolean", "default": False},
"bootable": {
@@ -654,13 +664,20 @@ def make_schema():
"gather_profiler": {"type": "boolean", "default": False},
"gather_allow_reuse": {"type": "boolean", "default": False},
"pkgset_allow_reuse": {"type": "boolean", "default": True},
- "pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
+ "createiso_allow_reuse": {"type": "boolean", "default": True},
+ "extraiso_allow_reuse": {"type": "boolean", "default": True},
+ "pkgset_source": {"type": "string", "enum": [
+ "koji",
+ "repos",
+ "kojimock",
+ ]},
"createrepo_c": {"type": "boolean", "default": True},
"createrepo_checksum": {
"type": "string",
"default": "sha256",
"enum": ["sha1", "sha256", "sha512"],
},
+ "createrepo_enable_cache": {"type": "boolean", "default": True},
"createrepo_use_xz": {"type": "boolean", "default": False},
"createrepo_num_threads": {"type": "number", "default": get_num_cpus()},
"createrepo_num_workers": {"type": "number", "default": 3},
@@ -722,6 +739,8 @@ def make_schema():
"minItems": 1,
"default": [None],
},
+ "signed_packages_retries": {"type": "number", "default": 0},
+ "signed_packages_wait": {"type": "number", "default": 30},
"variants_file": {"$ref": "#/definitions/str_or_scm_dict"},
"comps_file": {"$ref": "#/definitions/str_or_scm_dict"},
"comps_filter_environments": {"type": "boolean", "default": True},
@@ -732,6 +751,7 @@ def make_schema():
"patternProperties": {".+": {"$ref": "#/definitions/strings"}},
"additionalProperties": False,
},
+ "module_obsoletes_dir": {"$ref": "#/definitions/str_or_scm_dict"},
"create_optional_isos": {"type": "boolean", "default": False},
"symlink_isos_to": {"type": "string"},
"dogpile_cache_backend": {"type": "string"},
@@ -744,6 +764,12 @@ def make_schema():
),
"createiso_break_hardlinks": {"type": "boolean", "default": False},
"createiso_use_xorrisofs": {"type": "boolean", "default": False},
+ "iso_level": {
+ "anyOf": [
+ {"type": "number", "enum": [1, 2, 3, 4]},
+ _variant_arch_mapping({"type": "number", "enum": [1, 2, 3, 4]}),
+ ],
+ },
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": True},
"multilib": _variant_arch_mapping(
{"$ref": "#/definitions/list_of_strings"}
@@ -785,6 +811,10 @@ def make_schema():
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
"buildinstall_skip": _variant_arch_mapping({"type": "boolean"}),
+ "buildinstall_packages": {
+ "$ref": "#/definitions/package_mapping",
+ "default": [],
+ },
"global_ksurl": {"type": "url"},
"global_version": {"type": "string"},
"global_target": {"type": "string"},
@@ -976,6 +1006,7 @@ def make_schema():
"arches": {"$ref": "#/definitions/list_of_strings"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"release": {"$ref": "#/definitions/optional_string"},
+ "nomacboot": {"type": "boolean"},
},
"required": ["name", "kickstart"],
"additionalProperties": False,
@@ -1009,6 +1040,7 @@ def make_schema():
},
"update_summary": {"type": "boolean"},
"force_new_commit": {"type": "boolean"},
+ "unified_core": {"type": "boolean"},
"version": {"type": "string"},
"config_branch": {"type": "string"},
"tag_ref": {"type": "boolean"},
@@ -1043,6 +1075,7 @@ def make_schema():
"failable": {"$ref": "#/definitions/list_of_strings"},
"update_summary": {"type": "boolean"},
"force_new_commit": {"type": "boolean"},
+ "unified_core": {"type": "boolean"},
"version": {"type": "string"},
"config_branch": {"type": "string"},
"tag_ref": {"type": "boolean"},
@@ -1072,6 +1105,7 @@ def make_schema():
"template_repo": {"type": "string"},
"template_branch": {"type": "string"},
"extra_runroot_pkgs": {"$ref": "#/definitions/list_of_strings"},
+ "skip_branding": {"type": "boolean"},
},
"additionalProperties": False,
}
@@ -1082,6 +1116,7 @@ def make_schema():
"live_images": _variant_arch_mapping(
_one_or_list({"$ref": "#/definitions/live_image_config"})
),
+ "image_build_allow_reuse": {"type": "boolean", "default": False},
"image_build": {
"type": "object",
"patternProperties": {
@@ -1149,12 +1184,109 @@ def make_schema():
"version": {"type": "string"},
"distro": {"type": "string"},
"target": {"type": "string"},
- "image_types": {"$ref": "#/definitions/strings"},
+ # Only a single image_type can be specified
+ # https://github.com/osbuild/koji-osbuild/commit/c7252650814f82281ee57b598cb2ad970b580451
+ # https://github.com/osbuild/koji-osbuild/commit/f21a2de39b145eb94f3d49cb4d8775a33ba56752
+ "image_types": {
+ "oneOf": [
+ {
+ "type": "array",
+ "items": {"type": "string"},
+ "description": "Deprecated variant",
+ "minItems": 1,
+ "maxItems": 1,
+ },
+ {"type": "string"},
+ ]
+ },
"arches": {"$ref": "#/definitions/list_of_strings"},
"release": {"type": "string"},
"repo": {"$ref": "#/definitions/list_of_strings"},
"failable": {"$ref": "#/definitions/list_of_strings"},
"subvariant": {"type": "string"},
+ "ostree_url": {"type": "string"},
+ "ostree_ref": {"type": "string"},
+ "ostree_parent": {"type": "string"},
+ "upload_options": {
+ "oneOf": [
+ # AWSEC2UploadOptions
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "required": [
+ "region",
+ "share_with_accounts",
+ ],
+ "properties": {
+ "region": {
+ "type": "string",
+ },
+ "snapshot_name": {
+ "type": "string",
+ },
+ "share_with_accounts": {
+ "type": "array",
+ "items": {"type": "string"},
+ },
+ },
+ },
+ # AWSS3UploadOptions
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["region"],
+ "properties": {
+ "region": {"type": "string"}
+ },
+ },
+ # AzureUploadOptions
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "required": [
+ "tenant_id",
+ "subscription_id",
+ "resource_group",
+ "location",
+ ],
+ "properties": {
+ "tenant_id": {"type": "string"},
+ "subscription_id": {"type": "string"},
+ "resource_group": {"type": "string"},
+ "location": {"type": "string"},
+ "image_name": {
+ "type": "string",
+ },
+ },
+ },
+ # GCPUploadOptions
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["region", "bucket"],
+ "properties": {
+ "region": {"type": "string"},
+ "bucket": {"type": "string"},
+ "image_name": {
+ "type": "string",
+ },
+ "share_with_accounts": {
+ "type": "array",
+ "items": {"type": "string"},
+ },
+ },
+ },
+ # ContainerUploadOptions
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "name": {"type": "string"},
+ "tag": {"type": "string"},
+ },
+ },
+ ]
+ },
},
"required": ["name", "distro", "image_types"],
"additionalProperties": False,
@@ -1203,6 +1335,7 @@ def make_schema():
"anyOf": [{"type": "string"}, {"type": "number"}],
"default": 10 * 1024 * 1024,
},
+ "osbs_allow_reuse": {"type": "boolean", "default": False},
"osbs": {
"type": "object",
"patternProperties": {
@@ -1221,6 +1354,26 @@ def make_schema():
},
"additionalProperties": False,
},
+ "image_container": {
+ "type": "object",
+ "patternProperties": {
+ ".+": _one_or_list(
+ {
+ "type": "object",
+ "properties": {
+ "url": {"type": "url"},
+ "target": {"type": "string"},
+ "priority": {"type": "number"},
+ "failable": {"type": "boolean"},
+ "git_branch": {"type": "string"},
+ "image_spec": {"type": "object"},
+ },
+ "required": ["url", "target", "git_branch", "image_spec"],
+ }
+ ),
+ },
+ "additionalProperties": False,
+ },
"extra_files": _variant_arch_mapping(
{
"type": "array",
@@ -1325,6 +1478,7 @@ CONFIG_DEPS = {
"requires": ((lambda x: x, ["base_product_name", "base_product_short"]),),
"conflicts": ((lambda x: not x, ["base_product_name", "base_product_short"]),),
},
+ "cts_url": {"requires": ((lambda x: x, ["translate_paths"]),)},
"product_id": {"conflicts": [(lambda x: not x, ["product_id_allow_missing"])]},
"pkgset_scratch_modules": {"requires": ((lambda x: x, ["mbs_api_url"]),)},
"pkgset_source": {
diff --git a/pungi/compose.py b/pungi/compose.py
index 2121a69f..ab62bbfa 100644
--- a/pungi/compose.py
+++ b/pungi/compose.py
@@ -24,8 +24,12 @@ import time
import tempfile
import shutil
import json
+import socket
import kobo.log
+import kobo.tback
+import requests
+from requests.exceptions import RequestException
from productmd.composeinfo import ComposeInfo
from productmd.images import Images
from dogpile.cache import make_region
@@ -40,6 +44,8 @@ from pungi.util import (
get_arch_variant_data,
get_format_substs,
get_variant_data,
+ retry,
+ translate_path_raw,
)
from pungi.metadata import compose_to_composeinfo
@@ -51,6 +57,14 @@ except ImportError:
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
+@retry(wait_on=RequestException)
+def retry_request(method, url, data=None, auth=None):
+ request_method = getattr(requests, method)
+ rv = request_method(url, json=data, auth=auth)
+ rv.raise_for_status()
+ return rv
+
+
def get_compose_info(
conf,
compose_type="production",
@@ -83,20 +97,19 @@ def get_compose_info(
cts_url = conf.get("cts_url", None)
if cts_url:
- # Import requests and requests-kerberos here so it is not needed
- # if running without Compose Tracking Service.
- import requests
- from requests_kerberos import HTTPKerberosAuth
-
# Requests-kerberos cannot accept custom keytab, we need to use
# environment variable for this. But we need to change environment
# only temporarily just for this single requests.post.
# So at first backup the current environment and revert to it
# after the requests.post call.
cts_keytab = conf.get("cts_keytab", None)
+ authentication = get_authentication(conf)
if cts_keytab:
environ_copy = dict(os.environ)
+ if "$HOSTNAME" in cts_keytab:
+ cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
+ os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
try:
# Create compose in CTS and get the reserved compose ID.
@@ -107,10 +120,10 @@ def get_compose_info(
"parent_compose_ids": parent_compose_ids,
"respin_of": respin_of,
}
- rv = requests.post(url, json=data, auth=HTTPKerberosAuth())
- rv.raise_for_status()
+ rv = retry_request("post", url, data=data, auth=authentication)
finally:
if cts_keytab:
+ shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
os.environ.clear()
os.environ.update(environ_copy)
@@ -119,12 +132,23 @@ def get_compose_info(
cts_ci.loads(rv.text)
ci.compose.respin = cts_ci.compose.respin
ci.compose.id = cts_ci.compose.id
+
else:
ci.compose.id = ci.create_compose_id()
return ci
+def get_authentication(conf):
+ authentication = None
+ cts_keytab = conf.get("cts_keytab", None)
+ if cts_keytab:
+ from requests_kerberos import HTTPKerberosAuth
+
+ authentication = HTTPKerberosAuth()
+ return authentication
+
+
def write_compose_info(compose_dir, ci):
"""
Write ComposeInfo `ci` to `compose_dir` subdirectories.
@@ -137,6 +161,20 @@ def write_compose_info(compose_dir, ci):
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
+def update_compose_url(compose_id, compose_dir, conf):
+ authentication = get_authentication(conf)
+ cts_url = conf.get("cts_url", None)
+ if cts_url:
+ url = os.path.join(cts_url, "api/1/composes", compose_id)
+ tp = conf.get("translate_paths", None)
+ compose_url = translate_path_raw(tp, compose_dir)
+ data = {
+ "action": "set_url",
+ "compose_url": compose_url,
+ }
+ return retry_request("patch", url, data=data, auth=authentication)
+
+
def get_compose_dir(
topdir,
conf,
@@ -222,6 +260,8 @@ class Compose(kobo.log.LoggingBase):
self.koji_event = koji_event or conf.get("koji_event")
self.notifier = notifier
+ self._old_config = None
+
# path definitions
self.paths = Paths(self)
@@ -284,6 +324,8 @@ class Compose(kobo.log.LoggingBase):
self.im.compose.respin = self.compose_respin
self.im.metadata_path = self.paths.compose.metadata()
+ self.containers_metadata = {}
+
# Stores list of deliverables that failed, but did not abort the
# compose.
# {deliverable: [(Variant.uid, arch, subvariant)]}
@@ -303,6 +345,7 @@ class Compose(kobo.log.LoggingBase):
get_compose_info = staticmethod(get_compose_info)
write_compose_info = staticmethod(write_compose_info)
get_compose_dir = staticmethod(get_compose_dir)
+ update_compose_url = staticmethod(update_compose_url)
def __getitem__(self, name):
return self.variants[name]
@@ -343,6 +386,10 @@ class Compose(kobo.log.LoggingBase):
def has_module_defaults(self):
return bool(self.conf.get("module_defaults_dir", False))
+ @property
+ def has_module_obsoletes(self):
+ return bool(self.conf.get("module_obsoletes_dir", False))
+
@property
def config_dir(self):
return os.path.dirname(self.conf._open_file or "")
@@ -370,7 +417,7 @@ class Compose(kobo.log.LoggingBase):
)
else:
file_name = os.path.basename(scm_dict)
- scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
+ scm_dict = os.path.join(self.config_dir, scm_dict)
self.log_debug("Writing variants file: %s", variants_file)
tmp_dir = self.mkdtemp(prefix="variants_file_")
@@ -573,7 +620,52 @@ class Compose(kobo.log.LoggingBase):
/work/{global,}/tmp[-]/
"""
path = os.path.join(self.paths.work.tmp_dir(arch=arch, variant=variant))
- return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
+ tmpdir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
+ os.chmod(tmpdir, 0o755)
+ return tmpdir
+
+ def dump_containers_metadata(self):
+ """Create a file with container metadata if there are any containers."""
+ if not self.containers_metadata:
+ return
+ with open(self.paths.compose.metadata("osbs.json"), "w") as f:
+ json.dump(
+ self.containers_metadata,
+ f,
+ indent=4,
+ sort_keys=True,
+ separators=(",", ": "),
+ )
+
+ def traceback(self, detail=None):
+ """Store an extended traceback. This method should only be called when
+ handling an exception.
+
+ :param str detail: Extra information appended to the filename
+ """
+ basename = "traceback"
+ if detail:
+ basename += "-" + detail
+ tb_path = self.paths.log.log_file("global", basename)
+ self.log_error("Extended traceback in: %s", tb_path)
+ with open(tb_path, "wb") as f:
+ f.write(kobo.tback.Traceback().get_traceback())
+
+ def load_old_compose_config(self):
+ """
+ Helper method to load Pungi config dump from old compose.
+ """
+ if not self._old_config:
+ config_dump_full = self.paths.log.log_file("global", "config-dump")
+ config_dump_full = self.paths.old_compose_path(config_dump_full)
+ if not config_dump_full:
+ return None
+
+ self.log_info("Loading old config file: %s", config_dump_full)
+ with open(config_dump_full, "r") as f:
+ self._old_config = json.load(f)
+
+ return self._old_config
def get_ordered_variant_uids(compose):
diff --git a/pungi/createiso.py b/pungi/createiso.py
index 1f7990db..4d80678c 100644
--- a/pungi/createiso.py
+++ b/pungi/createiso.py
@@ -15,6 +15,7 @@ CreateIsoOpts = namedtuple(
"CreateIsoOpts",
[
"buildinstall_method",
+ "boot_iso",
"arch",
"output_dir",
"jigdo_dir",
@@ -25,6 +26,8 @@ CreateIsoOpts = namedtuple(
"os_tree",
"hfs_compat",
"use_xorrisofs",
+ "iso_level",
+ "script_dir",
],
)
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
@@ -76,6 +79,8 @@ def make_image(f, opts):
volid=opts.volid,
exclude=["./lost+found"],
graft_points=opts.graft_points,
+ use_xorrisofs=opts.use_xorrisofs,
+ iso_level=opts.iso_level,
**mkisofs_kwargs
)
emit(f, cmd)
@@ -97,7 +102,7 @@ def run_isohybrid(f, opts):
def make_manifest(f, opts):
- emit(f, iso.get_manifest_cmd(opts.iso_name))
+ emit(f, iso.get_manifest_cmd(opts.iso_name, opts.use_xorrisofs))
def make_jigdo(f, opts):
@@ -113,6 +118,27 @@ def make_jigdo(f, opts):
emit(f, cmd)
+def write_xorriso_commands(opts):
+ script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
+ with open(script, "w") as f:
+ emit(f, "-indev %s" % opts.boot_iso)
+ emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
+ emit(f, "-boot_image any replay")
+ emit(f, "-volid %s" % opts.volid)
+
+ with open(opts.graft_points) as gp:
+ for line in gp:
+ iso_path, fs_path = line.strip().split("=", 1)
+ emit(f, "-map %s %s" % (fs_path, iso_path))
+
+ if opts.arch == "ppc64le":
+ # This is needed for the image to be bootable.
+ emit(f, "-as mkisofs -U --")
+
+ emit(f, "-end")
+ return script
+
+
def write_script(opts, f):
if bool(opts.jigdo_dir) != bool(opts.os_tree):
raise RuntimeError("jigdo_dir must be used together with os_tree")
@@ -120,8 +146,14 @@ def write_script(opts, f):
emit(f, "#!/bin/bash")
emit(f, "set -ex")
emit(f, "cd %s" % opts.output_dir)
- make_image(f, opts)
- run_isohybrid(f, opts)
+
+ if opts.use_xorrisofs and opts.buildinstall_method:
+ script = write_xorriso_commands(opts)
+ emit(f, "xorriso -dialog on <%s" % script)
+ else:
+ make_image(f, opts)
+ run_isohybrid(f, opts)
+
implant_md5(f, opts)
make_manifest(f, opts)
if opts.jigdo_dir:
diff --git a/pungi/errors.py b/pungi/errors.py
new file mode 100644
index 00000000..093c2c83
--- /dev/null
+++ b/pungi/errors.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
+
+class UnsignedPackagesError(RuntimeError):
+ """Raised when package set fails to find a properly signed copy of an
+ RPM."""
+
+ pass
diff --git a/pungi/gather.py b/pungi/gather.py
index 2c4f7a80..a77ed6bd 100644
--- a/pungi/gather.py
+++ b/pungi/gather.py
@@ -35,7 +35,7 @@ from pungi.wrappers.createrepo import CreaterepoWrapper
class ReentrantYumLock(object):
- """ A lock that can be acquired multiple times by the same process. """
+ """A lock that can be acquired multiple times by the same process."""
def __init__(self, lock, log):
self.lock = lock
@@ -60,7 +60,7 @@ class ReentrantYumLock(object):
def yumlocked(method):
- """ A locking decorator. """
+ """A locking decorator."""
def wrapper(self, *args, **kwargs):
with self.yumlock:
@@ -519,7 +519,7 @@ class Pungi(PungiBase):
def verifyCachePkg(self, po, path): # Stolen from yum
"""check the package checksum vs the cache
- return True if pkg is good, False if not"""
+ return True if pkg is good, False if not"""
(csum_type, csum) = po.returnIdSum()
@@ -682,7 +682,7 @@ class Pungi(PungiBase):
def get_package_deps(self, po):
"""Add the dependencies for a given package to the
- transaction info"""
+ transaction info"""
added = set()
if po.repoid in self.lookaside_repos:
# Don't resolve deps for stuff in lookaside.
@@ -911,7 +911,7 @@ class Pungi(PungiBase):
def getPackagesFromGroup(self, group):
"""Get a list of package names from a ksparser group object
- Returns a list of package names"""
+ Returns a list of package names"""
packages = []
@@ -951,7 +951,7 @@ class Pungi(PungiBase):
def _addDefaultGroups(self, excludeGroups=None):
"""Cycle through the groups and return at list of the ones that ara
- default."""
+ default."""
excludeGroups = excludeGroups or []
# This is mostly stolen from anaconda.
@@ -1217,8 +1217,8 @@ class Pungi(PungiBase):
def createSourceHashes(self):
"""Create two dicts - one that maps binary POs to source POs, and
- one that maps a single source PO to all binary POs it produces.
- Requires yum still configured."""
+ one that maps a single source PO to all binary POs it produces.
+ Requires yum still configured."""
self.src_by_bin = {}
self.bin_by_src = {}
self.logger.info("Generating source <-> binary package mappings")
@@ -1232,8 +1232,8 @@ class Pungi(PungiBase):
def add_srpms(self, po_list=None):
"""Cycle through the list of package objects and
- find the sourcerpm for them. Requires yum still
- configured and a list of package objects"""
+ find the sourcerpm for them. Requires yum still
+ configured and a list of package objects"""
srpms = set()
po_list = po_list or self.po_list
@@ -1275,9 +1275,9 @@ class Pungi(PungiBase):
def add_fulltree(self, srpm_po_list=None):
"""Cycle through all package objects, and add any
- that correspond to a source rpm that we are including.
- Requires yum still configured and a list of package
- objects."""
+ that correspond to a source rpm that we are including.
+ Requires yum still configured and a list of package
+ objects."""
self.logger.info("Completing package set")
@@ -1357,8 +1357,8 @@ class Pungi(PungiBase):
def getDebuginfoList(self):
"""Cycle through the list of package objects and find
- debuginfo rpms for them. Requires yum still
- configured and a list of package objects"""
+ debuginfo rpms for them. Requires yum still
+ configured and a list of package objects"""
added = set()
for po in self.all_pkgs:
@@ -1398,7 +1398,7 @@ class Pungi(PungiBase):
def _downloadPackageList(self, polist, relpkgdir):
"""Cycle through the list of package objects and
- download them from their respective repos."""
+ download them from their respective repos."""
for pkg in sorted(polist):
repo = self.ayum.repos.getRepo(pkg.repoid)
@@ -1533,7 +1533,7 @@ class Pungi(PungiBase):
@yumlocked
def downloadSRPMs(self):
"""Cycle through the list of srpms and
- find the package objects for them, Then download them."""
+ find the package objects for them, Then download them."""
# do the downloads
self._downloadPackageList(self.srpm_po_list, os.path.join("source", "SRPMS"))
@@ -1541,7 +1541,7 @@ class Pungi(PungiBase):
@yumlocked
def downloadDebuginfo(self):
"""Cycle through the list of debuginfo rpms and
- download them."""
+ download them."""
# do the downloads
self._downloadPackageList(
@@ -1980,7 +1980,7 @@ class Pungi(PungiBase):
def doGetRelnotes(self):
"""Get extra files from packages in the tree to put in the topdir of
- the tree."""
+ the tree."""
docsdir = os.path.join(self.workdir, "docs")
relnoterpms = self.config.get("pungi", "relnotepkgs").split()
diff --git a/pungi/gather_dnf.py b/pungi/gather_dnf.py
index 5fecc7f0..97ebf48e 100644
--- a/pungi/gather_dnf.py
+++ b/pungi/gather_dnf.py
@@ -1029,7 +1029,7 @@ class Gather(GatherBase):
# Link downloaded package in (or link package from file repo)
try:
- linker.hardlink(pkg.localPkg(), target)
+ linker.link(pkg.localPkg(), target)
except Exception:
self.logger.error("Unable to link %s from the yum cache." % pkg.name)
raise
diff --git a/pungi/graph.py b/pungi/graph.py
index 4e946f1b..03112951 100755
--- a/pungi/graph.py
+++ b/pungi/graph.py
@@ -54,8 +54,7 @@ class SimpleAcyclicOrientedGraph(object):
return False if node in self._graph else True
def remove_final_endpoint(self, node):
- """
- """
+ """"""
remove_start_points = []
for start, ends in self._graph.items():
if node in ends:
diff --git a/pungi/media_split.py b/pungi/media_split.py
index 74be8fab..708ebd5d 100644
--- a/pungi/media_split.py
+++ b/pungi/media_split.py
@@ -20,8 +20,8 @@ import os
SIZE_UNITS = {
"b": 1,
"k": 1024,
- "M": 1024 ** 2,
- "G": 1024 ** 3,
+ "M": 1024**2,
+ "G": 1024**3,
}
diff --git a/pungi/module_util.py b/pungi/module_util.py
index ab29a67c..ba97590f 100644
--- a/pungi/module_util.py
+++ b/pungi/module_util.py
@@ -44,6 +44,30 @@ def iter_module_defaults(path):
yield module_name, index.get_module(module_name).get_defaults()
+def get_module_obsoletes_idx(path, mod_list):
+ """Given a path to a directory with yaml files, return Index with
+ merged all obsoletes.
+ """
+
+ merger = Modulemd.ModuleIndexMerger.new()
+ md_idxs = []
+
+ # associate_index does NOT copy it's argument (nor increases a
+ # reference counter on the object). It only stores a pointer.
+ for file in glob.glob(os.path.join(path, "*.yaml")):
+ index = Modulemd.ModuleIndex()
+ index.update_from_file(file, strict=False)
+ mod_name = index.get_module_names()[0]
+
+ if mod_name and (mod_name in mod_list or not mod_list):
+ md_idxs.append(index)
+ merger.associate_index(md_idxs[-1], 0)
+
+ merged_idx = merger.resolve()
+
+ return merged_idx
+
+
def collect_module_defaults(
defaults_dir, modules_to_load=None, mod_index=None, overrides_dir=None
):
@@ -69,3 +93,26 @@ def collect_module_defaults(
mod_index.add_defaults(defaults)
return mod_index
+
+
+def collect_module_obsoletes(obsoletes_dir, modules_to_load, mod_index=None):
+ """Load module obsoletes into index.
+
+ This works in a similar fashion as collect_module_defaults except it
+ merges indexes together instead of adding them during iteration.
+
+ Additionally if modules_to_load is not empty returned Index will include
+ only obsoletes for those modules.
+ """
+
+ obsoletes_index = get_module_obsoletes_idx(obsoletes_dir, modules_to_load)
+
+ # Merge Obsoletes with Modules Index.
+ if mod_index:
+ merger = Modulemd.ModuleIndexMerger.new()
+ merger.associate_index(mod_index, 0)
+ merger.associate_index(obsoletes_index, 0)
+ merged_idx = merger.resolve()
+ obsoletes_index = merged_idx
+
+ return obsoletes_index
diff --git a/pungi/notifier.py b/pungi/notifier.py
index 5eed865c..bef2ae63 100644
--- a/pungi/notifier.py
+++ b/pungi/notifier.py
@@ -81,9 +81,6 @@ class PungiNotifier(object):
self._update_args(kwargs)
- if self.compose:
- workdir = self.compose.paths.compose.topdir()
-
with self.lock:
for cmd in self.cmds:
self._run_script(cmd, msg, workdir, kwargs)
diff --git a/pungi/ostree/__init__.py b/pungi/ostree/__init__.py
index 03a02a73..49162692 100644
--- a/pungi/ostree/__init__.py
+++ b/pungi/ostree/__init__.py
@@ -65,6 +65,11 @@ def main(args=None):
action="store_true",
help="do not use rpm-ostree's built-in change detection",
)
+ treep.add_argument(
+ "--unified-core",
+ action="store_true",
+ help="use unified core mode in rpm-ostree",
+ )
installerp = subparser.add_parser(
"installer", help="Create an OSTree installer image"
diff --git a/pungi/ostree/tree.py b/pungi/ostree/tree.py
index a2ee379d..1ba138b3 100644
--- a/pungi/ostree/tree.py
+++ b/pungi/ostree/tree.py
@@ -43,6 +43,9 @@ class Tree(OSTree):
# because something went wrong.
"--touch-if-changed=%s.stamp" % self.commitid_file,
]
+ if self.unified_core:
+ # See https://github.com/coreos/rpm-ostree/issues/729
+ cmd.append("--unified-core")
if self.version:
# Add versioning metadata
cmd.append("--add-metadata-string=version=%s" % self.version)
@@ -121,6 +124,7 @@ class Tree(OSTree):
self.extra_config = self.args.extra_config
self.ostree_ref = self.args.ostree_ref
self.force_new_commit = self.args.force_new_commit
+ self.unified_core = self.args.unified_core
if self.extra_config or self.ostree_ref:
if self.extra_config:
diff --git a/pungi/paths.py b/pungi/paths.py
index 43b084c7..aff17a93 100644
--- a/pungi/paths.py
+++ b/pungi/paths.py
@@ -103,12 +103,23 @@ class LogPaths(object):
makedirs(path)
return path
- def log_file(self, arch, log_name, create_dir=True):
+ def koji_tasks_dir(self, create_dir=True):
+ """
+ Examples:
+ logs/global/koji-tasks
+ """
+ path = os.path.join(self.topdir(create_dir=create_dir), "koji-tasks")
+ if create_dir:
+ makedirs(path)
+ return path
+
+ def log_file(self, arch, log_name, create_dir=True, ext=None):
+ ext = ext or "log"
arch = arch or "global"
if log_name.endswith(".log"):
log_name = log_name[:-4]
return os.path.join(
- self.topdir(arch, create_dir=create_dir), "%s.%s.log" % (log_name, arch)
+ self.topdir(arch, create_dir=create_dir), "%s.%s.%s" % (log_name, arch, ext)
)
@@ -498,10 +509,23 @@ class WorkPaths(object):
makedirs(path)
return path
+ def module_obsoletes_dir(self, create_dir=True):
+ """
+ Example:
+ work/global/module_obsoletes
+ """
+ path = os.path.join(self.topdir(create_dir=create_dir), "module_obsoletes")
+ if create_dir:
+ makedirs(path)
+ return path
+
def pkgset_file_cache(self, pkgset_name):
"""
Returns the path to file in which the cached version of
PackageSetBase.file_cache should be stored.
+
+ Example:
+ work/global/pkgset_f33-compose_file_cache.pickle
"""
filename = "pkgset_%s_file_cache.pickle" % pkgset_name
return os.path.join(self.topdir(arch="global"), filename)
diff --git a/pungi/phases/__init__.py b/pungi/phases/__init__.py
index 7b28e4e5..3e124548 100644
--- a/pungi/phases/__init__.py
+++ b/pungi/phases/__init__.py
@@ -27,6 +27,7 @@ from .createiso import CreateisoPhase # noqa
from .extra_isos import ExtraIsosPhase # noqa
from .live_images import LiveImagesPhase # noqa
from .image_build import ImageBuildPhase # noqa
+from .image_container import ImageContainerPhase # noqa
from .osbuild import OSBuildPhase # noqa
from .repoclosure import RepoclosurePhase # noqa
from .test import TestPhase # noqa
diff --git a/pungi/phases/base.py b/pungi/phases/base.py
index 917dce26..bf306f1a 100644
--- a/pungi/phases/base.py
+++ b/pungi/phases/base.py
@@ -14,6 +14,8 @@
# along with this program; if not, see .
import logging
+import math
+import time
from pungi import util
@@ -58,6 +60,7 @@ class PhaseBase(object):
self.compose.log_warning("[SKIP ] %s" % self.msg)
self.finished = True
return
+ self._start_time = time.time()
self.compose.log_info("[BEGIN] %s" % self.msg)
self.compose.notifier.send("phase-start", phase_name=self.name)
self.run()
@@ -108,6 +111,13 @@ class PhaseBase(object):
self.pool.stop()
self.finished = True
self.compose.log_info("[DONE ] %s" % self.msg)
+
+ if hasattr(self, "_start_time"):
+ self.compose.log_info(
+ "PHASE %s took %d seconds"
+ % (self.name.upper(), math.ceil(time.time() - self._start_time))
+ )
+
if self.used_patterns is not None:
# We only want to report this if the config was actually queried.
self.report_unused_patterns()
diff --git a/pungi/phases/buildinstall.py b/pungi/phases/buildinstall.py
index 8d833364..d2119987 100644
--- a/pungi/phases/buildinstall.py
+++ b/pungi/phases/buildinstall.py
@@ -50,6 +50,9 @@ class BuildinstallPhase(PhaseBase):
# A set of (variant_uid, arch) pairs that completed successfully. This
# is needed to skip copying files for failed tasks.
self.pool.finished_tasks = set()
+ # A set of (variant_uid, arch) pairs that were reused from previous
+ # compose.
+ self.pool.reused_tasks = set()
self.buildinstall_method = self.compose.conf.get("buildinstall_method")
self.lorax_use_koji_plugin = self.compose.conf.get("lorax_use_koji_plugin")
self.used_lorax = self.buildinstall_method == "lorax"
@@ -312,6 +315,18 @@ class BuildinstallPhase(PhaseBase):
in self.pool.finished_tasks
)
+ def reused(self, variant, arch):
+ """
+ Check if buildinstall phase reused previous results for given variant
+ and arch. If the phase is skipped, the results will be considered
+ reused as well.
+ """
+ return (
+ super(BuildinstallPhase, self).skip()
+ or (variant.uid if self.used_lorax else None, arch)
+ in self.pool.reused_tasks
+ )
+
def get_kickstart_file(compose):
scm_dict = compose.conf.get("buildinstall_kickstart")
@@ -368,7 +383,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
# double-escape volid in yaboot.conf
new_volid = volid_escaped_2 if "yaboot" in config else volid_escaped
- ks = (" ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
+ ks = (" inst.ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
# pre-f18
data = re.sub(r":CDLABEL=[^ \n]*", r":CDLABEL=%s%s" % (new_volid, ks), data)
@@ -661,9 +676,16 @@ class BuildinstallThread(WorkerThread):
return None
compose.log_info("Loading old BUILDINSTALL phase metadata: %s", old_metadata)
- with open(old_metadata, "rb") as f:
- old_result = pickle.load(f)
- return old_result
+ try:
+ with open(old_metadata, "rb") as f:
+ old_result = pickle.load(f)
+ return old_result
+ except Exception as e:
+ compose.log_debug(
+ "Failed to load old BUILDINSTALL phase metadata %s : %s"
+ % (old_metadata, str(e))
+ )
+ return None
def _reuse_old_buildinstall_result(self, compose, arch, variant, cmd, pkgset_phase):
"""
@@ -729,7 +751,7 @@ class BuildinstallThread(WorkerThread):
# Ask Koji for all the RPMs in the `runroot_tag` and check that
# those installed in the old buildinstall buildroot are still in the
# very same versions/releases.
- koji_wrapper = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
+ koji_wrapper = kojiwrapper.KojiWrapper(compose)
rpms = koji_wrapper.koji_proxy.listTaggedRPMS(
compose.conf.get("runroot_tag"), inherit=True, latest=True
)[0]
@@ -801,12 +823,15 @@ class BuildinstallThread(WorkerThread):
chown_paths.append(_get_log_dir(compose, variant, arch))
elif buildinstall_method == "buildinstall":
packages += ["anaconda"]
-
+ packages += get_arch_variant_data(
+ compose.conf, "buildinstall_packages", arch, variant
+ )
if self._reuse_old_buildinstall_result(
compose, arch, variant, cmd, pkgset_phase
):
self.copy_files(compose, variant, arch)
self.pool.finished_tasks.add((variant.uid if variant else None, arch))
+ self.pool.reused_tasks.add((variant.uid if variant else None, arch))
self.pool.log_info("[DONE ] %s" % msg)
return
diff --git a/pungi/phases/createiso.py b/pungi/phases/createiso.py
index cd20080f..481d38fb 100644
--- a/pungi/phases/createiso.py
+++ b/pungi/phases/createiso.py
@@ -18,6 +18,7 @@ import os
import random
import shutil
import stat
+import json
import productmd.treeinfo
from productmd.images import Image
@@ -36,6 +37,7 @@ from pungi.util import (
failable,
get_file_size,
get_mtime,
+ read_json_file,
)
from pungi.media_split import MediaSplitter, convert_media_size
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
@@ -73,6 +75,170 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
return False
return bool(self.compose.conf.get("buildinstall_method", ""))
+ def _metadata_path(self, variant, arch, disc_num, disc_count):
+ return self.compose.paths.log.log_file(
+ arch,
+ "createiso-%s-%d-%d" % (variant.uid, disc_num, disc_count),
+ ext="json",
+ )
+
+ def save_reuse_metadata(self, cmd, variant, arch, opts):
+ """Save metadata for future composes to verify if the compose can be reused."""
+ metadata = {
+ "cmd": cmd,
+ "opts": opts._asdict(),
+ }
+
+ metadata_path = self._metadata_path(
+ variant, arch, cmd["disc_num"], cmd["disc_count"]
+ )
+ with open(metadata_path, "w") as f:
+ json.dump(metadata, f, indent=2)
+ return metadata
+
+ def _load_old_metadata(self, cmd, variant, arch):
+ metadata_path = self._metadata_path(
+ variant, arch, cmd["disc_num"], cmd["disc_count"]
+ )
+ old_path = self.compose.paths.old_compose_path(metadata_path)
+ self.logger.info(
+ "Loading old metadata for %s.%s from: %s", variant, arch, old_path
+ )
+ try:
+ return read_json_file(old_path)
+ except Exception:
+ return None
+
+ def perform_reuse(self, cmd, variant, arch, opts, iso_path):
+ """
+ Copy all related files from old compose to the new one. As a last step
+ add the new image to metadata.
+ """
+ linker = OldFileLinker(self.logger)
+ old_file_name = os.path.basename(iso_path)
+ current_file_name = os.path.basename(cmd["iso_path"])
+ try:
+ # Hardlink ISO and manifest
+ for suffix in ("", ".manifest"):
+ linker.link(iso_path + suffix, cmd["iso_path"] + suffix)
+ # Copy log files
+ # The log file name includes filename of the image, so we need to
+ # find old file with the old name, and rename it to the new name.
+ log_file = self.compose.paths.log.log_file(
+ arch, "createiso-%s" % current_file_name
+ )
+ old_log_file = self.compose.paths.old_compose_path(
+ self.compose.paths.log.log_file(arch, "createiso-%s" % old_file_name)
+ )
+ linker.link(old_log_file, log_file)
+ # Copy jigdo files
+ if opts.jigdo_dir:
+ old_jigdo_dir = self.compose.paths.old_compose_path(opts.jigdo_dir)
+ for suffix in (".template", ".jigdo"):
+ linker.link(
+ os.path.join(old_jigdo_dir, old_file_name) + suffix,
+ os.path.join(opts.jigdo_dir, current_file_name) + suffix,
+ )
+ except Exception:
+ # A problem happened while linking some file, let's clean up
+ # everything.
+ linker.abort()
+ raise
+ # Add image to manifest
+ add_iso_to_metadata(
+ self.compose,
+ variant,
+ arch,
+ cmd["iso_path"],
+ bootable=cmd["bootable"],
+ disc_num=cmd["disc_num"],
+ disc_count=cmd["disc_count"],
+ )
+
+ def try_reuse(self, cmd, variant, arch, opts):
+ """Try to reuse image from previous compose.
+
+ :returns bool: True if reuse was successful, False otherwise
+ """
+ if not self.compose.conf["createiso_allow_reuse"]:
+ return
+
+ log_msg = "Cannot reuse ISO for %s.%s" % (variant, arch)
+ current_metadata = self.save_reuse_metadata(cmd, variant, arch, opts)
+
+ if opts.buildinstall_method and not self.bi.reused(variant, arch):
+ # If buildinstall phase was not reused for some reason, we can not
+ # reuse any bootable image. If a package change caused rebuild of
+ # boot.iso, we would catch it here too, but there could be a
+ # configuration change in lorax template which would remain
+ # undetected.
+ self.logger.info("%s - boot configuration changed", log_msg)
+ return False
+
+ # Check old compose configuration: extra_files and product_ids can be
+ # reflected on ISO.
+ old_config = self.compose.load_old_compose_config()
+ if not old_config:
+ self.logger.info("%s - no config for old compose", log_msg)
+ return False
+ # Convert current configuration to JSON and back to encode it similarly
+ # to the old one
+ config = json.loads(json.dumps(self.compose.conf))
+ for opt in self.compose.conf:
+ # Skip a selection of options: these affect what packages can be
+ # included, which we explicitly check later on.
+ config_whitelist = set(
+ [
+ "gather_lookaside_repos",
+ "pkgset_koji_builds",
+ "pkgset_koji_scratch_tasks",
+ "pkgset_koji_module_builds",
+ ]
+ )
+ # Skip irrelevant options
+ config_whitelist.update(["osbs", "osbuild"])
+ if opt in config_whitelist:
+ continue
+
+ if old_config.get(opt) != config.get(opt):
+ self.logger.info("%s - option %s differs", log_msg, opt)
+ return False
+
+ old_metadata = self._load_old_metadata(cmd, variant, arch)
+ if not old_metadata:
+ self.logger.info("%s - no old metadata found", log_msg)
+ return False
+
+ # Test if volume ID matches - volid can be generated dynamically based on
+ # other values, and could change even if nothing else is different.
+ if current_metadata["opts"]["volid"] != old_metadata["opts"]["volid"]:
+ self.logger.info("%s - volume ID differs", log_msg)
+ return False
+
+ # Compare packages on the ISO.
+ if compare_packages(
+ old_metadata["opts"]["graft_points"],
+ current_metadata["opts"]["graft_points"],
+ ):
+ self.logger.info("%s - packages differ", log_msg)
+ return False
+
+ try:
+ self.perform_reuse(
+ cmd,
+ variant,
+ arch,
+ opts,
+ old_metadata["cmd"]["iso_path"],
+ )
+ return True
+ except Exception as exc:
+ self.compose.log_error(
+ "Error while reusing ISO for %s.%s: %s", variant, arch, exc
+ )
+ self.compose.traceback("createiso-reuse-%s-%s" % (variant, arch))
+ return False
+
def run(self):
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
@@ -172,21 +338,29 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
supported=self.compose.supported,
hfs_compat=self.compose.conf["iso_hfs_ppc64le_compatible"],
use_xorrisofs=self.compose.conf.get("createiso_use_xorrisofs"),
+ iso_level=get_iso_level_config(self.compose, variant, arch),
)
if bootable:
opts = opts._replace(
- buildinstall_method=self.compose.conf["buildinstall_method"]
+ buildinstall_method=self.compose.conf[
+ "buildinstall_method"
+ ],
+ boot_iso=os.path.join(os_tree, "images", "boot.iso"),
)
if self.compose.conf["create_jigdo"]:
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
- script_file = os.path.join(
- self.compose.paths.work.tmp_dir(arch, variant),
- "createiso-%s.sh" % filename,
- )
+ # Try to reuse
+ if self.try_reuse(cmd, variant, arch, opts):
+ # Reuse was successful, go to next ISO
+ continue
+
+ script_dir = self.compose.paths.work.tmp_dir(arch, variant)
+ opts = opts._replace(script_dir=script_dir)
+ script_file = os.path.join(script_dir, "createiso-%s.sh" % filename)
with open(script_file, "w") as f:
createiso.write_script(opts, f)
cmd["cmd"] = ["bash", script_file]
@@ -202,6 +376,36 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
self.pool.start()
+def read_packages(graft_points):
+ """Read packages that were listed in given graft points file.
+
+ Only files under Packages directory are considered. Particularly this
+ excludes .discinfo, .treeinfo and media.repo as well as repodata and
+ any extra files.
+
+ Extra files are easier to check by configuration (same name doesn't
+ imply same content). Repodata depend entirely on included packages (and
+ possibly product id certificate), but are affected by current time
+ which can change checksum despite data being the same.
+ """
+ with open(graft_points) as f:
+ return set(
+ line.split("=", 1)[0]
+ for line in f
+ if line.startswith("Packages/") or "/Packages/" in line
+ )
+
+
+def compare_packages(old_graft_points, new_graft_points):
+ """Read packages from the two files and compare them.
+
+ :returns bool: True if there are differences, False otherwise
+ """
+ old_files = read_packages(old_graft_points)
+ new_files = read_packages(new_graft_points)
+ return old_files != new_files
+
+
class CreateIsoThread(WorkerThread):
def fail(self, compose, cmd, variant, arch):
self.pool.log_error("CreateISO failed, removing ISO: %s" % cmd["iso_path"])
@@ -324,15 +528,13 @@ def add_iso_to_metadata(
return img
-def run_createiso_command(
- num, compose, bootable, arch, cmd, mounts, log_file, with_jigdo=True
-):
+def run_createiso_command(num, compose, bootable, arch, cmd, mounts, log_file):
packages = [
"coreutils",
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
"isomd5sum",
]
- if with_jigdo and compose.conf["create_jigdo"]:
+ if compose.conf["create_jigdo"]:
packages.append("jigdo")
if bootable:
extra_packages = {
@@ -346,7 +548,7 @@ def run_createiso_command(
build_arch = arch
if runroot.runroot_method == "koji" and not bootable:
runroot_tag = compose.conf["runroot_tag"]
- koji_wrapper = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
+ koji_wrapper = kojiwrapper.KojiWrapper(compose)
koji_proxy = koji_wrapper.koji_proxy
tag_info = koji_proxy.getTag(runroot_tag)
if not tag_info:
@@ -598,3 +800,36 @@ def create_hardlinks(staging_dir, log_file):
"""
cmd = ["/usr/sbin/hardlink", "-c", "-vv", staging_dir]
run(cmd, logfile=log_file, show_cmd=True)
+
+
+class OldFileLinker(object):
+ """
+ A wrapper around os.link that remembers which files were linked and can
+ clean them up.
+ """
+
+ def __init__(self, logger):
+ self.logger = logger
+ self.linked_files = []
+
+ def link(self, src, dst):
+ self.logger.debug("Hardlinking %s to %s", src, dst)
+ os.link(src, dst)
+ self.linked_files.append(dst)
+
+ def abort(self):
+ """Clean up all files created by this instance."""
+ for f in self.linked_files:
+ os.unlink(f)
+
+
+def get_iso_level_config(compose, variant, arch):
+ """
+ Get configured ISO level for this variant and architecture.
+ """
+ level = compose.conf.get("iso_level")
+ if isinstance(level, list):
+ level = None
+ for c in get_arch_variant_data(compose.conf, "iso_level", arch, variant):
+ level = c
+ return level
diff --git a/pungi/phases/createrepo.py b/pungi/phases/createrepo.py
index 0ad8b3f9..d784f023 100644
--- a/pungi/phases/createrepo.py
+++ b/pungi/phases/createrepo.py
@@ -16,7 +16,6 @@
__all__ = ("create_variant_repo",)
-
import copy
import errno
import glob
@@ -25,19 +24,22 @@ import shutil
import threading
import xml.dom.minidom
-from kobo.threads import ThreadPool, WorkerThread
-from kobo.shortcuts import run, relative_path
-
-from ..wrappers.scm import get_dir_from_scm
-from ..wrappers.createrepo import CreaterepoWrapper
-from .base import PhaseBase
-from ..util import get_arch_variant_data, temp_dir
-from ..module_util import Modulemd, collect_module_defaults
-
-import productmd.rpms
import productmd.modules
+import productmd.rpms
+from kobo.shortcuts import relative_path, run
+from kobo.threads import ThreadPool, WorkerThread
+from ..module_util import Modulemd, collect_module_defaults, collect_module_obsoletes
+from ..util import (
+ get_arch_variant_data,
+ read_single_module_stream_from_file,
+ temp_dir,
+)
+from ..wrappers.createrepo import CreaterepoWrapper
+from ..wrappers.scm import get_dir_from_scm
+from .base import PhaseBase
+CACHE_TOPDIR = "/var/cache/pungi/createrepo_c/"
createrepo_lock = threading.Lock()
createrepo_dirs = set()
@@ -79,6 +81,7 @@ class CreaterepoPhase(PhaseBase):
get_dir_from_scm(
self.compose.conf["createrepo_extra_modulemd"][variant.uid],
self.compose.paths.work.tmp_dir(variant=variant, create_dir=False),
+ compose=self.compose,
)
self.pool.queue_put((self.compose, None, variant, "srpm"))
@@ -188,6 +191,23 @@ def create_variant_repo(
comps_path = None
if compose.has_comps and pkg_type == "rpm":
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
+
+ if compose.conf["createrepo_enable_cache"]:
+ cachedir = os.path.join(
+ CACHE_TOPDIR,
+ "%s-%s" % (compose.conf["release_short"], os.getuid()),
+ )
+ if not os.path.exists(cachedir):
+ try:
+ os.makedirs(cachedir)
+ except Exception as e:
+ compose.log_warning(
+ "Cache disabled because cannot create cache dir %s %s"
+ % (cachedir, str(e))
+ )
+ cachedir = None
+ else:
+ cachedir = None
cmd = repo.get_createrepo_cmd(
repo_dir,
update=True,
@@ -203,6 +223,7 @@ def create_variant_repo(
oldpackagedirs=old_package_dirs,
use_xz=compose.conf["createrepo_use_xz"],
extra_args=compose.conf["createrepo_extra_args"],
+ cachedir=cachedir,
)
log_file = compose.paths.log.log_file(
arch, "createrepo-%s.%s" % (variant, pkg_type)
@@ -245,12 +266,15 @@ def create_variant_repo(
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
)
+ obsoletes_dir = compose.paths.work.module_obsoletes_dir()
+ mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
+
# Add extra modulemd files
if variant.uid in compose.conf.get("createrepo_extra_modulemd", {}):
compose.log_debug("Adding extra modulemd for %s.%s", variant.uid, arch)
dirname = compose.paths.work.tmp_dir(variant=variant, create_dir=False)
for filepath in glob.glob(os.path.join(dirname, arch) + "/*.yaml"):
- module_stream = Modulemd.ModuleStream.read_file(filepath, strict=True)
+ module_stream = read_single_module_stream_from_file(filepath)
if not mod_index.add_module_stream(module_stream):
raise RuntimeError(
"Failed parsing modulemd data from %s" % filepath
@@ -343,7 +367,7 @@ def get_productids_from_scm(compose):
tmp_dir = compose.mkdtemp(prefix="pungi_")
try:
- get_dir_from_scm(product_id, tmp_dir)
+ get_dir_from_scm(product_id, tmp_dir, compose=compose)
except OSError as e:
if e.errno == errno.ENOENT and product_id_allow_missing:
compose.log_warning("No product IDs in %s" % product_id)
diff --git a/pungi/phases/extra_isos.py b/pungi/phases/extra_isos.py
index 4f561953..5dee590d 100644
--- a/pungi/phases/extra_isos.py
+++ b/pungi/phases/extra_isos.py
@@ -14,6 +14,8 @@
# along with this program; if not, see .
import os
+import hashlib
+import json
from kobo.shortcuts import force_list
from kobo.threads import ThreadPool, WorkerThread
@@ -28,8 +30,17 @@ from pungi.phases.createiso import (
copy_boot_images,
run_createiso_command,
load_and_tweak_treeinfo,
+ compare_packages,
+ OldFileLinker,
+ get_iso_level_config,
+)
+from pungi.util import (
+ failable,
+ get_format_substs,
+ get_variant_data,
+ get_volid,
+ read_json_file,
)
-from pungi.util import failable, get_format_substs, get_variant_data, get_volid
from pungi.wrappers import iso
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
@@ -37,9 +48,10 @@ from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
name = "extra_isos"
- def __init__(self, compose):
+ def __init__(self, compose, buildinstall_phase):
super(ExtraIsosPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.logger)
+ self.bi = buildinstall_phase
def validate(self):
for variant in self.compose.get_variants(types=["variant"]):
@@ -65,13 +77,17 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
commands.append((config, variant, arch))
for (config, variant, arch) in commands:
- self.pool.add(ExtraIsosThread(self.pool))
+ self.pool.add(ExtraIsosThread(self.pool, self.bi))
self.pool.queue_put((self.compose, config, variant, arch))
self.pool.start()
class ExtraIsosThread(WorkerThread):
+ def __init__(self, pool, buildinstall_phase):
+ super(ExtraIsosThread, self).__init__(pool)
+ self.bi = buildinstall_phase
+
def process(self, item, num):
self.num = num
compose, config, variant, arch = item
@@ -115,35 +131,42 @@ class ExtraIsosThread(WorkerThread):
supported=compose.supported,
hfs_compat=compose.conf["iso_hfs_ppc64le_compatible"],
use_xorrisofs=compose.conf.get("createiso_use_xorrisofs"),
+ iso_level=get_iso_level_config(compose, variant, arch),
)
+ os_tree = compose.paths.compose.os_tree(arch, variant)
if compose.conf["create_jigdo"]:
jigdo_dir = compose.paths.compose.jigdo_dir(arch, variant)
- os_tree = compose.paths.compose.os_tree(arch, variant)
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
if bootable:
opts = opts._replace(
- buildinstall_method=compose.conf["buildinstall_method"]
+ buildinstall_method=compose.conf["buildinstall_method"],
+ boot_iso=os.path.join(os_tree, "images", "boot.iso"),
)
- script_file = os.path.join(
- compose.paths.work.tmp_dir(arch, variant), "extraiso-%s.sh" % filename
- )
- with open(script_file, "w") as f:
- createiso.write_script(opts, f)
+ # Check if it can be reused.
+ hash = hashlib.sha256()
+ hash.update(json.dumps(config, sort_keys=True).encode("utf-8"))
+ config_hash = hash.hexdigest()
- run_createiso_command(
- self.num,
- compose,
- bootable,
- arch,
- ["bash", script_file],
- [compose.topdir],
- log_file=compose.paths.log.log_file(
- arch, "extraiso-%s" % os.path.basename(iso_path)
- ),
- with_jigdo=compose.conf["create_jigdo"],
- )
+ if not self.try_reuse(compose, variant, arch, config_hash, opts):
+ script_dir = compose.paths.work.tmp_dir(arch, variant)
+ opts = opts._replace(script_dir=script_dir)
+ script_file = os.path.join(script_dir, "extraiso-%s.sh" % filename)
+ with open(script_file, "w") as f:
+ createiso.write_script(opts, f)
+
+ run_createiso_command(
+ self.num,
+ compose,
+ bootable,
+ arch,
+ ["bash", script_file],
+ [compose.topdir],
+ log_file=compose.paths.log.log_file(
+ arch, "extraiso-%s" % os.path.basename(iso_path)
+ ),
+ )
img = add_iso_to_metadata(
compose,
@@ -155,8 +178,155 @@ class ExtraIsosThread(WorkerThread):
)
img._max_size = config.get("max_size")
+ save_reuse_metadata(compose, variant, arch, config_hash, opts, iso_path)
+
self.pool.log_info("[DONE ] %s" % msg)
+ def try_reuse(self, compose, variant, arch, config_hash, opts):
+ # Check explicit config
+ if not compose.conf["extraiso_allow_reuse"]:
+ return
+
+ log_msg = "Cannot reuse ISO for %s.%s" % (variant, arch)
+
+ if opts.buildinstall_method and not self.bi.reused(variant, arch):
+ # If buildinstall phase was not reused for some reason, we can not
+ # reuse any bootable image. If a package change caused rebuild of
+ # boot.iso, we would catch it here too, but there could be a
+ # configuration change in lorax template which would remain
+ # undetected.
+ self.pool.log_info("%s - boot configuration changed", log_msg)
+ return False
+
+ # Check old compose configuration: extra_files and product_ids can be
+ # reflected on ISO.
+ old_config = compose.load_old_compose_config()
+ if not old_config:
+ self.pool.log_info("%s - no config for old compose", log_msg)
+ return False
+ # Convert current configuration to JSON and back to encode it similarly
+ # to the old one
+ config = json.loads(json.dumps(compose.conf))
+ for opt in compose.conf:
+ # Skip a selection of options: these affect what packages can be
+ # included, which we explicitly check later on.
+ config_whitelist = set(
+ [
+ "gather_lookaside_repos",
+ "pkgset_koji_builds",
+ "pkgset_koji_scratch_tasks",
+ "pkgset_koji_module_builds",
+ ]
+ )
+ # Skip irrelevant options
+ config_whitelist.update(["osbs", "osbuild"])
+ if opt in config_whitelist:
+ continue
+
+ if old_config.get(opt) != config.get(opt):
+ self.pool.log_info("%s - option %s differs", log_msg, opt)
+ return False
+
+ old_metadata = load_old_metadata(compose, variant, arch, config_hash)
+ if not old_metadata:
+ self.pool.log_info("%s - no old metadata found", log_msg)
+ return False
+
+ # Test if volume ID matches - volid can be generated dynamically based on
+ # other values, and could change even if nothing else is different.
+ if opts.volid != old_metadata["opts"]["volid"]:
+ self.pool.log_info("%s - volume ID differs", log_msg)
+ return False
+
+ # Compare packages on the ISO.
+ if compare_packages(
+ old_metadata["opts"]["graft_points"],
+ opts.graft_points,
+ ):
+ self.pool.log_info("%s - packages differ", log_msg)
+ return False
+
+ try:
+ self.perform_reuse(
+ compose,
+ variant,
+ arch,
+ opts,
+ old_metadata["opts"]["output_dir"],
+ old_metadata["opts"]["iso_name"],
+ )
+ return True
+ except Exception as exc:
+ self.pool.log_error(
+ "Error while reusing ISO for %s.%s: %s", variant, arch, exc
+ )
+ compose.traceback("extraiso-reuse-%s-%s-%s" % (variant, arch, config_hash))
+ return False
+
+ def perform_reuse(self, compose, variant, arch, opts, old_iso_dir, old_file_name):
+ """
+ Copy all related files from old compose to the new one. As a last step
+ add the new image to metadata.
+ """
+ linker = OldFileLinker(self.pool._logger)
+ old_iso_path = os.path.join(old_iso_dir, old_file_name)
+ iso_path = os.path.join(opts.output_dir, opts.iso_name)
+ try:
+ # Hardlink ISO and manifest
+ for suffix in ("", ".manifest"):
+ linker.link(old_iso_path + suffix, iso_path + suffix)
+ # Copy log files
+ # The log file name includes filename of the image, so we need to
+ # find old file with the old name, and rename it to the new name.
+ log_file = compose.paths.log.log_file(arch, "extraiso-%s" % opts.iso_name)
+ old_log_file = compose.paths.old_compose_path(
+ compose.paths.log.log_file(arch, "extraiso-%s" % old_file_name)
+ )
+ linker.link(old_log_file, log_file)
+ # Copy jigdo files
+ if opts.jigdo_dir:
+ old_jigdo_dir = compose.paths.old_compose_path(opts.jigdo_dir)
+ for suffix in (".template", ".jigdo"):
+ linker.link(
+ os.path.join(old_jigdo_dir, old_file_name) + suffix,
+ os.path.join(opts.jigdo_dir, opts.iso_name) + suffix,
+ )
+ except Exception:
+ # A problem happened while linking some file, let's clean up
+ # everything.
+ linker.abort()
+ raise
+
+
+def save_reuse_metadata(compose, variant, arch, config_hash, opts, iso_path):
+ """
+ Save metadata for possible reuse of this image. The file name is determined
+ from the hash of a configuration snippet for this image. Any change in that
+ configuration in next compose will change the hash and thus reuse will be
+ blocked.
+ """
+ metadata = {"opts": opts._asdict()}
+ metadata_path = compose.paths.log.log_file(
+ arch,
+ "extraiso-reuse-%s-%s-%s" % (variant.uid, arch, config_hash),
+ ext="json",
+ )
+ with open(metadata_path, "w") as f:
+ json.dump(metadata, f, indent=2)
+
+
+def load_old_metadata(compose, variant, arch, config_hash):
+ metadata_path = compose.paths.log.log_file(
+ arch,
+ "extraiso-reuse-%s-%s-%s" % (variant.uid, arch, config_hash),
+ ext="json",
+ )
+ old_path = compose.paths.old_compose_path(metadata_path)
+ try:
+ return read_json_file(old_path)
+ except Exception:
+ return None
+
def get_extra_files(compose, variant, arch, extra_files):
"""Clone the configured files into a directory from where they can be
diff --git a/pungi/phases/gather/__init__.py b/pungi/phases/gather/__init__.py
index bf190335..32e85eef 100644
--- a/pungi/phases/gather/__init__.py
+++ b/pungi/phases/gather/__init__.py
@@ -14,51 +14,49 @@
# along with this program; if not, see .
+import glob
import json
import os
import shutil
import threading
-import six
-from six.moves import cPickle as pickle
from kobo.rpmlib import parse_nvra
from kobo.shortcuts import run
from productmd.rpms import Rpms
+from six.moves import cPickle as pickle
try:
from queue import Queue
except ImportError:
from Queue import Queue
-from pungi.wrappers.scm import get_file_from_scm
-from .link import link_files
-from ...wrappers.createrepo import CreaterepoWrapper
import pungi.wrappers.kojiwrapper
-
-from pungi.compose import get_ordered_variant_uids
from pungi.arch import get_compatible_arches, split_name_arch
+from pungi.compose import get_ordered_variant_uids
+from pungi.module_util import (
+ Modulemd,
+ collect_module_defaults,
+ collect_module_obsoletes,
+)
from pungi.phases.base import PhaseBase
-from pungi.util import get_arch_data, get_arch_variant_data, get_variant_data, makedirs
-from pungi.module_util import Modulemd, collect_module_defaults
from pungi.phases.createrepo import add_modular_metadata
+from pungi.util import get_arch_data, get_arch_variant_data, get_variant_data, makedirs
+from pungi.wrappers.scm import get_file_from_scm
+
+from ...wrappers.createrepo import CreaterepoWrapper
+from .link import link_files
def get_gather_source(name):
import pungi.phases.gather.sources
- from .source import GatherSourceContainer
- GatherSourceContainer.register_module(pungi.phases.gather.sources)
- container = GatherSourceContainer()
- return container["GatherSource%s" % name]
+ return pungi.phases.gather.sources.ALL_SOURCES[name.lower()]
def get_gather_method(name):
import pungi.phases.gather.methods
- from .method import GatherMethodContainer
- GatherMethodContainer.register_module(pungi.phases.gather.methods)
- container = GatherMethodContainer()
- return container["GatherMethod%s" % name]
+ return pungi.phases.gather.methods.ALL_METHODS[name.lower()]
class GatherPhase(PhaseBase):
@@ -87,10 +85,11 @@ class GatherPhase(PhaseBase):
if variant.modules:
errors.append("Modular compose requires libmodulemd package.")
- # check whether variants from configuration value
- # 'variant_as_lookaside' are correct
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
all_variants = self.compose.all_variants
+
+ # check whether variants from configuration value
+ # 'variant_as_lookaside' are correct
for (requiring, required) in variant_as_lookaside:
if requiring in all_variants and required not in all_variants:
errors.append(
@@ -98,6 +97,22 @@ class GatherPhase(PhaseBase):
"required by %r" % (required, requiring)
)
+ # check whether variants from configuration value
+ # 'variant_as_lookaside' have same architectures
+ for (requiring, required) in variant_as_lookaside:
+ if (
+ requiring in all_variants
+ and required in all_variants
+ and not set(all_variants[requiring].arches).issubset(
+ set(all_variants[required].arches)
+ )
+ ):
+ errors.append(
+ "variant_as_lookaside: architectures of variant '%s' "
+ "aren't subset of architectures of variant '%s'"
+ % (requiring, required)
+ )
+
if errors:
raise ValueError("\n".join(errors))
@@ -178,27 +193,19 @@ def load_old_gather_result(compose, arch, variant):
return None
compose.log_info("Loading old GATHER phase results: %s", old_gather_result)
- with open(old_gather_result, "rb") as f:
- old_result = pickle.load(f)
- return old_result
-
-
-def load_old_compose_config(compose):
- """
- Helper method to load Pungi config dump from old compose.
- """
- config_dump_full = compose.paths.log.log_file("global", "config-dump")
- config_dump_full = compose.paths.old_compose_path(config_dump_full)
- if not config_dump_full:
+ try:
+ with open(old_gather_result, "rb") as f:
+ old_result = pickle.load(f)
+ return old_result
+ except Exception as e:
+ compose.log_debug(
+ "Failed to load old GATHER phase results %s : %s"
+ % (old_gather_result, str(e))
+ )
return None
- compose.log_info("Loading old config file: %s", config_dump_full)
- with open(config_dump_full, "r") as f:
- old_config = json.load(f)
- return old_config
-
-def reuse_old_gather_packages(compose, arch, variant, package_sets):
+def reuse_old_gather_packages(compose, arch, variant, package_sets, methods):
"""
Tries to reuse `gather_packages` result from older compose.
@@ -206,6 +213,7 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets):
:param str arch: Architecture to reuse old gather data for.
:param str variant: Variant to reuse old gather data for.
:param list package_sets: List of package sets to gather packages from.
+ :param str methods: Gather method.
:return: Old `gather_packages` result or None if old result cannot be used.
"""
log_msg = "Cannot reuse old GATHER phase results - %s"
@@ -218,38 +226,38 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets):
compose.log_info(log_msg % "no old gather results.")
return
- old_config = load_old_compose_config(compose)
+ old_config = compose.load_old_compose_config()
if old_config is None:
compose.log_info(log_msg % "no old compose config dump.")
return
+ # Do not reuse when required variant is not reused.
+ if not hasattr(compose, "_gather_reused_variant_arch"):
+ setattr(compose, "_gather_reused_variant_arch", [])
+ variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
+ for (requiring, required) in variant_as_lookaside:
+ if (
+ requiring == variant.uid
+ and (required, arch) not in compose._gather_reused_variant_arch
+ ):
+ compose.log_info(
+ log_msg % "variant %s as lookaside is not reused." % required
+ )
+ return
+
+ # Do not reuse if there's external lookaside repo.
+ with open(compose.paths.log.log_file("global", "config-dump"), "r") as f:
+ config_dump = json.load(f)
+ if config_dump.get("gather_lookaside_repos") or old_config.get(
+ "gather_lookaside_repos"
+ ):
+ compose.log_info(log_msg % "there's external lookaside repo.")
+ return
+
# The dumps/loads is needed to convert all unicode strings to non-unicode ones.
config = json.loads(json.dumps(compose.conf))
for opt, value in old_config.items():
- # Gather lookaside repos are updated during the gather phase. Check that
- # the gather_lookaside_repos except the ones added are the same.
- if opt == "gather_lookaside_repos" and opt in config:
- value_to_compare = []
- # Filter out repourls which starts with `compose.topdir` and also remove
- # their parent list in case it would be empty.
- for variant, per_arch_repos in config[opt]:
- per_arch_repos_to_compare = {}
- for arch, repourl in per_arch_repos.items():
- # The gather_lookaside_repos config allows setting multiple repourls
- # using list, but `_update_config` always uses strings. Therefore we
- # only try to filter out string_types.
- if not isinstance(repourl, six.string_types):
- continue
- if not repourl.startswith(compose.topdir):
- per_arch_repos_to_compare[arch] = repourl
- if per_arch_repos_to_compare:
- value_to_compare.append([variant, per_arch_repos_to_compare])
- if value != value_to_compare:
- compose.log_info(
- log_msg
- % ("compose configuration option gather_lookaside_repos changed.")
- )
- return
+ if opt == "gather_lookaside_repos":
continue
# Skip checking for frequently changing configuration options which do *not*
@@ -378,6 +386,30 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets):
compose.log_info(log_msg % "some RPMs have been removed.")
return
+ compose._gather_reused_variant_arch.append((variant.uid, arch))
+
+ # Copy old gather log for debugging
+ try:
+ if methods == "hybrid":
+ log_dir = compose.paths.log.topdir(arch, create_dir=False)
+ old_log_dir = compose.paths.old_compose_path(log_dir)
+ for log_file in glob.glob(
+ os.path.join(old_log_dir, "hybrid-depsolver-%s-iter-*" % variant)
+ ):
+ compose.log_info(
+ "Copying old gather log %s to %s" % (log_file, log_dir)
+ )
+ shutil.copy2(log_file, log_dir)
+ else:
+ log_dir = os.path.dirname(
+ compose.paths.work.pungi_log(arch, variant, create_dir=False)
+ )
+ old_log_dir = compose.paths.old_compose_path(log_dir)
+ compose.log_info("Copying old gather log %s to %s" % (old_log_dir, log_dir))
+ shutil.copytree(old_log_dir, log_dir)
+ except Exception as e:
+ compose.log_warning("Copying old gather log failed: %s" % str(e))
+
return result
@@ -404,7 +436,9 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
prepopulate = get_prepopulate_packages(compose, arch, variant)
fulltree_excludes = fulltree_excludes or set()
- reused_result = reuse_old_gather_packages(compose, arch, variant, package_sets)
+ reused_result = reuse_old_gather_packages(
+ compose, arch, variant, package_sets, methods
+ )
if reused_result:
result = reused_result
elif methods == "hybrid":
@@ -507,7 +541,8 @@ def write_packages(compose, arch, variant, pkg_map, path_prefix):
def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs=None):
- """Remove parent variant's packages from pkg_map <-- it gets modified in this function
+ """Remove parent variant's packages from pkg_map <-- it gets modified in
+ this function
There are three cases where changes may happen:
@@ -613,19 +648,37 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
)
+ "/",
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
- compose.conf["koji_profile"]
+ compose
+ ).koji_module.config.topdir.rstrip("/")
+ + "/",
+ "kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
+ compose
).koji_module.config.topdir.rstrip("/")
+ "/",
}
path_prefix = prefixes[compose.conf["pkgset_source"]]()
+ package_list = set()
+ for pkg_arch in pkg_map.keys():
+ try:
+ for pkg_type, packages in pkg_map[pkg_arch][variant.uid].items():
+ # We want all packages for current arch, and SRPMs for any
+ # arch. Ultimately there will only be one source repository, so
+ # we need a union of all SRPMs.
+ if pkg_type == "srpm" or pkg_arch == arch:
+ for pkg in packages:
+ pkg = pkg["path"]
+ if path_prefix and pkg.startswith(path_prefix):
+ pkg = pkg[len(path_prefix) :]
+ package_list.add(pkg)
+ except KeyError:
+ raise RuntimeError(
+ "Variant '%s' does not have architecture " "'%s'!" % (variant, pkg_arch)
+ )
+
pkglist = compose.paths.work.lookaside_package_list(arch=arch, variant=variant)
with open(pkglist, "w") as f:
- for packages in pkg_map[arch][variant.uid].values():
- for pkg in packages:
- pkg = pkg["path"]
- if path_prefix and pkg.startswith(path_prefix):
- pkg = pkg[len(path_prefix) :]
- f.write("%s\n" % pkg)
+ for pkg in sorted(package_list):
+ f.write("%s\n" % pkg)
cr = CreaterepoWrapper(compose.conf["createrepo_c"])
update_metadata = None
@@ -661,6 +714,8 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
collect_module_defaults(
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
)
+ obsoletes_dir = compose.paths.work.module_obsoletes_dir()
+ mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
log_file = compose.paths.log.log_file(
arch, "lookaside_repo_modules_%s" % (variant.uid)
@@ -736,6 +791,10 @@ def _gather_variants(
try:
que.put((arch, gather_packages(*args, **kwargs)))
except Exception as exc:
+ compose.log_error(
+ "Error in gathering for %s.%s: %s", variant, arch, exc
+ )
+ compose.traceback("gather-%s-%s" % (variant, arch))
errors.put(exc)
# Run gather_packages() in parallel with multi threads and store
diff --git a/pungi/phases/gather/method.py b/pungi/phases/gather/method.py
index 7feb835f..94e5460b 100644
--- a/pungi/phases/gather/method.py
+++ b/pungi/phases/gather/method.py
@@ -14,15 +14,6 @@
# along with this program; if not, see .
-import kobo.plugins
-
-
-class GatherMethodBase(kobo.plugins.Plugin):
+class GatherMethodBase(object):
def __init__(self, compose):
self.compose = compose
-
-
-class GatherMethodContainer(kobo.plugins.PluginContainer):
- @classmethod
- def normalize_name(cls, name):
- return name.lower()
diff --git a/pungi/phases/gather/methods/__init__.py b/pungi/phases/gather/methods/__init__.py
index e69de29b..905edf70 100644
--- a/pungi/phases/gather/methods/__init__.py
+++ b/pungi/phases/gather/methods/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
+from .method_deps import GatherMethodDeps
+from .method_nodeps import GatherMethodNodeps
+from .method_hybrid import GatherMethodHybrid
+
+ALL_METHODS = {
+ "deps": GatherMethodDeps,
+ "nodeps": GatherMethodNodeps,
+ "hybrid": GatherMethodHybrid,
+}
diff --git a/pungi/phases/gather/methods/method_deps.py b/pungi/phases/gather/methods/method_deps.py
index a0e0bee6..4b785629 100644
--- a/pungi/phases/gather/methods/method_deps.py
+++ b/pungi/phases/gather/methods/method_deps.py
@@ -15,6 +15,7 @@
import os
+import shutil
from kobo.shortcuts import run
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
@@ -31,8 +32,6 @@ import pungi.phases.gather.method
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
- enabled = True
-
def __call__(
self,
arch,
@@ -243,8 +242,19 @@ def resolve_deps(compose, arch, variant, source_name=None):
)
# Use temp working directory directory as workaround for
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
- with temp_dir(prefix="pungi_") as tmp_dir:
- run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir, env=os.environ)
+ with temp_dir(prefix="pungi_") as work_dir:
+ run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
+
+ # Clean up tmp dir
+ # Workaround for rpm not honoring sgid bit which only appears when yum is used.
+ yumroot_dir = os.path.join(tmp_dir, "work", arch, "yumroot")
+ if os.path.isdir(yumroot_dir):
+ try:
+ shutil.rmtree(yumroot_dir)
+ except Exception as e:
+ compose.log_warning(
+ "Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
+ )
with open(pungi_log, "r") as f:
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
diff --git a/pungi/phases/gather/methods/method_hybrid.py b/pungi/phases/gather/methods/method_hybrid.py
index 5d143199..32fa85e8 100644
--- a/pungi/phases/gather/methods/method_hybrid.py
+++ b/pungi/phases/gather/methods/method_hybrid.py
@@ -60,8 +60,6 @@ class FakePackage(object):
class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
- enabled = True
-
def __init__(self, *args, **kwargs):
super(GatherMethodHybrid, self).__init__(*args, **kwargs)
self.package_maps = {}
@@ -351,8 +349,11 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
# There are two ways how the debuginfo package can be named. We
# want to get them all.
- for pattern in ["%s-debuginfo", "%s-debugsource"]:
- debuginfo_name = pattern % pkg.name
+ source_name = kobo.rpmlib.parse_nvra(pkg.rpm_sourcerpm)["name"]
+ for debuginfo_name in [
+ "%s-debuginfo" % pkg.name,
+ "%s-debugsource" % source_name,
+ ]:
debuginfo = self._get_debuginfo(debuginfo_name, pkg_arch)
for dbg in debuginfo:
# For each debuginfo package that matches on name and
@@ -501,6 +502,27 @@ def _make_result(paths):
return [{"path": path, "flags": []} for path in sorted(paths)]
+def get_repo_packages(path):
+ """Extract file names of all packages in the given repository."""
+
+ packages = set()
+
+ def callback(pkg):
+ packages.add(os.path.basename(pkg.location_href))
+
+ repomd = os.path.join(path, "repodata/repomd.xml")
+ with as_local_file(repomd) as url_:
+ repomd = cr.Repomd(url_)
+ for rec in repomd.records:
+ if rec.type != "primary":
+ continue
+ record_url = os.path.join(path, rec.location_href)
+ with as_local_file(record_url) as url_:
+ cr.xml_parse_primary(url_, pkgcb=callback, do_files=False)
+
+ return packages
+
+
def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
"""For each package add source RPM."""
# This will serve as the final result. We collect sets of paths to the
@@ -511,25 +533,16 @@ def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
filters = set(filter_packages)
- # Collect list of all packages in lookaside. These will not be added to the
- # result. Fus handles this in part: if a package is explicitly mentioned as
- # input (which can happen with comps group expansion), it will be in the
- # output even if it's in lookaside.
lookaside_packages = set()
for repo in lookasides:
- md = cr.Metadata()
- md.locate_and_load_xml(repo)
- for key in md.keys():
- pkg = md.get(key)
- url = os.path.join(pkg.location_base or repo, pkg.location_href)
- # Strip file:// prefix
- lookaside_packages.add(url[7:])
+ lookaside_packages.update(get_repo_packages(repo))
for nvr, pkg_arch, flags in nvrs:
pkg = nevra_to_pkg["%s.%s" % (nvr, pkg_arch)]
- if pkg.file_path in lookaside_packages:
- # Package is in lookaside, don't add it and ignore sources and
- # debuginfo too.
+ if os.path.basename(pkg.file_path) in lookaside_packages:
+ # Fus can return lookaside package in output if the package is
+ # explicitly listed as input. This can happen during comps
+ # expansion.
continue
if pkg_is_debug(pkg):
debuginfo.add(pkg.file_path)
@@ -542,7 +555,7 @@ def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
if (srpm.name, "src") in filters:
# Filtered package, skipping
continue
- if srpm.file_path not in lookaside_packages:
+ if os.path.basename(srpm.file_path) not in lookaside_packages:
srpms.add(srpm.file_path)
except KeyError:
# Didn't find source RPM.. this should be logged
diff --git a/pungi/phases/gather/methods/method_nodeps.py b/pungi/phases/gather/methods/method_nodeps.py
index cd625047..062a386b 100644
--- a/pungi/phases/gather/methods/method_nodeps.py
+++ b/pungi/phases/gather/methods/method_nodeps.py
@@ -28,8 +28,6 @@ from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
- enabled = True
-
def __call__(self, arch, variant, *args, **kwargs):
fname = "gather-nodeps-%s" % variant.uid
if self.source_name:
diff --git a/pungi/phases/gather/source.py b/pungi/phases/gather/source.py
index c1d7c9c5..92c15df1 100644
--- a/pungi/phases/gather/source.py
+++ b/pungi/phases/gather/source.py
@@ -14,15 +14,6 @@
# along with this program; if not, see .
-import kobo.plugins
-
-
-class GatherSourceBase(kobo.plugins.Plugin):
+class GatherSourceBase(object):
def __init__(self, compose):
self.compose = compose
-
-
-class GatherSourceContainer(kobo.plugins.PluginContainer):
- @classmethod
- def normalize_name(cls, name):
- return name.lower()
diff --git a/pungi/phases/gather/sources/__init__.py b/pungi/phases/gather/sources/__init__.py
index e69de29b..00ff61e8 100644
--- a/pungi/phases/gather/sources/__init__.py
+++ b/pungi/phases/gather/sources/__init__.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
+from .source_comps import GatherSourceComps
+from .source_json import GatherSourceJson
+from .source_module import GatherSourceModule
+from .source_none import GatherSourceNone
+
+ALL_SOURCES = {
+ "comps": GatherSourceComps,
+ "json": GatherSourceJson,
+ "module": GatherSourceModule,
+ "none": GatherSourceNone,
+}
diff --git a/pungi/phases/gather/sources/source_comps.py b/pungi/phases/gather/sources/source_comps.py
index e9987dfe..e1247770 100644
--- a/pungi/phases/gather/sources/source_comps.py
+++ b/pungi/phases/gather/sources/source_comps.py
@@ -30,8 +30,6 @@ import pungi.phases.gather.source
class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
- enabled = True
-
def __call__(self, arch, variant):
groups = set()
if not self.compose.conf.get("comps_file"):
diff --git a/pungi/phases/gather/sources/source_json.py b/pungi/phases/gather/sources/source_json.py
index 073935d8..b336f9b5 100644
--- a/pungi/phases/gather/sources/source_json.py
+++ b/pungi/phases/gather/sources/source_json.py
@@ -32,30 +32,31 @@ set([(rpm_name, rpm_arch or None)])
import json
+import os
import pungi.phases.gather.source
class GatherSourceJson(pungi.phases.gather.source.GatherSourceBase):
- enabled = True
-
def __call__(self, arch, variant):
json_path = self.compose.conf.get("gather_source_mapping")
if not json_path:
return set(), set()
- with open(json_path, "r") as f:
+ with open(os.path.join(self.compose.config_dir, json_path), "r") as f:
mapping = json.load(f)
packages = set()
if variant is None:
# get all packages for all variants
for variant_uid in mapping:
- for pkg_name, pkg_arches in mapping[variant_uid][arch].items():
+ for pkg_name, pkg_arches in mapping[variant_uid].get(arch, {}).items():
for pkg_arch in pkg_arches:
packages.add((pkg_name, pkg_arch))
else:
# get packages for a particular variant
- for pkg_name, pkg_arches in mapping[variant.uid][arch].items():
+ for pkg_name, pkg_arches in (
+ mapping.get(variant.uid, {}).get(arch, {}).items()
+ ):
for pkg_arch in pkg_arches:
packages.add((pkg_name, pkg_arch))
return packages, set()
diff --git a/pungi/phases/gather/sources/source_module.py b/pungi/phases/gather/sources/source_module.py
index beb108d2..be636bf0 100644
--- a/pungi/phases/gather/sources/source_module.py
+++ b/pungi/phases/gather/sources/source_module.py
@@ -26,8 +26,6 @@ import pungi.phases.gather.source
class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
- enabled = True
-
def __call__(self, arch, variant):
groups = set()
packages = set()
diff --git a/pungi/phases/gather/sources/source_none.py b/pungi/phases/gather/sources/source_none.py
index 35801e9f..a78b198a 100644
--- a/pungi/phases/gather/sources/source_none.py
+++ b/pungi/phases/gather/sources/source_none.py
@@ -29,7 +29,5 @@ import pungi.phases.gather.source
class GatherSourceNone(pungi.phases.gather.source.GatherSourceBase):
- enabled = True
-
def __call__(self, arch, variant):
return set(), set()
diff --git a/pungi/phases/image_build.py b/pungi/phases/image_build.py
index 9818b2d6..e0dcb02b 100644
--- a/pungi/phases/image_build.py
+++ b/pungi/phases/image_build.py
@@ -1,18 +1,22 @@
# -*- coding: utf-8 -*-
import copy
+import hashlib
+import json
import os
+import shutil
import time
from kobo import shortcuts
from pungi.util import makedirs, get_mtime, get_file_size, failable, log_failed_task
-from pungi.util import translate_path, get_repo_urls, version_generator
+from pungi.util import as_local_file, translate_path, get_repo_urls, version_generator
from pungi.phases import base
from pungi.linker import Linker
from pungi.wrappers.kojiwrapper import KojiWrapper
from kobo.threads import ThreadPool, WorkerThread
from kobo.shortcuts import force_list
from productmd.images import Image
+from productmd.rpms import Rpms
# This is a mapping from formats to file extensions. The format is what koji
@@ -46,9 +50,10 @@ class ImageBuildPhase(
name = "image_build"
- def __init__(self, compose):
+ def __init__(self, compose, buildinstall_phase=None):
super(ImageBuildPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.logger)
+ self.buildinstall_phase = buildinstall_phase
def _get_install_tree(self, image_conf, variant):
"""
@@ -117,6 +122,7 @@ class ImageBuildPhase(
# prevent problems in next iteration where the original
# value is needed.
image_conf = copy.deepcopy(image_conf)
+ original_image_conf = copy.deepcopy(image_conf)
# image_conf is passed to get_image_build_cmd as dict
@@ -167,6 +173,7 @@ class ImageBuildPhase(
image_conf["image-build"]["can_fail"] = sorted(can_fail)
cmd = {
+ "original_image_conf": original_image_conf,
"image_conf": image_conf,
"conf_file": self.compose.paths.work.image_build_conf(
image_conf["image-build"]["variant"],
@@ -182,7 +189,7 @@ class ImageBuildPhase(
"scratch": image_conf["image-build"].pop("scratch", False),
}
self.pool.add(CreateImageBuildThread(self.pool))
- self.pool.queue_put((self.compose, cmd))
+ self.pool.queue_put((self.compose, cmd, self.buildinstall_phase))
self.pool.start()
@@ -192,7 +199,7 @@ class CreateImageBuildThread(WorkerThread):
self.pool.log_error("CreateImageBuild failed.")
def process(self, item, num):
- compose, cmd = item
+ compose, cmd, buildinstall_phase = item
variant = cmd["image_conf"]["image-build"]["variant"]
subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)
self.failable_arches = cmd["image_conf"]["image-build"].get("can_fail", "")
@@ -208,22 +215,54 @@ class CreateImageBuildThread(WorkerThread):
subvariant,
logger=self.pool._logger,
):
- self.worker(num, compose, variant, subvariant, cmd)
+ self.worker(num, compose, variant, subvariant, cmd, buildinstall_phase)
- def worker(self, num, compose, variant, subvariant, cmd):
+ def worker(self, num, compose, variant, subvariant, cmd, buildinstall_phase):
arches = cmd["image_conf"]["image-build"]["arches"]
formats = "-".join(cmd["image_conf"]["image-build"]["format"])
dash_arches = "-".join(arches)
log_file = compose.paths.log.log_file(
dash_arches, "imagebuild-%s-%s-%s" % (variant.uid, subvariant, formats)
)
+ metadata_file = log_file[:-4] + ".reuse.json"
+
+ external_repo_checksum = {}
+ try:
+ for repo in cmd["original_image_conf"]["image-build"]["repo"]:
+ if repo in compose.all_variants:
+ continue
+ with as_local_file(
+ os.path.join(repo, "repodata/repomd.xml")
+ ) as filename:
+ with open(filename, "rb") as f:
+ external_repo_checksum[repo] = hashlib.sha256(
+ f.read()
+ ).hexdigest()
+ except Exception as e:
+ external_repo_checksum = None
+ self.pool.log_info(
+ "Can't calculate checksum of repomd.xml of external repo - %s" % str(e)
+ )
+
+ if self._try_to_reuse(
+ compose,
+ variant,
+ subvariant,
+ metadata_file,
+ log_file,
+ cmd,
+ external_repo_checksum,
+ buildinstall_phase,
+ ):
+ return
+
msg = (
"Creating image (formats: %s, arches: %s, variant: %s, subvariant: %s)"
% (formats, dash_arches, variant, subvariant)
)
self.pool.log_info("[BEGIN] %s" % msg)
- koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
+ koji_wrapper = KojiWrapper(compose)
# writes conf file for koji image-build
self.pool.log_info(
@@ -275,6 +314,22 @@ class CreateImageBuildThread(WorkerThread):
)
break
+ self._link_images(compose, variant, subvariant, cmd, image_infos)
+ self._write_reuse_metadata(
+ compose, metadata_file, cmd, image_infos, external_repo_checksum
+ )
+
+ self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
+
+ def _link_images(self, compose, variant, subvariant, cmd, image_infos):
+ """Link images to compose and update image manifest.
+
+ :param Compose compose: Current compose.
+ :param Variant variant: Current variant.
+ :param str subvariant:
+ :param dict cmd: Dict of params for image-build.
+ :param dict image_infos: Dict contains image info.
+ """
# The usecase here is that you can run koji image-build with multiple --format
# It's ok to do it serialized since we're talking about max 2 images per single
# image_build record
@@ -308,4 +363,160 @@ class CreateImageBuildThread(WorkerThread):
setattr(img, "deliverable", "image-build")
compose.im.add(variant=variant.uid, arch=image_info["arch"], image=img)
- self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
+ def _try_to_reuse(
+ self,
+ compose,
+ variant,
+ subvariant,
+ metadata_file,
+ log_file,
+ cmd,
+ external_repo_checksum,
+ buildinstall_phase,
+ ):
+ """Try to reuse images from old compose.
+
+ :param Compose compose: Current compose.
+ :param Variant variant: Current variant.
+ :param str subvariant:
+ :param str metadata_file: Path to reuse metadata file.
+ :param str log_file: Path to log file.
+ :param dict cmd: Dict of params for image-build.
+ :param dict external_repo_checksum: Dict contains checksum of repomd.xml
+ or None if can't get checksum.
+ :param BuildinstallPhase buildinstall_phase: buildinstall phase of
+ current compose.
+ """
+ log_msg = "Cannot reuse old image_build phase results - %s"
+ if not compose.conf["image_build_allow_reuse"]:
+ self.pool.log_info(
+ log_msg % "reuse of old image_build results is disabled."
+ )
+ return False
+
+ if external_repo_checksum is None:
+ self.pool.log_info(
+ log_msg % "Can't ensure that external repo is not changed."
+ )
+ return False
+
+ old_metadata_file = compose.paths.old_compose_path(metadata_file)
+ if not old_metadata_file:
+ self.pool.log_info(log_msg % "Can't find old reuse metadata file")
+ return False
+
+ try:
+ old_metadata = self._load_reuse_metadata(old_metadata_file)
+ except Exception as e:
+ self.pool.log_info(
+ log_msg % "Can't load old reuse metadata file: %s" % str(e)
+ )
+ return False
+
+ if old_metadata["cmd"]["original_image_conf"] != cmd["original_image_conf"]:
+ self.pool.log_info(log_msg % "image_build config changed")
+ return False
+
+ # Make sure external repo does not change
+ if (
+ old_metadata["external_repo_checksum"] is None
+ or old_metadata["external_repo_checksum"] != external_repo_checksum
+ ):
+ self.pool.log_info(log_msg % "External repo may be changed")
+ return False
+
+ # Make sure buildinstall phase is reused
+ for arch in cmd["image_conf"]["image-build"]["arches"]:
+ if buildinstall_phase and not buildinstall_phase.reused(variant, arch):
+ self.pool.log_info(log_msg % "buildinstall phase changed")
+ return False
+
+ # Make sure packages in variant not change
+ rpm_manifest_file = compose.paths.compose.metadata("rpms.json")
+ rpm_manifest = Rpms()
+ rpm_manifest.load(rpm_manifest_file)
+
+ old_rpm_manifest_file = compose.paths.old_compose_path(rpm_manifest_file)
+ old_rpm_manifest = Rpms()
+ old_rpm_manifest.load(old_rpm_manifest_file)
+
+ for repo in cmd["original_image_conf"]["image-build"]["repo"]:
+ if repo not in compose.all_variants:
+ # External repos are checked using other logic.
+ continue
+ for arch in cmd["image_conf"]["image-build"]["arches"]:
+ if (
+ rpm_manifest.rpms[variant.uid][arch]
+ != old_rpm_manifest.rpms[variant.uid][arch]
+ ):
+ self.pool.log_info(
+ log_msg % "Packages in %s.%s changed." % (variant.uid, arch)
+ )
+ return False
+
+ self.pool.log_info(
+ "Reusing images from old compose for variant %s" % variant.uid
+ )
+ try:
+ self._link_images(
+ compose, variant, subvariant, cmd, old_metadata["image_infos"]
+ )
+ except Exception as e:
+ self.pool.log_info(log_msg % "Can't link images %s" % str(e))
+ return False
+
+ old_log_file = compose.paths.old_compose_path(log_file)
+ try:
+ shutil.copy2(old_log_file, log_file)
+ except Exception as e:
+ self.pool.log_info(
+ log_msg % "Can't copy old log_file: %s %s" % (old_log_file, str(e))
+ )
+ return False
+
+ self._write_reuse_metadata(
+ compose,
+ metadata_file,
+ cmd,
+ old_metadata["image_infos"],
+ external_repo_checksum,
+ )
+
+ return True
+
+ def _write_reuse_metadata(
+ self, compose, metadata_file, cmd, image_infos, external_repo_checksum
+ ):
+ """Write metadata file.
+
+ :param Compose compose: Current compose.
+ :param str metadata_file: Path to reuse metadata file.
+ :param dict cmd: Dict of params for image-build.
+ :param dict image_infos: Dict contains image info.
+ :param dict external_repo_checksum: Dict contains checksum of repomd.xml
+ or None if can't get checksum.
+ """
+ msg = "Writing reuse metadata file: %s" % metadata_file
+ self.pool.log_info(msg)
+
+ cmd_copy = copy.deepcopy(cmd)
+ del cmd_copy["image_conf"]["image-build"]["variant"]
+
+ data = {
+ "cmd": cmd_copy,
+ "image_infos": image_infos,
+ "external_repo_checksum": external_repo_checksum,
+ }
+ try:
+ with open(metadata_file, "w") as f:
+ json.dump(data, f, indent=4)
+ except Exception as e:
+ self.pool.log_info("%s Failed: %s" % (msg, str(e)))
+
+ def _load_reuse_metadata(self, metadata_file):
+ """Load metadata file.
+
+ :param str metadata_file: Path to reuse metadata file.
+ """
+ with open(metadata_file, "r") as f:
+ return json.load(f)
diff --git a/pungi/phases/image_checksum.py b/pungi/phases/image_checksum.py
index 5b980597..0277a97c 100644
--- a/pungi/phases/image_checksum.py
+++ b/pungi/phases/image_checksum.py
@@ -3,6 +3,7 @@
import os
from kobo import shortcuts
from collections import defaultdict
+import threading
from .base import PhaseBase
from ..util import get_format_substs, get_file_size
@@ -68,6 +69,7 @@ class ImageChecksumPhase(PhaseBase):
def run(self):
topdir = self.compose.paths.compose.topdir()
+
make_checksums(
topdir,
self.compose.im,
@@ -87,6 +89,8 @@ def _compute_checksums(
checksum_types,
base_checksum_name_gen,
one_file,
+ results_lock,
+ cache_lock,
):
for image in images:
filename = os.path.basename(image.path)
@@ -96,14 +100,21 @@ def _compute_checksums(
filesize = image.size or get_file_size(full_path)
+ cache_lock.acquire()
if full_path not in cache:
+ cache_lock.release()
# Source ISO is listed under each binary architecture. There's no
# point in checksumming it twice, so we can just remember the
# digest from first run..
- cache[full_path] = shortcuts.compute_file_checksums(
- full_path, checksum_types
- )
- digests = cache[full_path]
+ checksum_value = shortcuts.compute_file_checksums(full_path, checksum_types)
+ with cache_lock:
+ cache[full_path] = checksum_value
+ else:
+ cache_lock.release()
+
+ with cache_lock:
+ digests = cache[full_path]
+
for checksum, digest in digests.items():
# Update metadata with the checksum
image.add_checksum(None, checksum, digest)
@@ -112,7 +123,10 @@ def _compute_checksums(
checksum_filename = os.path.join(
path, "%s.%sSUM" % (filename, checksum.upper())
)
- results[checksum_filename].add((filename, filesize, checksum, digest))
+ with results_lock:
+ results[checksum_filename].add(
+ (filename, filesize, checksum, digest)
+ )
if one_file:
dirname = os.path.basename(path)
@@ -125,24 +139,42 @@ def _compute_checksums(
checksum_filename = "%s%sSUM" % (base_checksum_name, checksum.upper())
checksum_path = os.path.join(path, checksum_filename)
- results[checksum_path].add((filename, filesize, checksum, digest))
+ with results_lock:
+ results[checksum_path].add((filename, filesize, checksum, digest))
def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen):
results = defaultdict(set)
cache = {}
+ threads = []
+ results_lock = threading.Lock() # lock to synchronize access to the results dict.
+ cache_lock = threading.Lock() # lock to synchronize access to the cache dict.
+
+ # create all worker threads
for (variant, arch, path), images in get_images(topdir, im).items():
- _compute_checksums(
- results,
- cache,
- variant,
- arch,
- path,
- images,
- checksum_types,
- base_checksum_name_gen,
- one_file,
+ threads.append(
+ threading.Thread(
+ target=_compute_checksums,
+ args=[
+ results,
+ cache,
+ variant,
+ arch,
+ path,
+ images,
+ checksum_types,
+ base_checksum_name_gen,
+ one_file,
+ results_lock,
+ cache_lock,
+ ],
+ )
)
+ threads[-1].start()
+
+ # wait for all worker threads to finish
+ for thread in threads:
+ thread.join()
for file in results:
dump_checksums(file, results[file])
diff --git a/pungi/phases/image_container.py b/pungi/phases/image_container.py
new file mode 100644
index 00000000..f7139263
--- /dev/null
+++ b/pungi/phases/image_container.py
@@ -0,0 +1,122 @@
+# -*- coding: utf-8 -*-
+
+import os
+import re
+from kobo.threads import ThreadPool, WorkerThread
+
+from .base import ConfigGuardedPhase, PhaseLoggerMixin
+from .. import util
+from ..wrappers import kojiwrapper
+from ..phases.osbs import add_metadata
+
+
+class ImageContainerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
+ name = "image_container"
+
+ def __init__(self, compose):
+ super(ImageContainerPhase, self).__init__(compose)
+ self.pool = ThreadPool(logger=self.logger)
+ self.pool.metadata = {}
+
+ def run(self):
+ for variant in self.compose.get_variants():
+ for conf in self.get_config_block(variant):
+ self.pool.add(ImageContainerThread(self.pool))
+ self.pool.queue_put((self.compose, variant, conf))
+
+ self.pool.start()
+
+
+class ImageContainerThread(WorkerThread):
+ def process(self, item, num):
+ compose, variant, config = item
+ self.num = num
+ with util.failable(
+ compose,
+ bool(config.pop("failable", None)),
+ variant,
+ "*",
+ "osbs",
+ logger=self.pool._logger,
+ ):
+ self.worker(compose, variant, config)
+
+ def worker(self, compose, variant, config):
+ msg = "Image container task for variant %s" % variant.uid
+ self.pool.log_info("[BEGIN] %s" % msg)
+
+ source = config.pop("url")
+ target = config.pop("target")
+ priority = config.pop("priority", None)
+
+ config["yum_repourls"] = [
+ self._get_repo(
+ compose,
+ variant,
+ config.get("arch_override", "").split(),
+ config.pop("image_spec"),
+ )
+ ]
+
+ # Start task
+ koji = kojiwrapper.KojiWrapper(compose)
+ koji.login()
+ task_id = koji.koji_proxy.buildContainer(
+ source, target, config, priority=priority
+ )
+
+ koji.save_task_id(task_id)
+
+ # Wait for it to finish and capture the output into log file (even
+ # though there is not much there).
+ log_dir = os.path.join(compose.paths.log.topdir(), "image_container")
+ util.makedirs(log_dir)
+ log_file = os.path.join(
+ log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
+ )
+ if koji.watch_task(task_id, log_file) != 0:
+ raise RuntimeError(
+ "ImageContainer: task %s failed: see %s for details"
+ % (task_id, log_file)
+ )
+
+ add_metadata(variant, task_id, compose, config.get("scratch", False))
+
+ self.pool.log_info("[DONE ] %s" % msg)
+
+ def _get_repo(self, compose, variant, arches, image_spec):
+ """
+ Return a repo file that points baseurl to the image specified by
+ image_spec.
+ """
+ image_paths = set()
+
+ for arch in arches or compose.im.images[variant.uid].keys():
+ for image in compose.im.images[variant.uid].get(arch, []):
+ for key, value in image_spec.items():
+ if not re.match(value, getattr(image, key)):
+ break
+ else:
+ image_paths.add(image.path.replace(arch, "$basearch"))
+
+ if len(image_paths) != 1:
+ raise RuntimeError(
+ "%d images matched specification. Only one was expected."
+ % len(image_paths)
+ )
+
+ image_path = image_paths.pop()
+ absolute_path = os.path.join(compose.paths.compose.topdir(), image_path)
+
+ repo_file = os.path.join(
+ compose.paths.work.tmp_dir(None, variant),
+ "image-container-%s-%s.repo" % (variant, self.num),
+ )
+ with open(repo_file, "w") as f:
+ f.write("[image-to-include]\n")
+ f.write("name=Location of image to embed\n")
+ f.write("baseurl=%s\n" % util.translate_path(compose, absolute_path))
+ f.write("enabled=0\n")
+ f.write("gpgcheck=0\n")
+
+ return util.translate_path(compose, repo_file)
diff --git a/pungi/phases/init.py b/pungi/phases/init.py
index f0590128..a99bc595 100644
--- a/pungi/phases/init.py
+++ b/pungi/phases/init.py
@@ -16,6 +16,7 @@
import collections
import os
+import glob
import shutil
from kobo.shortcuts import run
@@ -72,6 +73,10 @@ class InitPhase(PhaseBase):
self.compose.paths.work.module_defaults_dir(create_dir=False)
)
+ # download module obsoletes
+ if self.compose.has_module_obsoletes:
+ write_module_obsoletes(self.compose)
+
# write prepopulate file
write_prepopulate_file(self.compose)
@@ -160,12 +165,18 @@ def write_variant_comps(compose, arch, variant):
run(cmd)
comps = CompsWrapper(comps_file)
- if variant.groups or variant.modules is not None or variant.type != "variant":
- # Filter groups if the variant has some, or it's a modular variant, or
- # is not a base variant.
+ # Filter groups if the variant has some, or it's a modular variant, or
+ # is not a base variant.
+ if (
+ variant.groups
+ or variant.modules is not None
+ or variant.modular_koji_tags is not None
+ or variant.type != "variant"
+ ):
unmatched = comps.filter_groups(variant.groups)
for grp in unmatched:
compose.log_warning(UNMATCHED_GROUP_MSG % (variant.uid, arch, grp))
+
contains_all = not variant.groups and not variant.environments
if compose.conf["comps_filter_environments"] and not contains_all:
# We only want to filter environments if it's enabled by configuration
@@ -218,12 +229,33 @@ def write_module_defaults(compose):
)
+def write_module_obsoletes(compose):
+ scm_dict = compose.conf["module_obsoletes_dir"]
+ if isinstance(scm_dict, dict):
+ if scm_dict["scm"] == "file":
+ scm_dict["dir"] = os.path.join(compose.config_dir, scm_dict["dir"])
+ else:
+ scm_dict = os.path.join(compose.config_dir, scm_dict)
+
+ with temp_dir(prefix="moduleobsoletes_") as tmp_dir:
+ get_dir_from_scm(scm_dict, tmp_dir, compose=compose)
+ compose.log_debug("Writing module obsoletes")
+ shutil.copytree(
+ tmp_dir,
+ compose.paths.work.module_obsoletes_dir(create_dir=False),
+ ignore=shutil.ignore_patterns(".git"),
+ )
+
+
def validate_module_defaults(path):
- """Make sure there are no conflicting defaults. Each module name can only
- have one default stream.
+ """Make sure there are no conflicting defaults and every default can be loaded.
+ Each module name can onlyhave one default stream.
:param str path: directory with cloned module defaults
"""
+
+ defaults_num = len(glob.glob(os.path.join(path, "*.yaml")))
+
seen_defaults = collections.defaultdict(set)
for module_name, defaults in iter_module_defaults(path):
@@ -242,6 +274,11 @@ def validate_module_defaults(path):
"There are duplicated module defaults:\n%s" % "\n".join(errors)
)
+ # Make sure all defaults are valid otherwise update_from_defaults_directory
+ # will return empty object
+ if defaults_num != len(seen_defaults):
+ raise RuntimeError("Defaults contains not valid default file")
+
def validate_comps(path):
"""Check that there are whitespace issues in comps."""
diff --git a/pungi/phases/live_images.py b/pungi/phases/live_images.py
index d2767aa2..0a4c4108 100644
--- a/pungi/phases/live_images.py
+++ b/pungi/phases/live_images.py
@@ -186,7 +186,7 @@ class CreateLiveImageThread(WorkerThread):
)
self.pool.log_info("[BEGIN] %s" % msg)
- koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
+ koji_wrapper = KojiWrapper(compose)
_, version = compose.compose_id.rsplit("-", 1)
name = cmd["name"] or imgname
version = cmd["version"] or version
diff --git a/pungi/phases/livemedia_phase.py b/pungi/phases/livemedia_phase.py
index 50fdb0b8..f28c68a3 100644
--- a/pungi/phases/livemedia_phase.py
+++ b/pungi/phases/livemedia_phase.py
@@ -71,6 +71,7 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
"ksurl": self.get_ksurl(image_conf),
"ksversion": image_conf.get("ksversion"),
"scratch": image_conf.get("scratch", False),
+ "nomacboot": image_conf.get("nomacboot", False),
"release": self.get_release(image_conf),
"skip_tag": image_conf.get("skip_tag"),
"name": name,
@@ -140,7 +141,7 @@ class LiveMediaThread(WorkerThread):
)
self.pool.log_info("[BEGIN] %s" % msg)
- koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
+ koji_wrapper = KojiWrapper(compose)
cmd = self._get_cmd(koji_wrapper, config)
log_file = self._get_log_file(compose, variant, subvariant, config)
diff --git a/pungi/phases/osbs.py b/pungi/phases/osbs.py
index 148b3c3e..e5ddcb60 100644
--- a/pungi/phases/osbs.py
+++ b/pungi/phases/osbs.py
@@ -1,24 +1,29 @@
# -*- coding: utf-8 -*-
+import copy
import fnmatch
import json
import os
from kobo.threads import ThreadPool, WorkerThread
from kobo import shortcuts
+from productmd.rpms import Rpms
+from six.moves import configparser
from .base import ConfigGuardedPhase, PhaseLoggerMixin
from .. import util
from ..wrappers import kojiwrapper
+from ..wrappers.scm import get_file_from_scm
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
name = "osbs"
- def __init__(self, compose):
+ def __init__(self, compose, pkgset_phase, buildinstall_phase):
super(OSBSPhase, self).__init__(compose)
self.pool = ThreadPool(logger=self.logger)
- self.pool.metadata = {}
self.pool.registries = {}
+ self.pool.pkgset_phase = pkgset_phase
+ self.pool.buildinstall_phase = buildinstall_phase
def run(self):
for variant in self.compose.get_variants():
@@ -28,15 +33,6 @@ class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
self.pool.start()
- def dump_metadata(self):
- """Create a file with image metadata if the phase actually ran."""
- if self._skipped:
- return
- with open(self.compose.paths.compose.metadata("osbs.json"), "w") as f:
- json.dump(
- self.pool.metadata, f, indent=4, sort_keys=True, separators=(",", ": ")
- )
-
def request_push(self):
"""Store configuration data about where to push the created images and
then send the same data to message bus.
@@ -87,8 +83,8 @@ class OSBSThread(WorkerThread):
def worker(self, compose, variant, config):
msg = "OSBS task for variant %s" % variant.uid
self.pool.log_info("[BEGIN] %s" % msg)
- koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
- koji.login()
+
+ original_config = copy.deepcopy(config)
# Start task
source = config.pop("url")
@@ -104,86 +100,98 @@ class OSBSThread(WorkerThread):
config["yum_repourls"] = repos
- task_id = koji.koji_proxy.buildContainer(
- source, target, config, priority=priority
- )
-
- # Wait for it to finish and capture the output into log file (even
- # though there is not much there).
log_dir = os.path.join(compose.paths.log.topdir(), "osbs")
util.makedirs(log_dir)
log_file = os.path.join(
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
)
+ reuse_file = log_file[:-4] + ".reuse.json"
+
+ try:
+ image_conf = self._get_image_conf(compose, original_config)
+ except Exception as e:
+ image_conf = None
+ self.pool.log_info(
+ "Can't get image-build.conf for variant: %s source: %s - %s"
+ % (variant.uid, source, str(e))
+ )
+
+ koji = kojiwrapper.KojiWrapper(compose)
+ koji.login()
+
+ task_id = self._try_to_reuse(
+ compose, variant, original_config, image_conf, reuse_file
+ )
+
+ if not task_id:
+ task_id = koji.koji_proxy.buildContainer(
+ source, target, config, priority=priority
+ )
+
+ koji.save_task_id(task_id)
+
+ # Wait for it to finish and capture the output into log file (even
+ # though there is not much there).
if koji.watch_task(task_id, log_file) != 0:
raise RuntimeError(
"OSBS: task %s failed: see %s for details" % (task_id, log_file)
)
scratch = config.get("scratch", False)
- nvr = self._add_metadata(variant, task_id, compose, scratch)
+ nvr, archive_ids = add_metadata(variant, task_id, compose, scratch)
if nvr:
registry = get_registry(compose, nvr, registry)
if registry:
self.pool.registries[nvr] = registry
+ self._write_reuse_metadata(
+ compose,
+ variant,
+ original_config,
+ image_conf,
+ task_id,
+ archive_ids,
+ reuse_file,
+ )
+
self.pool.log_info("[DONE ] %s" % msg)
- def _add_metadata(self, variant, task_id, compose, is_scratch):
- # Create new Koji session. The task could take so long to finish that
- # our session will expire. This second session does not need to be
- # authenticated since it will only do reading operations.
- koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
+ def _get_image_conf(self, compose, config):
+ """Get image-build.conf from git repo.
- # Create metadata
- metadata = {
- "compose_id": compose.compose_id,
- "koji_task": task_id,
- }
+ :param Compose compose: Current compose.
+ :param dict config: One osbs config item of compose.conf["osbs"][$variant]
+ """
+ tmp_dir = compose.mkdtemp(prefix="osbs_")
- result = koji.koji_proxy.getTaskResult(task_id)
- if is_scratch:
- metadata.update({"repositories": result["repositories"]})
- # add a fake arch of 'scratch', so we can construct the metadata
- # in same data structure as real builds.
- self.pool.metadata.setdefault(variant.uid, {}).setdefault(
- "scratch", []
- ).append(metadata)
- return None
+ url = config["url"].split("#")
+ if len(url) == 1:
+ url.append(config["git_branch"])
+ filename = "image-build.conf"
+ get_file_from_scm(
+ {
+ "scm": "git",
+ "repo": url[0],
+ "branch": url[1],
+ "file": [filename],
+ },
+ tmp_dir,
+ )
+
+ c = configparser.ConfigParser()
+ c.read(os.path.join(tmp_dir, filename))
+ return c
+
+ def _get_ksurl(self, image_conf):
+ """Get ksurl from image-build.conf"""
+ ksurl = image_conf.get("image-build", "ksurl")
+
+ if ksurl:
+ resolver = util.GitUrlResolver(offline=False)
+ return resolver(ksurl)
else:
- build_id = int(result["koji_builds"][0])
- buildinfo = koji.koji_proxy.getBuild(build_id)
- archives = koji.koji_proxy.listArchives(build_id)
-
- nvr = "%(name)s-%(version)s-%(release)s" % buildinfo
-
- metadata.update(
- {
- "name": buildinfo["name"],
- "version": buildinfo["version"],
- "release": buildinfo["release"],
- "nvr": nvr,
- "creation_time": buildinfo["creation_time"],
- }
- )
- for archive in archives:
- data = {
- "filename": archive["filename"],
- "size": archive["size"],
- "checksum": archive["checksum"],
- }
- data.update(archive["extra"])
- data.update(metadata)
- arch = archive["extra"]["image"]["arch"]
- self.pool.log_debug(
- "Created Docker base image %s-%s-%s.%s"
- % (metadata["name"], metadata["version"], metadata["release"], arch)
- )
- self.pool.metadata.setdefault(variant.uid, {}).setdefault(
- arch, []
- ).append(data)
- return nvr
+ return None
def _get_repo(self, compose, repo, gpgkey=None):
"""
@@ -192,7 +200,7 @@ class OSBSThread(WorkerThread):
file pointing to that location and return the URL to .repo file.
"""
if "://" in repo:
- return repo
+ return repo.replace("$COMPOSE_ID", compose.compose_id)
if repo.startswith("/"):
# The repo is an absolute path on the filesystem
@@ -211,6 +219,15 @@ class OSBSThread(WorkerThread):
raise RuntimeError(
"There is no variant %s to get repo from to pass to OSBS." % repo
)
+ cts_url = compose.conf.get("cts_url", None)
+ if cts_url:
+ return os.path.join(
+ cts_url,
+ "api/1/composes",
+ compose.compose_id,
+ "repo/?variant=%s" % variant,
+ )
+
repo_path = compose.paths.compose.repository(
"$basearch", variant, create_dir=False
)
@@ -231,3 +248,209 @@ class OSBSThread(WorkerThread):
f.write("gpgkey=%s\n" % gpgkey)
return util.translate_path(compose, repo_file)
+
+ def _try_to_reuse(self, compose, variant, config, image_conf, reuse_file):
+ """Try to reuse results of old compose.
+
+ :param Compose compose: Current compose.
+ :param Variant variant: Current variant.
+ :param dict config: One osbs config item of compose.conf["osbs"][$variant]
+ :param ConfigParser image_conf: ConfigParser obj of image-build.conf.
+ :param str reuse_file: Path to reuse metadata file
+ """
+ log_msg = "Cannot reuse old osbs phase results - %s"
+
+ if not compose.conf["osbs_allow_reuse"]:
+ self.pool.log_info(log_msg % "reuse of old osbs results is disabled.")
+ return False
+
+ old_reuse_file = compose.paths.old_compose_path(reuse_file)
+ if not old_reuse_file:
+ self.pool.log_info(log_msg % "Can't find old reuse metadata file")
+ return False
+
+ try:
+ with open(old_reuse_file) as f:
+ old_reuse_metadata = json.load(f)
+ except Exception as e:
+ self.pool.log_info(
+ log_msg % "Can't load old reuse metadata file: %s" % str(e)
+ )
+ return False
+
+ if old_reuse_metadata["config"] != config:
+ self.pool.log_info(log_msg % "osbs config changed")
+ return False
+
+ if not image_conf:
+ self.pool.log_info(log_msg % "Can't get image-build.conf")
+ return False
+
+ # Make sure ksurl not change
+ try:
+ ksurl = self._get_ksurl(image_conf)
+ except Exception as e:
+ self.pool.log_info(
+ log_msg % "Can't get ksurl from image-build.conf - %s" % str(e)
+ )
+ return False
+
+ if not old_reuse_metadata["ksurl"]:
+ self.pool.log_info(
+ log_msg % "Can't get ksurl from old compose reuse metadata."
+ )
+ return False
+
+ if ksurl != old_reuse_metadata["ksurl"]:
+ self.pool.log_info(log_msg % "ksurl changed")
+ return False
+
+ # Make sure buildinstall phase is reused
+ try:
+ arches = image_conf.get("image-build", "arches").split(",")
+ except Exception as e:
+ self.pool.log_info(
+ log_msg % "Can't get arches from image-build.conf - %s" % str(e)
+ )
+ for arch in arches:
+ if not self.pool.buildinstall_phase.reused(variant, arch):
+ self.pool.log_info(
+ log_msg % "buildinstall phase changed %s.%s" % (variant, arch)
+ )
+ return False
+
+ # Make sure rpms installed in image exists in current compose
+ rpm_manifest_file = compose.paths.compose.metadata("rpms.json")
+ rpm_manifest = Rpms()
+ rpm_manifest.load(rpm_manifest_file)
+ rpms = set()
+ for variant in rpm_manifest.rpms:
+ for arch in rpm_manifest.rpms[variant]:
+ for src in rpm_manifest.rpms[variant][arch]:
+ for nevra in rpm_manifest.rpms[variant][arch][src]:
+ rpms.add(nevra)
+
+ for nevra in old_reuse_metadata["rpmlist"]:
+ if nevra not in rpms:
+ self.pool.log_info(
+ log_msg % "%s does not exist in current compose" % nevra
+ )
+ return False
+
+ self.pool.log_info(
+ "Reusing old OSBS task %d result" % old_reuse_file["task_id"]
+ )
+ return old_reuse_file["task_id"]
+
+ def _write_reuse_metadata(
+ self, compose, variant, config, image_conf, task_id, archive_ids, reuse_file
+ ):
+ """Write metadata to file for reusing.
+
+ :param Compose compose: Current compose.
+ :param Variant variant: Current variant.
+ :param dict config: One osbs config item of compose.conf["osbs"][$variant]
+ :param ConfigParser image_conf: ConfigParser obj of image-build.conf.
+ :param int task_id: Koji task id of osbs task.
+ :param list archive_ids: List of koji archive id
+ :param str reuse_file: Path to reuse metadata file.
+ """
+ msg = "Writing reuse metadata file %s" % reuse_file
+ compose.log_info(msg)
+
+ rpmlist = set()
+ koji = kojiwrapper.KojiWrapper(compose)
+ for archive_id in archive_ids:
+ rpms = koji.koji_proxy.listRPMs(imageID=archive_id)
+ for item in rpms:
+ if item["epoch"]:
+ rpmlist.add(
+ "%s:%s-%s-%s.%s"
+ % (
+ item["name"],
+ item["epoch"],
+ item["version"],
+ item["release"],
+ item["arch"],
+ )
+ )
+ else:
+ rpmlist.add("%s.%s" % (item["nvr"], item["arch"]))
+
+ try:
+ ksurl = self._get_ksurl(image_conf)
+ except Exception:
+ ksurl = None
+
+ data = {
+ "config": config,
+ "ksurl": ksurl,
+ "rpmlist": sorted(rpmlist),
+ "task_id": task_id,
+ }
+ try:
+ with open(reuse_file, "w") as f:
+ json.dump(data, f, indent=4)
+ except Exception as e:
+ compose.log_info(msg + " failed - %s" % str(e))
+
+
+def add_metadata(variant, task_id, compose, is_scratch):
+ """Given a task ID, find details about the container and add it to global
+ metadata."""
+ # Create new Koji session. The task could take so long to finish that
+ # our session will expire. This second session does not need to be
+ # authenticated since it will only do reading operations.
+ koji = kojiwrapper.KojiWrapper(compose)
+
+ # Create metadata
+ metadata = {
+ "compose_id": compose.compose_id,
+ "koji_task": task_id,
+ }
+
+ result = koji.koji_proxy.getTaskResult(task_id)
+ if is_scratch:
+ metadata.update({"repositories": result["repositories"]})
+ # add a fake arch of 'scratch', so we can construct the metadata
+ # in same data structure as real builds.
+ compose.containers_metadata.setdefault(variant.uid, {}).setdefault(
+ "scratch", []
+ ).append(metadata)
+ return None, []
+
+ else:
+ build_id = int(result["koji_builds"][0])
+ buildinfo = koji.koji_proxy.getBuild(build_id)
+ archives = koji.koji_proxy.listArchives(build_id, type="image")
+
+ nvr = "%(name)s-%(version)s-%(release)s" % buildinfo
+
+ metadata.update(
+ {
+ "name": buildinfo["name"],
+ "version": buildinfo["version"],
+ "release": buildinfo["release"],
+ "nvr": nvr,
+ "creation_time": buildinfo["creation_time"],
+ }
+ )
+ archive_ids = []
+ for archive in archives:
+ data = {
+ "filename": archive["filename"],
+ "size": archive["size"],
+ "checksum": archive["checksum"],
+ }
+ data.update(archive["extra"])
+ data.update(metadata)
+ arch = archive["extra"]["image"]["arch"]
+ compose.log_debug(
+ "Created Docker base image %s-%s-%s.%s"
+ % (metadata["name"], metadata["version"], metadata["release"], arch)
+ )
+ compose.containers_metadata.setdefault(variant.uid, {}).setdefault(
+ arch, []
+ ).append(data)
+ archive_ids.append(archive["id"])
+ return nvr, archive_ids
diff --git a/pungi/phases/osbuild.py b/pungi/phases/osbuild.py
index b81afc9e..6e52e9c5 100644
--- a/pungi/phases/osbuild.py
+++ b/pungi/phases/osbuild.py
@@ -96,7 +96,12 @@ class RunOSBuildThread(WorkerThread):
self.can_fail = can_fail
self.num = num
with util.failable(
- compose, can_fail, variant, "*", "osbuild", logger=self.pool._logger,
+ compose,
+ can_fail,
+ variant,
+ "*",
+ "osbuild",
+ logger=self.pool._logger,
):
self.worker(
compose, variant, config, arches, version, release, target, repo
@@ -105,11 +110,26 @@ class RunOSBuildThread(WorkerThread):
def worker(self, compose, variant, config, arches, version, release, target, repo):
msg = "OSBuild task for variant %s" % variant.uid
self.pool.log_info("[BEGIN] %s" % msg)
- koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
+ koji = kojiwrapper.KojiWrapper(compose)
koji.login()
+ ostree = {}
+ if config.get("ostree_url"):
+ ostree["url"] = config["ostree_url"]
+ if config.get("ostree_ref"):
+ ostree["ref"] = config["ostree_ref"]
+ if config.get("ostree_parent"):
+ ostree["parent"] = config["ostree_parent"]
+
# Start task
opts = {"repo": repo}
+ if ostree:
+ opts["ostree"] = ostree
+
+ upload_options = config.get("upload_options")
+ if upload_options:
+ opts["upload_options"] = upload_options
+
if release:
opts["release"] = release
task_id = koji.koji_proxy.osbuildImage(
@@ -122,6 +142,8 @@ class RunOSBuildThread(WorkerThread):
opts=opts,
)
+ koji.save_task_id(task_id)
+
# Wait for it to finish and capture the output into log file.
log_dir = os.path.join(compose.paths.log.topdir(), "osbuild")
util.makedirs(log_dir)
@@ -136,7 +158,7 @@ class RunOSBuildThread(WorkerThread):
# Refresh koji session which may have timed out while the task was
# running. Watching is done via a subprocess, so the session is
# inactive.
- koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
+ koji = kojiwrapper.KojiWrapper(compose)
# Get build id via the task's result json data
result = koji.koji_proxy.getTaskResult(task_id)
@@ -148,7 +170,7 @@ class RunOSBuildThread(WorkerThread):
# architecture, but we don't verify that.
build_info = koji.koji_proxy.getBuild(build_id)
for archive in koji.koji_proxy.listArchives(buildID=build_id):
- if archive["type_name"] not in config["image_types"]:
+ if archive["type_name"] not in EXTENSIONS:
# Ignore values that are not of required types.
continue
@@ -175,8 +197,11 @@ class RunOSBuildThread(WorkerThread):
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
- suffix = archive["filename"].rsplit(".", 1)[-1]
- if suffix not in EXTENSIONS[archive["type_name"]]:
+ for suffix in EXTENSIONS[archive["type_name"]]:
+ if archive["filename"].endswith(suffix):
+ break
+ else:
+ # No suffix matched.
raise RuntimeError(
"Failed to generate metadata. Format %s doesn't match type %s"
% (suffix, archive["type_name"])
diff --git a/pungi/phases/ostree.py b/pungi/phases/ostree.py
index 2fcfce6c..cbfcd76e 100644
--- a/pungi/phases/ostree.py
+++ b/pungi/phases/ostree.py
@@ -165,6 +165,7 @@ class OSTreeThread(WorkerThread):
("update-summary", config.get("update_summary", False)),
("ostree-ref", config.get("ostree_ref")),
("force-new-commit", config.get("force_new_commit", False)),
+ ("unified-core", config.get("unified_core", False)),
]
)
packages = ["pungi", "ostree", "rpm-ostree"]
diff --git a/pungi/phases/ostree_installer.py b/pungi/phases/ostree_installer.py
index 3424ea8c..8e9a1f6e 100644
--- a/pungi/phases/ostree_installer.py
+++ b/pungi/phases/ostree_installer.py
@@ -272,6 +272,7 @@ class OstreeInstallerThread(WorkerThread):
rootfs_size=config.get("rootfs_size"),
is_final=compose.supported,
log_dir=self.logdir,
+ skip_branding=config.get("skip_branding"),
)
cmd = "rm -rf %s && %s" % (
shlex_quote(output_dir),
diff --git a/pungi/phases/pkgset/__init__.py b/pungi/phases/pkgset/__init__.py
index 66fa4952..684f6e95 100644
--- a/pungi/phases/pkgset/__init__.py
+++ b/pungi/phases/pkgset/__init__.py
@@ -29,13 +29,10 @@ class PkgsetPhase(PhaseBase):
self.path_prefix = None
def run(self):
- pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
- from .source import PkgsetSourceContainer
from . import sources
- PkgsetSourceContainer.register_module(sources)
- container = PkgsetSourceContainer()
- SourceClass = container[pkgset_source]
+ SourceClass = sources.ALL_SOURCES[self.compose.conf["pkgset_source"].lower()]
+
self.package_sets, self.path_prefix = SourceClass(self.compose)()
def validate(self):
diff --git a/pungi/phases/pkgset/common.py b/pungi/phases/pkgset/common.py
index f2a38457..14dad789 100644
--- a/pungi/phases/pkgset/common.py
+++ b/pungi/phases/pkgset/common.py
@@ -28,7 +28,11 @@ from pungi.util import (
PartialFuncWorkerThread,
PartialFuncThreadPool,
)
-from pungi.module_util import Modulemd, collect_module_defaults
+from pungi.module_util import (
+ Modulemd,
+ collect_module_defaults,
+ collect_module_obsoletes,
+)
from pungi.phases.createrepo import add_modular_metadata
@@ -159,6 +163,9 @@ def _create_arch_repo(worker_thread, args, task_num):
mod_index = collect_module_defaults(
compose.paths.work.module_defaults_dir(), names, overrides_dir=overrides_dir
)
+ mod_index = collect_module_obsoletes(
+ compose.paths.work.module_obsoletes_dir(), names, mod_index
+ )
for x in mmd:
mod_index.add_module_stream(x)
add_modular_metadata(
diff --git a/pungi/phases/pkgset/pkgsets.py b/pungi/phases/pkgset/pkgsets.py
index 56e96aa5..3decf961 100644
--- a/pungi/phases/pkgset/pkgsets.py
+++ b/pungi/phases/pkgset/pkgsets.py
@@ -22,6 +22,9 @@ It automatically finds a signed copies according to *sigkey_ordering*.
import itertools
import json
import os
+import time
+import pgpy
+import rpm
from six.moves import cPickle as pickle
import kobo.log
@@ -30,9 +33,9 @@ import kobo.rpmlib
from kobo.threads import WorkerThread, ThreadPool
-import pungi.wrappers.kojiwrapper
from pungi.util import pkg_is_srpm, copy_all
from pungi.arch import get_valid_arches, is_excluded
+from pungi.errors import UnsignedPackagesError
class ExtendedRpmWrapper(kobo.pkgset.SimpleRpmWrapper):
@@ -144,7 +147,7 @@ class PackageSetBase(kobo.log.LoggingBase):
def raise_invalid_sigkeys_exception(self, rpminfos):
"""
- Raises RuntimeError containing details of RPMs with invalid
+ Raises UnsignedPackagesError containing details of RPMs with invalid
sigkeys defined in `rpminfos`.
"""
@@ -166,7 +169,9 @@ class PackageSetBase(kobo.log.LoggingBase):
if not isinstance(rpminfos, dict):
rpminfos = {self.sigkey_ordering: rpminfos}
- raise RuntimeError("\n".join(get_error(k, v) for k, v in rpminfos.items()))
+ raise UnsignedPackagesError(
+ "\n".join(get_error(k, v) for k, v in rpminfos.items())
+ )
def read_packages(self, rpms, srpms):
srpm_pool = ReaderPool(self, self._logger)
@@ -329,6 +334,8 @@ class KojiPackageSet(PackageSetBase):
cache_region=None,
extra_builds=None,
extra_tasks=None,
+ signed_packages_retries=0,
+ signed_packages_wait=30,
):
"""
Creates new KojiPackageSet.
@@ -361,6 +368,9 @@ class KojiPackageSet(PackageSetBase):
:param list extra_tasks: Extra RPMs defined as Koji task IDs to get from Koji
and include in the package set. Useful when building testing compose
with RPM scratch builds.
+ :param int signed_packages_retries: How many times should a search for
+ signed package be repeated.
+ :param int signed_packages_wait: How long to wait between search attemts.
"""
super(KojiPackageSet, self).__init__(
name,
@@ -377,10 +387,11 @@ class KojiPackageSet(PackageSetBase):
self.extra_builds = extra_builds or []
self.extra_tasks = extra_tasks or []
self.reuse = None
+ self.signed_packages_retries = signed_packages_retries
+ self.signed_packages_wait = signed_packages_wait
def __getstate__(self):
result = self.__dict__.copy()
- result["koji_profile"] = self.koji_wrapper.profile
del result["koji_wrapper"]
del result["_logger"]
if "cache_region" in result:
@@ -388,8 +399,6 @@ class KojiPackageSet(PackageSetBase):
return result
def __setstate__(self, data):
- koji_profile = data.pop("koji_profile")
- self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
self._logger = None
self.__dict__.update(data)
@@ -505,17 +514,28 @@ class KojiPackageSet(PackageSetBase):
pathinfo = self.koji_wrapper.koji_module.pathinfo
paths = []
- for sigkey in self.sigkey_ordering:
- if not sigkey:
- # we're looking for *signed* copies here
- continue
- sigkey = sigkey.lower()
- rpm_path = os.path.join(
- pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey)
- )
- paths.append(rpm_path)
- if os.path.isfile(rpm_path):
- return rpm_path
+
+ attempts_left = self.signed_packages_retries + 1
+ while attempts_left > 0:
+ for sigkey in self.sigkey_ordering:
+ if not sigkey:
+ # we're looking for *signed* copies here
+ continue
+ sigkey = sigkey.lower()
+ rpm_path = os.path.join(
+ pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey)
+ )
+ if rpm_path not in paths:
+ paths.append(rpm_path)
+ if os.path.isfile(rpm_path):
+ return rpm_path
+
+ # No signed copy was found, wait a little and try again.
+ attempts_left -= 1
+ if attempts_left > 0:
+ nvr = "%(name)s-%(version)s-%(release)s" % rpm_info
+ self.log_debug("Waiting for signed package to appear for %s", nvr)
+ time.sleep(self.signed_packages_wait)
if None in self.sigkey_ordering or "" in self.sigkey_ordering:
# use an unsigned copy (if allowed)
@@ -727,20 +747,26 @@ class KojiPackageSet(PackageSetBase):
% (old_koji_event, koji_event)
)
changed = self.koji_proxy.queryHistory(
- tables=["tag_listing"], tag=tag, afterEvent=old_koji_event
+ tables=["tag_listing", "tag_inheritance"],
+ tag=tag,
+ afterEvent=min(koji_event, old_koji_event),
+ beforeEvent=max(koji_event, old_koji_event) + 1,
)
if changed["tag_listing"]:
self.log_debug("Builds under tag %s changed. Can't reuse." % tag)
return False
+ if changed["tag_inheritance"]:
+ self.log_debug("Tag inheritance %s changed. Can't reuse." % tag)
+ return False
if inherit:
inherit_tags = self.koji_proxy.getFullInheritance(tag, koji_event)
for t in inherit_tags:
changed = self.koji_proxy.queryHistory(
- tables=["tag_listing"],
+ tables=["tag_listing", "tag_inheritance"],
tag=t["name"],
- afterEvent=old_koji_event,
- beforeEvent=koji_event + 1,
+ afterEvent=min(koji_event, old_koji_event),
+ beforeEvent=max(koji_event, old_koji_event) + 1,
)
if changed["tag_listing"]:
self.log_debug(
@@ -748,6 +774,9 @@ class KojiPackageSet(PackageSetBase):
% t["name"]
)
return False
+ if changed["tag_inheritance"]:
+ self.log_debug("Tag inheritance %s changed. Can't reuse." % tag)
+ return False
repo_dir = compose.paths.work.pkgset_repo(tag, create_dir=False)
old_repo_dir = compose.paths.old_compose_path(repo_dir)
@@ -801,6 +830,8 @@ class KojiMockPackageSet(PackageSetBase):
cache_region=None,
extra_builds=None,
extra_tasks=None,
+ signed_packages_retries=0,
+ signed_packages_wait=30,
):
"""
Creates new KojiPackageSet.
@@ -849,10 +880,11 @@ class KojiMockPackageSet(PackageSetBase):
self.extra_builds = extra_builds or []
self.extra_tasks = extra_tasks or []
self.reuse = None
+ self.signed_packages_retries = signed_packages_retries
+ self.signed_packages_wait = signed_packages_wait
def __getstate__(self):
result = self.__dict__.copy()
- result["koji_profile"] = self.koji_wrapper.profile
del result["koji_wrapper"]
del result["_logger"]
if "cache_region" in result:
@@ -860,8 +892,6 @@ class KojiMockPackageSet(PackageSetBase):
return result
def __setstate__(self, data):
- koji_profile = data.pop("koji_profile")
- self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
self._logger = None
self.__dict__.update(data)
@@ -965,6 +995,24 @@ class KojiMockPackageSet(PackageSetBase):
return response
+ def _is_rpm_signed(self, rpm_path) -> bool:
+ ts = rpm.TransactionSet()
+ ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
+ sigkeys = [
+ sigkey.lower() for sigkey in self.sigkey_ordering
+ if sigkey is not None
+ ]
+ with open(rpm_path, 'rb') as fd:
+ header = ts.hdrFromFdno(fd)
+ signature = header[rpm.RPMTAG_SIGGPG] or header[rpm.RPMTAG_SIGPGP]
+ if signature is None:
+ return False
+ pgp_msg = pgpy.PGPMessage.from_blob(signature)
+ return any(
+ signature.signer.lower() in sigkeys
+ for signature in pgp_msg.signatures
+ )
+
def get_package_path(self, queue_item):
rpm_info, build_info = queue_item
@@ -982,6 +1030,13 @@ class KojiMockPackageSet(PackageSetBase):
rpm_path = os.path.join(pathinfo.topdir, pathinfo.rpm(rpm_info))
if os.path.isfile(rpm_path):
+ if not self._is_rpm_signed(rpm_path):
+ self._invalid_sigkey_rpms.append(rpm_info)
+ self.log_error(
+ 'RPM "%s" not found for sigs: "%s". Path checked: "%s"',
+ rpm_info, self.sigkey_ordering, rpm_path
+ )
+ return
return rpm_path
else:
self.log_warning("RPM %s not found" % rpm_path)
@@ -1175,20 +1230,26 @@ class KojiMockPackageSet(PackageSetBase):
% (old_koji_event, koji_event)
)
changed = self.koji_proxy.queryHistory(
- tables=["tag_listing"], tag=tag, afterEvent=old_koji_event
+ tables=["tag_listing", "tag_inheritance"],
+ tag=tag,
+ afterEvent=min(koji_event, old_koji_event),
+ beforeEvent=max(koji_event, old_koji_event) + 1,
)
if changed["tag_listing"]:
self.log_debug("Builds under tag %s changed. Can't reuse." % tag)
return False
+ if changed["tag_inheritance"]:
+ self.log_debug("Tag inheritance %s changed. Can't reuse." % tag)
+ return False
if inherit:
inherit_tags = self.koji_proxy.getFullInheritance(tag, koji_event)
for t in inherit_tags:
changed = self.koji_proxy.queryHistory(
- tables=["tag_listing"],
+ tables=["tag_listing", "tag_inheritance"],
tag=t["name"],
- afterEvent=old_koji_event,
- beforeEvent=koji_event + 1,
+ afterEvent=min(koji_event, old_koji_event),
+ beforeEvent=max(koji_event, old_koji_event) + 1,
)
if changed["tag_listing"]:
self.log_debug(
@@ -1196,6 +1257,9 @@ class KojiMockPackageSet(PackageSetBase):
% t["name"]
)
return False
+ if changed["tag_inheritance"]:
+ self.log_debug("Tag inheritance %s changed. Can't reuse." % tag)
+ return False
repo_dir = compose.paths.work.pkgset_repo(tag, create_dir=False)
old_repo_dir = compose.paths.old_compose_path(repo_dir)
diff --git a/pungi/phases/pkgset/source.py b/pungi/phases/pkgset/source.py
index 472b4400..297d499e 100644
--- a/pungi/phases/pkgset/source.py
+++ b/pungi/phases/pkgset/source.py
@@ -14,15 +14,6 @@
# along with this program; if not, see .
-import kobo.plugins
-
-
-class PkgsetSourceBase(kobo.plugins.Plugin):
+class PkgsetSourceBase(object):
def __init__(self, compose):
self.compose = compose
-
-
-class PkgsetSourceContainer(kobo.plugins.PluginContainer):
- @classmethod
- def normalize_name(cls, name):
- return name.lower()
diff --git a/pungi/phases/pkgset/sources/__init__.py b/pungi/phases/pkgset/sources/__init__.py
index e69de29b..2241b30e 100644
--- a/pungi/phases/pkgset/sources/__init__.py
+++ b/pungi/phases/pkgset/sources/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; version 2 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
+from .source_koji import PkgsetSourceKoji
+from .source_repos import PkgsetSourceRepos
+from .source_kojimock import PkgsetSourceKojiMock
+
+ALL_SOURCES = {
+ "koji": PkgsetSourceKoji,
+ "repos": PkgsetSourceRepos,
+ "kojimock": PkgsetSourceKojiMock,
+}
diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py
index a557f604..26ec770e 100644
--- a/pungi/phases/pkgset/sources/source_koji.py
+++ b/pungi/phases/pkgset/sources/source_koji.py
@@ -29,7 +29,13 @@ from pungi.wrappers.comps import CompsWrapper
from pungi.wrappers.mbs import MBSWrapper
import pungi.phases.pkgset.pkgsets
from pungi.arch import getBaseArch
-from pungi.util import retry, get_arch_variant_data, get_variant_data
+from pungi.util import (
+ retry,
+ get_arch_variant_data,
+ get_variant_data,
+ read_single_module_stream_from_file,
+ read_single_module_stream_from_string,
+)
from pungi.module_util import Modulemd
from pungi.phases.pkgset.common import MaterializedPackageSet, get_all_arches
@@ -184,12 +190,9 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
- enabled = True
-
def __call__(self):
compose = self.compose
- koji_profile = compose.conf["koji_profile"]
- self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
+ self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
# path prefix must contain trailing '/'
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
package_sets = get_pkgset_from_koji(
@@ -204,7 +207,12 @@ def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
def _add_module_to_variant(
- koji_wrapper, variant, build, add_to_variant_modules=False, compose=None
+ koji_wrapper,
+ variant,
+ build,
+ add_to_variant_modules=False,
+ compose=None,
+ exclude_module_ns=None,
):
"""
Adds module defined by Koji build info to variant.
@@ -214,6 +222,7 @@ def _add_module_to_variant(
:param bool add_to_variant_modules: Adds the modules also to
variant.modules.
:param compose: Compose object to get filters from
+ :param list exclude_module_ns: Module name:stream which will be excluded.
"""
mmds = {}
archives = koji_wrapper.koji_proxy.listArchives(build["id"])
@@ -243,6 +252,10 @@ def _add_module_to_variant(
info = build["extra"]["typeinfo"]["module"]
nsvc = "%(name)s:%(stream)s:%(version)s:%(context)s" % info
+ ns = "%(name)s:%(stream)s" % info
+
+ if exclude_module_ns and ns in exclude_module_ns:
+ return
added = False
@@ -251,17 +264,18 @@ def _add_module_to_variant(
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
continue
- try:
- mmd = Modulemd.ModuleStream.read_file(
- mmds["modulemd.%s.txt" % arch], strict=True
+ filename = "modulemd.%s.txt" % arch
+ if filename not in mmds:
+ raise RuntimeError(
+ "Module %s does not have metadata for arch %s and is not filtered "
+ "out via filter_modules option." % (nsvc, arch)
)
- variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
+ mod_stream = read_single_module_stream_from_file(
+ mmds[filename], compose, arch, build
+ )
+ if mod_stream:
added = True
- except KeyError:
- # There is no modulemd for this arch. This could mean an arch was
- # added to the compose after the module was built. We don't want to
- # process this, let's skip this module.
- pass
+ variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
if not added:
# The module is filtered on all arches of this variant.
@@ -341,9 +355,7 @@ def _add_scratch_modules_to_variant(
tag_to_mmd.setdefault(tag, {})
for arch in variant.arches:
try:
- mmd = Modulemd.ModuleStream.read_string(
- final_modulemd[arch], strict=True
- )
+ mmd = read_single_module_stream_from_string(final_modulemd[arch])
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
except KeyError:
continue
@@ -383,7 +395,7 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
def _get_modules_from_koji(
- compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
+ compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd, exclude_module_ns
):
"""
Loads modules for given `variant` from koji `session`, adds them to
@@ -394,6 +406,7 @@ def _get_modules_from_koji(
:param Variant variant: Variant with modules to find.
:param dict variant_tags: Dict populated by this method. Key is `variant`
and value is list of Koji tags to get the RPMs from.
+ :param list exclude_module_ns: Module name:stream which will be excluded.
"""
# Find out all modules in every variant and add their Koji tags
@@ -402,7 +415,11 @@ def _get_modules_from_koji(
koji_modules = get_koji_modules(compose, koji_wrapper, event, module["name"])
for koji_module in koji_modules:
nsvc = _add_module_to_variant(
- koji_wrapper, variant, koji_module, compose=compose
+ koji_wrapper,
+ variant,
+ koji_module,
+ compose=compose,
+ exclude_module_ns=exclude_module_ns,
)
if not nsvc:
continue
@@ -517,7 +534,13 @@ def filter_by_whitelist(compose, module_builds, input_modules, expected_modules)
def _get_modules_from_koji_tags(
- compose, koji_wrapper, event_id, variant, variant_tags, tag_to_mmd
+ compose,
+ koji_wrapper,
+ event_id,
+ variant,
+ variant_tags,
+ tag_to_mmd,
+ exclude_module_ns,
):
"""
Loads modules for given `variant` from Koji, adds them to
@@ -529,6 +552,7 @@ def _get_modules_from_koji_tags(
:param Variant variant: Variant with modules to find.
:param dict variant_tags: Dict populated by this method. Key is `variant`
and value is list of Koji tags to get the RPMs from.
+ :param list exclude_module_ns: Module name:stream which will be excluded.
"""
# Compose tags from configuration
compose_tags = [
@@ -595,21 +619,26 @@ def _get_modules_from_koji_tags(
for build in latest_builds:
# Get the Build from Koji to get modulemd and module_tag.
build = koji_proxy.getBuild(build["build_id"])
+
+ nsvc = _add_module_to_variant(
+ koji_wrapper,
+ variant,
+ build,
+ True,
+ compose=compose,
+ exclude_module_ns=exclude_module_ns,
+ )
+ if not nsvc:
+ continue
+
module_tag = (
build.get("extra", {})
.get("typeinfo", {})
.get("module", {})
.get("content_koji_tag", "")
)
-
variant_tags[variant].append(module_tag)
- nsvc = _add_module_to_variant(
- koji_wrapper, variant, build, True, compose=compose
- )
- if not nsvc:
- continue
-
tag_to_mmd.setdefault(module_tag, {})
for arch in variant.arch_mmds:
try:
@@ -635,7 +664,7 @@ def _get_modules_from_koji_tags(
if expected_modules:
# There are some module names that were listed in configuration and not
# found in any tag...
- raise RuntimeError(
+ compose.log_warning(
"Configuration specified patterns (%s) that don't match "
"any modules in the configured tags." % ", ".join(expected_modules)
)
@@ -695,23 +724,44 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
"modules."
)
+ extra_modules = get_variant_data(
+ compose.conf, "pkgset_koji_module_builds", variant
+ )
+
+ # When adding extra modules, other modules of the same name:stream available
+ # in brew tag should be excluded.
+ exclude_module_ns = []
+ if extra_modules:
+ exclude_module_ns = [
+ ":".join(nsvc.split(":")[:2]) for nsvc in extra_modules
+ ]
+
if modular_koji_tags or (
compose.conf["pkgset_koji_module_tag"] and variant.modules
):
# List modules tagged in particular tags.
_get_modules_from_koji_tags(
- compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
+ compose,
+ koji_wrapper,
+ event,
+ variant,
+ variant_tags,
+ tag_to_mmd,
+ exclude_module_ns,
)
elif variant.modules:
# Search each module in Koji separately. Tagging does not come into
# play here.
_get_modules_from_koji(
- compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
+ compose,
+ koji_wrapper,
+ event,
+ variant,
+ variant_tags,
+ tag_to_mmd,
+ exclude_module_ns,
)
- extra_modules = get_variant_data(
- compose.conf, "pkgset_koji_module_builds", variant
- )
if extra_modules:
_add_extra_modules_to_variant(
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
@@ -765,6 +815,8 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
cache_region=compose.cache_region,
extra_builds=extra_builds,
extra_tasks=extra_tasks,
+ signed_packages_retries=compose.conf["signed_packages_retries"],
+ signed_packages_wait=compose.conf["signed_packages_wait"],
)
# Check if we have cache for this tag from previous compose. If so, use
@@ -773,11 +825,16 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
compose.paths.work.pkgset_file_cache(compose_tag)
)
if old_cache_path:
- pkgset.set_old_file_cache(
- pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache(
- old_cache_path
+ try:
+ pkgset.set_old_file_cache(
+ pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache(
+ old_cache_path
+ )
+ )
+ except Exception as e:
+ compose.log_debug(
+ "Failed to load old cache file %s : %s" % (old_cache_path, str(e))
)
- )
is_traditional = compose_tag in compose.conf.get("pkgset_koji_tag", [])
should_inherit = inherit if is_traditional else inherit_modules
diff --git a/pungi/phases/pkgset/sources/source_kojimock.py b/pungi/phases/pkgset/sources/source_kojimock.py
index b3fcd7f0..8c2e3701 100644
--- a/pungi/phases/pkgset/sources/source_kojimock.py
+++ b/pungi/phases/pkgset/sources/source_kojimock.py
@@ -35,7 +35,13 @@ import pungi.wrappers.kojiwrapper
from pungi.wrappers.comps import CompsWrapper
from pungi.wrappers.mbs import MBSWrapper
import pungi.phases.pkgset.pkgsets
-from pungi.util import retry, get_arch_variant_data, get_variant_data
+from pungi.util import (
+ retry,
+ get_arch_variant_data,
+ get_variant_data,
+ read_single_module_stream_from_string,
+ read_single_module_stream_from_file,
+)
from pungi.module_util import Modulemd
from pungi.phases.pkgset.common import MaterializedPackageSet, get_all_arches
@@ -194,16 +200,13 @@ class PkgsetSourceKojiMock(pungi.phases.pkgset.source.PkgsetSourceBase):
def __call__(self):
compose = self.compose
- koji_profile = compose.conf["koji_profile"]
- self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiMockWrapper(
- koji_profile
- )
+ self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiMockWrapper(compose)
# path prefix must contain trailing '/'
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
package_sets = get_pkgset_from_koji(
self.compose, self.koji_wrapper, path_prefix
)
- return (package_sets, path_prefix)
+ return package_sets, path_prefix
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
@@ -212,7 +215,12 @@ def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
def _add_module_to_variant(
- koji_wrapper, variant, build, add_to_variant_modules=False, compose=None
+ koji_wrapper,
+ variant,
+ build,
+ add_to_variant_modules=False,
+ compose=None,
+ exclude_module_ns=None,
):
"""
Adds module defined by Koji build info to variant.
@@ -222,6 +230,7 @@ def _add_module_to_variant(
:param bool add_to_variant_modules: Adds the modules also to
variant.modules.
:param compose: Compose object to get filters from
+ :param list exclude_module_ns: Module name:stream which will be excluded.
"""
mmds = {}
archives = koji_wrapper.koji_proxy.listArchives(build["id"])
@@ -246,6 +255,10 @@ def _add_module_to_variant(
info = build["extra"]["typeinfo"]["module"]
nsvc = "%(name)s:%(stream)s:%(version)s:%(context)s" % info
+ ns = "%(name)s:%(stream)s" % info
+
+ if exclude_module_ns and ns in exclude_module_ns:
+ return
added = False
@@ -253,12 +266,14 @@ def _add_module_to_variant(
if _is_filtered_out(compose, variant, arch, info["name"], info["stream"]):
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
continue
-
+ filename = "modulemd.%s.txt" % arch
try:
- mmd = Modulemd.ModuleStream.read_file(
- mmds["modulemd.%s.txt" % arch], strict=True
+ mod_stream = read_single_module_stream_from_file(
+ mmds[filename], compose, arch, build
)
- variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
+ if mod_stream:
+ added = True
+ variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
added = True
except KeyError:
# There is no modulemd for this arch. This could mean an arch was
@@ -344,8 +359,8 @@ def _add_scratch_modules_to_variant(
tag_to_mmd.setdefault(tag, {})
for arch in variant.arches:
try:
- mmd = Modulemd.ModuleStream.read_string(
- final_modulemd[arch], strict=True
+ mmd = read_single_module_stream_from_string(
+ final_modulemd[arch]
)
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
except KeyError:
diff --git a/pungi/phases/pkgset/sources/source_repos.py b/pungi/phases/pkgset/sources/source_repos.py
index 3d3701bb..716f6336 100644
--- a/pungi/phases/pkgset/sources/source_repos.py
+++ b/pungi/phases/pkgset/sources/source_repos.py
@@ -15,6 +15,7 @@
import os
+import shutil
from kobo.shortcuts import run
@@ -31,8 +32,6 @@ import pungi.phases.pkgset.source
class PkgsetSourceRepos(pungi.phases.pkgset.source.PkgsetSourceBase):
- enabled = True
-
def __call__(self):
package_sets, path_prefix = get_pkgset_from_repos(self.compose)
return (package_sets, path_prefix)
@@ -112,6 +111,17 @@ def get_pkgset_from_repos(compose):
flist.append(dst)
pool.queue_put((src, dst))
+ # Clean up tmp dir
+ # Workaround for rpm not honoring sgid bit which only appears when yum is used.
+ yumroot_dir = os.path.join(pungi_dir, "work", arch, "yumroot")
+ if os.path.isdir(yumroot_dir):
+ try:
+ shutil.rmtree(yumroot_dir)
+ except Exception as e:
+ compose.log_warning(
+ "Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
+ )
+
msg = "Linking downloaded pkgset packages"
compose.log_info("[BEGIN] %s" % msg)
pool.start()
diff --git a/pungi/phases/test.py b/pungi/phases/test.py
index 5d3a483e..099af558 100644
--- a/pungi/phases/test.py
+++ b/pungi/phases/test.py
@@ -18,6 +18,7 @@ import os
from pungi.phases.base import PhaseBase
from pungi.util import failable, get_arch_variant_data
+import productmd.compose
class TestPhase(PhaseBase):
@@ -25,6 +26,7 @@ class TestPhase(PhaseBase):
def run(self):
check_image_sanity(self.compose)
+ check_image_metadata(self.compose)
def check_image_sanity(compose):
@@ -45,6 +47,17 @@ def check_image_sanity(compose):
check_size_limit(compose, variant, arch, img)
+def check_image_metadata(compose):
+ """
+ Check the images metadata for entries that cannot be serialized.
+ Often caused by isos with duplicate metadata.
+ Accessing the `images` attribute will raise an exception if there's a problem
+ """
+ if compose.im.images:
+ compose = productmd.compose.Compose(compose.paths.compose.topdir())
+ return compose.images
+
+
def check_sanity(compose, variant, arch, image):
path = os.path.join(compose.paths.compose.topdir(), image.path)
deliverable = getattr(image, "deliverable")
diff --git a/pungi/profiler.py b/pungi/profiler.py
index cb7488b3..ed05c54f 100644
--- a/pungi/profiler.py
+++ b/pungi/profiler.py
@@ -69,10 +69,13 @@ class Profiler(object):
@classmethod
def print_results(cls, stream=sys.stdout):
- print("Profiling results:", file=sys.stdout)
+ # Ensure all data that was printed to stdout was already flushed. If
+ # the caller is redirecting stderr to stdout, and there's buffered
+ # data, we may end up in a situation where the stderr output printed
+ # below ends up mixed with the stdout lines.
+ sys.stdout.flush()
+ print("Profiling results:", file=stream)
results = cls._data.items()
results = sorted(results, key=lambda x: x[1]["time"], reverse=True)
for name, data in results:
- print(
- " %6.2f %5d %s" % (data["time"], data["calls"], name), file=sys.stdout
- )
+ print(" %6.2f %5d %s" % (data["time"], data["calls"], name), file=stream)
diff --git a/pungi/runroot.py b/pungi/runroot.py
index 9e5929d6..166e3988 100644
--- a/pungi/runroot.py
+++ b/pungi/runroot.py
@@ -15,6 +15,7 @@
import os
import re
+import six
from six.moves import shlex_quote
import kobo.log
from kobo.shortcuts import run
@@ -110,7 +111,7 @@ class Runroot(kobo.log.LoggingBase):
runroot_tag = self.compose.conf["runroot_tag"]
log_dir = kwargs.pop("log_dir", None)
- koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
+ koji_wrapper = kojiwrapper.KojiWrapper(self.compose)
koji_cmd = koji_wrapper.get_runroot_cmd(
runroot_tag,
arch,
@@ -149,7 +150,11 @@ class Runroot(kobo.log.LoggingBase):
"""
formatted_cmd = command.format(**fmt_dict) if fmt_dict else command
ssh_cmd = ["ssh", "-oBatchMode=yes", "-n", "-l", user, hostname, formatted_cmd]
- return run(ssh_cmd, show_cmd=True, logfile=log_file)[1]
+ output = run(ssh_cmd, show_cmd=True, logfile=log_file)[1]
+ if six.PY3 and isinstance(output, bytes):
+ return output.decode()
+ else:
+ return output
def _log_file(self, base, suffix):
return base.replace(".log", "." + suffix + ".log")
@@ -174,10 +179,13 @@ class Runroot(kobo.log.LoggingBase):
# by the runroot task, so the Pungi user can access them.
if chown_paths:
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
+ command += " ; EXIT_CODE=$?"
# Make the files world readable
- command += " && chmod -R a+r %s" % paths
+ command += " ; chmod -R a+r %s" % paths
# and owned by the same user that is running the process
- command += " && chown -R %d %s" % (os.getuid(), paths)
+ command += " ; chown -R %d %s" % (os.getuid(), paths)
+ # Exit with code of main command
+ command += " ; exit $EXIT_CODE"
hostname = runroot_ssh_hostnames[arch]
user = self.compose.conf.get("runroot_ssh_username", "root")
@@ -300,7 +308,7 @@ class Runroot(kobo.log.LoggingBase):
runroot_channel = self.compose.conf.get("runroot_channel")
runroot_tag = self.compose.conf["runroot_tag"]
- koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
+ koji_wrapper = kojiwrapper.KojiWrapper(self.compose)
koji_cmd = koji_wrapper.get_pungi_buildinstall_cmd(
runroot_tag,
arch,
@@ -334,7 +342,7 @@ class Runroot(kobo.log.LoggingBase):
runroot_channel = self.compose.conf.get("runroot_channel")
runroot_tag = self.compose.conf["runroot_tag"]
- koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
+ koji_wrapper = kojiwrapper.KojiWrapper(self.compose)
koji_cmd = koji_wrapper.get_pungi_ostree_cmd(
runroot_tag, arch, args, channel=runroot_channel, **kwargs
)
diff --git a/pungi/scripts/comps_filter.py b/pungi/scripts/comps_filter.py
index 21ebcfff..8c07cee4 100644
--- a/pungi/scripts/comps_filter.py
+++ b/pungi/scripts/comps_filter.py
@@ -96,7 +96,7 @@ def main():
f.filter_environments(opts.arch, opts.variant, opts.arch_only_environments)
if not opts.no_cleanup:
- f.cleanup(opts.keep_empty_group, opts.lookaside_group)
+ f.cleanup(opts.arch, opts.keep_empty_group, opts.lookaside_group)
if opts.remove_categories:
f.remove_categories()
diff --git a/pungi/scripts/config_validate.py b/pungi/scripts/config_validate.py
index d4b9b5b5..b4bdb1eb 100644
--- a/pungi/scripts/config_validate.py
+++ b/pungi/scripts/config_validate.py
@@ -127,7 +127,7 @@ def run(config, topdir, has_old, offline, defined_variables, schema_overrides):
pungi.phases.OstreeInstallerPhase(compose, buildinstall_phase),
pungi.phases.OSTreePhase(compose),
pungi.phases.CreateisoPhase(compose, buildinstall_phase),
- pungi.phases.ExtraIsosPhase(compose),
+ pungi.phases.ExtraIsosPhase(compose, buildinstall_phase),
pungi.phases.LiveImagesPhase(compose),
pungi.phases.LiveMediaPhase(compose),
pungi.phases.ImageBuildPhase(compose),
diff --git a/pungi/scripts/create_unified_isos.py b/pungi/scripts/create_unified_isos.py
index 645debaf..81f47aed 100644
--- a/pungi/scripts/create_unified_isos.py
+++ b/pungi/scripts/create_unified_isos.py
@@ -16,7 +16,10 @@ def parse_args():
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument(
- "compose", metavar="", nargs=1, help="path to compose",
+ "compose",
+ metavar="",
+ nargs=1,
+ help="path to compose",
)
parser.add_argument(
"--arch",
diff --git a/pungi/scripts/pungi.py b/pungi/scripts/pungi.py
index 59b96ddc..9c307eaf 100644
--- a/pungi/scripts/pungi.py
+++ b/pungi/scripts/pungi.py
@@ -476,14 +476,14 @@ def main():
else:
mypungi.downloadSRPMs()
- print("RPM size: %s MiB" % (mypungi.size_packages() / 1024 ** 2))
+ print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
if not opts.nodebuginfo:
print(
"DEBUGINFO size: %s MiB"
- % (mypungi.size_debuginfo() / 1024 ** 2)
+ % (mypungi.size_debuginfo() / 1024**2)
)
if not opts.nosource:
- print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024 ** 2))
+ print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
# Furthermore (but without the yumlock...)
if not opts.sourceisos:
diff --git a/pungi/scripts/pungi_gather.py b/pungi/scripts/pungi_gather.py
index 22cebe13..232d54b7 100644
--- a/pungi/scripts/pungi_gather.py
+++ b/pungi/scripts/pungi_gather.py
@@ -18,13 +18,18 @@ from pungi.util import temp_dir
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
- "--profiler", action="store_true",
+ "--profiler",
+ action="store_true",
)
parser.add_argument(
- "--arch", required=True,
+ "--arch",
+ required=True,
)
parser.add_argument(
- "--config", metavar="PATH", required=True, help="path to kickstart config file",
+ "--config",
+ metavar="PATH",
+ required=True,
+ help="path to kickstart config file",
)
parser.add_argument(
"--download-to",
@@ -42,7 +47,9 @@ def get_parser():
group = parser.add_argument_group("Gather options")
group.add_argument(
- "--nodeps", action="store_true", help="disable resolving dependencies",
+ "--nodeps",
+ action="store_true",
+ help="disable resolving dependencies",
)
group.add_argument(
"--selfhosting",
@@ -61,7 +68,9 @@ def get_parser():
choices=["none", "all", "build"],
)
group.add_argument(
- "--multilib", metavar="[METHOD]", action="append",
+ "--multilib",
+ metavar="[METHOD]",
+ action="append",
)
group.add_argument(
"--tempdir",
diff --git a/pungi/scripts/pungi_koji.py b/pungi/scripts/pungi_koji.py
index 8c905565..97db2263 100644
--- a/pungi/scripts/pungi_koji.py
+++ b/pungi/scripts/pungi_koji.py
@@ -5,6 +5,7 @@ from __future__ import print_function
import argparse
import getpass
+import glob
import json
import locale
import logging
@@ -20,6 +21,8 @@ from six.moves import shlex_quote
from pungi.phases import PHASES_NAMES
from pungi import get_full_version, util
+from pungi.errors import UnsignedPackagesError
+from pungi.wrappers import kojiwrapper
# force C locales
@@ -262,14 +265,12 @@ def main():
# check if all requirements are met
import pungi.checks
- if not pungi.checks.check(conf):
- sys.exit(1)
pungi.checks.check_umask(logger)
if not pungi.checks.check_skip_phases(
logger, opts.skip_phase + conf.get("skip_phases", []), opts.just_phase
):
sys.exit(1)
- errors, warnings = pungi.checks.validate(conf)
+ errors, warnings = pungi.checks.validate(conf, offline=True)
if not opts.quiet:
# TODO: workaround for config files containing skip_phase = productimg
@@ -294,6 +295,9 @@ def main():
fail_to_start("Config validation failed", errors=errors)
sys.exit(1)
+ if not pungi.checks.check(conf):
+ sys.exit(1)
+
if opts.target_dir:
compose_dir = Compose.get_compose_dir(
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
@@ -325,14 +329,34 @@ def main():
logger=logger,
notifier=notifier,
)
+
+ rv = Compose.update_compose_url(compose.compose_id, compose_dir, conf)
+ if rv and not rv.ok:
+ logger.error("CTS compose_url update failed with the error: %s" % rv.text)
+
+ errors, warnings = pungi.checks.validate(conf, offline=False)
+ if errors:
+ for error in errors:
+ logger.error("Config validation failed with the error: %s" % error)
+ fail_to_start("Config validation failed", errors=errors)
+ sys.exit(1)
+
notifier.compose = compose
COMPOSE = compose
- run_compose(
- compose,
- create_latest_link=create_latest_link,
- latest_link_status=latest_link_status,
- latest_link_components=latest_link_components,
- )
+ try:
+ run_compose(
+ compose,
+ create_latest_link=create_latest_link,
+ latest_link_status=latest_link_status,
+ latest_link_components=latest_link_components,
+ )
+ except UnsignedPackagesError:
+ # There was an unsigned package somewhere. It is not safe to reuse any
+ # package set from this compose (since we could leak the unsigned
+ # package). Let's make sure all reuse files are deleted.
+ for fp in glob.glob(compose.paths.work.pkgset_reuse_file("*")):
+ os.unlink(fp)
+ raise
def run_compose(
@@ -354,6 +378,8 @@ def run_compose(
)
compose.log_info("Compose top directory: %s" % compose.topdir)
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
+ compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
+
compose.read_variants()
# dump the config file
@@ -378,12 +404,13 @@ def run_compose(
)
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
- extra_isos_phase = pungi.phases.ExtraIsosPhase(compose)
+ extra_isos_phase = pungi.phases.ExtraIsosPhase(compose, buildinstall_phase)
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
- image_build_phase = pungi.phases.ImageBuildPhase(compose)
+ image_build_phase = pungi.phases.ImageBuildPhase(compose, buildinstall_phase)
osbuild_phase = pungi.phases.OSBuildPhase(compose)
- osbs_phase = pungi.phases.OSBSPhase(compose)
+ osbs_phase = pungi.phases.OSBSPhase(compose, pkgset_phase, buildinstall_phase)
+ image_container_phase = pungi.phases.ImageContainerPhase(compose)
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
repoclosure_phase = pungi.phases.RepoclosurePhase(compose)
test_phase = pungi.phases.TestPhase(compose)
@@ -407,6 +434,7 @@ def run_compose(
extra_isos_phase,
osbs_phase,
osbuild_phase,
+ image_container_phase,
):
if phase.skip():
continue
@@ -506,9 +534,12 @@ def run_compose(
livemedia_phase,
osbuild_phase,
)
+ post_image_phase = pungi.phases.WeaverPhase(
+ compose, (image_checksum_phase, image_container_phase)
+ )
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
extra_phase_schema = (
- (compose_images_phase, image_checksum_phase),
+ (compose_images_phase, post_image_phase),
osbs_phase,
repoclosure_phase,
)
@@ -522,13 +553,14 @@ def run_compose(
buildinstall_phase.skip()
and ostree_installer_phase.skip()
and createiso_phase.skip()
+ and extra_isos_phase.skip()
and liveimages_phase.skip()
and livemedia_phase.skip()
and image_build_phase.skip()
and osbuild_phase.skip()
):
compose.im.dump(compose.paths.compose.metadata("images.json"))
- osbs_phase.dump_metadata()
+ compose.dump_containers_metadata()
test_phase.start()
test_phase.stop()
@@ -600,9 +632,25 @@ def try_kill_children(signal):
COMPOSE.log_warning("Failed to kill all subprocesses")
+def try_kill_koji_tasks():
+ try:
+ if COMPOSE:
+ koji_tasks_dir = COMPOSE.paths.log.koji_tasks_dir(create_dir=False)
+ if os.path.exists(koji_tasks_dir):
+ COMPOSE.log_warning("Trying to kill koji tasks")
+ koji = kojiwrapper.KojiWrapper(COMPOSE)
+ koji.login()
+ for task_id in os.listdir(koji_tasks_dir):
+ koji.koji_proxy.cancelTask(int(task_id))
+ except Exception:
+ if COMPOSE:
+ COMPOSE.log_warning("Failed to kill koji tasks")
+
+
def sigterm_handler(signum, frame):
if COMPOSE:
try_kill_children(signum)
+ try_kill_koji_tasks()
COMPOSE.log_error("Compose run failed: signal %s" % signum)
COMPOSE.log_error("Traceback:\n%s" % "\n".join(traceback.format_stack(frame)))
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
@@ -622,15 +670,10 @@ def cli_main():
main()
except (Exception, KeyboardInterrupt) as ex:
if COMPOSE:
- tb_path = COMPOSE.paths.log.log_file("global", "traceback")
COMPOSE.log_error("Compose run failed: %s" % ex)
- COMPOSE.log_error("Extended traceback in: %s" % tb_path)
+ COMPOSE.traceback()
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
COMPOSE.write_status("DOOMED")
- import kobo.tback
-
- with open(tb_path, "wb") as f:
- f.write(kobo.tback.Traceback().get_traceback())
else:
print("Exception: %s" % ex)
raise
diff --git a/pungi/util.py b/pungi/util.py
index 1965c86a..b744f572 100644
--- a/pungi/util.py
+++ b/pungi/util.py
@@ -34,6 +34,7 @@ import kobo.conf
from kobo.shortcuts import run, force_list
from kobo.threads import WorkerThread, ThreadPool
from productmd.common import get_major_version
+from pungi.module_util import Modulemd
# Patterns that match all names of debuginfo packages
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
@@ -287,8 +288,13 @@ def resolve_git_ref(repourl, ref):
if re.match(r"^[a-f0-9]{40}$", ref):
# This looks like a commit ID already.
return ref
-
- _, output = git_ls_remote(repourl, ref)
+ try:
+ _, output = git_ls_remote(repourl, ref)
+ except RuntimeError as e:
+ raise GitUrlResolveError(
+ "ref does not exist in remote repo %s with the error %s %s"
+ % (repourl, e, e.output)
+ )
lines = []
for line in output.split("\n"):
@@ -941,7 +947,7 @@ def get_repo_dicts(repos, logger=None):
def version_generator(compose, gen):
"""If ``gen`` is a known generator, create a value. Otherwise return
- the argument value unchanged.
+ the argument value unchanged.
"""
if gen == "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN":
return "%s.%s" % (compose.image_version, compose.image_release)
@@ -963,8 +969,8 @@ def version_generator(compose, gen):
def retry(timeout=120, interval=30, wait_on=Exception):
- """ A decorator that allows to retry a section of code until success or
- timeout.
+ """A decorator that allows to retry a section of code until success or
+ timeout.
"""
def wrapper(function):
@@ -1034,6 +1040,46 @@ def load_config(file_path, defaults={}):
return conf
+def _read_single_module_stream(
+ file_or_string, compose=None, arch=None, build=None, is_file=True
+):
+ try:
+ mod_index = Modulemd.ModuleIndex.new()
+ if is_file:
+ mod_index.update_from_file(file_or_string, True)
+ else:
+ mod_index.update_from_string(file_or_string, True)
+ mod_names = mod_index.get_module_names()
+ emit_warning = False
+ if len(mod_names) > 1:
+ emit_warning = True
+ mod_streams = mod_index.get_module(mod_names[0]).get_all_streams()
+ if len(mod_streams) > 1:
+ emit_warning = True
+ if emit_warning and compose:
+ compose.log_warning(
+ "Multiple modules/streams for arch: %s. Build: %s. "
+ "Processing first module/stream only.",
+ arch,
+ build,
+ )
+ return mod_streams[0]
+ except (KeyError, IndexError):
+ # There is no modulemd for this arch. This could mean an arch was
+ # added to the compose after the module was built. We don't want to
+ # process this, let's skip this module.
+ if compose:
+ compose.log_info("Skipping arch: %s. Build: %s", arch, build)
+
+
+def read_single_module_stream_from_file(*args, **kwargs):
+ return _read_single_module_stream(*args, is_file=True, **kwargs)
+
+
+def read_single_module_stream_from_string(*args, **kwargs):
+ return _read_single_module_stream(*args, is_file=False, **kwargs)
+
+
@contextlib.contextmanager
def as_local_file(url):
"""If URL points to a file over HTTP, the file will be downloaded locally
@@ -1046,6 +1092,8 @@ def as_local_file(url):
yield local_filename
finally:
os.remove(local_filename)
+ elif url.startswith("file://"):
+ yield url[7:]
else:
# Not a remote url, return unchanged.
yield url
@@ -1083,3 +1131,9 @@ class PartialFuncThreadPool(ThreadPool):
@property
def results(self):
return self._results
+
+
+def read_json_file(file_path):
+ """A helper function to read a JSON file."""
+ with open(file_path) as f:
+ return json.load(f)
diff --git a/pungi/wrappers/comps.py b/pungi/wrappers/comps.py
index 5a8ee09c..08572ac1 100644
--- a/pungi/wrappers/comps.py
+++ b/pungi/wrappers/comps.py
@@ -177,9 +177,9 @@ class CompsFilter(object):
for i in self.tree.xpath("//*[@xml:lang]"):
i.getparent().remove(i)
- def filter_environment_groups(self, lookaside_groups=[]):
+ def filter_environment_groups(self, arch, lookaside_groups=[]):
"""
- Remove undefined groups from environments.
+ Remove undefined groups or groups not matching given arch from environments.
"""
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
for environment in self.tree.xpath("/comps/environment"):
@@ -187,6 +187,12 @@ class CompsFilter(object):
if group.text not in all_groups:
group.getparent().remove(group)
+ for group in environment.xpath("grouplist/groupid[@arch]"):
+ value = group.attrib.get("arch")
+ values = [v for v in re.split(r"[, ]+", value) if v]
+ if arch not in values:
+ group.getparent().remove(group)
+
def remove_empty_environments(self):
"""
Remove all environments without groups.
@@ -212,7 +218,7 @@ class CompsFilter(object):
)
file_obj.write(b"\n")
- def cleanup(self, keep_groups=[], lookaside_groups=[]):
+ def cleanup(self, arch, keep_groups=[], lookaside_groups=[]):
"""
Remove empty groups, categories and environment from the comps file.
Groups given in ``keep_groups`` will be preserved even if empty.
@@ -223,7 +229,7 @@ class CompsFilter(object):
self.remove_empty_groups(keep_groups)
self.filter_category_groups()
self.remove_empty_categories()
- self.filter_environment_groups(lookaside_groups)
+ self.filter_environment_groups(arch, lookaside_groups)
self.remove_empty_environments()
@@ -357,7 +363,10 @@ class CompsWrapper(object):
if environment.option_ids:
append_grouplist(
- doc, env_node, set(environment.option_ids), "optionlist",
+ doc,
+ env_node,
+ set(environment.option_ids),
+ "optionlist",
)
if self.comps.langpacks:
diff --git a/pungi/wrappers/fus.py b/pungi/wrappers/fus.py
index df195067..5c0c2876 100644
--- a/pungi/wrappers/fus.py
+++ b/pungi/wrappers/fus.py
@@ -26,7 +26,12 @@ Pungi).
def get_cmd(
- conf_file, arch, repos, lookasides, platform=None, filter_packages=None,
+ conf_file,
+ arch,
+ repos,
+ lookasides,
+ platform=None,
+ filter_packages=None,
):
cmd = ["fus", "--verbose", "--arch", arch]
diff --git a/pungi/wrappers/iso.py b/pungi/wrappers/iso.py
index afbdf87b..3f438f74 100644
--- a/pungi/wrappers/iso.py
+++ b/pungi/wrappers/iso.py
@@ -146,6 +146,7 @@ def get_mkisofs_cmd(
input_charset="utf-8",
graft_points=None,
use_xorrisofs=False,
+ iso_level=None,
):
# following options are always enabled
untranslated_filenames = True
@@ -155,6 +156,10 @@ def get_mkisofs_cmd(
rock = True
cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"]
+
+ if iso_level:
+ cmd.extend(["-iso-level", str(iso_level)])
+
if appid:
cmd.extend(["-appid", appid])
@@ -255,11 +260,21 @@ def get_isohybrid_cmd(iso_path, arch):
return cmd
-def get_manifest_cmd(iso_name):
- return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
- shlex_quote(iso_name),
- shlex_quote(iso_name),
- )
+def get_manifest_cmd(iso_name, xorriso=False):
+ if xorriso:
+ return """xorriso -dev %s --find |
+ tail -n+2 |
+ tr -d "'" |
+ cut -c2- |
+ sort >> %s.manifest""" % (
+ shlex_quote(iso_name),
+ shlex_quote(iso_name),
+ )
+ else:
+ return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
+ shlex_quote(iso_name),
+ shlex_quote(iso_name),
+ )
def get_volume_id(path):
diff --git a/pungi/wrappers/jigdo.py b/pungi/wrappers/jigdo.py
index 5a6c7fee..417762cf 100644
--- a/pungi/wrappers/jigdo.py
+++ b/pungi/wrappers/jigdo.py
@@ -25,7 +25,7 @@ class JigdoWrapper(kobo.log.LoggingBase):
self, image, files, output_dir, cache=None, no_servers=False, report=None
):
"""
- files: [{"path", "label", "uri"}]
+ files: [{"path", "label", "uri"}]
"""
cmd = ["jigdo-file", "make-template"]
diff --git a/pungi/wrappers/kojiwrapper.py b/pungi/wrappers/kojiwrapper.py
index a7341cff..4348f884 100644
--- a/pungi/wrappers/kojiwrapper.py
+++ b/pungi/wrappers/kojiwrapper.py
@@ -37,10 +37,14 @@ KOJI_BUILD_DELETED = koji.BUILD_STATES["DELETED"]
class KojiWrapper(object):
lock = threading.Lock()
- def __init__(self, profile, real_koji=False):
- self.profile = profile
+ def __init__(self, compose):
+ self.compose = compose
+ try:
+ self.profile = self.compose.conf["koji_profile"]
+ except KeyError:
+ raise RuntimeError("Koji profile must be configured")
with self.lock:
- self.koji_module = koji.get_profile_module(profile)
+ self.koji_module = koji.get_profile_module(self.profile)
session_opts = {}
for key in (
"timeout",
@@ -62,6 +66,9 @@ class KojiWrapper(object):
self.koji_module.config.server, session_opts
)
+ # This retry should be removed once https://pagure.io/koji/issue/3170 is
+ # fixed and released.
+ @util.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError))
def login(self):
"""Authenticate to the hub."""
auth_type = self.koji_module.config.authtype
@@ -112,8 +119,6 @@ class KojiWrapper(object):
if channel:
cmd.append("--channel-override=%s" % channel)
- else:
- cmd.append("--channel-override=runroot-local")
if weight:
cmd.append("--weight=%s" % int(weight))
@@ -143,10 +148,13 @@ class KojiWrapper(object):
if chown_paths:
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
+ command += " ; EXIT_CODE=$?"
# Make the files world readable
- command += " && chmod -R a+r %s" % paths
+ command += " ; chmod -R a+r %s" % paths
# and owned by the same user that is running the process
- command += " && chown -R %d %s" % (os.getuid(), paths)
+ command += " ; chown -R %d %s" % (os.getuid(), paths)
+ # Exit with code of main command
+ command += " ; exit $EXIT_CODE"
cmd.append(command)
return cmd
@@ -166,8 +174,6 @@ class KojiWrapper(object):
if channel:
cmd.append("--channel-override=%s" % channel)
- else:
- cmd.append("--channel-override=runroot-local")
if weight:
cmd.append("--weight=%s" % int(weight))
@@ -203,14 +209,19 @@ class KojiWrapper(object):
return cmd
def get_pungi_ostree_cmd(
- self, target, arch, args, channel=None, packages=None, mounts=None, weight=None,
+ self,
+ target,
+ arch,
+ args,
+ channel=None,
+ packages=None,
+ mounts=None,
+ weight=None,
):
cmd = self._get_cmd("pungi-ostree", "--nowait", "--task-id")
if channel:
cmd.append("--channel-override=%s" % channel)
- else:
- cmd.append("--channel-override=runroot-local")
if weight:
cmd.append("--weight=%s" % int(weight))
@@ -281,15 +292,22 @@ class KojiWrapper(object):
universal_newlines=True,
)
- first_line = output.splitlines()[0]
- match = re.search(r"^(\d+)$", first_line)
- if not match:
+ # Look for first line that contains only a number. This is the ID of
+ # the new task. Usually this should be the first line, but there may be
+ # warnings before it.
+ for line in output.splitlines():
+ match = re.search(r"^(\d+)$", line)
+ if match:
+ task_id = int(match.groups()[0])
+ break
+
+ if not task_id:
raise RuntimeError(
"Could not find task ID in output. Command '%s' returned '%s'."
% (" ".join(command), output)
)
- task_id = int(match.groups()[0])
+ self.save_task_id(task_id)
retcode, output = self._wait_for_task(task_id, logfile=log_file)
@@ -323,9 +341,11 @@ class KojiWrapper(object):
"ksurl",
"distro",
)
- assert set(min_options).issubset(set(config_options["image-build"].keys())), (
- "image-build requires at least %s got '%s'"
- % (", ".join(min_options), config_options)
+ assert set(min_options).issubset(
+ set(config_options["image-build"].keys())
+ ), "image-build requires at least %s got '%s'" % (
+ ", ".join(min_options),
+ config_options,
)
cfg_parser = configparser.ConfigParser()
for section, opts in config_options.items():
@@ -380,6 +400,9 @@ class KojiWrapper(object):
if "can_fail" in options:
cmd.append("--can-fail=%s" % ",".join(options["can_fail"]))
+ if options.get("nomacboot"):
+ cmd.append("--nomacboot")
+
if wait:
cmd.append("--wait")
@@ -517,6 +540,7 @@ class KojiWrapper(object):
retcode, output = run(
command,
can_fail=True,
+ show_cmd=True,
logfile=log_file,
env=env,
buffer_size=-1,
@@ -531,6 +555,8 @@ class KojiWrapper(object):
)
task_id = int(match.groups()[0])
+ self.save_task_id(task_id)
+
if retcode != 0 and (
self._has_connection_error(output) or self._has_offline_error(output)
):
@@ -545,6 +571,19 @@ class KojiWrapper(object):
}
def watch_task(self, task_id, log_file=None, max_retries=None):
+ """Watch and wait for a task to finish.
+
+ :param int task_id: ID of koji task.
+ :param str log_file: Path to log file.
+ :param int max_retries: Max times to retry when error occurs,
+ no limits by default.
+ """
+ if log_file:
+ task_url = os.path.join(
+ self.koji_module.config.weburl, "taskinfo?taskID=%d" % task_id
+ )
+ with open(log_file, "a") as f:
+ f.write("Task URL: %s\n" % task_url)
retcode, _ = self._wait_for_task(
task_id, logfile=log_file, max_retries=max_retries
)
@@ -816,14 +855,27 @@ class KojiWrapper(object):
"""
return self.multicall_map(*args, **kwargs)
+ def save_task_id(self, task_id):
+ """Save task id by creating a file using task_id as file name
+
+ :param int task_id: ID of koji task
+ """
+ log_dir = self.compose.paths.log.koji_tasks_dir()
+ with open(os.path.join(log_dir, str(task_id)), "w"):
+ pass
+
class KojiMockWrapper(object):
lock = threading.Lock()
- def __init__(self, profile):
- self.profile = profile
+ def __init__(self, compose):
+ self.compose = compose
+ try:
+ self.profile = self.compose.conf["koji_profile"]
+ except KeyError:
+ raise RuntimeError("Koji profile must be configured")
with self.lock:
- self.koji_module = koji.get_profile_module(profile)
+ self.koji_module = koji.get_profile_module(self.profile)
session_opts = {}
for key in (
"timeout",
@@ -843,7 +895,11 @@ class KojiMockWrapper(object):
session_opts[key] = value
self.koji_proxy = KojiMock(
packages_dir=self.koji_module.config.topdir,
- modules_dir=os.path.join(self.koji_module.config.topdir, 'modules'))
+ modules_dir=os.path.join(
+ self.koji_module.config.topdir,
+ 'modules',
+ )
+ )
def get_buildroot_rpms(compose, task_id):
@@ -851,7 +907,7 @@ def get_buildroot_rpms(compose, task_id):
result = []
if task_id:
# runroot
- koji = KojiWrapper(compose.conf["koji_profile"])
+ koji = KojiWrapper(compose)
buildroot_infos = koji.koji_proxy.listBuildroots(taskID=task_id)
if not buildroot_infos:
children_tasks = koji.koji_proxy.getTaskChildren(task_id)
diff --git a/pungi/wrappers/repoclosure.py b/pungi/wrappers/repoclosure.py
index f62b3da4..268df094 100644
--- a/pungi/wrappers/repoclosure.py
+++ b/pungi/wrappers/repoclosure.py
@@ -40,9 +40,13 @@ def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
# There are options that are not exposed here, because we don't need
# them.
- for i in force_list(arch or []):
+ arches = force_list(arch or [])
+ for i in arches:
cmd.append("--arch=%s" % i)
+ if backend == "dnf" and arches:
+ cmd.append("--forcearch=%s" % arches[0])
+
repos = repos or {}
for repo_id, repo_path in repos.items():
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
diff --git a/pungi/wrappers/scm.py b/pungi/wrappers/scm.py
index 5602aafe..5c4b37fb 100644
--- a/pungi/wrappers/scm.py
+++ b/pungi/wrappers/scm.py
@@ -265,11 +265,7 @@ class RpmScmWrapper(ScmBase):
class KojiScmWrapper(ScmBase):
def __init__(self, *args, **kwargs):
super(KojiScmWrapper, self).__init__(*args, **kwargs)
- try:
- profile = kwargs["compose"].conf["koji_profile"]
- except KeyError:
- raise RuntimeError("Koji profile must be configured")
- wrapper = KojiWrapper(profile)
+ wrapper = KojiWrapper(kwargs["compose"])
self.koji = wrapper.koji_module
self.proxy = wrapper.koji_proxy
diff --git a/pungi_utils/orchestrator.py b/pungi_utils/orchestrator.py
index 5bf12a05..e63838aa 100644
--- a/pungi_utils/orchestrator.py
+++ b/pungi_utils/orchestrator.py
@@ -302,8 +302,7 @@ def block_on(parts, name):
def check_finished_processes(processes):
- """Walk through all active processes and check if something finished.
- """
+ """Walk through all active processes and check if something finished."""
for proc in processes.keys():
proc.poll()
if proc.returncode is not None:
diff --git a/requirements.txt b/requirements.txt
index ee73d0c3..9acc2ad6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,6 @@
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
dict.sorted
dogpile.cache
-fedmsg
funcsigs
jsonschema
kobo
diff --git a/setup.py b/setup.py
index 5dcf30b9..c48b75d9 100755
--- a/setup.py
+++ b/setup.py
@@ -25,7 +25,7 @@ packages = sorted(packages)
setup(
name="pungi",
- version="4.2.17",
+ version="4.3.6",
description="Distribution compose tool",
url="https://pagure.io/pungi",
author="Dennis Gilmore",
@@ -37,7 +37,6 @@ setup(
"comps_filter = pungi.scripts.comps_filter:main",
"pungi = pungi.scripts.pungi:main",
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
- "pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
"pungi-make-ostree = pungi.ostree:main",
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
diff --git a/tests/Dockerfile-test b/tests/Dockerfile-test
new file mode 100644
index 00000000..61c99a96
--- /dev/null
+++ b/tests/Dockerfile-test
@@ -0,0 +1,24 @@
+FROM fedora:33
+LABEL \
+ name="Pungi test" \
+ description="Run tests using tox with Python 3" \
+ vendor="Pungi developers" \
+ license="MIT"
+
+RUN dnf -y update && dnf -y install \
+ findutils \
+ libmodulemd \
+ git \
+ koji \
+ make \
+ python3-createrepo_c \
+ python3-gobject-base \
+ python3-tox \
+ python3-urlgrabber \
+ && dnf clean all
+
+WORKDIR /src
+
+COPY . .
+
+CMD ["tox", "-e", "flake8,black,py3"]
diff --git a/tests/Dockerfile-test-py2 b/tests/Dockerfile-test-py2
new file mode 100644
index 00000000..84ce1f99
--- /dev/null
+++ b/tests/Dockerfile-test-py2
@@ -0,0 +1,27 @@
+FROM centos:7
+LABEL \
+ name="Pungi test" \
+ description="Run tests using tox with Python 2" \
+ vendor="Pungi developers" \
+ license="MIT"
+
+RUN yum -y update && yum -y install epel-release && yum -y install \
+ git \
+ libmodulemd2 \
+ make \
+ python3 \
+ python-createrepo_c \
+ python-gobject-base \
+ python-gssapi \
+ python-libcomps \
+ pykickstart \
+ && yum clean all
+
+# python-tox in yum repo is too old, let's install latest version
+RUN pip3 install tox
+
+WORKDIR /src
+
+COPY . .
+
+CMD ["tox", "-e", "py27"]
diff --git a/tests/Jenkinsfile b/tests/Jenkinsfile
new file mode 100644
index 00000000..04826e8c
--- /dev/null
+++ b/tests/Jenkinsfile
@@ -0,0 +1,59 @@
+def DUFFY_SESSION_ID
+
+pipeline {
+ agent {
+ label 'cico-workspace'
+ }
+
+ parameters {
+ string(name: 'REPO', defaultValue: '', description: 'Git repo URL where the pull request from')
+ string(name: 'BRANCH', defaultValue: '', description: 'Git branch where the pull request from')
+ }
+
+ stages {
+ stage('CI') {
+ steps {
+ script {
+ if (params.REPO == "" || params.BRANCH == "") {
+ error "Please supply both params (REPO and BRANCH)"
+ }
+ try {
+ echo "Requesting duffy node ..."
+ def session_str = sh returnStdout: true, script: "set +x; duffy client --url https://duffy.ci.centos.org/api/v1 --auth-name fedora-infra --auth-key $CICO_API_KEY request-session pool=virt-ec2-t2-centos-9s-x86_64,quantity=1"
+ def session = readJSON text: session_str
+ DUFFY_SESSION_ID= session.session.id
+ def hostname = session.session.nodes[0].hostname
+ echo "duffy session id: $DUFFY_SESSION_ID hostname: $hostname"
+ def remote_dir = "/tmp/$JENKINS_AGENT_NAME"
+ echo "remote_dir: $remote_dir"
+ writeFile file: 'job.sh', text: """
+set -xe
+dnf install -y git podman
+git config --global user.email "jenkins@localhost"
+git config --global user.name "jenkins"
+cd $remote_dir
+git clone https://pagure.io/pungi.git -b master
+cd pungi
+git remote rm proposed || true
+git remote add proposed "$params.REPO"
+git fetch proposed
+git checkout origin/master
+git merge --no-ff "proposed/$params.BRANCH" -m "Merge PR"
+podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test tox -r -e flake8,black,py3,bandit
+podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test-py2 tox -r -e py27
+ """
+ sh "cat job.sh"
+ sh "ssh -o StrictHostKeyChecking=no root@$hostname mkdir $remote_dir"
+ sh "scp job.sh root@$hostname:$remote_dir"
+ sh "ssh root@$hostname sh $remote_dir/job.sh"
+ } finally {
+ if (DUFFY_SESSION_ID) {
+ echo "Release duffy node ..."
+ sh "set +x; duffy client --url https://duffy.ci.centos.org/api/v1 --auth-name fedora-infra --auth-key $CICO_API_KEY retire-session $DUFFY_SESSION_ID > /dev/null"
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/tests/data/dummy-comps.xml b/tests/data/dummy-comps.xml
index 72ed7738..3e73366b 100644
--- a/tests/data/dummy-comps.xml
+++ b/tests/data/dummy-comps.xml
@@ -118,7 +118,7 @@
10
core
- standard
+ standard
basic-desktop
diff --git a/tests/data/dummy-pungi.conf b/tests/data/dummy-pungi.conf
index 5225f34d..f7e56388 100644
--- a/tests/data/dummy-pungi.conf
+++ b/tests/data/dummy-pungi.conf
@@ -110,4 +110,8 @@ extra_isos = {
}]
}
-create_jigdo = False
+iso_level = [
+ (".*", {
+ "src": 3,
+ }),
+]
diff --git a/tests/fixtures/invalid-image-metadata/compose/metadata/images.json b/tests/fixtures/invalid-image-metadata/compose/metadata/images.json
new file mode 100644
index 00000000..025d329e
--- /dev/null
+++ b/tests/fixtures/invalid-image-metadata/compose/metadata/images.json
@@ -0,0 +1,58 @@
+{
+ "header": {
+ "type": "productmd.images",
+ "version": "1.2"
+ },
+ "payload": {
+ "compose": {
+ "date": "20181001",
+ "id": "Mixed-1.0-20181001.n.0",
+ "respin": 0,
+ "type": "nightly"
+ },
+ "images": {
+ "Server": {
+ "x86_64": [
+ {
+ "arch": "x86_64",
+ "bootable": false,
+ "checksums": {
+ "md5": "c7977d67f6522bce7fb04c0818a3c744",
+ "sha1": "c7d65673b2eb477016f9e09f321935bace545515",
+ "sha256": "6d9cfc9be59cba96763dcca5d1b5759127d2f7920055b663dbcf29474bc368de"
+ },
+ "disc_count": 1,
+ "disc_number": 1,
+ "format": "iso",
+ "implant_md5": "340b7dc15b9c74b8576b81c3b33fc3f2",
+ "mtime": 1636012560,
+ "path": "Server-Gluster/x86_64/iso/Gluster-2.3-DP-1-20211104.t.4-Server-x86_64-dvd1.iso",
+ "size": 419840,
+ "subvariant": "Server-Gluster",
+ "type": "dvd",
+ "volume_id": "Gluster-2.3 DP-1 Server.x86_64"
+ },
+ {
+ "arch": "x86_64",
+ "bootable": false,
+ "checksums": {
+ "md5": "a7977d67f6522bce7fb04c0818a3c744",
+ "sha1": "a7d65673b2eb477016f9e09f321935bace545515",
+ "sha256": "ad9cfc9be59cba96763dcca5d1b5759127d2f7920055b663dbcf29474bc368de"
+ },
+ "disc_count": 1,
+ "disc_number": 1,
+ "format": "iso",
+ "implant_md5": "340b7dc15b9c74b8576b81c3b33fc3f2",
+ "mtime": 1636012560,
+ "path": "Server-Gluster/x86_64/iso/Gluster-2.3-DP-1-20211104.t.4-Server-x86_64-dvd1.iso",
+ "size": 419840,
+ "subvariant": "Server-Gluster",
+ "type": "dvd",
+ "volume_id": "Gluster-2.3 DP-1 Server.x86_64"
+ }
+ ]
+ }
+ }
+ }
+}
diff --git a/tests/fixtures/mmds/m1.x86_64.txt b/tests/fixtures/mmds/m1.x86_64.txt
new file mode 100644
index 00000000..e1989733
--- /dev/null
+++ b/tests/fixtures/mmds/m1.x86_64.txt
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: m1
+ stream: latest
+ version: 20190101
+ context: cafe
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/modulemd.armv7hl.txt b/tests/fixtures/mmds/modulemd.armv7hl.txt
new file mode 100644
index 00000000..e03147d2
--- /dev/null
+++ b/tests/fixtures/mmds/modulemd.armv7hl.txt
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: module
+ stream: master
+ version: 20190318
+ context: abcdef
+ arch: armhfp
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/modulemd.x86_64.txt b/tests/fixtures/mmds/modulemd.x86_64.txt
new file mode 100644
index 00000000..b7e3761c
--- /dev/null
+++ b/tests/fixtures/mmds/modulemd.x86_64.txt
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: module
+ stream: master
+ version: 20190318
+ context: abcdef
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/modules/x86_64/module:master-20190318-abcdef b/tests/fixtures/mmds/modules/x86_64/module:master-20190318-abcdef
new file mode 100644
index 00000000..b7e3761c
--- /dev/null
+++ b/tests/fixtures/mmds/modules/x86_64/module:master-20190318-abcdef
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: module
+ stream: master
+ version: 20190318
+ context: abcdef
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/modules/x86_64/modulemd.x86_64.txt b/tests/fixtures/mmds/modules/x86_64/modulemd.x86_64.txt
new file mode 100644
index 00000000..b7e3761c
--- /dev/null
+++ b/tests/fixtures/mmds/modules/x86_64/modulemd.x86_64.txt
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: module
+ stream: master
+ version: 20190318
+ context: abcdef
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/modules/x86_64/scratch-module.x86_64.txt b/tests/fixtures/mmds/modules/x86_64/scratch-module.x86_64.txt
new file mode 100644
index 00000000..8a13926b
--- /dev/null
+++ b/tests/fixtures/mmds/modules/x86_64/scratch-module.x86_64.txt
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: scratch-module
+ stream: master
+ version: 20200710
+ context: abcdef
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/modules/x86_64/scratch-module:master-20190318-abcdef b/tests/fixtures/mmds/modules/x86_64/scratch-module:master-20190318-abcdef
new file mode 100644
index 00000000..8a13926b
--- /dev/null
+++ b/tests/fixtures/mmds/modules/x86_64/scratch-module:master-20190318-abcdef
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: scratch-module
+ stream: master
+ version: 20200710
+ context: abcdef
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/fixtures/mmds/scratch-module.x86_64.txt b/tests/fixtures/mmds/scratch-module.x86_64.txt
new file mode 100644
index 00000000..8a13926b
--- /dev/null
+++ b/tests/fixtures/mmds/scratch-module.x86_64.txt
@@ -0,0 +1,20 @@
+---
+document: modulemd
+version: 2
+data:
+ name: scratch-module
+ stream: master
+ version: 20200710
+ context: abcdef
+ arch: x86_64
+ summary: Dummy module
+ description: Dummy module
+ license:
+ module:
+ - Beerware
+ content:
+ - Beerware
+ artifacts:
+ rpms:
+ - foobar-0:1.0-1.noarch
+...
diff --git a/tests/helpers.py b/tests/helpers.py
index 2f996a3d..67fd9db2 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -79,6 +79,7 @@ class MockVariant(mock.Mock):
self.variants = {}
self.pkgsets = set()
self.modules = None
+ self.modular_koji_tags = None
self.name = name
self.nsvc_to_pkgset = defaultdict(lambda: mock.Mock(rpms_by_arch={}))
@@ -215,7 +216,10 @@ class DummyCompose(object):
self.log_warning = mock.Mock()
self.get_image_name = mock.Mock(return_value="image-name")
self.image = mock.Mock(
- path="Client/i386/iso/image.iso", can_fail=False, size=123, _max_size=None,
+ path="Client/i386/iso/image.iso",
+ can_fail=False,
+ size=123,
+ _max_size=None,
)
self.im = mock.Mock(images={"Client": {"amd64": [self.image]}})
self.old_composes = []
@@ -226,6 +230,8 @@ class DummyCompose(object):
self.require_deliverable = mock.Mock()
self.should_create_yum_database = True
self.cache_region = None
+ self.containers_metadata = {}
+ self.load_old_compose_config = mock.Mock(return_value=None)
def setup_optional(self):
self.all_variants["Server-optional"] = MockVariant(
@@ -301,7 +307,10 @@ def mk_boom(cls=Exception, msg="BOOM"):
return b
-PKGSET_REPOS = dict(pkgset_source="repos", pkgset_repos={},)
+PKGSET_REPOS = dict(
+ pkgset_source="repos",
+ pkgset_repos={},
+)
BASE_CONFIG = dict(
release_short="test",
diff --git a/tests/test_buildinstall.py b/tests/test_buildinstall.py
index cbad3c14..7188b854 100644
--- a/tests/test_buildinstall.py
+++ b/tests/test_buildinstall.py
@@ -1920,7 +1920,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
)
def test_reuse_old_buildinstall_result_no_old_compose(
- self, load_old_buildinstall_metadata,
+ self,
+ load_old_buildinstall_metadata,
):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
load_old_buildinstall_metadata.return_value = None
@@ -1935,7 +1936,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
)
def test_reuse_old_buildinstall_result_different_cmd(
- self, load_old_buildinstall_metadata,
+ self,
+ load_old_buildinstall_metadata,
):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
@@ -1958,7 +1960,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
)
def test_reuse_old_buildinstall_result_different_installed_pkgs(
- self, load_old_buildinstall_metadata,
+ self,
+ load_old_buildinstall_metadata,
):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
load_old_buildinstall_metadata.return_value = {
@@ -1978,7 +1981,9 @@ class BuildinstallThreadTestCase(PungiTestCase):
)
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
def test_reuse_old_buildinstall_result_different_buildroot_rpms(
- self, KojiWrapperMock, load_old_buildinstall_metadata,
+ self,
+ KojiWrapperMock,
+ load_old_buildinstall_metadata,
):
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
load_old_buildinstall_metadata.return_value = {
@@ -2168,7 +2173,7 @@ class TestTweakConfigs(PungiTestCase):
)
for cfg in configs:
self.assertFileContent(
- cfg, ":LABEL=new\\x20volid ks=hd:LABEL=new\\x20volid:/ks.cfg\n"
+ cfg, ":LABEL=new\\x20volid inst.ks=hd:LABEL=new\\x20volid:/ks.cfg\n"
)
def test_tweak_configs_yaboot(self):
@@ -2180,5 +2185,5 @@ class TestTweakConfigs(PungiTestCase):
tweak_configs(self.topdir, "new volid", os.path.join(self.topdir, "ks.cfg"))
for cfg in configs:
self.assertFileContent(
- cfg, ":LABEL=new\\\\x20volid ks=hd:LABEL=new\\\\x20volid:/ks.cfg\n"
+ cfg, ":LABEL=new\\\\x20volid inst.ks=hd:LABEL=new\\\\x20volid:/ks.cfg\n"
)
diff --git a/tests/test_checks.py b/tests/test_checks.py
index 788f727a..9d53b119 100644
--- a/tests/test_checks.py
+++ b/tests/test_checks.py
@@ -147,7 +147,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
with mock.patch("sys.stdout", new_callable=StringIO) as out:
with mock.patch("os.path.exists") as exists:
exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
- result = checks.check({})
+ result = checks.check({"createrepo_c": True})
self.assertIn("createrepo_c", out.getvalue())
self.assertFalse(result)
diff --git a/tests/test_compose.py b/tests/test_compose.py
index 85590273..44748520 100644
--- a/tests/test_compose.py
+++ b/tests/test_compose.py
@@ -13,7 +13,9 @@ import tempfile
import shutil
import json
-from pungi.compose import Compose
+from requests.exceptions import HTTPError
+
+from pungi.compose import Compose, retry_request
class ConfigWrapper(dict):
@@ -608,8 +610,9 @@ class ComposeTestCase(unittest.TestCase):
ci_json = json.loads(ci.dumps())
self.assertEqual(ci_json, self.ci_json)
+ @mock.patch("pungi.compose.requests")
@mock.patch("time.strftime", new=lambda fmt, time: "20200526")
- def test_get_compose_info_cts(self):
+ def test_get_compose_info_cts(self, mocked_requests):
conf = ConfigWrapper(
release_name="Test",
release_version="1.0",
@@ -626,7 +629,6 @@ class ComposeTestCase(unittest.TestCase):
ci_copy["header"]["version"] = "1.2"
mocked_response = mock.MagicMock()
mocked_response.text = json.dumps(self.ci_json)
- mocked_requests = mock.MagicMock()
mocked_requests.post.return_value = mocked_response
mocked_requests_kerberos = mock.MagicMock()
@@ -637,7 +639,6 @@ class ComposeTestCase(unittest.TestCase):
# `import`.
with mock.patch.dict(
"sys.modules",
- requests=mocked_requests,
requests_kerberos=mocked_requests_kerberos,
):
ci = Compose.get_compose_info(conf, respin_of="Fedora-Rawhide-20200517.n.1")
@@ -753,3 +754,76 @@ class StatusTest(unittest.TestCase):
self.compose.conf["gather_backend"] = "yum"
self.compose.conf["createrepo_database"] = False
self.assertFalse(self.compose.should_create_yum_database)
+
+
+class DumpContainerMetadataTest(unittest.TestCase):
+ def setUp(self):
+ self.tmp_dir = tempfile.mkdtemp()
+ with mock.patch("pungi.compose.ComposeInfo"):
+ self.compose = Compose({}, self.tmp_dir)
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp_dir)
+
+ def test_dump_metadata(self):
+ metadata = {"Server": {"x86_64": "Metadata"}}
+ self.compose.containers_metadata = metadata
+ self.compose.dump_containers_metadata()
+
+ with open(self.tmp_dir + "/compose/metadata/osbs.json") as f:
+ data = json.load(f)
+ self.assertEqual(data, metadata)
+
+ @mock.patch("pungi.phases.osbs.ThreadPool")
+ def test_dump_empty_metadata(self, ThreadPool):
+ self.compose.dump_containers_metadata()
+ self.assertFalse(os.path.isfile(self.tmp_dir + "/compose/metadata/osbs.json"))
+
+
+class TracebackTest(unittest.TestCase):
+ def setUp(self):
+ self.tmp_dir = tempfile.mkdtemp()
+ with mock.patch("pungi.compose.ComposeInfo"):
+ self.compose = Compose({}, self.tmp_dir)
+ self.patcher = mock.patch("kobo.tback.Traceback")
+ self.Traceback = self.patcher.start()
+ self.Traceback.return_value.get_traceback.return_value = b"traceback"
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp_dir)
+ self.patcher.stop()
+
+ def assertTraceback(self, filename):
+ self.assertTrue(
+ os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
+ )
+ self.assertEqual(
+ self.Traceback.mock_calls, [mock.call(), mock.call().get_traceback()]
+ )
+
+ def test_traceback_default(self):
+ self.compose.traceback()
+ self.assertTraceback("traceback")
+
+ def test_with_detail(self):
+ self.compose.traceback("extra-info")
+ self.assertTraceback("traceback-extra-info")
+
+
+class RetryRequestTest(unittest.TestCase):
+ @mock.patch("pungi.compose.requests")
+ def test_retry_timeout(self, mocked_requests):
+ mocked_requests.post.side_effect = [
+ HTTPError("Gateway Timeout", response=mock.Mock(status_code=504)),
+ mock.Mock(status_code=200),
+ ]
+ url = "http://locahost/api/1/composes/"
+ rv = retry_request("post", url)
+ self.assertEqual(
+ mocked_requests.mock_calls,
+ [
+ mock.call.post(url, json=None, auth=None),
+ mock.call.post(url, json=None, auth=None),
+ ],
+ )
+ self.assertEqual(rv.status_code, 200)
diff --git a/tests/test_comps_wrapper.py b/tests/test_comps_wrapper.py
index 47d323bf..cee03dcb 100644
--- a/tests/test_comps_wrapper.py
+++ b/tests/test_comps_wrapper.py
@@ -196,22 +196,22 @@ class CompsFilterTest(unittest.TestCase):
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-environments.xml"))
def test_cleanup(self):
- self.filter.cleanup()
+ self.filter.cleanup("ppc64le")
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup.xml"))
def test_cleanup_after_filter(self):
self.filter.filter_packages("ppc64le", None)
- self.filter.cleanup()
+ self.filter.cleanup("ppc64le")
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-filter.xml"))
def test_cleanup_after_filter_keep_group(self):
self.filter.filter_packages("ppc64le", None)
- self.filter.cleanup(["standard"])
+ self.filter.cleanup("ppc64le", ["standard"])
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-keep.xml"))
def test_cleanup_all(self):
self.filter.filter_packages("ppc64le", None)
self.filter.filter_groups("ppc64le", None)
self.filter.filter_environments("ppc64le", None)
- self.filter.cleanup()
+ self.filter.cleanup("ppc64le")
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-all.xml"))
diff --git a/tests/test_config.py b/tests/test_config.py
index a3d8f92c..3bc03328 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -22,7 +22,9 @@ class ConfigTestCase(unittest.TestCase):
class PkgsetConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self):
- cfg = load_config(pkgset_source="koji",)
+ cfg = load_config(
+ pkgset_source="koji",
+ )
self.assertValidation(cfg)
@@ -36,7 +38,9 @@ class PkgsetConfigTestCase(ConfigTestCase):
def test_pkgset_mismatch_repos(self):
cfg = load_config(
- pkgset_source="repos", pkgset_koji_tag="f25", pkgset_koji_inherit=False,
+ pkgset_source="repos",
+ pkgset_koji_tag="f25",
+ pkgset_koji_inherit=False,
)
self.assertValidation(
@@ -51,7 +55,10 @@ class PkgsetConfigTestCase(ConfigTestCase):
)
def test_pkgset_mismatch_koji(self):
- cfg = load_config(pkgset_source="koji", pkgset_repos={"whatever": "/foo"},)
+ cfg = load_config(
+ pkgset_source="koji",
+ pkgset_repos={"whatever": "/foo"},
+ )
self.assertValidation(
cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")]
@@ -78,7 +85,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
)
def test_only_config_base_product_name(self):
- cfg = load_config(PKGSET_REPOS, base_product_name="Prod",)
+ cfg = load_config(
+ PKGSET_REPOS,
+ base_product_name="Prod",
+ )
self.assertValidation(
cfg,
@@ -99,7 +109,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
)
def test_only_config_base_product_short(self):
- cfg = load_config(PKGSET_REPOS, base_product_short="bp",)
+ cfg = load_config(
+ PKGSET_REPOS,
+ base_product_short="bp",
+ )
self.assertValidation(
cfg,
@@ -118,7 +131,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
)
def test_only_config_base_product_version(self):
- cfg = load_config(PKGSET_REPOS, base_product_version="1.0",)
+ cfg = load_config(
+ PKGSET_REPOS,
+ base_product_version="1.0",
+ )
self.assertValidation(
cfg,
@@ -141,19 +157,28 @@ class ReleaseConfigTestCase(ConfigTestCase):
class ImageNameConfigTestCase(ConfigTestCase):
def test_image_name_simple_string(self):
- cfg = load_config(PKGSET_REPOS, image_name_format="foobar",)
+ cfg = load_config(
+ PKGSET_REPOS,
+ image_name_format="foobar",
+ )
self.assertValidation(cfg, [])
def test_image_name_variant_mapping(self):
- cfg = load_config(PKGSET_REPOS, image_name_format={"^Server$": "foobar"},)
+ cfg = load_config(
+ PKGSET_REPOS,
+ image_name_format={"^Server$": "foobar"},
+ )
self.assertValidation(cfg, [])
class RunrootConfigTestCase(ConfigTestCase):
def test_set_runroot_true(self):
- cfg = load_config(PKGSET_REPOS, runroot=True,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ runroot=True,
+ )
self.assertValidation(
cfg,
@@ -163,7 +188,10 @@ class RunrootConfigTestCase(ConfigTestCase):
)
def test_set_runroot_false(self):
- cfg = load_config(PKGSET_REPOS, runroot=False,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ runroot=False,
+ )
self.assertValidation(
cfg,
@@ -175,7 +203,10 @@ class RunrootConfigTestCase(ConfigTestCase):
class BuildinstallConfigTestCase(ConfigTestCase):
def test_bootable_deprecated(self):
- cfg = load_config(PKGSET_REPOS, bootable=True,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ bootable=True,
+ )
self.assertValidation(
cfg,
@@ -185,7 +216,10 @@ class BuildinstallConfigTestCase(ConfigTestCase):
)
def test_buildinstall_method_without_bootable(self):
- cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax",)
+ cfg = load_config(
+ PKGSET_REPOS,
+ buildinstall_method="lorax",
+ )
self.assertValidation(cfg, [])
@@ -231,7 +265,9 @@ class BuildinstallConfigTestCase(ConfigTestCase):
class CreaterepoConfigTestCase(ConfigTestCase):
def test_validate_minimal_pkgset_koji(self):
cfg = load_config(
- pkgset_source="koji", pkgset_koji_tag="f25", product_id_allow_missing=True,
+ pkgset_source="koji",
+ pkgset_koji_tag="f25",
+ product_id_allow_missing=True,
)
self.assertValidation(
@@ -242,14 +278,20 @@ class CreaterepoConfigTestCase(ConfigTestCase):
class GatherConfigTestCase(ConfigTestCase):
def test_dnf_backend_is_default_on_py3(self):
- cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
+ cfg = load_config(
+ pkgset_source="koji",
+ pkgset_koji_tag="f27",
+ )
with mock.patch("six.PY2", new=False):
self.assertValidation(cfg, [])
self.assertEqual(cfg["gather_backend"], "dnf")
def test_yum_backend_is_default_on_py2(self):
- cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
+ cfg = load_config(
+ pkgset_source="koji",
+ pkgset_koji_tag="f27",
+ )
with mock.patch("six.PY2", new=True):
self.assertValidation(cfg, [])
@@ -257,7 +299,9 @@ class GatherConfigTestCase(ConfigTestCase):
def test_yum_backend_is_rejected_on_py3(self):
cfg = load_config(
- pkgset_source="koji", pkgset_koji_tag="f27", gather_backend="yum",
+ pkgset_source="koji",
+ pkgset_koji_tag="f27",
+ gather_backend="yum",
)
with mock.patch("six.PY2", new=False):
@@ -402,7 +446,10 @@ class LiveMediaConfigTestCase(ConfigTestCase):
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
def test_global_config_null_release(self):
- cfg = load_config(PKGSET_REPOS, live_media_release=None,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ live_media_release=None,
+ )
self.assertValidation(cfg)
@@ -429,7 +476,8 @@ class TestRegexValidation(ConfigTestCase):
class RepoclosureTestCase(ConfigTestCase):
def test_invalid_backend(self):
cfg = load_config(
- PKGSET_REPOS, repoclosure_backend="fnd", # Intentionally with a typo
+ PKGSET_REPOS,
+ repoclosure_backend="fnd", # Intentionally with a typo
)
options = ["yum", "dnf"] if six.PY2 else ["dnf"]
@@ -445,7 +493,10 @@ class RepoclosureTestCase(ConfigTestCase):
class VariantAsLookasideTestCase(ConfigTestCase):
def test_empty(self):
variant_as_lookaside = []
- cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ variant_as_lookaside=variant_as_lookaside,
+ )
self.assertValidation(cfg)
def test_basic(self):
@@ -454,14 +505,20 @@ class VariantAsLookasideTestCase(ConfigTestCase):
("Server", "Client"),
("Everything", "Spin"),
]
- cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ variant_as_lookaside=variant_as_lookaside,
+ )
self.assertValidation(cfg)
class SkipPhasesTestCase(ConfigTestCase):
def test_empty(self):
skip_phases = []
- cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ skip_phases=skip_phases,
+ )
self.assertValidation(cfg)
def test_basic(self):
@@ -469,7 +526,10 @@ class SkipPhasesTestCase(ConfigTestCase):
"buildinstall",
"gather",
]
- cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ skip_phases=skip_phases,
+ )
self.assertValidation(cfg)
def test_bad_phase_name(self):
@@ -477,5 +537,8 @@ class SkipPhasesTestCase(ConfigTestCase):
"gather",
"non-existing-phase_name",
]
- cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
+ cfg = load_config(
+ PKGSET_REPOS,
+ skip_phases=skip_phases,
+ )
self.assertNotEqual(checks.validate(cfg), ([], []))
diff --git a/tests/test_createiso_phase.py b/tests/test_createiso_phase.py
index 48bf37b7..f71a45c0 100644
--- a/tests/test_createiso_phase.py
+++ b/tests/test_createiso_phase.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
+import logging
import mock
import six
@@ -119,10 +120,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
graft_points="dummy-graft-points",
arch="x86_64",
supported=True,
- jigdo_dir="%s/compose/Server/x86_64/jigdo" % self.topdir,
- os_tree="%s/compose/Server/x86_64/os" % self.topdir,
+ jigdo_dir=None,
+ os_tree=None,
hfs_compat=True,
use_xorrisofs=False,
+ script_dir="%s/work/x86_64/tmp-Server" % self.topdir,
)
],
)
@@ -239,16 +241,20 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
[
CreateIsoOpts(
output_dir="%s/compose/Server/x86_64/iso" % self.topdir,
+ boot_iso=(
+ "%s/compose/Server/x86_64/os/images/boot.iso" % self.topdir
+ ),
iso_name="image-name",
volid="test-1.0 Server.x86_64",
graft_points="dummy-graft-points",
arch="x86_64",
buildinstall_method="lorax",
supported=True,
- jigdo_dir="%s/compose/Server/x86_64/jigdo" % self.topdir,
- os_tree="%s/compose/Server/x86_64/os" % self.topdir,
+ jigdo_dir=None,
+ os_tree=None,
hfs_compat=True,
use_xorrisofs=False,
+ script_dir="%s/work/x86_64/tmp-Server" % self.topdir,
),
CreateIsoOpts(
output_dir="%s/compose/Server/source/iso" % self.topdir,
@@ -257,10 +263,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
graft_points="dummy-graft-points",
arch="src",
supported=True,
- jigdo_dir="%s/compose/Server/source/jigdo" % self.topdir,
- os_tree="%s/compose/Server/source/tree" % self.topdir,
+ jigdo_dir=None,
+ os_tree=None,
hfs_compat=True,
use_xorrisofs=False,
+ script_dir="%s/work/src/tmp-Server" % self.topdir,
),
],
)
@@ -389,10 +396,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
graft_points="dummy-graft-points",
arch="src",
supported=True,
- jigdo_dir="%s/compose/Server/source/jigdo" % self.topdir,
- os_tree="%s/compose/Server/source/tree" % self.topdir,
+ jigdo_dir=None,
+ os_tree=None,
hfs_compat=True,
use_xorrisofs=False,
+ script_dir="%s/work/src/tmp-Server" % self.topdir,
)
],
)
@@ -496,10 +504,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
graft_points="dummy-graft-points",
arch="x86_64",
supported=True,
- jigdo_dir="%s/compose/Server/x86_64/jigdo" % self.topdir,
- os_tree="%s/compose/Server/x86_64/os" % self.topdir,
+ jigdo_dir=None,
+ os_tree=None,
hfs_compat=False,
use_xorrisofs=False,
+ script_dir="%s/work/x86_64/tmp-Server" % self.topdir,
)
],
)
@@ -579,7 +588,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
cmd["cmd"],
channel=None,
mounts=[self.topdir],
- packages=["coreutils", "genisoimage", "isomd5sum", "jigdo"],
+ packages=["coreutils", "genisoimage", "isomd5sum"],
use_shell=True,
weight=None,
)
@@ -749,7 +758,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
"coreutils",
"genisoimage",
"isomd5sum",
- "jigdo",
"lorax",
"which",
],
@@ -1105,8 +1113,8 @@ class SplitIsoTest(helpers.PungiTestCase):
os.path.join(self.topdir, "compose/Server/x86_64/os/n/media.repo")
)
- M = 1024 ** 2
- G = 1024 ** 3
+ M = 1024**2
+ G = 1024**3
with mock.patch(
"os.path.getsize",
@@ -1157,8 +1165,8 @@ class SplitIsoTest(helpers.PungiTestCase):
os.path.join(self.topdir, "compose/Server/x86_64/os/n/media.repo")
)
- M = 1024 ** 2
- G = 1024 ** 3
+ M = 1024**2
+ G = 1024**3
with mock.patch(
"os.path.getsize",
@@ -1209,7 +1217,7 @@ class SplitIsoTest(helpers.PungiTestCase):
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
)
- M = 1024 ** 2
+ M = 1024**2
# treeinfo has size 0, spacer leaves 11M of free space, so with 10M
# reserve the padding package should be on second disk
@@ -1233,7 +1241,7 @@ class SplitIsoTest(helpers.PungiTestCase):
)
def test_can_customize_reserve(self):
- compose = helpers.DummyCompose(self.topdir, {"split_iso_reserve": 1024 ** 2})
+ compose = helpers.DummyCompose(self.topdir, {"split_iso_reserve": 1024**2})
helpers.touch(
os.path.join(self.topdir, "compose/Server/x86_64/os/.treeinfo"), TREEINFO
)
@@ -1244,7 +1252,7 @@ class SplitIsoTest(helpers.PungiTestCase):
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
)
- M = 1024 ** 2
+ M = 1024**2
with mock.patch(
"os.path.getsize", DummySize({"spacer": 4688465664, "pad": 5 * M})
@@ -1265,7 +1273,7 @@ class SplitIsoTest(helpers.PungiTestCase):
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
)
- M = 1024 ** 2
+ M = 1024**2
with mock.patch(
"os.path.getsize", DummySize({"spacer": 4688465664, "pad": 5 * M})
@@ -1322,3 +1330,262 @@ class TweakTreeinfo(helpers.PungiTestCase):
ti.dump(output)
self.assertFilesEqual(output, expected)
+
+
+class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
+ def setUp(self):
+ super(CreateisoTryReusePhaseTest, self).setUp()
+ self.logger = logging.getLogger()
+ self.logger.setLevel(logging.DEBUG)
+ self.logger.addHandler(logging.StreamHandler(os.devnull))
+
+ def test_disabled(self):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": False})
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+
+ self.assertFalse(phase.try_reuse(mock.Mock(), "Server", "x86_64", mock.Mock()))
+
+ def test_buildinstall_changed(self):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ phase.bi = mock.Mock()
+ phase.bi.reused.return_value = False
+ cmd = {"disc_num": 1, "disc_count": 1}
+ opts = CreateIsoOpts(buildinstall_method="lorax")
+
+ self.assertFalse(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+
+ def test_no_old_config(self):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ cmd = {"disc_num": 1, "disc_count": 1}
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+
+ def test_old_config_changed(self):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ old_config = compose.conf.copy()
+ old_config["release_version"] = "2"
+ compose.load_old_compose_config.return_value = old_config
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ cmd = {"disc_num": 1, "disc_count": 1}
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+
+ def test_no_old_metadata(self):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ cmd = {"disc_num": 1, "disc_count": 1}
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+
+ @mock.patch("pungi.phases.createiso.read_json_file")
+ def test_volume_id_differs(self, read_json_file):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ cmd = {"disc_num": 1, "disc_count": 1}
+
+ opts = CreateIsoOpts(volid="new-volid")
+
+ read_json_file.return_value = {"opts": {"volid": "old-volid"}}
+
+ self.assertFalse(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+
+ @mock.patch("pungi.phases.createiso.read_json_file")
+ def test_packages_differ(self, read_json_file):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ cmd = {"disc_num": 1, "disc_count": 1}
+
+ new_graft_points = os.path.join(self.topdir, "new_graft_points")
+ helpers.touch(new_graft_points, "Packages/f/foo-1-1.x86_64.rpm\n")
+ opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
+
+ old_graft_points = os.path.join(self.topdir, "old_graft_points")
+ helpers.touch(old_graft_points, "Packages/f/foo-1-2.x86_64.rpm\n")
+ read_json_file.return_value = {
+ "opts": {"graft_points": old_graft_points, "volid": "volid"}
+ }
+
+ self.assertFalse(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+
+ @mock.patch("pungi.phases.createiso.read_json_file")
+ def test_runs_perform_reuse(self, read_json_file):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ phase.logger = self.logger
+ phase.perform_reuse = mock.Mock()
+ cmd = {"disc_num": 1, "disc_count": 1}
+
+ new_graft_points = os.path.join(self.topdir, "new_graft_points")
+ helpers.touch(new_graft_points)
+ opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
+
+ old_graft_points = os.path.join(self.topdir, "old_graft_points")
+ helpers.touch(old_graft_points)
+ dummy_iso_path = "dummy-iso-path"
+ read_json_file.return_value = {
+ "opts": {
+ "graft_points": old_graft_points,
+ "volid": "volid",
+ },
+ "cmd": {"iso_path": dummy_iso_path},
+ }
+
+ self.assertTrue(
+ phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
+ )
+ self.assertEqual(
+ phase.perform_reuse.call_args_list,
+ [
+ mock.call(
+ cmd,
+ compose.variants["Server"],
+ "x86_64",
+ opts,
+ dummy_iso_path,
+ )
+ ],
+ )
+
+
+@mock.patch("pungi.phases.createiso.OldFileLinker")
+@mock.patch("pungi.phases.createiso.add_iso_to_metadata")
+class CreateisoPerformReusePhaseTest(helpers.PungiTestCase):
+ def test_success(self, add_iso_to_metadata, OldFileLinker):
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ cmd = {
+ "iso_path": "target/image.iso",
+ "bootable": False,
+ "disc_num": 1,
+ "disc_count": 2,
+ }
+ opts = CreateIsoOpts()
+
+ phase.perform_reuse(
+ cmd,
+ compose.variants["Server"],
+ "x86_64",
+ opts,
+ "old/image.iso",
+ )
+
+ self.assertEqual(
+ add_iso_to_metadata.call_args_list,
+ [
+ mock.call(
+ compose,
+ compose.variants["Server"],
+ "x86_64",
+ cmd["iso_path"],
+ bootable=False,
+ disc_count=2,
+ disc_num=1,
+ ),
+ ],
+ )
+ self.assertEqual(
+ OldFileLinker.return_value.mock_calls,
+ [
+ mock.call.link("old/image.iso", "target/image.iso"),
+ mock.call.link("old/image.iso.manifest", "target/image.iso.manifest"),
+ # The old log file doesn't exist in the test scenario.
+ mock.call.link(
+ None,
+ os.path.join(
+ self.topdir, "logs/x86_64/createiso-image.iso.x86_64.log"
+ ),
+ ),
+ ],
+ )
+
+ def test_failure(self, add_iso_to_metadata, OldFileLinker):
+ OldFileLinker.return_value.link.side_effect = helpers.mk_boom()
+ compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
+ phase = createiso.CreateisoPhase(compose, mock.Mock())
+ cmd = {
+ "iso_path": "target/image.iso",
+ "bootable": False,
+ "disc_num": 1,
+ "disc_count": 2,
+ }
+ opts = CreateIsoOpts()
+
+ with self.assertRaises(Exception):
+ phase.perform_reuse(
+ cmd,
+ compose.variants["Server"],
+ "x86_64",
+ opts,
+ "old/image.iso",
+ )
+
+ self.assertEqual(add_iso_to_metadata.call_args_list, [])
+ self.assertEqual(
+ OldFileLinker.return_value.mock_calls,
+ [
+ mock.call.link("old/image.iso", "target/image.iso"),
+ mock.call.abort(),
+ ],
+ )
+
+
+class ComposeConfGetIsoLevelTest(helpers.PungiTestCase):
+ def test_global_config(self):
+ compose = helpers.DummyCompose(self.topdir, {"iso_level": 3})
+
+ self.assertEqual(
+ createiso.get_iso_level_config(
+ compose, compose.variants["Server"], "x86_64"
+ ),
+ 3,
+ )
+
+ def test_src_only_config(self):
+ compose = helpers.DummyCompose(
+ self.topdir,
+ {"iso_level": [(".*", {"src": 4})]},
+ )
+
+ self.assertEqual(
+ createiso.get_iso_level_config(compose, compose.variants["Server"], "src"),
+ 4,
+ )
+
+ def test_no_match(self):
+ compose = helpers.DummyCompose(
+ self.topdir,
+ {"iso_level": [("^Server$", {"*": 4})]},
+ )
+
+ self.assertIsNone(
+ createiso.get_iso_level_config(
+ compose, compose.variants["Client"], "x86_64"
+ ),
+ )
diff --git a/tests/test_createrepophase.py b/tests/test_createrepophase.py
index c0617e7f..aecff998 100644
--- a/tests/test_createrepophase.py
+++ b/tests/test_createrepophase.py
@@ -1,24 +1,24 @@
# -*- coding: utf-8 -*-
-
try:
import unittest2 as unittest
except ImportError:
import unittest
-import mock
import glob
import os
+
+import mock
import six
+from pungi.module_util import Modulemd
from pungi.phases.createrepo import (
CreaterepoPhase,
+ ModulesMetadata,
create_variant_repo,
get_productids_from_scm,
- ModulesMetadata,
)
from tests.helpers import DummyCompose, PungiTestCase, copy_fixture, touch
-from pungi.module_util import Modulemd
class TestCreaterepoPhase(PungiTestCase):
@@ -141,7 +141,13 @@ class TestCreaterepoPhase(PungiTestCase):
self.assertEqual(
get_dir_from_scm.call_args_list,
- [mock.call(scm, os.path.join(compose.topdir, "work/global/tmp-Server"))],
+ [
+ mock.call(
+ scm,
+ os.path.join(compose.topdir, "work/global/tmp-Server"),
+ compose=compose,
+ )
+ ],
)
@@ -158,7 +164,9 @@ def make_mocked_modifyrepo_cmd(tc, module_artifacts):
for ms in module_streams:
tc.assertIn(ms.get_stream_name(), module_artifacts)
six.assertCountEqual(
- tc, ms.get_rpm_artifacts(), module_artifacts[ms.get_stream_name()],
+ tc,
+ ms.get_rpm_artifacts(),
+ module_artifacts[ms.get_stream_name()],
)
return mocked_modifyrepo_cmd
@@ -174,19 +182,24 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.run")
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_rpms(self, CreaterepoWrapperCls, run):
- compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
+ compose = DummyCompose(
+ self.topdir,
+ {"createrepo_checksum": "sha256"},
+ )
compose.has_comps = False
repo = CreaterepoWrapperCls.return_value
copy_fixture("server-rpms.json", compose.paths.compose.metadata("rpms.json"))
- create_variant_repo(
- compose, "x86_64", compose.variants["Server"], "rpm", self.pkgset
- )
+ with mock.patch("pungi.phases.createrepo.CACHE_TOPDIR", self.topdir):
+ create_variant_repo(
+ compose, "x86_64", compose.variants["Server"], "rpm", self.pkgset
+ )
list_file = (
self.topdir + "/work/x86_64/repo_package_list/Server.x86_64.rpm.conf"
)
+
self.assertEqual(
CreaterepoWrapperCls.mock_calls[0], mock.call(createrepo_c=True)
)
@@ -208,6 +221,10 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=os.path.join(
+ self.topdir,
+ "%s-%s" % (compose.conf["release_short"], os.getuid()),
+ ),
)
],
)
@@ -217,7 +234,10 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.run")
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_rpms_without_database(self, CreaterepoWrapperCls, run):
- compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
+ compose = DummyCompose(
+ self.topdir,
+ {"createrepo_checksum": "sha256", "createrepo_enable_cache": False},
+ )
compose.should_create_yum_database = False
compose.has_comps = False
@@ -252,6 +272,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -261,7 +282,10 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.run")
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_source(self, CreaterepoWrapperCls, run):
- compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
+ compose = DummyCompose(
+ self.topdir,
+ {"createrepo_checksum": "sha256", "createrepo_enable_cache": False},
+ )
compose.has_comps = False
repo = CreaterepoWrapperCls.return_value
@@ -293,6 +317,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -302,7 +327,10 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.run")
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_debug(self, CreaterepoWrapperCls, run):
- compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
+ compose = DummyCompose(
+ self.topdir,
+ {"createrepo_checksum": "sha256", "createrepo_enable_cache": False},
+ )
compose.has_comps = False
repo = CreaterepoWrapperCls.return_value
@@ -337,6 +365,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -349,7 +378,12 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_no_createrepo_c(self, CreaterepoWrapperCls, run):
compose = DummyCompose(
- self.topdir, {"createrepo_c": False, "createrepo_checksum": "sha256"}
+ self.topdir,
+ {
+ "createrepo_c": False,
+ "createrepo_enable_cache": False,
+ "createrepo_checksum": "sha256",
+ },
)
compose.has_comps = False
@@ -384,6 +418,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -395,7 +430,11 @@ class TestCreateVariantRepo(PungiTestCase):
def test_variant_repo_is_idepotent(self, CreaterepoWrapperCls, run):
compose = DummyCompose(
self.topdir,
- {"createrepo_checksum": "sha256", "createrepo_num_workers": 10},
+ {
+ "createrepo_checksum": "sha256",
+ "createrepo_enable_cache": False,
+ "createrepo_num_workers": 10,
+ },
)
compose.has_comps = False
@@ -434,6 +473,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -444,7 +484,12 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_rpms_with_xz(self, CreaterepoWrapperCls, run):
compose = DummyCompose(
- self.topdir, {"createrepo_checksum": "sha256", "createrepo_use_xz": True}
+ self.topdir,
+ {
+ "createrepo_checksum": "sha256",
+ "createrepo_enable_cache": False,
+ "createrepo_use_xz": True,
+ },
)
compose.has_comps = False
@@ -479,6 +524,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=True,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -489,7 +535,12 @@ class TestCreateVariantRepo(PungiTestCase):
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
def test_variant_repo_rpms_with_deltas(self, CreaterepoWrapperCls, run):
compose = DummyCompose(
- self.topdir, {"createrepo_checksum": "sha256", "createrepo_deltas": True}
+ self.topdir,
+ {
+ "createrepo_checksum": "sha256",
+ "createrepo_deltas": True,
+ "createrepo_enable_cache": False,
+ },
)
compose.has_comps = False
compose.old_composes = [self.topdir + "/old"]
@@ -534,6 +585,7 @@ class TestCreateVariantRepo(PungiTestCase):
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages",
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -550,6 +602,7 @@ class TestCreateVariantRepo(PungiTestCase):
{
"createrepo_checksum": "sha256",
"createrepo_deltas": [("^Server$", {"*": True})],
+ "createrepo_enable_cache": False,
},
)
compose.has_comps = False
@@ -594,6 +647,7 @@ class TestCreateVariantRepo(PungiTestCase):
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages",
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -610,6 +664,7 @@ class TestCreateVariantRepo(PungiTestCase):
{
"createrepo_checksum": "sha256",
"createrepo_deltas": [("^Everything$", {"x86_64": True})],
+ "createrepo_enable_cache": False,
},
)
compose.has_comps = False
@@ -650,6 +705,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -666,6 +722,7 @@ class TestCreateVariantRepo(PungiTestCase):
{
"createrepo_checksum": "sha256",
"createrepo_deltas": [("^Server$", {"s390x": True})],
+ "createrepo_enable_cache": False,
},
)
compose.has_comps = False
@@ -706,6 +763,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -720,6 +778,7 @@ class TestCreateVariantRepo(PungiTestCase):
{
"createrepo_checksum": "sha256",
"createrepo_deltas": True,
+ "createrepo_enable_cache": False,
"hashed_directories": True,
},
)
@@ -774,6 +833,7 @@ class TestCreateVariantRepo(PungiTestCase):
],
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -790,6 +850,7 @@ class TestCreateVariantRepo(PungiTestCase):
{
"createrepo_checksum": "sha256",
"createrepo_deltas": True,
+ "createrepo_enable_cache": False,
"hashed_directories": True,
},
)
@@ -832,6 +893,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -843,7 +905,12 @@ class TestCreateVariantRepo(PungiTestCase):
def test_variant_repo_source_with_deltas(self, CreaterepoWrapperCls, run):
# This should not actually create deltas, only binary repos do.
compose = DummyCompose(
- self.topdir, {"createrepo_checksum": "sha256", "createrepo_deltas": True}
+ self.topdir,
+ {
+ "createrepo_checksum": "sha256",
+ "createrepo_enable_cache": False,
+ "createrepo_deltas": True,
+ },
)
compose.has_comps = False
compose.old_composes = [self.topdir + "/old"]
@@ -881,6 +948,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -892,7 +960,12 @@ class TestCreateVariantRepo(PungiTestCase):
def test_variant_repo_debug_with_deltas(self, CreaterepoWrapperCls, run):
# This should not actually create deltas, only binary repos do.
compose = DummyCompose(
- self.topdir, {"createrepo_checksum": "sha256", "createrepo_deltas": True}
+ self.topdir,
+ {
+ "createrepo_checksum": "sha256",
+ "createrepo_deltas": True,
+ "createrepo_enable_cache": False,
+ },
)
compose.has_comps = False
compose.old_composes = [self.topdir + "/old"]
@@ -932,6 +1005,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -947,6 +1021,7 @@ class TestCreateVariantRepo(PungiTestCase):
self.topdir,
{
"createrepo_checksum": "sha256",
+ "createrepo_enable_cache": False,
"product_id": "yes", # Truthy value is enough for this test
},
)
@@ -991,6 +1066,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -1007,6 +1083,7 @@ class TestCreateVariantRepo(PungiTestCase):
self.topdir,
{
"createrepo_checksum": "sha256",
+ "createrepo_enable_cache": False,
"product_id": "yes", # Truthy value is enough for this test
},
)
@@ -1044,6 +1121,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -1059,6 +1137,7 @@ class TestCreateVariantRepo(PungiTestCase):
self.topdir,
{
"createrepo_checksum": "sha256",
+ "createrepo_enable_cache": False,
"product_id": "yes", # Truthy value is enough for this test
},
)
@@ -1094,6 +1173,7 @@ class TestCreateVariantRepo(PungiTestCase):
oldpackagedirs=None,
use_xz=False,
extra_args=[],
+ cachedir=None,
)
],
)
@@ -1259,7 +1339,7 @@ class TestCreateVariantRepo(PungiTestCase):
class TestGetProductIds(PungiTestCase):
def mock_get(self, filenames):
- def _mock_get(scm, dest):
+ def _mock_get(scm, dest, compose=None):
for filename in filenames:
touch(os.path.join(dest, filename))
@@ -1305,7 +1385,10 @@ class TestGetProductIds(PungiTestCase):
get_productids_from_scm(self.compose)
- self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
+ self.assertEqual(
+ get_dir_from_scm.call_args_list,
+ [mock.call(cfg, mock.ANY, compose=self.compose)],
+ )
self.assertProductIds(
{
"Client": ["amd64"],
@@ -1326,7 +1409,10 @@ class TestGetProductIds(PungiTestCase):
get_productids_from_scm(self.compose)
- self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
+ self.assertEqual(
+ get_dir_from_scm.call_args_list,
+ [mock.call(cfg, mock.ANY, compose=self.compose)],
+ )
self.assertProductIds({"Server": ["amd64", "x86_64"]})
@mock.patch("pungi.phases.createrepo.get_dir_from_scm")
@@ -1340,7 +1426,10 @@ class TestGetProductIds(PungiTestCase):
with self.assertRaises(RuntimeError) as ctx:
get_productids_from_scm(self.compose)
- self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
+ self.assertEqual(
+ get_dir_from_scm.call_args_list,
+ [mock.call(cfg, mock.ANY, compose=self.compose)],
+ )
self.assertRegex(
str(ctx.exception),
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)", # noqa: E501
@@ -1364,5 +1453,8 @@ class TestGetProductIds(PungiTestCase):
with self.assertRaises(RuntimeError) as ctx:
get_productids_from_scm(self.compose)
- self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
+ self.assertEqual(
+ get_dir_from_scm.call_args_list,
+ [mock.call(cfg, mock.ANY, compose=self.compose)],
+ )
self.assertRegex(str(ctx.exception), "Multiple product certificates found.+")
diff --git a/tests/test_extra_isos_phase.py b/tests/test_extra_isos_phase.py
index ae0e9ba6..fed53c26 100644
--- a/tests/test_extra_isos_phase.py
+++ b/tests/test_extra_isos_phase.py
@@ -2,10 +2,12 @@
from typing import AnyStr, List
from unittest import mock
import six
+import logging
import os
from tests import helpers
+from pungi.createiso import CreateIsoOpts
from pungi.phases import extra_isos
@@ -18,7 +20,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
- phase = extra_isos.ExtraIsosPhase(compose)
+ phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
phase.validate()
self.assertEqual(len(compose.log_warning.call_args_list), 1)
@@ -29,7 +31,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
- phase = extra_isos.ExtraIsosPhase(compose)
+ phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
phase.run()
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 3)
@@ -50,7 +52,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
- phase = extra_isos.ExtraIsosPhase(compose)
+ phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
phase.run()
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 2)
@@ -70,7 +72,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
- phase = extra_isos.ExtraIsosPhase(compose)
+ phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
phase.run()
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 2)
@@ -105,7 +107,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gvi.return_value = "my volume id"
gic.return_value = "/tmp/iso-graft-points"
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "x86_64"), 1)
@@ -146,7 +148,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
log_file=os.path.join(
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
),
- with_jigdo=True,
)
],
)
@@ -181,7 +182,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gvi.return_value = "my volume id"
gic.return_value = "/tmp/iso-graft-points"
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "x86_64"), 1)
@@ -222,7 +223,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
log_file=os.path.join(
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
),
- with_jigdo=False,
)
],
)
@@ -255,7 +255,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gvi.return_value = "my volume id"
gic.return_value = "/tmp/iso-graft-points"
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "x86_64"), 1)
@@ -296,7 +296,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
log_file=os.path.join(
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
),
- with_jigdo=True,
)
],
)
@@ -329,7 +328,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gvi.return_value = "my volume id"
gic.return_value = "/tmp/iso-graft-points"
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "x86_64"), 1)
@@ -372,7 +371,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
log_file=os.path.join(
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
),
- with_jigdo=True,
)
],
)
@@ -404,7 +402,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gvi.return_value = "my volume id"
gic.return_value = "/tmp/iso-graft-points"
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "src"), 1)
@@ -443,7 +441,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
log_file=os.path.join(
self.topdir, "logs/src/extraiso-my.iso.src.log"
),
- with_jigdo=True,
)
],
)
@@ -475,7 +472,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gic.return_value = "/tmp/iso-graft-points"
rcc.side_effect = helpers.mk_boom()
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "x86_64"), 1)
@@ -493,7 +490,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
gic.return_value = "/tmp/iso-graft-points"
rcc.side_effect = helpers.mk_boom(RuntimeError)
- t = extra_isos.ExtraIsosThread(mock.Mock())
+ t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
with self.assertRaises(RuntimeError):
with mock.patch("time.sleep"):
t.process((compose, cfg, server, "x86_64"), 1)
@@ -595,7 +592,9 @@ class GetExtraFilesTest(helpers.PungiTestCase):
get_file.call_args_list,
[
mock.call(
- cfg1, os.path.join(self.dir, "legalese"), compose=self.compose,
+ cfg1,
+ os.path.join(self.dir, "legalese"),
+ compose=self.compose,
),
mock.call(cfg2, self.dir, compose=self.compose),
],
@@ -848,7 +847,8 @@ class GetIsoContentsTest(helpers.PungiTestCase):
["Client"],
os.path.join(self.topdir, "compose/Server/source/tree/.treeinfo"),
os.path.join(
- self.topdir, "work/src/Server/extra-iso-extra-files/.treeinfo",
+ self.topdir,
+ "work/src/Server/extra-iso-extra-files/.treeinfo",
),
),
],
@@ -1127,3 +1127,215 @@ class PrepareMetadataTest(helpers.PungiTestCase):
),
],
)
+
+
+class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
+ def setUp(self):
+ super(ExtraisoTryReusePhaseTest, self).setUp()
+ self.logger = logging.getLogger()
+ self.logger.setLevel(logging.DEBUG)
+ self.logger.addHandler(logging.StreamHandler(os.devnull))
+
+ def test_disabled(self):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": False})
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ def test_buildinstall_changed(self):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+ thread.bi = mock.Mock()
+ thread.bi.reused.return_value = False
+ opts = CreateIsoOpts(buildinstall_method="lorax")
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ def test_no_old_config(self):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ def test_old_config_changed(self):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ old_config = compose.conf.copy()
+ old_config["release_version"] = "2"
+ compose.load_old_compose_config.return_value = old_config
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ def test_no_old_metadata(self):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+ opts = CreateIsoOpts()
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ @mock.patch("pungi.phases.extra_isos.read_json_file")
+ def test_volume_id_differs(self, read_json_file):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+
+ opts = CreateIsoOpts(volid="new-volid")
+
+ read_json_file.return_value = {"opts": {"volid": "old-volid"}}
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ @mock.patch("pungi.phases.extra_isos.read_json_file")
+ def test_packages_differ(self, read_json_file):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+
+ new_graft_points = os.path.join(self.topdir, "new_graft_points")
+ helpers.touch(new_graft_points, "Packages/f/foo-1-1.x86_64.rpm\n")
+ opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
+
+ old_graft_points = os.path.join(self.topdir, "old_graft_points")
+ helpers.touch(old_graft_points, "Packages/f/foo-1-2.x86_64.rpm\n")
+ read_json_file.return_value = {
+ "opts": {"graft_points": old_graft_points, "volid": "volid"}
+ }
+
+ self.assertFalse(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+
+ @mock.patch("pungi.phases.extra_isos.read_json_file")
+ def test_runs_perform_reuse(self, read_json_file):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ compose.load_old_compose_config.return_value = compose.conf.copy()
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ thread.logger = self.logger
+ thread.perform_reuse = mock.Mock()
+
+ new_graft_points = os.path.join(self.topdir, "new_graft_points")
+ helpers.touch(new_graft_points)
+ opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
+
+ old_graft_points = os.path.join(self.topdir, "old_graft_points")
+ helpers.touch(old_graft_points)
+ dummy_iso_path = "dummy-iso-path/dummy.iso"
+ read_json_file.return_value = {
+ "opts": {
+ "graft_points": old_graft_points,
+ "volid": "volid",
+ "output_dir": os.path.dirname(dummy_iso_path),
+ "iso_name": os.path.basename(dummy_iso_path),
+ },
+ }
+
+ self.assertTrue(
+ thread.try_reuse(
+ compose, compose.variants["Server"], "x86_64", "abcdef", opts
+ )
+ )
+ self.assertEqual(
+ thread.perform_reuse.call_args_list,
+ [
+ mock.call(
+ compose,
+ compose.variants["Server"],
+ "x86_64",
+ opts,
+ "dummy-iso-path",
+ "dummy.iso",
+ )
+ ],
+ )
+
+
+@mock.patch("pungi.phases.extra_isos.OldFileLinker")
+class ExtraIsoPerformReusePhaseTest(helpers.PungiTestCase):
+ def test_success(self, OldFileLinker):
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ opts = CreateIsoOpts(output_dir="new/path", iso_name="new.iso")
+
+ thread.perform_reuse(
+ compose,
+ compose.variants["Server"],
+ "x86_64",
+ opts,
+ "old",
+ "image.iso",
+ )
+
+ self.assertEqual(
+ OldFileLinker.return_value.mock_calls,
+ [
+ mock.call.link("old/image.iso", "new/path/new.iso"),
+ mock.call.link("old/image.iso.manifest", "new/path/new.iso.manifest"),
+ # The old log file doesn't exist in the test scenario.
+ mock.call.link(
+ None,
+ os.path.join(
+ self.topdir, "logs/x86_64/extraiso-new.iso.x86_64.log"
+ ),
+ ),
+ ],
+ )
+
+ def test_failure(self, OldFileLinker):
+ OldFileLinker.return_value.link.side_effect = helpers.mk_boom()
+ compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
+ thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
+ opts = CreateIsoOpts(output_dir="new/path", iso_name="new.iso")
+
+ with self.assertRaises(Exception):
+ thread.perform_reuse(
+ compose,
+ compose.variants["Server"],
+ "x86_64",
+ opts,
+ "old",
+ "image.iso",
+ )
+
+ self.assertEqual(
+ OldFileLinker.return_value.mock_calls,
+ [
+ mock.call.link("old/image.iso", "new/path/new.iso"),
+ mock.call.abort(),
+ ],
+ )
diff --git a/tests/test_fus_wrapper.py b/tests/test_fus_wrapper.py
index 5f17a1ad..bfc2696b 100644
--- a/tests/test_fus_wrapper.py
+++ b/tests/test_fus_wrapper.py
@@ -147,7 +147,8 @@ class TestParseOutput(unittest.TestCase):
touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n")
packages, modules = fus.parse_output(self.file)
self.assertEqual(
- packages, set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
+ packages,
+ set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
)
self.assertEqual(modules, set())
diff --git a/tests/test_gather.py b/tests/test_gather.py
index 38bc2300..3062c470 100644
--- a/tests/test_gather.py
+++ b/tests/test_gather.py
@@ -2620,5 +2620,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
six.assertCountEqual(self, pkg_map["rpm"], [])
six.assertCountEqual(self, pkg_map["srpm"], [])
six.assertCountEqual(
- self, pkg_map["debuginfo"], ["dummy-bash-debuginfo-4.2.37-6.x86_64.rpm"],
+ self,
+ pkg_map["debuginfo"],
+ ["dummy-bash-debuginfo-4.2.37-6.x86_64.rpm"],
)
diff --git a/tests/test_gather_method_hybrid.py b/tests/test_gather_method_hybrid.py
index 80db2913..b053f229 100644
--- a/tests/test_gather_method_hybrid.py
+++ b/tests/test_gather_method_hybrid.py
@@ -350,7 +350,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
],
)
self.assertEqual(
- wc.call_args_list, [mock.call(self.config1, ["mod:master"], [])],
+ wc.call_args_list,
+ [mock.call(self.config1, ["mod:master"], [])],
)
self.assertEqual(
gc.call_args_list,
@@ -390,7 +391,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
),
}
po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"])
- self.phase.packages = {"p-1-1.x86_64": mock.Mock()}
+ self.phase.packages = {"p-1-1.x86_64": mock.Mock(rpm_sourcerpm="p-1-1.src.rpm")}
res = self.phase.run_solver(
self.compose.variants["Server"],
@@ -430,7 +431,9 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
)
def test_with_comps(self, run, gc, po, wc):
- self.phase.packages = {"pkg-1.0-1.x86_64": mock.Mock()}
+ self.phase.packages = {
+ "pkg-1.0-1.x86_64": mock.Mock(rpm_sourcerpm="pkg-1.0-1.src.rpm")
+ }
self.phase.debuginfo = {"x86_64": {}}
po.return_value = ([("pkg-1.0-1", "x86_64", frozenset())], [])
res = self.phase.run_solver(
@@ -454,7 +457,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
],
)
self.assertEqual(
- wc.call_args_list, [mock.call(self.config1, [], ["pkg"])],
+ wc.call_args_list,
+ [mock.call(self.config1, [], ["pkg"])],
)
self.assertEqual(
gc.call_args_list,
@@ -471,11 +475,23 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
)
def test_with_comps_with_debuginfo(self, run, gc, po, wc):
- dbg1 = NamedMock(name="pkg-debuginfo", arch="x86_64", sourcerpm="pkg.src.rpm")
- dbg2 = NamedMock(name="pkg-debuginfo", arch="x86_64", sourcerpm="x.src.rpm")
+ # dbg1 and dbg2 mocks both package from Kobo (with sourcerpm) and from
+ # createrepo_c (with rpm_sourcerpm)
+ dbg1 = NamedMock(
+ name="pkg-debuginfo",
+ arch="x86_64",
+ sourcerpm="pkg-1.0-1.src.rpm",
+ rpm_sourcerpm="pkg-1.0-1.src.rpm",
+ )
+ dbg2 = NamedMock(
+ name="pkg-debuginfo",
+ arch="x86_64",
+ sourcerpm="pkg-1.0-2.src.rpm",
+ rpm_sourcerpm="pkg-1.0-2.src.rpm",
+ )
self.phase.packages = {
"pkg-1.0-1.x86_64": NamedMock(
- name="pkg", arch="x86_64", rpm_sourcerpm="pkg.src.rpm"
+ name="pkg", arch="x86_64", rpm_sourcerpm="pkg-1.0-1.src.rpm"
),
"pkg-debuginfo-1.0-1.x86_64": dbg1,
"pkg-debuginfo-1.0-2.x86_64": dbg2,
@@ -556,8 +572,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
]
po.side_effect = [([("pkg-1.0-1", "x86_64", frozenset())], []), (final, [])]
self.phase.packages = {
- "pkg-1.0-1.x86_64": mock.Mock(),
- "pkg-en-1.0-1.noarch": mock.Mock(),
+ "pkg-1.0-1.x86_64": mock.Mock(rpm_sourcerpm="pkg-1.0-1.src.rpm"),
+ "pkg-en-1.0-1.noarch": mock.Mock(rpm_sourcerpm="pkg-1.0-1.src.rpm"),
}
res = self.phase.run_solver(
@@ -626,9 +642,15 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
cr.Metadata.return_value.keys.return_value = []
self.phase.package_maps = {
"x86_64": {
- "pkg-devel-1.0-1.x86_64": NamedMock(name="pkg-devel"),
- "pkg-devel-1.0-1.i686": NamedMock(name="pkg-devel"),
- "foo-1.0-1.x86_64": NamedMock(name="foo"),
+ "pkg-devel-1.0-1.x86_64": NamedMock(
+ name="pkg-devel", rpm_sourcerpm="pkg-1.0-1.src.rpm"
+ ),
+ "pkg-devel-1.0-1.i686": NamedMock(
+ name="pkg-devel", rpm_sourcerpm="pkg-1.0-1.src.rpm"
+ ),
+ "foo-1.0-1.x86_64": NamedMock(
+ name="foo", rpm_sourcerpm="foo-1.0-1.src.rpm"
+ ),
}
}
self.phase.packages = self.phase.package_maps["x86_64"]
@@ -716,6 +738,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
release="1",
arch="x86_64",
provides=[("/usr/lib/libfoo.1.so.1", None, None)],
+ rpm_sourcerpm="foo-1.0-1.src.rpm",
),
"def": NamedMock(
name="foo",
@@ -724,6 +747,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
release="1",
arch="i686",
provides=[("/usr/lib/libfoo.1.so.1", None, None)],
+ rpm_sourcerpm="foo-1.0-1.src.rpm",
),
"ghi": NamedMock(
name="pkg-devel",
@@ -732,6 +756,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
release="1",
arch="x86_64",
provides=[],
+ rpm_sourcerpm="pkg-devel-1.0-1.src.rpm",
),
}
cr.Metadata.return_value.keys.return_value = packages.keys()
@@ -932,20 +957,11 @@ class TestExpandPackages(helpers.PungiTestCase):
},
)
- @mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
- def test_skip_lookaside_source(self, cr):
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.get_repo_packages")
+ def test_skip_lookaside_source(self, get_repo_packages):
nevra_to_pkg = self._mk_packages(src=True)
lookasides = [mock.Mock()]
- repo = {
- "abc": NamedMock(
- name="pkg",
- arch="src",
- location_base="file:///tmp/",
- location_href="pkg.src.rpm",
- ),
- }
- cr.Metadata.return_value.keys.return_value = repo.keys()
- cr.Metadata.return_value.get.side_effect = lambda key: repo[key]
+ get_repo_packages.return_value = ["pkg.src.rpm"]
res = hybrid.expand_packages(
nevra_to_pkg, lookasides, [("pkg-3:1-2", "x86_64", [])], []
@@ -960,20 +976,11 @@ class TestExpandPackages(helpers.PungiTestCase):
},
)
- @mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
- def test_skip_lookaside_packages(self, cr):
+ @mock.patch("pungi.phases.gather.methods.method_hybrid.get_repo_packages")
+ def test_skip_lookaside_packages(self, get_repo_packages):
nevra_to_pkg = self._mk_packages(debug_arch="x86_64")
lookasides = [mock.Mock()]
- repo = {
- "abc": NamedMock(
- name="pkg",
- arch="x86_64",
- location_base="file:///tmp/",
- location_href="pkg.rpm",
- )
- }
- cr.Metadata.return_value.keys.return_value = repo.keys()
- cr.Metadata.return_value.get.side_effect = lambda key: repo[key]
+ get_repo_packages.return_value = ["pkg.rpm"]
res = hybrid.expand_packages(
nevra_to_pkg, lookasides, [("pkg-3:1-2", "x86_64", [])], []
diff --git a/tests/test_gather_modules.py b/tests/test_gather_modules.py
index 427285d1..67cbaa7a 100644
--- a/tests/test_gather_modules.py
+++ b/tests/test_gather_modules.py
@@ -110,13 +110,13 @@ class TestModulesYamlParser(TestCase):
os.listdir(os.path.join(PATH_TO_KOJI, 'module_defaults')))
# check that modules were exported
- self.assertEqual(MARIADB_MODULE, yaml.load(
+ self.assertEqual(MARIADB_MODULE, yaml.safe_load(
open(os.path.join(PATH_TO_KOJI, 'modules/x86_64', 'mariadb-devel-10.3_1-8010020200108182321.cdc1202b'))))
- self.assertEqual(JAVAPACKAGES_TOOLS_MODULE, yaml.load(
+ self.assertEqual(JAVAPACKAGES_TOOLS_MODULE, yaml.safe_load(
open(os.path.join(PATH_TO_KOJI, 'modules/x86_64', 'javapackages-tools-201801-8000020190628172923.b07bea58'))))
# check that defaults were copied
- self.assertEqual(ANT_DEFAULTS, yaml.load(
+ self.assertEqual(ANT_DEFAULTS, yaml.safe_load(
open(os.path.join(PATH_TO_KOJI, 'module_defaults', 'ant.yaml'))))
diff --git a/tests/test_gather_phase.py b/tests/test_gather_phase.py
index e120fea2..a5d59730 100644
--- a/tests/test_gather_phase.py
+++ b/tests/test_gather_phase.py
@@ -1,9 +1,11 @@
# -*- coding: utf-8 -*-
import copy
-import mock
+import json
import os
+import mock
+
try:
import unittest2 as unittest
except ImportError:
@@ -12,8 +14,8 @@ except ImportError:
import six
from pungi.phases import gather
-from pungi.phases.pkgset.common import MaterializedPackageSet
from pungi.phases.gather import _mk_pkg_map
+from pungi.phases.pkgset.common import MaterializedPackageSet
from tests import helpers
from tests.helpers import MockPackageSet, MockPkg
@@ -1080,21 +1082,24 @@ class TestGatherPackages(helpers.PungiTestCase):
class TestReuseOldGatherPackages(helpers.PungiTestCase):
+ def _save_config_dump(self, compose):
+ config_dump_full = compose.paths.log.log_file("global", "config-dump")
+ with open(config_dump_full, "w") as f:
+ json.dump(compose.conf, f, sort_keys=True, indent=4)
+
@mock.patch("pungi.phases.gather.load_old_gather_result")
def test_reuse_no_old_gather_result(self, load_old_gather_result):
load_old_gather_result.return_value = None
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
+ self._save_config_dump(compose)
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], []
+ compose, "x86_64", compose.variants["Server"], [], "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_no_old_compose_config(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_no_old_compose_config(self, load_old_gather_result):
load_old_gather_result.return_value = {
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
"srpm": [],
@@ -1102,18 +1107,15 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = None
+ self._save_config_dump(compose)
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], []
+ compose, "x86_64", compose.variants["Server"], [], "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_compose_config_different(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_compose_config_different(self, load_old_gather_result):
load_old_gather_result.return_value = {
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
"srpm": [],
@@ -1121,20 +1123,18 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
+ self._save_config_dump(compose)
compose_conf_copy = dict(compose.conf)
compose_conf_copy["gather_method"] = "nodeps"
- load_old_compose_config.return_value = compose_conf_copy
+ compose.load_old_compose_config.return_value = compose_conf_copy
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], []
+ compose, "x86_64", compose.variants["Server"], [], "nodeps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_compose_config_different_whitelist(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_compose_config_different_whitelist(self, load_old_gather_result):
for whitelist_opt in ["product_id", "pkgset_koji_builds"]:
load_old_gather_result.return_value = {
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
@@ -1143,12 +1143,13 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
}
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
+ self._save_config_dump(compose)
compose_conf_copy = dict(compose.conf)
compose_conf_copy[whitelist_opt] = "different"
- load_old_compose_config.return_value = compose_conf_copy
+ compose.load_old_compose_config.return_value = compose_conf_copy
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], []
+ compose, "x86_64", compose.variants["Server"], [], "deps"
)
self.assertEqual(result, {"rpm": [], "srpm": [], "debuginfo": []})
@@ -1173,16 +1174,16 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
return package_sets
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse(self, load_old_compose_config, load_old_gather_result):
+ def test_reuse(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(
result,
@@ -1194,19 +1195,17 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_update_gather_lookaside_repos(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_update_gather_lookaside_repos(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = copy.deepcopy(compose.conf)
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = copy.deepcopy(compose.conf)
gather._update_config(compose, "Server", "x86_64", compose.topdir)
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(
result,
@@ -1218,49 +1217,46 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
def test_reuse_update_gather_lookaside_repos_different_initial_repos(
- self, load_old_compose_config, load_old_gather_result
+ self, load_old_gather_result
):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
+ self._save_config_dump(compose)
lookasides = compose.conf["gather_lookaside_repos"]
lookasides.append(("^Server$", {"x86_64": "http://localhost/real.repo"}))
- load_old_compose_config.return_value = copy.deepcopy(compose.conf)
+ compose.load_old_compose_config.return_value = copy.deepcopy(compose.conf)
gather._update_config(compose, "Server", "x86_64", compose.topdir)
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
def test_reuse_update_gather_lookaside_repos_different_initial_repos_list(
- self, load_old_compose_config, load_old_gather_result
+ self, load_old_gather_result
):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
+ self._save_config_dump(compose)
lookasides = compose.conf["gather_lookaside_repos"]
repos = ["http://localhost/real1.repo", "http://localhost/real2.repo"]
lookasides.append(("^Server$", {"x86_64": repos}))
- load_old_compose_config.return_value = copy.deepcopy(compose.conf)
+ compose.load_old_compose_config.return_value = copy.deepcopy(compose.conf)
gather._update_config(compose, "Server", "x86_64", compose.topdir)
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_no_old_file_cache(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_no_old_file_cache(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
@@ -1268,18 +1264,16 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
"/build/foo-1-1.x86_64.rpm": MockPkg("foo-1-1.x86_64.rpm", sourcerpm="foo")
}
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_two_rpms_from_same_source(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_two_rpms_from_same_source(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
@@ -1290,18 +1284,16 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
pkg_set.old_file_cache["/build/bash-1-2.x86_64.rpm"] = bash_pkg
pkg_set.file_cache["/build/bash-1-2.x86_64.rpm"] = bash_pkg
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_rpm_added_removed(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_rpm_added_removed(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=[]
)
@@ -1315,59 +1307,54 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
pkg_set.old_file_cache["/build/file-1-1.x86_64.rpm"] = file_pkg
pkg_set.file_cache["/build/foo-1-1.x86_64.rpm"] = foo_pkg
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_different_packages(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_different_packages(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=["foo"]
)
package_sets[0]["global"].old_file_cache = None
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_requires_changed(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_requires_changed(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=["foo"], provides=[]
)
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@mock.patch("pungi.phases.gather.load_old_gather_result")
- @mock.patch("pungi.phases.gather.load_old_compose_config")
- def test_reuse_provides_changed(
- self, load_old_compose_config, load_old_gather_result
- ):
+ def test_reuse_provides_changed(self, load_old_gather_result):
package_sets = self._prepare_package_sets(
load_old_gather_result, requires=[], provides=["foo"]
)
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
- load_old_compose_config.return_value = compose.conf
+ self._save_config_dump(compose)
+ compose.load_old_compose_config.return_value = compose.conf
result = gather.reuse_old_gather_packages(
- compose, "x86_64", compose.variants["Server"], package_sets
+ compose, "x86_64", compose.variants["Server"], package_sets, "deps"
)
self.assertEqual(result, None)
@@ -1561,6 +1548,24 @@ class TestGatherPhase(helpers.PungiTestCase):
phase = gather.GatherPhase(compose, pkgset_phase)
phase.validate()
+ def test_validates_variants_requiring_is_not_subset_of_required(self):
+ pkgset_phase = mock.Mock()
+ compose = helpers.DummyCompose(
+ self.topdir, {"variant_as_lookaside": [("Everything", "Client")]}
+ )
+ phase = gather.GatherPhase(compose, pkgset_phase)
+ with self.assertRaises(ValueError) as ctx:
+ phase.validate()
+ self.assertIn("architectures of variant 'Client'", str(ctx.exception))
+
+ def test_validates_variants_requiring_is_subset_of_required(self):
+ pkgset_phase = mock.Mock()
+ compose = helpers.DummyCompose(
+ self.topdir, {"variant_as_lookaside": [("Client", "Everything")]}
+ )
+ phase = gather.GatherPhase(compose, pkgset_phase)
+ phase.validate()
+
class TestGetPackagesToGather(helpers.PungiTestCase):
def setUp(self):
diff --git a/tests/test_image_container_phase.py b/tests/test_image_container_phase.py
new file mode 100644
index 00000000..2bb99c7b
--- /dev/null
+++ b/tests/test_image_container_phase.py
@@ -0,0 +1,264 @@
+# -*- coding: utf-8 -*-
+
+import mock
+
+import os
+
+from tests import helpers
+from pungi import checks
+from pungi.phases import image_container
+
+
+class ImageContainerPhaseTest(helpers.PungiTestCase):
+ @mock.patch("pungi.phases.image_container.ThreadPool")
+ def test_run(self, ThreadPool):
+ cfg = helpers.IterableMock()
+ compose = helpers.DummyCompose(
+ self.topdir, {"image_container": {"^Everything$": cfg}}
+ )
+
+ pool = ThreadPool.return_value
+
+ phase = image_container.ImageContainerPhase(compose)
+ phase.run()
+
+ self.assertEqual(len(pool.add.call_args_list), 1)
+ self.assertEqual(
+ pool.queue_put.call_args_list,
+ [mock.call((compose, compose.variants["Everything"], cfg))],
+ )
+
+ @mock.patch("pungi.phases.image_container.ThreadPool")
+ def test_skip_without_config(self, ThreadPool):
+ compose = helpers.DummyCompose(self.topdir, {})
+ compose.just_phases = None
+ compose.skip_phases = []
+ phase = image_container.ImageContainerPhase(compose)
+ self.assertTrue(phase.skip())
+
+
+class ImageContainerConfigTest(helpers.PungiTestCase):
+ def assertConfigMissing(self, cfg, key):
+ conf = helpers.load_config(
+ helpers.PKGSET_REPOS, **{"image_container": {"^Server$": cfg}}
+ )
+ errors, warnings = checks.validate(conf, offline=True)
+ self.assertIn(
+ "Failed validation in image_container.^Server$: %r is not valid under any of the given schemas" # noqa: E501
+ % cfg,
+ errors,
+ )
+ self.assertIn(" Possible reason: %r is a required property" % key, errors)
+ self.assertEqual([], warnings)
+
+ def test_correct(self):
+ conf = helpers.load_config(
+ helpers.PKGSET_REPOS,
+ **{
+ "image_container": {
+ "^Server$": [
+ {
+ "url": "http://example.com/repo.git#HEAD",
+ "target": "container-candidate",
+ "git_branch": "main",
+ "image_spec": {"type": "qcow2"},
+ }
+ ]
+ }
+ }
+ )
+ errors, warnings = checks.validate(conf, offline=True)
+ self.assertEqual([], errors)
+ self.assertEqual([], warnings)
+
+ def test_missing_url(self):
+ self.assertConfigMissing(
+ {
+ "target": "container-candidate",
+ "git_branch": "main",
+ "image_spec": {"type": "qcow2"},
+ },
+ "url",
+ )
+
+ def test_missing_target(self):
+ self.assertConfigMissing(
+ {
+ "url": "http://example.com/repo.git#HEAD",
+ "git_branch": "main",
+ "image_spec": {"type": "qcow2"},
+ },
+ "target",
+ )
+
+ def test_missing_git_branch(self):
+ self.assertConfigMissing(
+ {
+ "url": "http://example.com/repo.git#HEAD",
+ "target": "container-candidate",
+ "image_spec": {"type": "qcow2"},
+ },
+ "git_branch",
+ )
+
+ def test_missing_image_spec(self):
+ self.assertConfigMissing(
+ {
+ "url": "http://example.com/repo.git#HEAD",
+ "target": "container-candidate",
+ "git_branch": "main",
+ },
+ "image_spec",
+ )
+
+
+class ImageContainerThreadTest(helpers.PungiTestCase):
+ def setUp(self):
+ super(ImageContainerThreadTest, self).setUp()
+ self.pool = mock.Mock()
+ self.repofile_path = "work/global/tmp-Server/image-container-Server-1.repo"
+ self.t = image_container.ImageContainerThread(self.pool)
+ self.compose = helpers.DummyCompose(
+ self.topdir,
+ {
+ "koji_profile": "koji",
+ "translate_paths": [(self.topdir, "http://root")],
+ },
+ )
+ self.cfg = {
+ "url": "git://example.com/repo?#BEEFCAFE",
+ "target": "f24-docker-candidate",
+ "git_branch": "f24-docker",
+ "image_spec": {"type": "qcow2"},
+ }
+ self.compose.im.images["Server"] = {
+ "x86_64": [
+ mock.Mock(path="Server/x86_64/iso/image.iso", type="iso"),
+ mock.Mock(path="Server/x86_64/images/image.qcow2", type="qcow2"),
+ ]
+ }
+
+ def _setupMock(self, KojiWrapper):
+ self.wrapper = KojiWrapper.return_value
+ self.wrapper.koji_proxy.buildContainer.return_value = 12345
+ self.wrapper.watch_task.return_value = 0
+
+ def assertRepoFile(self):
+ repofile = os.path.join(self.topdir, self.repofile_path)
+ with open(repofile) as f:
+ repo_content = list(f)
+ self.assertIn("[image-to-include]\n", repo_content)
+ self.assertIn(
+ "baseurl=http://root/compose/Server/$basearch/images/image.qcow2\n",
+ repo_content,
+ )
+ self.assertIn("enabled=0\n", repo_content)
+
+ def assertKojiCalls(self, cfg, scratch=False):
+ opts = {
+ "git_branch": cfg["git_branch"],
+ "yum_repourls": ["http://root/" + self.repofile_path],
+ }
+ if scratch:
+ opts["scratch"] = True
+ self.assertEqual(
+ self.wrapper.mock_calls,
+ [
+ mock.call.login(),
+ mock.call.koji_proxy.buildContainer(
+ cfg["url"],
+ cfg["target"],
+ opts,
+ priority=None,
+ ),
+ mock.call.save_task_id(12345),
+ mock.call.watch_task(
+ 12345,
+ os.path.join(
+ self.topdir,
+ "logs/global/image_container/Server-1-watch-task.log",
+ ),
+ ),
+ ],
+ )
+
+ @mock.patch("pungi.phases.image_container.add_metadata")
+ @mock.patch("pungi.phases.image_container.kojiwrapper.KojiWrapper")
+ def test_success(self, KojiWrapper, add_metadata):
+ self._setupMock(KojiWrapper)
+
+ self.t.process(
+ (self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
+ )
+
+ self.assertRepoFile()
+ self.assertKojiCalls(self.cfg)
+ self.assertEqual(
+ add_metadata.call_args_list,
+ [mock.call(self.compose.variants["Server"], 12345, self.compose, False)],
+ )
+
+ @mock.patch("pungi.phases.image_container.add_metadata")
+ @mock.patch("pungi.phases.image_container.kojiwrapper.KojiWrapper")
+ def test_scratch_build(self, KojiWrapper, add_metadata):
+ self.cfg["scratch"] = True
+ self._setupMock(KojiWrapper)
+
+ self.t.process(
+ (self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
+ )
+
+ self.assertRepoFile()
+ self.assertKojiCalls(self.cfg, scratch=True)
+ self.assertEqual(
+ add_metadata.call_args_list,
+ [mock.call(self.compose.variants["Server"], 12345, self.compose, True)],
+ )
+
+ @mock.patch("pungi.phases.image_container.add_metadata")
+ @mock.patch("pungi.phases.image_container.kojiwrapper.KojiWrapper")
+ def test_task_fail(self, KojiWrapper, add_metadata):
+ self._setupMock(KojiWrapper)
+ self.wrapper.watch_task.return_value = 1
+
+ with self.assertRaises(RuntimeError) as ctx:
+ self.t.process(
+ (self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
+ )
+
+ self.assertRegex(str(ctx.exception), r"task 12345 failed: see .+ for details")
+ self.assertRepoFile()
+ self.assertKojiCalls(self.cfg)
+ self.assertEqual(add_metadata.call_args_list, [])
+
+ @mock.patch("pungi.phases.image_container.add_metadata")
+ @mock.patch("pungi.phases.image_container.kojiwrapper.KojiWrapper")
+ def test_task_fail_failable(self, KojiWrapper, add_metadata):
+ self.cfg["failable"] = "*"
+ self._setupMock(KojiWrapper)
+ self.wrapper.watch_task.return_value = 1
+
+ self.t.process(
+ (self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
+ )
+
+ self.assertRepoFile()
+ self.assertKojiCalls(self.cfg)
+ self.assertEqual(add_metadata.call_args_list, [])
+
+ @mock.patch("pungi.phases.image_container.add_metadata")
+ @mock.patch("pungi.phases.image_container.kojiwrapper.KojiWrapper")
+ def test_non_unique_spec(self, KojiWrapper, add_metadata):
+ self.cfg["image_spec"] = {"path": ".*/image\\..*"}
+ self._setupMock(KojiWrapper)
+
+ with self.assertRaises(RuntimeError) as ctx:
+ self.t.process(
+ (self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
+ )
+
+ self.assertRegex(
+ str(ctx.exception), "2 images matched specification. Only one was expected."
+ )
+ self.assertEqual(self.wrapper.mock_calls, [])
+ self.assertEqual(add_metadata.call_args_list, [])
diff --git a/tests/test_imagebuildphase.py b/tests/test_imagebuildphase.py
index 711e0f7d..c0f5ac7d 100644
--- a/tests/test_imagebuildphase.py
+++ b/tests/test_imagebuildphase.py
@@ -17,26 +17,23 @@ class TestImageBuildPhase(PungiTestCase):
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": [("docker", "tar.xz")],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "failable": ["x86_64"],
+ }
+ }
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Client|Server$": [
- {
- "image-build": {
- "format": [("docker", "tar.xz")],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "failable": ["x86_64"],
- }
- }
- ]
- },
+ "image_build": {"^Client|Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -50,6 +47,7 @@ class TestImageBuildPhase(PungiTestCase):
# assert at least one thread was started
self.assertTrue(phase.pool.add.called)
client_args = {
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Client/$arch/os",
@@ -75,6 +73,7 @@ class TestImageBuildPhase(PungiTestCase):
"scratch": False,
}
server_args = {
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Server/$arch/os",
@@ -102,11 +101,23 @@ class TestImageBuildPhase(PungiTestCase):
six.assertCountEqual(
self,
phase.pool.queue_put.mock_calls,
- [mock.call((compose, client_args)), mock.call((compose, server_args))],
+ [
+ mock.call((compose, client_args, phase.buildinstall_phase)),
+ mock.call((compose, server_args, phase.buildinstall_phase)),
+ ],
)
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_phase_global_options(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ }
+ }
compose = DummyCompose(
self.topdir,
{
@@ -114,19 +125,7 @@ class TestImageBuildPhase(PungiTestCase):
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
"image_build_target": "f24",
"image_build_version": "Rawhide",
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -140,6 +139,7 @@ class TestImageBuildPhase(PungiTestCase):
# assert at least one thread was started
self.assertTrue(phase.pool.add.called)
server_args = {
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Server/$arch/os",
@@ -165,30 +165,28 @@ class TestImageBuildPhase(PungiTestCase):
"scratch": False,
}
self.assertEqual(
- phase.pool.queue_put.mock_calls, [mock.call((compose, server_args))]
+ phase.pool.queue_put.mock_calls,
+ [mock.call((compose, server_args, phase.buildinstall_phase))],
)
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_phase_missing_version(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": "docker",
+ "name": "Fedora-Docker-Base",
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ }
+ }
compose = DummyCompose(
self.topdir,
{
"image_build_ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
"image_build_release": "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN",
"image_build_target": "f24",
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": "docker",
- "name": "Fedora-Docker-Base",
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -200,6 +198,7 @@ class TestImageBuildPhase(PungiTestCase):
# assert at least one thread was started
self.assertTrue(phase.pool.add.called)
server_args = {
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Server/$arch/os",
@@ -225,7 +224,8 @@ class TestImageBuildPhase(PungiTestCase):
"scratch": False,
}
self.assertEqual(
- phase.pool.queue_put.mock_calls, [mock.call((compose, server_args))]
+ phase.pool.queue_put.mock_calls,
+ [mock.call((compose, server_args, phase.buildinstall_phase))],
)
@mock.patch("pungi.phases.image_build.ThreadPool")
@@ -266,27 +266,25 @@ class TestImageBuildPhase(PungiTestCase):
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_set_install_tree(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "arches": ["x86_64"],
+ "install_tree_from": "Server-optional",
+ }
+ }
+
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "arches": ["x86_64"],
- "install_tree_from": "Server-optional",
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -307,6 +305,7 @@ class TestImageBuildPhase(PungiTestCase):
self.assertDictEqual(
args[0][1],
{
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir
@@ -335,27 +334,24 @@ class TestImageBuildPhase(PungiTestCase):
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_set_install_tree_from_path(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "arches": ["x86_64"],
+ "install_tree_from": "/my/tree",
+ }
+ }
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "arches": ["x86_64"],
- "install_tree_from": "/my/tree",
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
"translate_paths": [("/my", "http://example.com")],
},
@@ -376,6 +372,7 @@ class TestImageBuildPhase(PungiTestCase):
self.assertDictEqual(
args[0][1],
{
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": "http://example.com/tree",
@@ -403,27 +400,24 @@ class TestImageBuildPhase(PungiTestCase):
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_set_extra_repos(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "arches": ["x86_64"],
+ "repo_from": ["Everything", "Server-optional"],
+ }
+ }
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "arches": ["x86_64"],
- "repo_from": ["Everything", "Server-optional"],
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -444,6 +438,7 @@ class TestImageBuildPhase(PungiTestCase):
self.assertDictEqual(
args[0][1],
{
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Server/$arch/os",
@@ -477,27 +472,24 @@ class TestImageBuildPhase(PungiTestCase):
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_set_external_install_tree(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "arches": ["x86_64"],
+ "install_tree_from": "http://example.com/install-tree/",
+ }
+ }
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "arches": ["x86_64"],
- "install_tree_from": "http://example.com/install-tree/",
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -517,6 +509,7 @@ class TestImageBuildPhase(PungiTestCase):
self.assertDictEqual(
args[0][1],
{
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": "http://example.com/install-tree/",
@@ -670,26 +663,23 @@ class TestImageBuildPhase(PungiTestCase):
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_image_build_optional(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "failable": ["x86_64"],
+ }
+ }
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Server-optional$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "failable": ["x86_64"],
- }
- }
- ]
- },
+ "image_build": {"^Server-optional$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -704,6 +694,7 @@ class TestImageBuildPhase(PungiTestCase):
# assert at least one thread was started
self.assertTrue(phase.pool.add.called)
server_args = {
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Server/$arch/os",
@@ -729,31 +720,29 @@ class TestImageBuildPhase(PungiTestCase):
"scratch": False,
}
self.assertEqual(
- phase.pool.queue_put.mock_calls, [mock.call((compose, server_args))]
+ phase.pool.queue_put.mock_calls,
+ [mock.call((compose, server_args, phase.buildinstall_phase))],
)
@mock.patch("pungi.phases.image_build.ThreadPool")
def test_failable_star(self, ThreadPool):
+ original_image_conf = {
+ "image-build": {
+ "format": ["docker"],
+ "name": "Fedora-Docker-Base",
+ "target": "f24",
+ "version": "Rawhide",
+ "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
+ "kickstart": "fedora-docker-base.ks",
+ "distro": "Fedora-20",
+ "disk_size": 3,
+ "failable": ["*"],
+ }
+ }
compose = DummyCompose(
self.topdir,
{
- "image_build": {
- "^Server$": [
- {
- "image-build": {
- "format": ["docker"],
- "name": "Fedora-Docker-Base",
- "target": "f24",
- "version": "Rawhide",
- "ksurl": "git://git.fedorahosted.org/git/spin-kickstarts.git", # noqa: E501
- "kickstart": "fedora-docker-base.ks",
- "distro": "Fedora-20",
- "disk_size": 3,
- "failable": ["*"],
- }
- }
- ]
- },
+ "image_build": {"^Server$": [original_image_conf]},
"koji_profile": "koji",
},
)
@@ -768,6 +757,7 @@ class TestImageBuildPhase(PungiTestCase):
# assert at least one thread was started
self.assertTrue(phase.pool.add.called)
server_args = {
+ "original_image_conf": original_image_conf,
"image_conf": {
"image-build": {
"install_tree": self.topdir + "/compose/Server/$arch/os",
@@ -793,7 +783,8 @@ class TestImageBuildPhase(PungiTestCase):
"scratch": False,
}
self.assertEqual(
- phase.pool.queue_put.mock_calls, [mock.call((compose, server_args))]
+ phase.pool.queue_put.mock_calls,
+ [mock.call((compose, server_args, phase.buildinstall_phase))],
)
@@ -854,7 +845,7 @@ class TestCreateImageBuildThread(PungiTestCase):
t = CreateImageBuildThread(pool)
with mock.patch("time.sleep"):
- t.process((compose, cmd), 1)
+ t.process((compose, cmd, None), 1)
self.assertEqual(
koji_wrapper.get_image_build_cmd.call_args_list,
@@ -987,7 +978,7 @@ class TestCreateImageBuildThread(PungiTestCase):
t = CreateImageBuildThread(pool)
with mock.patch("time.sleep"):
- t.process((compose, cmd), 1)
+ t.process((compose, cmd, None), 1)
pool._logger.error.assert_has_calls(
[
@@ -1041,7 +1032,7 @@ class TestCreateImageBuildThread(PungiTestCase):
t = CreateImageBuildThread(pool)
with mock.patch("time.sleep"):
- t.process((compose, cmd), 1)
+ t.process((compose, cmd, None), 1)
pool._logger.error.assert_has_calls(
[
@@ -1092,4 +1083,4 @@ class TestCreateImageBuildThread(PungiTestCase):
t = CreateImageBuildThread(pool)
with self.assertRaises(RuntimeError):
with mock.patch("time.sleep"):
- t.process((compose, cmd), 1)
+ t.process((compose, cmd, None), 1)
diff --git a/tests/test_initphase.py b/tests/test_initphase.py
index afd2601e..2ddb82ca 100644
--- a/tests/test_initphase.py
+++ b/tests/test_initphase.py
@@ -25,6 +25,7 @@ from tests.helpers import (
@mock.patch("pungi.phases.init.run_in_threads", new=fake_run_in_threads)
@mock.patch("pungi.phases.init.validate_comps")
@mock.patch("pungi.phases.init.validate_module_defaults")
+@mock.patch("pungi.phases.init.write_module_obsoletes")
@mock.patch("pungi.phases.init.write_module_defaults")
@mock.patch("pungi.phases.init.write_global_comps")
@mock.patch("pungi.phases.init.write_arch_comps")
@@ -40,12 +41,14 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
+ write_obsoletes,
validate_defaults,
validate_comps,
):
compose = DummyCompose(self.topdir, {})
compose.has_comps = True
compose.has_module_defaults = False
+ compose.has_module_obsoletes = False
compose.setup_optional()
phase = init.InitPhase(compose)
phase.run()
@@ -84,6 +87,7 @@ class TestInitPhase(PungiTestCase):
],
)
self.assertEqual(write_defaults.call_args_list, [])
+ self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
def test_run_with_preserve(
@@ -94,12 +98,14 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
+ write_obsoletes,
validate_defaults,
validate_comps,
):
compose = DummyCompose(self.topdir, {})
compose.has_comps = True
compose.has_module_defaults = False
+ compose.has_module_obsoletes = False
compose.variants["Everything"].groups = []
compose.variants["Everything"].modules = []
phase = init.InitPhase(compose)
@@ -140,6 +146,7 @@ class TestInitPhase(PungiTestCase):
],
)
self.assertEqual(write_defaults.call_args_list, [])
+ self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
def test_run_without_comps(
@@ -150,12 +157,14 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
+ write_obsoletes,
validate_defaults,
validate_comps,
):
compose = DummyCompose(self.topdir, {})
compose.has_comps = False
compose.has_module_defaults = False
+ compose.has_module_obsoletes = False
phase = init.InitPhase(compose)
phase.run()
@@ -166,6 +175,7 @@ class TestInitPhase(PungiTestCase):
self.assertEqual(create_comps.mock_calls, [])
self.assertEqual(write_variant.mock_calls, [])
self.assertEqual(write_defaults.call_args_list, [])
+ self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(validate_defaults.call_args_list, [])
def test_with_module_defaults(
@@ -176,12 +186,14 @@ class TestInitPhase(PungiTestCase):
write_arch,
write_global,
write_defaults,
+ write_obsoletes,
validate_defaults,
validate_comps,
):
compose = DummyCompose(self.topdir, {})
compose.has_comps = False
compose.has_module_defaults = True
+ compose.has_module_obsoletes = False
phase = init.InitPhase(compose)
phase.run()
@@ -192,11 +204,41 @@ class TestInitPhase(PungiTestCase):
self.assertEqual(create_comps.mock_calls, [])
self.assertEqual(write_variant.mock_calls, [])
self.assertEqual(write_defaults.call_args_list, [mock.call(compose)])
+ self.assertEqual(write_obsoletes.call_args_list, [])
self.assertEqual(
validate_defaults.call_args_list,
[mock.call(compose.paths.work.module_defaults_dir())],
)
+ def test_with_module_obsoletes(
+ self,
+ write_prepopulate,
+ write_variant,
+ create_comps,
+ write_arch,
+ write_global,
+ write_defaults,
+ write_obsoletes,
+ validate_defaults,
+ validate_comps,
+ ):
+ compose = DummyCompose(self.topdir, {})
+ compose.has_comps = False
+ compose.has_module_defaults = False
+ compose.has_module_obsoletes = True
+ phase = init.InitPhase(compose)
+ phase.run()
+
+ self.assertEqual(write_global.mock_calls, [])
+ self.assertEqual(validate_comps.call_args_list, [])
+ self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])
+ self.assertEqual(write_arch.mock_calls, [])
+ self.assertEqual(create_comps.mock_calls, [])
+ self.assertEqual(write_variant.mock_calls, [])
+ self.assertEqual(write_defaults.call_args_list, [])
+ self.assertEqual(write_obsoletes.call_args_list, [mock.call(compose)])
+ self.assertEqual(validate_defaults.call_args_list, [])
+
class TestWriteArchComps(PungiTestCase):
@mock.patch("pungi.phases.init.run")
@@ -455,6 +497,45 @@ class TestWriteVariantComps(PungiTestCase):
)
self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
+ @mock.patch("pungi.phases.init.run")
+ @mock.patch("pungi.phases.init.CompsWrapper")
+ def test_run_filter_for_modular_koji_tags(self, CompsWrapper, run):
+ compose = DummyCompose(self.topdir, {})
+ variant = compose.variants["Server"]
+ variant.groups = []
+ variant.modular_koji_tags = ["f38-modular"]
+ comps = CompsWrapper.return_value
+ comps.filter_groups.return_value = []
+
+ init.write_variant_comps(compose, "x86_64", variant)
+
+ self.assertEqual(
+ run.mock_calls,
+ [
+ mock.call(
+ [
+ "comps_filter",
+ "--arch=x86_64",
+ "--keep-empty-group=conflicts",
+ "--keep-empty-group=conflicts-server",
+ "--variant=Server",
+ "--output=%s/work/x86_64/comps/comps-Server.x86_64.xml"
+ % self.topdir,
+ self.topdir + "/work/global/comps/comps-global.xml",
+ ]
+ )
+ ],
+ )
+ self.assertEqual(
+ CompsWrapper.call_args_list,
+ [mock.call(self.topdir + "/work/x86_64/comps/comps-Server.x86_64.xml")],
+ )
+ self.assertEqual(comps.filter_groups.call_args_list, [mock.call([])])
+ self.assertEqual(
+ comps.filter_environments.mock_calls, [mock.call(variant.environments)]
+ )
+ self.assertEqual(comps.write_comps.mock_calls, [mock.call()])
+
@mock.patch("pungi.phases.init.run")
@mock.patch("pungi.phases.init.CompsWrapper")
def test_run_report_unmatched(self, CompsWrapper, run):
@@ -661,7 +742,10 @@ class TestValidateModuleDefaults(PungiTestCase):
),
)
- init.validate_module_defaults(self.topdir)
+ with self.assertRaises(RuntimeError) as ctx:
+ init.validate_module_defaults(self.topdir)
+
+ self.assertIn("Defaults contains not valid default file", str(ctx.exception))
@mock.patch("pungi.phases.init.CompsWrapper")
diff --git a/tests/test_koji_wrapper.py b/tests/test_koji_wrapper.py
index d14fe02a..4b943596 100644
--- a/tests/test_koji_wrapper.py
+++ b/tests/test_koji_wrapper.py
@@ -10,6 +10,7 @@ except ImportError:
import tempfile
import os
+import shutil
import six
@@ -33,13 +34,17 @@ def mock_imagebuild_path(id):
class KojiWrapperBaseTestCase(unittest.TestCase):
def setUp(self):
_, self.tmpfile = tempfile.mkstemp()
- self.koji_profile = mock.Mock()
+ compose = mock.Mock(conf={"koji_profile": "custom-koji"})
+ self.tmpdir = tempfile.mkdtemp()
+ compose.paths.log.koji_tasks_dir.return_value = self.tmpdir
with mock.patch("pungi.wrappers.kojiwrapper.koji") as koji:
koji.gssapi_login = mock.Mock()
koji.get_profile_module = mock.Mock(
return_value=mock.Mock(
config=DumbMock(
- server="koji.example.com", authtype="kerberos", cert="",
+ server="koji.example.com",
+ authtype="kerberos",
+ cert="",
),
pathinfo=mock.Mock(
work=mock.Mock(return_value="/koji"),
@@ -49,10 +54,11 @@ class KojiWrapperBaseTestCase(unittest.TestCase):
)
)
self.koji_profile = koji.get_profile_module.return_value
- self.koji = KojiWrapper("custom-koji")
+ self.koji = KojiWrapper(compose)
def tearDown(self):
os.remove(self.tmpfile)
+ shutil.rmtree(self.tmpdir)
class KojiWrapperTest(KojiWrapperBaseTestCase):
@@ -527,7 +533,7 @@ class LiveImageKojiWrapperTest(KojiWrapperBaseTestCase):
class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
def test_get_cmd_minimal(self):
cmd = self.koji.get_runroot_cmd("tgt", "s390x", "date", use_shell=False)
- self.assertEqual(len(cmd), 9)
+ self.assertEqual(len(cmd), 8)
self.assertEqual(
cmd[:5],
["koji", "--profile=custom-koji", "runroot", "--nowait", "--task-id"],
@@ -537,7 +543,7 @@ class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
self.assertEqual(
cmd[-1], "rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; date"
)
- six.assertCountEqual(self, cmd[5:-3], ["--channel-override=runroot-local"])
+ six.assertCountEqual(self, cmd[5:-3], [])
def test_get_cmd_full(self):
cmd = self.koji.get_runroot_cmd(
@@ -592,7 +598,7 @@ class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
self.assertEqual(cmd[-2], "s390x")
self.assertEqual(
cmd[-1],
- "rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; /bin/echo '&' && chmod -R a+r '/output dir' /foo && chown -R 1010 '/output dir' /foo", # noqa: E501
+ "rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; /bin/echo '&' ; EXIT_CODE=$? ; chmod -R a+r '/output dir' /foo ; chown -R 1010 '/output dir' /foo ; exit $EXIT_CODE", # noqa: E501
)
six.assertCountEqual(
self,
@@ -662,6 +668,30 @@ class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
],
)
+ @mock.patch("pungi.wrappers.kojiwrapper.run")
+ def test_run_runroot_cmd_with_warnings_before_task_id(self, run):
+ cmd = ["koji", "runroot", "--task-id"]
+ run.return_value = (0, "DeprecatioNWarning: whatever\n1234\n")
+ output = "Output ..."
+ self.koji._wait_for_task = mock.Mock(return_value=(0, output))
+
+ result = self.koji.run_runroot_cmd(cmd)
+ self.assertDictEqual(result, {"retcode": 0, "output": output, "task_id": 1234})
+ self.assertEqual(
+ run.call_args_list,
+ [
+ mock.call(
+ cmd,
+ can_fail=True,
+ env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
+ buffer_size=-1,
+ logfile=None,
+ show_cmd=True,
+ universal_newlines=True,
+ )
+ ],
+ )
+
@mock.patch("shutil.rmtree")
@mock.patch("tempfile.mkdtemp")
@mock.patch("pungi.wrappers.kojiwrapper.run")
@@ -714,6 +744,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
"cmd",
can_fail=True,
logfile=None,
+ show_cmd=True,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
universal_newlines=True,
@@ -739,6 +770,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={
"KRB5CCNAME": "DIR:/tmp/foo",
@@ -766,6 +798,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile="logfile",
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -789,6 +822,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -812,6 +846,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -836,6 +871,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -865,6 +901,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -895,6 +932,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -938,6 +976,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -974,6 +1013,7 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
mock.call(
"cmd",
can_fail=True,
+ show_cmd=True,
logfile=None,
env={"FOO": "BAR", "PYTHONUNBUFFERED": "1"},
buffer_size=-1,
@@ -1007,7 +1047,6 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
"pungi-buildinstall",
"--nowait",
"--task-id",
- "--channel-override=runroot-local",
"--weight=123",
"--package=lorax",
"--mount=/tmp",
@@ -1036,7 +1075,6 @@ class RunBlockingCmdTest(KojiWrapperBaseTestCase):
"pungi-ostree",
"--nowait",
"--task-id",
- "--channel-override=runroot-local",
"--weight=123",
"--package=lorax",
"--mount=/tmp",
@@ -1161,7 +1199,7 @@ class TestGetBuildrootRPMs(unittest.TestCase):
rpms = get_buildroot_rpms(compose, 1234)
- self.assertEqual(KojiWrapper.call_args_list, [mock.call("koji")])
+ self.assertEqual(KojiWrapper.call_args_list, [mock.call(compose)])
self.assertEqual(
KojiWrapper.return_value.mock_calls,
[
diff --git a/tests/test_livemediaphase.py b/tests/test_livemediaphase.py
index 7a2b878f..7a406716 100644
--- a/tests/test_livemediaphase.py
+++ b/tests/test_livemediaphase.py
@@ -60,6 +60,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "Rawhide",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
)
@@ -116,6 +117,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "Rawhide",
"subvariant": "Server",
"failable_arches": ["amd64", "x86_64"],
+ "nomacboot": False,
},
)
)
@@ -178,6 +180,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "Rawhide",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
),
@@ -201,6 +204,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "Rawhide",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
),
@@ -224,6 +228,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "25",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
),
@@ -286,6 +291,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "Rawhide",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
),
@@ -309,6 +315,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "Rawhide",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
),
@@ -332,6 +339,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "25",
"subvariant": "Server",
"failable_arches": [],
+ "nomacboot": False,
},
)
),
@@ -423,6 +431,7 @@ class TestLiveMediaPhase(PungiTestCase):
"install_tree_from": "Server-optional",
"subvariant": "Something",
"failable": ["*"],
+ "nomacboot": True,
}
]
}
@@ -436,6 +445,7 @@ class TestLiveMediaPhase(PungiTestCase):
phase.run()
self.assertTrue(phase.pool.add.called)
+
self.assertEqual(
phase.pool.queue_put.call_args_list,
[
@@ -464,6 +474,7 @@ class TestLiveMediaPhase(PungiTestCase):
"version": "25",
"subvariant": "Something",
"failable_arches": ["x86_64"],
+ "nomacboot": True,
},
)
)
diff --git a/tests/test_module_util.py b/tests/test_module_util.py
new file mode 100644
index 00000000..0d083af0
--- /dev/null
+++ b/tests/test_module_util.py
@@ -0,0 +1,192 @@
+import os
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+from parameterized import parameterized
+from pungi import module_util
+from pungi.module_util import Modulemd
+
+from tests import helpers
+
+
+@unittest.skipUnless(Modulemd, "Skipped test, no module support.")
+class TestModuleUtil(helpers.PungiTestCase):
+ def _get_stream(self, mod_name, stream_name):
+ stream = Modulemd.ModuleStream.new(
+ Modulemd.ModuleStreamVersionEnum.TWO, mod_name, stream_name
+ )
+ stream.props.version = 42
+ stream.props.context = "deadbeef"
+ stream.props.arch = "x86_64"
+
+ return stream
+
+ def _write_obsoletes(self, defs):
+ for mod_name, stream, obsoleted_by in defs:
+ mod_index = Modulemd.ModuleIndex.new()
+ mmdobs = Modulemd.Obsoletes.new(1, 10993435, mod_name, stream, "testmsg")
+ mmdobs.set_obsoleted_by(obsoleted_by[0], obsoleted_by[1])
+ mod_index.add_obsoletes(mmdobs)
+ filename = "%s:%s.yaml" % (mod_name, stream)
+ with open(os.path.join(self.topdir, filename), "w") as f:
+ f.write(mod_index.dump_to_string())
+
+ def _write_defaults(self, defs):
+ for mod_name, streams in defs.items():
+ for stream in streams:
+ mod_index = Modulemd.ModuleIndex.new()
+ mmddef = Modulemd.DefaultsV1.new(mod_name)
+ mmddef.set_default_stream(stream)
+ mod_index.add_defaults(mmddef)
+ filename = "%s-%s.yaml" % (mod_name, stream)
+ with open(os.path.join(self.topdir, filename), "w") as f:
+ f.write(mod_index.dump_to_string())
+
+ @parameterized.expand(
+ [
+ (
+ "MULTIPLE",
+ [
+ ("httpd", "1.22.1", ("httpd-new", "3.0")),
+ ("httpd", "10.4", ("httpd", "11.1.22")),
+ ],
+ ),
+ (
+ "NORMAL",
+ [
+ ("gdb", "2.8", ("gdb", "3.0")),
+ ("nginx", "12.7", ("nginx-nightly", "13.3")),
+ ],
+ ),
+ ]
+ )
+ def test_merged_module_obsoletes_idx(self, test_name, data):
+ self._write_obsoletes(data)
+
+ mod_index = module_util.get_module_obsoletes_idx(self.topdir, [])
+
+ if test_name == "MULTIPLE":
+ # Multiple obsoletes are allowed
+ mod = mod_index.get_module("httpd")
+ self.assertEqual(len(mod.get_obsoletes()), 2)
+ else:
+ mod = mod_index.get_module("gdb")
+ self.assertEqual(len(mod.get_obsoletes()), 1)
+ mod_obsolete = mod.get_obsoletes()
+ self.assertIsNotNone(mod_obsolete)
+ self.assertEqual(mod_obsolete[0].get_obsoleted_by_module_stream(), "3.0")
+
+ def test_collect_module_defaults_with_index(self):
+ stream = self._get_stream("httpd", "1")
+ mod_index = Modulemd.ModuleIndex()
+ mod_index.add_module_stream(stream)
+
+ defaults_data = {"httpd": ["1.44.2"], "python": ["3.6", "3.5"]}
+ self._write_defaults(defaults_data)
+
+ mod_index = module_util.collect_module_defaults(
+ self.topdir, defaults_data.keys(), mod_index
+ )
+
+ for module_name in defaults_data.keys():
+ mod = mod_index.get_module(module_name)
+ self.assertIsNotNone(mod)
+
+ mod_defaults = mod.get_defaults()
+ self.assertIsNotNone(mod_defaults)
+
+ if module_name == "httpd":
+ self.assertEqual(mod_defaults.get_default_stream(), "1.44.2")
+ else:
+ # Can't have multiple defaults for one stream
+ self.assertEqual(mod_defaults.get_default_stream(), None)
+
+ def test_handles_non_defaults_file_without_validation(self):
+ self._write_defaults({"httpd": ["1"], "python": ["3.6"]})
+ helpers.touch(
+ os.path.join(self.topdir, "boom.yaml"),
+ "\n".join(
+ [
+ "document: modulemd",
+ "version: 2",
+ "data:",
+ " summary: dummy module",
+ " description: dummy module",
+ " license:",
+ " module: [GPL]",
+ " content: [GPL]",
+ ]
+ ),
+ )
+
+ idx = module_util.collect_module_defaults(self.topdir)
+
+ self.assertEqual(len(idx.get_module_names()), 0)
+
+ @parameterized.expand([(False, ["httpd"]), (False, ["python"])])
+ def test_collect_module_obsoletes(self, no_index, mod_list):
+ if not no_index:
+ stream = self._get_stream(mod_list[0], "1.22.1")
+ mod_index = Modulemd.ModuleIndex()
+ mod_index.add_module_stream(stream)
+ else:
+ mod_index = None
+
+ data = [
+ ("httpd", "1.22.1", ("httpd-new", "3.0")),
+ ("httpd", "10.4", ("httpd", "11.1.22")),
+ ]
+ self._write_obsoletes(data)
+
+ mod_index = module_util.collect_module_obsoletes(
+ self.topdir, mod_list, mod_index
+ )
+
+ # Obsoletes should not me merged without corresponding module
+ # if module list is present
+ if "python" in mod_list:
+ mod = mod_index.get_module("httpd")
+ self.assertIsNone(mod)
+ else:
+ mod = mod_index.get_module("httpd")
+
+ # No modules
+ if "httpd" not in mod_list:
+ self.assertIsNone(mod.get_obsoletes())
+ else:
+ self.assertIsNotNone(mod)
+ obsoletes_from_orig = mod.get_newest_active_obsoletes("1.22.1", None)
+
+ self.assertEqual(
+ obsoletes_from_orig.get_obsoleted_by_module_name(), "httpd-new"
+ )
+
+ def test_collect_module_obsoletes_without_modlist(self):
+ stream = self._get_stream("nginx", "1.22.1")
+ mod_index = Modulemd.ModuleIndex()
+ mod_index.add_module_stream(stream)
+
+ data = [
+ ("httpd", "1.22.1", ("httpd-new", "3.0")),
+ ("nginx", "10.4", ("nginx", "11.1.22")),
+ ("nginx", "11.1.22", ("nginx", "66")),
+ ]
+ self._write_obsoletes(data)
+
+ mod_index = module_util.collect_module_obsoletes(self.topdir, [], mod_index)
+
+ # All obsoletes are merged into main Index when filter is empty
+ self.assertEqual(len(mod_index.get_module_names()), 2)
+
+ mod = mod_index.get_module("httpd")
+ self.assertIsNotNone(mod)
+
+ self.assertEqual(len(mod.get_obsoletes()), 1)
+
+ mod = mod_index.get_module("nginx")
+ self.assertIsNotNone(mod)
+
+ self.assertEqual(len(mod.get_obsoletes()), 2)
diff --git a/tests/test_notifier.py b/tests/test_notifier.py
index 445b6ff3..914d6314 100644
--- a/tests/test_notifier.py
+++ b/tests/test_notifier.py
@@ -73,7 +73,7 @@ class TestNotifier(unittest.TestCase):
stdin_data=json.dumps(data),
can_fail=True,
return_stdout=False,
- workdir=self.compose.paths.compose.topdir.return_value,
+ workdir=None,
universal_newlines=True,
show_cmd=True,
logfile=self.logfile,
diff --git a/tests/test_osbs_phase.py b/tests/test_osbs_phase.py
index 059ce828..9a45dfea 100644
--- a/tests/test_osbs_phase.py
+++ b/tests/test_osbs_phase.py
@@ -19,7 +19,7 @@ class OSBSPhaseTest(helpers.PungiTestCase):
pool = ThreadPool.return_value
- phase = osbs.OSBSPhase(compose)
+ phase = osbs.OSBSPhase(compose, None, None)
phase.run()
self.assertEqual(len(pool.add.call_args_list), 1)
@@ -33,44 +33,16 @@ class OSBSPhaseTest(helpers.PungiTestCase):
compose = helpers.DummyCompose(self.topdir, {})
compose.just_phases = None
compose.skip_phases = []
- phase = osbs.OSBSPhase(compose)
+ phase = osbs.OSBSPhase(compose, None, None)
self.assertTrue(phase.skip())
- @mock.patch("pungi.phases.osbs.ThreadPool")
- def test_dump_metadata(self, ThreadPool):
- compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": {}}})
- compose.just_phases = None
- compose.skip_phases = []
- compose.notifier = mock.Mock()
- phase = osbs.OSBSPhase(compose)
- phase.start()
- phase.stop()
- phase.pool.metadata = METADATA
- phase.dump_metadata()
-
- with open(self.topdir + "/compose/metadata/osbs.json") as f:
- data = json.load(f)
- self.assertEqual(data, METADATA)
-
- @mock.patch("pungi.phases.osbs.ThreadPool")
- def test_dump_metadata_after_skip(self, ThreadPool):
- compose = helpers.DummyCompose(self.topdir, {})
- compose.just_phases = None
- compose.skip_phases = []
- phase = osbs.OSBSPhase(compose)
- phase.start()
- phase.stop()
- phase.dump_metadata()
-
- self.assertFalse(os.path.isfile(self.topdir + "/compose/metadata/osbs.json"))
-
@mock.patch("pungi.phases.osbs.ThreadPool")
def test_request_push(self, ThreadPool):
compose = helpers.DummyCompose(self.topdir, {"osbs": {"^Everything$": {}}})
compose.just_phases = None
compose.skip_phases = []
compose.notifier = mock.Mock()
- phase = osbs.OSBSPhase(compose)
+ phase = osbs.OSBSPhase(compose, None, None)
phase.start()
phase.stop()
phase.pool.registries = {"foo": "bar"}
@@ -81,7 +53,8 @@ class OSBSPhaseTest(helpers.PungiTestCase):
self.assertEqual(data, phase.pool.registries)
self.assertEqual(
- compose.notifier.call_args_list, [],
+ compose.notifier.call_args_list,
+ [],
)
@@ -166,6 +139,8 @@ METADATA = {
}
}
+RPMS = []
+
SCRATCH_TASK_RESULT = {
"koji_builds": [],
"repositories": [
@@ -190,7 +165,7 @@ SCRATCH_METADATA = {
class OSBSThreadTest(helpers.PungiTestCase):
def setUp(self):
super(OSBSThreadTest, self).setUp()
- self.pool = mock.Mock(metadata={}, registries={})
+ self.pool = mock.Mock(registries={})
self.t = osbs.OSBSThread(self.pool)
self.compose = helpers.DummyCompose(
self.topdir,
@@ -209,6 +184,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.wrapper.koji_proxy.getTaskResult.return_value = TASK_RESULT
self.wrapper.koji_proxy.getBuild.return_value = BUILD_INFO
self.wrapper.koji_proxy.listArchives.return_value = ARCHIVES
+ self.wrapper.koji_proxy.listRPMs.return_value = RPMS
self.wrapper.koji_proxy.getLatestBuilds.return_value = [
mock.Mock(),
mock.Mock(),
@@ -226,7 +202,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
metadata = copy.deepcopy(METADATA)
metadata["Server"]["x86_64"][0]["compose_id"] = self.compose.compose_id
metadata["Server"]["x86_64"][0]["koji_task"] = 12345
- self.assertEqual(self.pool.metadata, metadata)
+ self.assertEqual(self.compose.containers_metadata, metadata)
def _assertCorrectCalls(self, opts, setupCalls=None, scratch=False):
setupCalls = setupCalls or []
@@ -247,6 +223,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
options,
priority=None,
),
+ mock.call.save_task_id(12345),
mock.call.watch_task(
12345, self.topdir + "/logs/global/osbs/Server-1-watch-task.log"
),
@@ -258,7 +235,8 @@ class OSBSThreadTest(helpers.PungiTestCase):
expect_calls.extend(
[
mock.call.koji_proxy.getBuild(54321),
- mock.call.koji_proxy.listArchives(54321),
+ mock.call.koji_proxy.listArchives(54321, type="image"),
+ mock.call.koji_proxy.listRPMs(imageID=1436049),
]
)
self.assertEqual(self.wrapper.mock_calls, expect_calls)
@@ -295,8 +273,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.assertIn(" Possible reason: %r is a required property" % key, errors)
self.assertEqual([], warnings)
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_minimal_run(self, KojiWrapper):
+ def test_minimal_run(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
@@ -311,8 +290,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self._assertCorrectMetadata()
self._assertRepoFile()
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_failable(self, KojiWrapper):
+ def test_run_failable(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
@@ -328,8 +308,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self._assertCorrectMetadata()
self._assertRepoFile()
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_with_more_args(self, KojiWrapper):
+ def test_run_with_more_args(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
@@ -348,15 +329,21 @@ class OSBSThreadTest(helpers.PungiTestCase):
self._assertCorrectMetadata()
self._assertRepoFile()
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_with_extra_repos(self, KojiWrapper):
+ def test_run_with_extra_repos(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
"git_branch": "f24-docker",
"name": "my-name",
"version": "1.0",
- "repo": ["Everything", "http://pkgs.example.com/my.repo", "/extra/repo"],
+ "repo": [
+ "Everything",
+ "http://pkgs.example.com/my.repo",
+ "/extra/repo",
+ "http://cts.localhost/$COMPOSE_ID/repo",
+ ],
}
self.compose.conf["translate_paths"].append(("/extra", "http://example.com"))
self._setupMock(KojiWrapper)
@@ -373,6 +360,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
"http://root/work/global/tmp-Everything/compose-rpms-Everything-1.repo",
"http://pkgs.example.com/my.repo",
"http://root/work/global/tmp/compose-rpms-local-1.repo",
+ "http://cts.localhost/%s/repo" % self.compose.compose_id,
],
}
self._assertCorrectCalls(options)
@@ -384,8 +372,41 @@ class OSBSThreadTest(helpers.PungiTestCase):
) as f:
self.assertIn("baseurl=http://example.com/repo\n", f)
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_with_deprecated_registry(self, KojiWrapper):
+ def test_run_with_extra_repos_with_cts(self, KojiWrapper, get_file_from_scm):
+ cfg = {
+ "url": "git://example.com/repo?#BEEFCAFE",
+ "target": "f24-docker-candidate",
+ "git_branch": "f24-docker",
+ "name": "my-name",
+ "version": "1.0",
+ "repo": [
+ "Everything",
+ ],
+ }
+ self.compose.conf["cts_url"] = "http://cts.localhost"
+ self._setupMock(KojiWrapper)
+ self._assertConfigCorrect(cfg)
+
+ self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
+
+ cts_url = "http://cts.localhost/api/1/composes/%s" % self.compose.compose_id
+ options = {
+ "name": "my-name",
+ "version": "1.0",
+ "git_branch": "f24-docker",
+ "yum_repourls": [
+ "%s/repo/?variant=Server" % cts_url,
+ "%s/repo/?variant=Everything" % cts_url,
+ ],
+ }
+ self._assertCorrectCalls(options)
+ self._assertCorrectMetadata()
+
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
+ @mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
+ def test_run_with_deprecated_registry(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
@@ -415,8 +436,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self._assertRepoFile(["Server", "Everything"])
self.assertEqual(self.t.pool.registries, {"my-name-1.0-1": {"foo": "bar"}})
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_with_registry(self, KojiWrapper):
+ def test_run_with_registry(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
@@ -446,8 +468,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self._assertRepoFile(["Server", "Everything"])
self.assertEqual(self.t.pool.registries, {"my-name-1.0-1": [{"foo": "bar"}]})
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_with_extra_repos_in_list(self, KojiWrapper):
+ def test_run_with_extra_repos_in_list(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
@@ -476,8 +499,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self._assertCorrectMetadata()
self._assertRepoFile(["Server", "Everything", "Client"])
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_run_with_gpgkey_enabled(self, KojiWrapper):
+ def test_run_with_gpgkey_enabled(self, KojiWrapper, get_file_from_scm):
gpgkey = "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release"
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
@@ -536,8 +560,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
}
self._assertConfigMissing(cfg, "git_branch")
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_failing_task(self, KojiWrapper):
+ def test_failing_task(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "fedora-24-docker-candidate",
@@ -552,8 +577,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.assertRegex(str(ctx.exception), r"task 12345 failed: see .+ for details")
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_failing_task_with_failable(self, KojiWrapper):
+ def test_failing_task_with_failable(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "fedora-24-docker-candidate",
@@ -566,8 +592,9 @@ class OSBSThreadTest(helpers.PungiTestCase):
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
+ @mock.patch("pungi.phases.osbs.get_file_from_scm")
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
- def test_scratch_metadata(self, KojiWrapper):
+ def test_scratch_metadata(self, KojiWrapper, get_file_from_scm):
cfg = {
"url": "git://example.com/repo?#BEEFCAFE",
"target": "f24-docker-candidate",
diff --git a/tests/test_osbuild_phase.py b/tests/test_osbuild_phase.py
index f3b70e61..c53f7529 100644
--- a/tests/test_osbuild_phase.py
+++ b/tests/test_osbuild_phase.py
@@ -8,6 +8,7 @@ import koji as orig_koji
from tests import helpers
from pungi.phases import osbuild
+from pungi.checks import validate
class OSBuildPhaseTest(helpers.PungiTestCase):
@@ -105,6 +106,24 @@ class OSBuildPhaseTest(helpers.PungiTestCase):
phase = osbuild.OSBuildPhase(compose)
self.assertTrue(phase.skip())
+ def test_fail_multiple_image_types(self):
+ cfg = {
+ "name": "test-image",
+ "distro": "rhel-8",
+ # more than one image type is not allowed
+ "image_types": ["qcow2", "rhel-ec2"],
+ }
+ compose = helpers.DummyCompose(
+ self.topdir,
+ {
+ "osbuild": {"^Everything$": [cfg]},
+ "osbuild_target": "image-target",
+ "osbuild_version": "1",
+ "osbuild_release": "2",
+ },
+ )
+ self.assertNotEqual(validate(compose.conf), ([], []))
+
class RunOSBuildThreadTest(helpers.PungiTestCase):
def setUp(self):
@@ -178,7 +197,6 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
# Verify two Koji instances were created.
self.assertEqual(len(KojiWrapper.call_args), 2)
- print(koji.mock_calls)
# Verify correct calls to Koji
self.assertEqual(
koji.mock_calls,
@@ -196,6 +214,7 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
"repo": [self.topdir + "/compose/Everything/$arch/os"],
},
),
+ mock.call.save_task_id(1234),
mock.call.watch_task(1234, mock.ANY),
mock.call.koji_proxy.getTaskResult(1234),
mock.call.koji_proxy.getBuild(build_id),
@@ -247,6 +266,258 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
],
)
+ @mock.patch("pungi.util.get_file_size", new=lambda fp: 65536)
+ @mock.patch("pungi.util.get_mtime", new=lambda fp: 1024)
+ @mock.patch("pungi.phases.osbuild.Linker")
+ @mock.patch("pungi.phases.osbuild.kojiwrapper.KojiWrapper")
+ def test_process_ostree(self, KojiWrapper, Linker):
+ cfg = {
+ "name": "test-image",
+ "distro": "rhel-8",
+ "image_types": ["edge-raw-disk"],
+ "ostree_url": "http://edge.example.com/repo",
+ "ostree_ref": "test/iot",
+ "ostree_parent": "test/iot-parent",
+ }
+ build_id = 5678
+ koji = KojiWrapper.return_value
+ koji.watch_task.side_effect = self.make_fake_watch(0)
+ koji.koji_proxy.osbuildImage.return_value = 1234
+ koji.koji_proxy.getTaskResult.return_value = {
+ "composer": {"server": "https://composer.osbuild.org", "id": ""},
+ "koji": {"build": build_id},
+ }
+ koji.koji_proxy.getBuild.return_value = {
+ "build_id": build_id,
+ "name": "test-image",
+ "version": "1",
+ "release": "1",
+ }
+ koji.koji_proxy.listArchives.return_value = [
+ {
+ "extra": {"image": {"arch": "aarch64"}},
+ "filename": "image.aarch64.raw.xz",
+ "type_name": "raw-xz",
+ },
+ {
+ "extra": {"image": {"arch": "x86_64"}},
+ "filename": "image.x86_64.raw.xz",
+ "type_name": "raw-xz",
+ },
+ ]
+ koji.koji_module.pathinfo = orig_koji.pathinfo
+
+ self.t.process(
+ (
+ self.compose,
+ self.compose.variants["Everything"],
+ cfg,
+ ["aarch64", "x86_64"],
+ "1", # version
+ "15", # release
+ "image-target",
+ [self.topdir + "/compose/Everything/$arch/os"],
+ ["x86_64"],
+ ),
+ 1,
+ )
+
+ # Verify two Koji instances were created.
+ self.assertEqual(len(KojiWrapper.call_args), 2)
+ # Verify correct calls to Koji
+ self.assertEqual(
+ koji.mock_calls,
+ [
+ mock.call.login(),
+ mock.call.koji_proxy.osbuildImage(
+ "test-image",
+ "1",
+ "rhel-8",
+ ["edge-raw-disk"],
+ "image-target",
+ ["aarch64", "x86_64"],
+ opts={
+ "release": "15",
+ "repo": [self.topdir + "/compose/Everything/$arch/os"],
+ "ostree": {
+ "url": "http://edge.example.com/repo",
+ "ref": "test/iot",
+ "parent": "test/iot-parent",
+ },
+ },
+ ),
+ mock.call.save_task_id(1234),
+ mock.call.watch_task(1234, mock.ANY),
+ mock.call.koji_proxy.getTaskResult(1234),
+ mock.call.koji_proxy.getBuild(build_id),
+ mock.call.koji_proxy.listArchives(buildID=build_id),
+ ],
+ )
+
+ # Assert there are 2 images added to manifest and the arguments are sane
+ self.assertEqual(
+ self.compose.im.add.call_args_list,
+ [
+ mock.call(arch="aarch64", variant="Everything", image=mock.ANY),
+ mock.call(arch="x86_64", variant="Everything", image=mock.ANY),
+ ],
+ )
+ for call in self.compose.im.add.call_args_list:
+ _, kwargs = call
+ image = kwargs["image"]
+ self.assertEqual(kwargs["variant"], "Everything")
+ self.assertIn(kwargs["arch"], ("aarch64", "x86_64"))
+ self.assertEqual(kwargs["arch"], image.arch)
+ self.assertEqual(
+ "Everything/%(arch)s/images/image.%(arch)s.raw.xz"
+ % {"arch": image.arch},
+ image.path,
+ )
+ self.assertEqual("raw.xz", image.format)
+ self.assertEqual("raw-xz", image.type)
+ self.assertEqual("Everything", image.subvariant)
+
+ self.assertTrue(
+ os.path.isdir(self.topdir + "/compose/Everything/aarch64/images")
+ )
+ self.assertTrue(
+ os.path.isdir(self.topdir + "/compose/Everything/x86_64/images")
+ )
+
+ self.assertEqual(
+ Linker.return_value.mock_calls,
+ [
+ mock.call.link(
+ "/mnt/koji/packages/test-image/1/1/images/image.%(arch)s.raw.xz"
+ % {"arch": arch},
+ self.topdir
+ + "/compose/Everything/%(arch)s/images/image.%(arch)s.raw.xz"
+ % {"arch": arch},
+ link_type="hardlink-or-copy",
+ )
+ for arch in ["aarch64", "x86_64"]
+ ],
+ )
+
+ @mock.patch("pungi.util.get_file_size", new=lambda fp: 65536)
+ @mock.patch("pungi.util.get_mtime", new=lambda fp: 1024)
+ @mock.patch("pungi.phases.osbuild.Linker")
+ @mock.patch("pungi.phases.osbuild.kojiwrapper.KojiWrapper")
+ def test_process_upload_options(self, KojiWrapper, Linker):
+ cfg = {
+ "name": "test-image",
+ "distro": "rhel-8",
+ "image_types": ["rhel-ec2"],
+ "upload_options": {
+ "region": "us-east-1",
+ "share_with_accounts": ["123456789012"],
+ },
+ }
+ build_id = 5678
+ koji = KojiWrapper.return_value
+ koji.watch_task.side_effect = self.make_fake_watch(0)
+ koji.koji_proxy.osbuildImage.return_value = 1234
+ koji.koji_proxy.getTaskResult.return_value = {
+ "composer": {"server": "https://composer.osbuild.org", "id": ""},
+ "koji": {"build": build_id},
+ }
+ koji.koji_proxy.getBuild.return_value = {
+ "build_id": build_id,
+ "name": "test-image",
+ "version": "1",
+ "release": "1",
+ }
+ koji.koji_proxy.listArchives.return_value = [
+ {
+ "extra": {"image": {"arch": "x86_64"}},
+ "filename": "image.raw.xz",
+ "type_name": "raw-xz",
+ }
+ ]
+ koji.koji_module.pathinfo = orig_koji.pathinfo
+
+ self.t.process(
+ (
+ self.compose,
+ self.compose.variants["Everything"],
+ cfg,
+ ["x86_64"],
+ "1", # version
+ "15", # release
+ "image-target",
+ [self.topdir + "/compose/Everything/$arch/os"],
+ ["x86_64"],
+ ),
+ 1,
+ )
+
+ # Verify two Koji instances were created.
+ self.assertEqual(len(KojiWrapper.call_args), 2)
+ # Verify correct calls to Koji
+ self.assertEqual(
+ koji.mock_calls,
+ [
+ mock.call.login(),
+ mock.call.koji_proxy.osbuildImage(
+ "test-image",
+ "1",
+ "rhel-8",
+ ["rhel-ec2"],
+ "image-target",
+ ["x86_64"],
+ opts={
+ "release": "15",
+ "repo": [self.topdir + "/compose/Everything/$arch/os"],
+ "upload_options": {
+ "region": "us-east-1",
+ "share_with_accounts": ["123456789012"],
+ },
+ },
+ ),
+ mock.call.save_task_id(1234),
+ mock.call.watch_task(1234, mock.ANY),
+ mock.call.koji_proxy.getTaskResult(1234),
+ mock.call.koji_proxy.getBuild(build_id),
+ mock.call.koji_proxy.listArchives(buildID=build_id),
+ ],
+ )
+
+ # Assert there is one image added to manifest and the arguments are sane
+ self.assertEqual(
+ self.compose.im.add.call_args_list,
+ [
+ mock.call(arch="x86_64", variant="Everything", image=mock.ANY),
+ ],
+ )
+ for call in self.compose.im.add.call_args_list:
+ _, kwargs = call
+ image = kwargs["image"]
+ self.assertEqual(kwargs["variant"], "Everything")
+ self.assertIn(kwargs["arch"], ("x86_64"))
+ self.assertEqual(kwargs["arch"], image.arch)
+ self.assertEqual(
+ "Everything/x86_64/images/image.raw.xz",
+ image.path,
+ )
+ self.assertEqual("raw.xz", image.format)
+ self.assertEqual("raw-xz", image.type)
+ self.assertEqual("Everything", image.subvariant)
+
+ self.assertTrue(
+ os.path.isdir(self.topdir + "/compose/Everything/x86_64/images")
+ )
+
+ self.assertEqual(
+ Linker.return_value.mock_calls,
+ [
+ mock.call.link(
+ "/mnt/koji/packages/test-image/1/1/images/image.raw.xz",
+ self.topdir + "/compose/Everything/x86_64/images/image.raw.xz",
+ link_type="hardlink-or-copy",
+ )
+ ],
+ )
+
@mock.patch("pungi.util.get_file_size", new=lambda fp: 65536)
@mock.patch("pungi.util.get_mtime", new=lambda fp: 1024)
@mock.patch("pungi.phases.osbuild.Linker")
@@ -312,6 +583,7 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
["aarch64", "x86_64"],
opts={"repo": [self.topdir + "/compose/Everything/$arch/os"]},
),
+ mock.call.save_task_id(1234),
mock.call.watch_task(1234, mock.ANY),
mock.call.koji_proxy.getTaskResult(1234),
mock.call.koji_proxy.getBuild(build_id),
diff --git a/tests/test_ostree_phase.py b/tests/test_ostree_phase.py
index b214a127..40c99076 100644
--- a/tests/test_ostree_phase.py
+++ b/tests/test_ostree_phase.py
@@ -325,6 +325,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
"ostree-ref": None,
"force-new-commit": False,
"version": None,
+ "unified-core": False,
},
channel=None,
mounts=[self.topdir, self.repo],
diff --git a/tests/test_ostree_script.py b/tests/test_ostree_script.py
index c32ce466..b6710e57 100644
--- a/tests/test_ostree_script.py
+++ b/tests/test_ostree_script.py
@@ -238,6 +238,22 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
self.assertCorrectCall(run, extra_args=["--force-nocache"])
+ @mock.patch("kobo.shortcuts.run")
+ def test_unified_core(self, run):
+ helpers.touch(os.path.join(self.repo, "initialized"))
+
+ ostree.main(
+ [
+ "tree",
+ "--repo=%s" % self.repo,
+ "--log-dir=%s" % os.path.join(self.topdir, "logs", "Atomic"),
+ "--treefile=%s/fedora-atomic-docker-host.json" % self.topdir,
+ "--unified-core",
+ ]
+ )
+
+ self.assertCorrectCall(run, extra_args=["--unified-core"])
+
@mock.patch("kobo.shortcuts.run")
def test_extra_config_with_extra_repos(self, run):
configdir = os.path.join(self.topdir, "config")
diff --git a/tests/test_patch_iso.py b/tests/test_patch_iso.py
index 55abf12b..9fe8d7b4 100644
--- a/tests/test_patch_iso.py
+++ b/tests/test_patch_iso.py
@@ -61,7 +61,7 @@ class EqualsAny(object):
return True
def __repr__(self):
- return u"ANYTHING"
+ return "ANYTHING"
ANYTHING = EqualsAny()
diff --git a/tests/test_pkgset_common.py b/tests/test_pkgset_common.py
index 455c7101..9209e408 100755
--- a/tests/test_pkgset_common.py
+++ b/tests/test_pkgset_common.py
@@ -96,24 +96,51 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
@helpers.unittest.skipUnless(Modulemd, "Skipping tests, no module support")
@mock.patch("pungi.phases.pkgset.common.collect_module_defaults")
+ @mock.patch("pungi.phases.pkgset.common.collect_module_obsoletes")
@mock.patch("pungi.phases.pkgset.common.add_modular_metadata")
- def test_run_with_modulemd(self, amm, cmd, mock_run):
- mmd = {"x86_64": [mock.Mock()]}
+ def test_run_with_modulemd(self, amm, cmo, cmd, mock_run):
+ # Test Index for cmo
+ mod_index = Modulemd.ModuleIndex.new()
+ mmdobs = Modulemd.Obsoletes.new(
+ 1, 10993435, "mod_name", "mod_stream", "testmsg"
+ )
+ mmdobs.set_obsoleted_by("mod_name", "mod_name_2")
+ mod_index.add_obsoletes(mmdobs)
+ cmo.return_value = mod_index
+
+ mmd = {
+ "x86_64": [
+ Modulemd.ModuleStream.new(
+ Modulemd.ModuleStreamVersionEnum.TWO, "mod_name", "stream_name"
+ )
+ ]
+ }
common.MaterializedPackageSet.create(
self.compose, self.pkgset, self.prefix, mmd=mmd
)
cmd.assert_called_once_with(
os.path.join(self.topdir, "work/global/module_defaults"),
- set(x.get_module_name.return_value for x in mmd["x86_64"]),
+ {"mod_name"},
overrides_dir=None,
)
- amm.assert_called_once_with(
- mock.ANY,
- os.path.join(self.topdir, "work/x86_64/repo/foo"),
- cmd.return_value,
+
+ cmo.assert_called_once()
+ cmd.assert_called_once()
+ amm.assert_called_once()
+
+ self.assertEqual(
+ amm.mock_calls[0][1][1], os.path.join(self.topdir, "work/x86_64/repo/foo")
+ )
+ self.assertIsInstance(amm.mock_calls[0][1][2], Modulemd.ModuleIndex)
+ self.assertIsNotNone(amm.mock_calls[0][1][2].get_module("mod_name"))
+ # Check if proper Index is used by add_modular_metadata
+ self.assertIsNotNone(
+ amm.mock_calls[0][1][2].get_module("mod_name").get_obsoletes()
+ )
+ self.assertEqual(
+ amm.mock_calls[0][1][3],
os.path.join(self.topdir, "logs/x86_64/arch_repo_modulemd.foo.x86_64.log"),
)
- cmd.return_value.add_module_stream.assert_called_once_with(mmd["x86_64"][0])
class TestCreateArchRepos(helpers.PungiTestCase):
diff --git a/tests/test_pkgset_pkgsets.py b/tests/test_pkgset_pkgsets.py
index cc59f5bf..75d3f474 100644
--- a/tests/test_pkgset_pkgsets.py
+++ b/tests/test_pkgset_pkgsets.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-import ddt as ddt
+import ddt
import mock
import os
import six
@@ -137,6 +137,21 @@ class PkgsetCompareMixin(object):
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
+
+ @classmethod
+ def setUpClass(cls) -> None:
+
+ cls.patcher = mock.patch.object(
+ pkgsets.KojiMockPackageSet,
+ '_is_rpm_signed',
+ return_value=True,
+ )
+ cls.patcher.start()
+
+ @classmethod
+ def tearDownClass(cls) -> None:
+ cls.patcher.stop()
+
def setUp(self):
super(TestKojiPkgset, self).setUp()
with open(os.path.join(helpers.FIXTURE_DIR, "tagged-rpms.json")) as f:
@@ -312,6 +327,58 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
)
self.assertRegex(str(ctx.exception), figure)
+ @mock.patch("os.path.isfile")
+ @mock.patch("time.sleep")
+ def test_find_signed_after_wait(self, sleep, isfile):
+ checked_files = set()
+
+ def check_file(path):
+ """First check for any path will fail, second and further will succeed."""
+ if path in checked_files:
+ return True
+ checked_files.add(path)
+ return False
+
+ isfile.side_effect = check_file
+
+ fst_key, snd_key = ["cafebabe", "deadbeef"]
+ pkgset = pkgsets.KojiPackageSet(
+ "pkgset",
+ self.koji_wrapper,
+ [fst_key, snd_key],
+ arches=["x86_64"],
+ signed_packages_retries=2,
+ signed_packages_wait=5,
+ )
+
+ result = pkgset.populate("f25")
+
+ self.assertEqual(
+ self.koji_wrapper.koji_proxy.mock_calls,
+ [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
+ )
+
+ fst_pkg = "signed/%s/bash-debuginfo@4.3.42@4.fc24@x86_64"
+ snd_pkg = "signed/%s/bash@4.3.42@4.fc24@x86_64"
+
+ self.assertPkgsetEqual(
+ result, {"x86_64": [fst_pkg % "cafebabe", snd_pkg % "cafebabe"]}
+ )
+ # Wait once for each of the two packages
+ self.assertEqual(sleep.call_args_list, [mock.call(5)] * 2)
+ # Each file will be checked three times
+ self.assertEqual(
+ isfile.call_args_list,
+ [
+ mock.call(os.path.join(self.topdir, fst_pkg % fst_key)),
+ mock.call(os.path.join(self.topdir, fst_pkg % snd_key)),
+ mock.call(os.path.join(self.topdir, fst_pkg % fst_key)),
+ mock.call(os.path.join(self.topdir, snd_pkg % fst_key)),
+ mock.call(os.path.join(self.topdir, snd_pkg % snd_key)),
+ mock.call(os.path.join(self.topdir, snd_pkg % fst_key)),
+ ],
+ )
+
def test_can_not_find_signed_package_allow_invalid_sigkeys(self):
pkgset = pkgsets.KojiPackageSet(
"pkgset",
@@ -355,6 +422,32 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
r"^RPM\(s\) not found for sigs: .+Check log for details.+",
)
+ @mock.patch("time.sleep")
+ def test_can_not_find_signed_package_with_retries(self, time):
+ pkgset = pkgsets.KojiPackageSet(
+ "pkgset",
+ self.koji_wrapper,
+ ["cafebabe"],
+ arches=["x86_64"],
+ signed_packages_retries=2,
+ signed_packages_wait=5,
+ )
+
+ with self.assertRaises(RuntimeError) as ctx:
+ pkgset.populate("f25")
+
+ self.assertEqual(
+ self.koji_wrapper.koji_proxy.mock_calls,
+ [mock.call.listTaggedRPMS("f25", event=None, inherit=True, latest=True)],
+ )
+
+ self.assertRegex(
+ str(ctx.exception),
+ r"^RPM\(s\) not found for sigs: .+Check log for details.+",
+ )
+ # Two packages making three attempts each, so two waits per package.
+ self.assertEqual(time.call_args_list, [mock.call(5)] * 4)
+
@ddt.data(
pkgsets.KojiPackageSet,
pkgsets.KojiMockPackageSet,
@@ -681,8 +774,8 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
self.koji_wrapper.koji_proxy.queryHistory.side_effect = [
- {"tag_listing": []},
- {"tag_listing": [{}]},
+ {"tag_listing": [], "tag_inheritance": []},
+ {"tag_listing": [{}], "tag_inheritance": []},
]
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = [
{"name": self.inherited_tag}
@@ -709,7 +802,9 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
def test_reuse_failed_load_reuse_file(self, mock_old_topdir, mock_exists):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": []}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [], "tag_inheritance": []
+ }
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = []
self.pkgset.load_old_file_cache = mock.Mock(
side_effect=Exception("unknown error")
@@ -741,7 +836,9 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
def test_reuse_criteria_not_match(self, mock_old_topdir, mock_exists):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": []}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [], "tag_inheritance": []
+ }
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = []
self.pkgset.load_old_file_cache = mock.Mock(
return_value={"allow_invalid_sigkeys": True}
@@ -780,7 +877,9 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
def test_reuse_pkgset(self, mock_old_topdir, mock_exists, mock_copy_all):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": []}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [], "tag_inheritance": []
+ }
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = []
self.pkgset.load_old_file_cache = mock.Mock(
return_value={
@@ -858,7 +957,10 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
def test_reuse_build_under_tag_changed(self, mock_old_topdir):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": [{}]}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [{}],
+ "tag_inheritance": [],
+ }
self.pkgset.try_to_reuse(self.compose, self.tag)
@@ -878,8 +980,8 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
self.koji_wrapper.koji_proxy.queryHistory.side_effect = [
- {"tag_listing": []},
- {"tag_listing": [{}]},
+ {"tag_listing": [], "tag_inheritance": []},
+ {"tag_listing": [{}], "tag_inheritance": []},
]
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = [
{"name": self.inherited_tag}
@@ -906,7 +1008,10 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
def test_reuse_failed_load_reuse_file(self, mock_old_topdir, mock_exists):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": []}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [],
+ "tag_inheritance": [],
+ }
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = []
self.pkgset.load_old_file_cache = mock.Mock(
side_effect=Exception("unknown error")
@@ -938,7 +1043,10 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
def test_reuse_criteria_not_match(self, mock_old_topdir, mock_exists):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": []}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [],
+ "tag_inheritance": [],
+ }
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = []
self.pkgset.load_old_file_cache = mock.Mock(
return_value={"allow_invalid_sigkeys": True}
@@ -977,7 +1085,10 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
def test_reuse_pkgset(self, mock_old_topdir, mock_exists, mock_copy_all):
mock_old_topdir.return_value = self.old_compose_dir
self.pkgset._get_koji_event_from_file = mock.Mock(side_effect=[3, 1])
- self.koji_wrapper.koji_proxy.queryHistory.return_value = {"tag_listing": []}
+ self.koji_wrapper.koji_proxy.queryHistory.return_value = {
+ "tag_listing": [],
+ "tag_inheritance": [],
+ }
self.koji_wrapper.koji_proxy.getFullInheritance.return_value = []
self.pkgset.load_old_file_cache = mock.Mock(
return_value={
diff --git a/tests/test_pkgset_source_koji.py b/tests/test_pkgset_source_koji.py
index 500af52d..da47fa86 100644
--- a/tests/test_pkgset_source_koji.py
+++ b/tests/test_pkgset_source_koji.py
@@ -8,6 +8,8 @@ import six
from ddt import ddt, data, unpack
from typing import AnyStr, List, Set, Dict, Tuple
+from tests.test_gather_method_hybrid import MockModule
+
try:
import unittest2 as unittest
from unittest2 import mock
@@ -18,7 +20,9 @@ except ImportError:
from pungi.phases.pkgset.sources import source_koji, source_kojimock
from tests import helpers
from pungi.module_util import Modulemd
+from pungi.util import read_single_module_stream_from_file
+MMDS_DIR = os.path.join(helpers.FIXTURE_DIR, "mmds")
EVENT_INFO = {"id": 15681980, "ts": 1460956382.81936}
TAG_INFO = {
"maven_support": False,
@@ -450,7 +454,7 @@ class TestSourceKoji(helpers.PungiTestCase):
self.assertEqual(pkgsets, gpfk.return_value)
self.assertEqual(path_prefix, "/prefix/")
- self.assertEqual(KojiWrapper.mock_calls, [mock.call("koji")])
+ self.assertEqual(KojiWrapper.mock_calls, [mock.call(compose)])
class TestCorrectNVR(helpers.PungiTestCase):
@@ -676,24 +680,12 @@ class TestFilterByWhitelist(unittest.TestCase):
self.assertEqual(expected, set())
-class MockModule(object):
- def __init__(self, path, strict=True):
- self.path = path
-
- def __repr__(self):
- return "MockModule(%r)" % self.path
-
- def __eq__(self, other):
- return self.path == other.path
-
-
-@mock.patch("pungi.module_util.Modulemd.ModuleStream.read_file", new=MockModule)
@unittest.skipIf(Modulemd is None, "Skipping tests, no module support")
class TestAddModuleToVariant(helpers.PungiTestCase):
def setUp(self):
super(TestAddModuleToVariant, self).setUp()
self.koji = mock.Mock()
- self.koji.koji_module.pathinfo.typedir.return_value = "/koji"
+ self.koji.koji_module.pathinfo.typedir.return_value = MMDS_DIR
files = ["modulemd.x86_64.txt", "modulemd.armv7hl.txt", "modulemd.txt"]
self.koji.koji_proxy.listArchives.return_value = [
{"btype": "module", "filename": fname} for fname in files
@@ -717,50 +709,35 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
- self.assertEqual(
- variant.arch_mmds,
- {
- "armhfp": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.armv7hl.txt"
- ),
- },
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.x86_64.txt"
- ),
- },
- },
- )
+ mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
+ mod2 = variant.arch_mmds["x86_64"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod2.get_NSVCA(), "module:master:20190318:abcdef:x86_64")
+ self.assertEqual(len(variant.arch_mmds), 2)
self.assertEqual(variant.modules, [])
def test_adding_module_to_existing(self):
variant = mock.Mock(
arches=["armhfp", "x86_64"],
arch_mmds={
- "x86_64": {"m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt")}
+ "x86_64": {
+ "m1:latest:20190101:cafe": read_single_module_stream_from_file(
+ os.path.join(MMDS_DIR, "m1.x86_64.txt")
+ )
+ }
},
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
)
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
- self.assertEqual(
- variant.arch_mmds,
- {
- "armhfp": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.armv7hl.txt"
- ),
- },
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.x86_64.txt"
- ),
- "m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"),
- },
- },
- )
+ mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
+ mod2 = variant.arch_mmds["x86_64"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod2.get_NSVCA(), "module:master:20190318:abcdef:x86_64")
+ mod3 = variant.arch_mmds["x86_64"]["m1:latest:20190101:cafe"]
+ self.assertEqual(mod3.get_NSVCA(), "m1:latest:20190101:cafe:x86_64")
+
self.assertEqual(
variant.modules, [{"name": "m1:latest-20190101:cafe", "glob": False}]
)
@@ -772,21 +749,11 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
self.koji, variant, self.buildinfo, add_to_variant_modules=True
)
- self.assertEqual(
- variant.arch_mmds,
- {
- "armhfp": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.armv7hl.txt"
- ),
- },
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.x86_64.txt"
- ),
- },
- },
- )
+ mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
+ mod2 = variant.arch_mmds["x86_64"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod2.get_NSVCA(), "module:master:20190318:abcdef:x86_64")
+
self.assertEqual(
variant.modules, [{"name": "module:master:20190318:abcdef", "glob": False}]
)
@@ -795,7 +762,11 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
variant = mock.Mock(
arches=["armhfp", "x86_64"],
arch_mmds={
- "x86_64": {"m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt")}
+ "x86_64": {
+ "m1:latest:20190101:cafe": read_single_module_stream_from_file(
+ os.path.join(MMDS_DIR, "m1.x86_64.txt")
+ )
+ }
},
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
)
@@ -804,22 +775,13 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
self.koji, variant, self.buildinfo, add_to_variant_modules=True
)
- self.assertEqual(
- variant.arch_mmds,
- {
- "armhfp": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.armv7hl.txt"
- ),
- },
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/koji/modulemd.x86_64.txt"
- ),
- "m1:latest:20190101:cafe": MockModule("/koji/m1.x86_64.txt"),
- },
- },
- )
+ mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
+ mod2 = variant.arch_mmds["x86_64"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod2.get_NSVCA(), "module:master:20190318:abcdef:x86_64")
+ mod3 = variant.arch_mmds["x86_64"]["m1:latest:20190101:cafe"]
+ self.assertEqual(mod3.get_NSVCA(), "m1:latest:20190101:cafe:x86_64")
+
self.assertEqual(
variant.modules,
[
@@ -856,14 +818,17 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
super(TestAddModuleToVariantForKojiMock, self).setUp()
self.koji = mock.Mock()
self.koji.koji_module.pathinfo.typedir.return_value = "/koji"
- self.koji.koji_module.pathinfo.topdir = "/mnt/koji"
- files = ["modulemd.x86_64.txt", "modulemd.armv7hl.txt", "modulemd.txt"]
+ self.koji.koji_module.pathinfo.topdir = MMDS_DIR
+ files = [
+ "modulemd.x86_64.txt",
+ "scratch-module.x86_64.txt",
+ ]
self.koji.koji_proxy.listArchives.return_value = [
{"btype": "module", "filename": fname} for fname in files
- ] + [{"btype": "foo"}]
+ ]
self.buildinfo = {
"id": 1234,
- "arch": "fake_arch",
+ "arch": "x86_64",
"extra": {
"typeinfo": {
"module": {
@@ -878,22 +843,23 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
}
def test_adding_module(self):
- variant = mock.Mock(arches=[
- "x86_64"
- ], arch_mmds={}, modules=[])
-
- source_kojimock._add_module_to_variant(self.koji, variant, self.buildinfo)
-
- self.assertEqual(
- variant.arch_mmds,
- {
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/mnt/koji/modules/fake_arch/module:master-20190318-abcdef"
- ),
- },
- },
+ variant = mock.Mock(
+ arches=[
+ "x86_64"
+ ],
+ arch_mmds={},
+ modules=[],
)
+
+ source_kojimock._add_module_to_variant(
+ self.koji,
+ variant,
+ self.buildinfo,
+ )
+
+ mod = variant.arch_mmds["x86_64"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod.get_NSVCA(), "module:master:20190318:abcdef:x86_64")
+ self.assertEqual(len(variant.arch_mmds), 1)
self.assertEqual(variant.modules, [])
def test_adding_module_to_existing(self):
@@ -902,26 +868,22 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
"x86_64"
],
arch_mmds={
- "x86_64": {"m1:latest:20190101:cafe": MockModule("/mnt/koji/modules/fake_arch/m1:latest:20190101:cafe")}
+ "x86_64": {
+ "m1:latest:20190101:cafe": read_single_module_stream_from_file(
+ os.path.join(MMDS_DIR, "m1.x86_64.txt")
+ )}
},
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
)
- source_kojimock._add_module_to_variant(self.koji, variant, self.buildinfo)
+ source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
+
+ mod = variant.arch_mmds["x86_64"]["m1:latest:20190101:cafe"]
+ self.assertEqual(mod.get_NSVCA(), "m1:latest:20190101:cafe:x86_64")
self.assertEqual(
- variant.arch_mmds,
- {
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/mnt/koji/modules/fake_arch/module:master-20190318-abcdef"
- ),
- "m1:latest:20190101:cafe": MockModule("/mnt/koji/modules/fake_arch/m1:latest:20190101:cafe"),
- },
- },
- )
- self.assertEqual(
- variant.modules, [{"name": "m1:latest-20190101:cafe", "glob": False}]
+ variant.modules,
+ [{"name": "m1:latest-20190101:cafe", "glob": False}]
)
def test_adding_module_with_add_module(self):
@@ -933,16 +895,9 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
self.koji, variant, self.buildinfo, add_to_variant_modules=True
)
- self.assertEqual(
- variant.arch_mmds,
- {
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/mnt/koji/modules/fake_arch/module:master-20190318-abcdef"
- )
- },
- },
- )
+ mod = variant.arch_mmds["x86_64"]["module:master:20190318:abcdef"]
+ self.assertEqual(mod.get_NSVCA(), "module:master:20190318:abcdef:x86_64")
+
self.assertEqual(
variant.modules, [{"name": "module:master:20190318:abcdef", "glob": False}]
)
@@ -953,7 +908,10 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
"x86_64"
],
arch_mmds={
- "x86_64": {"m1:latest:20190101:cafe": MockModule("/mnt/koji/modules/fake_arch/m1:latest:20190101:cafe")}
+ "x86_64": {"m1:latest:20190101:cafe": read_single_module_stream_from_file(
+ os.path.join(MMDS_DIR, "m1.x86_64.txt")
+ )
+ }
},
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
)
@@ -962,17 +920,9 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
self.koji, variant, self.buildinfo, add_to_variant_modules=True
)
- self.assertEqual(
- variant.arch_mmds,
- {
- "x86_64": {
- "module:master:20190318:abcdef": MockModule(
- "/mnt/koji/modules/fake_arch/module:master-20190318-abcdef"
- ),
- "m1:latest:20190101:cafe": MockModule("/mnt/koji/modules/fake_arch/m1:latest:20190101:cafe"),
- },
- },
- )
+ mod = variant.arch_mmds["x86_64"]["m1:latest:20190101:cafe"]
+ self.assertEqual(mod.get_NSVCA(), "m1:latest:20190101:cafe:x86_64")
+
self.assertEqual(
variant.modules,
[
@@ -1050,12 +1000,8 @@ class MockMBS(object):
return {"id": 1, "koji_tag": "scratch-module-tag", "name": "scratch-module"}
def final_modulemd(self, module_build_id):
- return {"x86_64": ""}
-
-
-class MockMmd(object):
- def __init__(self, mmd, strict=True):
- pass
+ with open(os.path.join(MMDS_DIR, "scratch-module.x86_64.txt")) as f:
+ return {"x86_64": f.read()}
@mock.patch("pungi.phases.pkgset.sources.source_koji.MBSWrapper", new=MockMBS)
@@ -1068,10 +1014,7 @@ class TestAddScratchModuleToVariant(helpers.PungiTestCase):
)
self.nsvc = "scratch-module:master:20200710:abcdef"
- @mock.patch(
- "pungi.phases.pkgset.sources.source_koji.Modulemd.ModuleStream.read_string"
- )
- def test_adding_scratch_module(self, mock_mmd):
+ def test_adding_scratch_module(self):
variant = mock.Mock(
arches=[
# "armhfp",
@@ -1089,11 +1032,16 @@ class TestAddScratchModuleToVariant(helpers.PungiTestCase):
self.compose, variant, scratch_modules, variant_tags, tag_to_mmd
)
self.assertEqual(variant_tags, {variant: ["scratch-module-tag"]})
+
self.assertEqual(
- variant.arch_mmds, {"x86_64": {self.nsvc: mock_mmd.return_value}}
+ variant.arch_mmds["x86_64"][self.nsvc].get_NSVCA(),
+ "scratch-module:master:20200710:abcdef:x86_64",
)
+
+ self.assertTrue(isinstance(tag_to_mmd["scratch-module-tag"]["x86_64"], set))
self.assertEqual(
- tag_to_mmd, {"scratch-module-tag": {"x86_64": set([mock_mmd.return_value])}}
+ list(tag_to_mmd["scratch-module-tag"]["x86_64"])[0].get_NSVCA(),
+ "scratch-module:master:20200710:abcdef:x86_64",
)
self.assertEqual(variant.modules, [])
diff --git a/tests/test_repoclosure_wrapper.py b/tests/test_repoclosure_wrapper.py
index af2dc3e0..361d5846 100755
--- a/tests/test_repoclosure_wrapper.py
+++ b/tests/test_repoclosure_wrapper.py
@@ -25,8 +25,14 @@ class RepoclosureWrapperTestCase(helpers.BaseTestCase):
def test_multiple_arches(self):
self.assertEqual(
- rc.get_repoclosure_cmd(arch=["x86_64", "ppc64"]),
- ["/usr/bin/repoclosure", "--tempcache", "--arch=x86_64", "--arch=ppc64"],
+ rc.get_repoclosure_cmd(arch=["x86_64", "i686", "noarch"]),
+ [
+ "/usr/bin/repoclosure",
+ "--tempcache",
+ "--arch=x86_64",
+ "--arch=i686",
+ "--arch=noarch",
+ ],
)
def test_full_command(self):
@@ -61,6 +67,34 @@ class RepoclosureWrapperTestCase(helpers.BaseTestCase):
cmd[2:],
[
"--arch=x86_64",
+ "--forcearch=x86_64",
+ "--repofrompath=my-repo,file:///mnt/koji/repo",
+ "--repofrompath=fedora,http://kojipkgs.fp.o/repo",
+ "--repo=my-repo",
+ "--check=my-repo",
+ "--repo=fedora",
+ ],
+ )
+
+ def test_dnf_command_with_multiple_arches(self):
+ repos = {"my-repo": "/mnt/koji/repo"}
+ lookaside = {"fedora": "http://kojipkgs.fp.o/repo"}
+
+ cmd = rc.get_repoclosure_cmd(
+ backend="dnf",
+ arch=["x86_64", "i686", "noarch"],
+ repos=repos,
+ lookaside=lookaside,
+ )
+ self.assertEqual(cmd[:2], ["dnf", "repoclosure"])
+ six.assertCountEqual(
+ self,
+ cmd[2:],
+ [
+ "--arch=x86_64",
+ "--arch=i686",
+ "--arch=noarch",
+ "--forcearch=x86_64",
"--repofrompath=my-repo,file:///mnt/koji/repo",
"--repofrompath=fedora,http://kojipkgs.fp.o/repo",
"--repo=my-repo",
diff --git a/tests/test_runroot.py b/tests/test_runroot.py
index 95f4ca17..4b3f0488 100644
--- a/tests/test_runroot.py
+++ b/tests/test_runroot.py
@@ -189,8 +189,10 @@ class TestRunrootOpenSSH(helpers.PungiTestCase):
run.assert_has_calls(
[
self._ssh_call(
- "run df -h && chmod -R a+r /mnt/foo/compose /mnt/foo/x && "
- "chown -R %d /mnt/foo/compose /mnt/foo/x" % os.getuid()
+ "run df -h ; EXIT_CODE=$? ; "
+ "chmod -R a+r /mnt/foo/compose /mnt/foo/x ; "
+ "chown -R %d /mnt/foo/compose /mnt/foo/x ; exit $EXIT_CODE"
+ % os.getuid()
),
self._ssh_call(
"run rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
@@ -204,7 +206,8 @@ class TestRunrootKoji(helpers.PungiTestCase):
def setUp(self):
super(TestRunrootKoji, self).setUp()
self.compose = helpers.DummyCompose(
- self.topdir, {"runroot": True, "runroot_tag": "f28-build"},
+ self.topdir,
+ {"runroot": True, "runroot_tag": "f28-build"},
)
self.runroot = Runroot(self.compose)
diff --git a/tests/test_scm.py b/tests/test_scm.py
index ae18985f..f6307967 100644
--- a/tests/test_scm.py
+++ b/tests/test_scm.py
@@ -588,9 +588,8 @@ class CvsSCMTestCase(SCMBaseTest):
@mock.patch("pungi.wrappers.scm.urlretrieve")
-@mock.patch("pungi.wrappers.scm.KojiWrapper")
class KojiSCMTestCase(SCMBaseTest):
- def test_without_koji_profile(self, KW, dl):
+ def test_without_koji_profile(self, dl):
compose = mock.Mock(conf={})
with self.assertRaises(RuntimeError) as ctx:
@@ -600,9 +599,9 @@ class KojiSCMTestCase(SCMBaseTest):
compose=compose,
)
self.assertIn("Koji profile must be configured", str(ctx.exception))
- self.assertEqual(KW.mock_calls, [])
self.assertEqual(dl.mock_calls, [])
+ @mock.patch("pungi.wrappers.scm.KojiWrapper")
def test_doesnt_get_dirs(self, KW, dl):
compose = mock.Mock(conf={"koji_profile": "koji"})
@@ -613,7 +612,7 @@ class KojiSCMTestCase(SCMBaseTest):
compose=compose,
)
self.assertIn("Only files can be exported", str(ctx.exception))
- self.assertEqual(KW.mock_calls, [mock.call("koji")])
+ self.assertEqual(KW.mock_calls, [mock.call(compose)])
self.assertEqual(dl.mock_calls, [])
def _setup_koji_wrapper(self, KW, build_id, files):
@@ -627,6 +626,7 @@ class KojiSCMTestCase(SCMBaseTest):
]
KW.return_value.koji_proxy.listTagged.return_value = [buildinfo]
+ @mock.patch("pungi.wrappers.scm.KojiWrapper")
def test_get_from_build(self, KW, dl):
compose = mock.Mock(conf={"koji_profile": "koji"})
@@ -646,7 +646,7 @@ class KojiSCMTestCase(SCMBaseTest):
self.assertEqual(
KW.mock_calls,
[
- mock.call("koji"),
+ mock.call(compose),
mock.call().koji_proxy.getBuild("my-build-1.0-2"),
mock.call().koji_proxy.listArchives(123),
mock.call().koji_module.pathinfo.typedir({"build_id": 123}, "image"),
@@ -657,6 +657,7 @@ class KojiSCMTestCase(SCMBaseTest):
[mock.call("http://koji.local/koji/images/abc.tar", mock.ANY)],
)
+ @mock.patch("pungi.wrappers.scm.KojiWrapper")
def test_get_from_latest_build(self, KW, dl):
compose = mock.Mock(conf={"koji_profile": "koji"})
@@ -676,7 +677,7 @@ class KojiSCMTestCase(SCMBaseTest):
self.assertEqual(
KW.mock_calls,
[
- mock.call("koji"),
+ mock.call(compose),
mock.call().koji_proxy.listTagged(
"images", package="my-build", inherit=True, latest=True
),
diff --git a/tests/test_test_phase.py b/tests/test_test_phase.py
index 486d8198..1b6f1ad1 100644
--- a/tests/test_test_phase.py
+++ b/tests/test_test_phase.py
@@ -4,7 +4,7 @@ import mock
import os
import pungi.phases.test as test_phase
-from tests.helpers import DummyCompose, PungiTestCase, touch
+from tests.helpers import DummyCompose, PungiTestCase, touch, FIXTURE_DIR
try:
import dnf # noqa: F401
@@ -267,7 +267,8 @@ class TestCheckImageSanity(PungiTestCase):
@mock.patch("pungi.phases.test.check_sanity", new=mock.Mock())
def test_too_big_unified_strict(self):
compose = DummyCompose(
- self.topdir, {"createiso_max_size_is_strict": [(".*", {"*": True})]},
+ self.topdir,
+ {"createiso_max_size_is_strict": [(".*", {"*": True})]},
)
compose.image.format = "iso"
compose.image.bootable = False
@@ -304,3 +305,31 @@ class TestCheckImageSanity(PungiTestCase):
test_phase.check_image_sanity(compose)
self.assertEqual(compose.log_warning.call_args_list, [])
+
+
+class TestImageMetadataValidation(PungiTestCase):
+ def test_valid_metadata(self):
+ compose = mock.Mock()
+ compose.im.images = {"Server": mock.Mock()}
+ compose.paths.compose.topdir = lambda: os.path.join(
+ FIXTURE_DIR, "basic-metadata"
+ )
+
+ test_phase.check_image_metadata(compose)
+
+ def test_missing_metadata(self):
+ compose = mock.Mock()
+ compose.im.images = {}
+ compose.paths.compose.topdir = lambda: self.topdir
+
+ test_phase.check_image_metadata(compose)
+
+ def test_invalid_metadata(self):
+ compose = mock.Mock()
+ compose.im.images = {"Server": mock.Mock()}
+ compose.paths.compose.topdir = lambda: os.path.join(
+ FIXTURE_DIR, "invalid-image-metadata"
+ )
+
+ with self.assertRaises(RuntimeError):
+ test_phase.check_image_metadata(compose)
diff --git a/tests/test_util.py b/tests/test_util.py
index a5fc894d..181c91d8 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -183,7 +183,8 @@ class TestGitRefResolver(unittest.TestCase):
def test_resolver_offline_branch(self, mock_resolve_url, mock_resolve_ref):
resolver = util.GitUrlResolver(offline=True)
self.assertEqual(
- resolver("http://example.com/repo.git", "master"), "master",
+ resolver("http://example.com/repo.git", "master"),
+ "master",
)
self.assertEqual(mock_resolve_url.call_args_list, [])
self.assertEqual(mock_resolve_ref.call_args_list, [])
@@ -935,7 +936,8 @@ class TestVersionGenerator(unittest.TestCase):
def test_version_from_version(self):
self.assertEqual(
- util.version_generator(self.compose, "!VERSION_FROM_VERSION"), "8",
+ util.version_generator(self.compose, "!VERSION_FROM_VERSION"),
+ "8",
)
@@ -1058,3 +1060,8 @@ class TestAsLocalFile(PungiTestCase):
self.assertEqual(fn, self.filename)
self.assertTrue(os.path.exists(self.filename))
self.assertFalse(os.path.exists(self.filename))
+
+ def test_file_url(self, urlretrieve):
+ with util.as_local_file("file:///tmp/foo") as fn:
+ self.assertEqual(fn, "/tmp/foo")
+ self.assertEqual(urlretrieve.call_args_list, [])
diff --git a/tox.ini b/tox.ini
index 96763345..ae1cf711 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = flake8, black, py27, py3
+envlist = bandit, flake8, black, py27, py3
[testenv:flake8]
deps =
@@ -8,6 +8,14 @@ whitelist_externals = sh
commands =
sh -c "flake8 pungi pungi_utils setup.py tests/*py"
+[testenv:bandit]
+basepython = python3
+skip_install = true
+deps = bandit
+commands =
+ bandit -r -ll pungi pungi_utils
+ignore_outcome = True
+
[testenv:black]
basepython = python3
whitelist_externals = sh
@@ -40,11 +48,10 @@ deps =
-rtest-requirements.txt
whitelist_externals =
sh
- make
commands =
sh -c 'find . -name "__pycache__" -exec rm -rf \{\} +'
pip install --force-reinstall pytest mock
- make test
+ pytest {posargs}
[flake8]
exclude = doc/*,*.pyc,*.py~,*.in,*.spec,*.sh,*.rst