Merge remote-tracking branch 'centos-origin/master'
# Conflicts: # pungi/phases/init.py # pungi/wrappers/comps.py
This commit is contained in:
commit
fce5493f09
4
.gitignore
vendored
4
.gitignore
vendored
@ -11,5 +11,9 @@ tests/data/repo-krb5-lookaside
|
|||||||
tests/_composes
|
tests/_composes
|
||||||
htmlcov/
|
htmlcov/
|
||||||
.coverage
|
.coverage
|
||||||
|
.eggs
|
||||||
.idea/
|
.idea/
|
||||||
.tox
|
.tox
|
||||||
|
.venv
|
||||||
|
.kdev4/
|
||||||
|
pungi.kdev4
|
||||||
|
@ -34,4 +34,6 @@ also moves the artifacts to correct locations.
|
|||||||
- Documentation: https://docs.pagure.org/pungi/
|
- Documentation: https://docs.pagure.org/pungi/
|
||||||
- Upstream GIT: https://pagure.io/pungi/
|
- Upstream GIT: https://pagure.io/pungi/
|
||||||
- Issue tracker: https://pagure.io/pungi/issues
|
- Issue tracker: https://pagure.io/pungi/issues
|
||||||
- Questions can be asked on *#fedora-releng* IRC channel on FreeNode
|
- Questions can be asked in the *#fedora-releng* IRC channel on irc.libera.chat
|
||||||
|
or in the matrix room
|
||||||
|
[`#releng:fedoraproject.org`](https://matrix.to/#/#releng:fedoraproject.org)
|
||||||
|
37
doc/_static/phases.svg
vendored
37
doc/_static/phases.svg
vendored
@ -12,7 +12,7 @@
|
|||||||
viewBox="0 0 610.46457 301.1662"
|
viewBox="0 0 610.46457 301.1662"
|
||||||
id="svg2"
|
id="svg2"
|
||||||
version="1.1"
|
version="1.1"
|
||||||
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
|
inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
|
||||||
sodipodi:docname="phases.svg"
|
sodipodi:docname="phases.svg"
|
||||||
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
||||||
inkscape:export-xdpi="90"
|
inkscape:export-xdpi="90"
|
||||||
@ -24,9 +24,9 @@
|
|||||||
borderopacity="1.0"
|
borderopacity="1.0"
|
||||||
inkscape:pageopacity="1"
|
inkscape:pageopacity="1"
|
||||||
inkscape:pageshadow="2"
|
inkscape:pageshadow="2"
|
||||||
inkscape:zoom="2.1213203"
|
inkscape:zoom="1.5"
|
||||||
inkscape:cx="276.65806"
|
inkscape:cx="9.4746397"
|
||||||
inkscape:cy="189.24198"
|
inkscape:cy="58.833855"
|
||||||
inkscape:document-units="px"
|
inkscape:document-units="px"
|
||||||
inkscape:current-layer="layer1"
|
inkscape:current-layer="layer1"
|
||||||
showgrid="false"
|
showgrid="false"
|
||||||
@ -70,7 +70,7 @@
|
|||||||
<dc:format>image/svg+xml</dc:format>
|
<dc:format>image/svg+xml</dc:format>
|
||||||
<dc:type
|
<dc:type
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||||
<dc:title></dc:title>
|
<dc:title />
|
||||||
</cc:Work>
|
</cc:Work>
|
||||||
</rdf:RDF>
|
</rdf:RDF>
|
||||||
</metadata>
|
</metadata>
|
||||||
@ -303,15 +303,15 @@
|
|||||||
</g>
|
</g>
|
||||||
<rect
|
<rect
|
||||||
transform="matrix(0,1,1,0,0,0)"
|
transform="matrix(0,1,1,0,0,0)"
|
||||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:2.65937px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1.85901px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
id="rect3338-1"
|
id="rect3338-1"
|
||||||
width="185.96895"
|
width="90.874992"
|
||||||
height="115.80065"
|
height="115.80065"
|
||||||
x="872.67383"
|
x="872.67383"
|
||||||
y="486.55563" />
|
y="486.55563" />
|
||||||
<text
|
<text
|
||||||
id="text3384-0"
|
id="text3384-0"
|
||||||
y="969.2854"
|
y="921.73846"
|
||||||
x="489.56451"
|
x="489.56451"
|
||||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
xml:space="preserve"><tspan
|
xml:space="preserve"><tspan
|
||||||
@ -319,7 +319,7 @@
|
|||||||
id="tspan3391"
|
id="tspan3391"
|
||||||
sodipodi:role="line"
|
sodipodi:role="line"
|
||||||
x="489.56451"
|
x="489.56451"
|
||||||
y="969.2854">ImageChecksum</tspan></text>
|
y="921.73846">ImageChecksum</tspan></text>
|
||||||
<g
|
<g
|
||||||
transform="translate(-42.209584,-80.817124)"
|
transform="translate(-42.209584,-80.817124)"
|
||||||
id="g3458">
|
id="g3458">
|
||||||
@ -518,5 +518,24 @@
|
|||||||
id="tspan301-5"
|
id="tspan301-5"
|
||||||
style="font-size:12px;line-height:0">OSBuild</tspan></text>
|
style="font-size:12px;line-height:0">OSBuild</tspan></text>
|
||||||
</g>
|
</g>
|
||||||
|
<rect
|
||||||
|
transform="matrix(0,1,1,0,0,0)"
|
||||||
|
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.83502px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
id="rect3338-1-3"
|
||||||
|
width="88.544876"
|
||||||
|
height="115.80065"
|
||||||
|
x="970.31763"
|
||||||
|
y="486.55563" />
|
||||||
|
<text
|
||||||
|
id="text3384-0-6"
|
||||||
|
y="1018.2172"
|
||||||
|
x="489.56451"
|
||||||
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
xml:space="preserve"><tspan
|
||||||
|
style="font-size:13.1475px;line-height:1.25"
|
||||||
|
id="tspan3391-7"
|
||||||
|
sodipodi:role="line"
|
||||||
|
x="489.56451"
|
||||||
|
y="1018.2172">ImageContainer</tspan></text>
|
||||||
</g>
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
|
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 22 KiB |
@ -51,9 +51,9 @@ copyright = u'2016, Red Hat, Inc.'
|
|||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '4.2'
|
version = '4.3'
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = '4.2.7'
|
release = '4.3.6'
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
|
@ -182,6 +182,8 @@ Options
|
|||||||
Please note that when ``dnf`` is used, the build dependencies check is
|
Please note that when ``dnf`` is used, the build dependencies check is
|
||||||
skipped. On Python 3, only ``dnf`` backend is available.
|
skipped. On Python 3, only ``dnf`` backend is available.
|
||||||
|
|
||||||
|
See also: the ``gather_backend`` setting for Pungi's gather phase.
|
||||||
|
|
||||||
**cts_url**
|
**cts_url**
|
||||||
(*str*) -- URL to Compose Tracking Service. If defined, Pungi will add
|
(*str*) -- URL to Compose Tracking Service. If defined, Pungi will add
|
||||||
the compose to Compose Tracking Service and ge the compose ID from it.
|
the compose to Compose Tracking Service and ge the compose ID from it.
|
||||||
@ -457,6 +459,12 @@ Options
|
|||||||
cloned files should be split into subdirectories for each architecture of
|
cloned files should be split into subdirectories for each architecture of
|
||||||
the variant.
|
the variant.
|
||||||
|
|
||||||
|
**createrepo_enable_cache** = True
|
||||||
|
(*bool*) -- whether to use ``--cachedir`` option of ``createrepo``. It will
|
||||||
|
cache and reuse checksum vaules to speed up createrepo phase.
|
||||||
|
The cache dir is located at ``/var/cache/pungi/createrepo_c/$release_short-$uid``
|
||||||
|
e.g. /var/cache/pungi/createrepo_c/Fedora-1000
|
||||||
|
|
||||||
**product_id** = None
|
**product_id** = None
|
||||||
(:ref:`scm_dict <scm_support>`) -- If specified, it should point to a
|
(:ref:`scm_dict <scm_support>`) -- If specified, it should point to a
|
||||||
directory with certificates ``*<variant_uid>-<arch>-*.pem``. Pungi will
|
directory with certificates ``*<variant_uid>-<arch>-*.pem``. Pungi will
|
||||||
@ -581,6 +589,18 @@ Options
|
|||||||
(for example) between composes, then Pungi may not respect those changes
|
(for example) between composes, then Pungi may not respect those changes
|
||||||
in your new compose.
|
in your new compose.
|
||||||
|
|
||||||
|
**signed_packages_retries** = 0
|
||||||
|
(*int*) -- In automated workflows, you might start a compose before Koji
|
||||||
|
has completely written all signed packages to disk. In this case you may
|
||||||
|
want Pungi to wait for the package to appear in Koji's storage. This
|
||||||
|
option controls how many times Pungi will retry looking for the signed
|
||||||
|
copy.
|
||||||
|
|
||||||
|
**signed_packages_wait** = 30
|
||||||
|
(*int*) -- Interval in seconds for how long to wait between attempts to
|
||||||
|
find signed packages. This option only makes sense when
|
||||||
|
``signed_packages_retries`` is set higher than 0.
|
||||||
|
|
||||||
|
|
||||||
Example
|
Example
|
||||||
-------
|
-------
|
||||||
@ -652,6 +672,11 @@ Options
|
|||||||
**buildinstall_allow_reuse** = False
|
**buildinstall_allow_reuse** = False
|
||||||
(*bool*) -- When set to ``True``, *Pungi* will try to reuse buildinstall
|
(*bool*) -- When set to ``True``, *Pungi* will try to reuse buildinstall
|
||||||
results from old compose specified by ``--old-composes``.
|
results from old compose specified by ``--old-composes``.
|
||||||
|
**buildinstall_packages**
|
||||||
|
(list) – Additional packages to be installed in the runroot environment
|
||||||
|
where lorax will run to create installer. Format: ``[(variant_uid_regex,
|
||||||
|
{arch|*: [package_globs]})]``.
|
||||||
|
|
||||||
|
|
||||||
Example
|
Example
|
||||||
-------
|
-------
|
||||||
@ -686,6 +711,13 @@ Example
|
|||||||
})
|
})
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Additional packages to be installed in the Koji runroot environment where
|
||||||
|
# lorax will run.
|
||||||
|
buildinstall_packages = [
|
||||||
|
('^Simple$', {
|
||||||
|
'*': ['dummy-package'],
|
||||||
|
})
|
||||||
|
]
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@ -728,7 +760,7 @@ Options
|
|||||||
(*bool*) -- When set to ``True``, *Pungi* will try to reuse gather results
|
(*bool*) -- When set to ``True``, *Pungi* will try to reuse gather results
|
||||||
from old compose specified by ``--old-composes``.
|
from old compose specified by ``--old-composes``.
|
||||||
|
|
||||||
**greedy_method**
|
**greedy_method** = none
|
||||||
(*str*) -- This option controls how package requirements are satisfied in
|
(*str*) -- This option controls how package requirements are satisfied in
|
||||||
case a particular ``Requires`` has multiple candidates.
|
case a particular ``Requires`` has multiple candidates.
|
||||||
|
|
||||||
@ -749,7 +781,7 @@ Options
|
|||||||
pulled in.
|
pulled in.
|
||||||
* With ``greedy_method = "all"`` all three packages will be
|
* With ``greedy_method = "all"`` all three packages will be
|
||||||
pulled in.
|
pulled in.
|
||||||
* With ``greedy_method = "build" ``pkg-b-provider-1`` and
|
* With ``greedy_method = "build"`` ``pkg-b-provider-1`` and
|
||||||
``pkg-b-provider-2`` will be pulled in.
|
``pkg-b-provider-2`` will be pulled in.
|
||||||
|
|
||||||
**gather_backend**
|
**gather_backend**
|
||||||
@ -763,6 +795,9 @@ Options
|
|||||||
``python-multilib`` library. Please refer to ``multilib`` option to see the
|
``python-multilib`` library. Please refer to ``multilib`` option to see the
|
||||||
differences.
|
differences.
|
||||||
|
|
||||||
|
See also: the ``repoclosure_backend`` setting for Pungi's repoclosure
|
||||||
|
phase.
|
||||||
|
|
||||||
**multilib**
|
**multilib**
|
||||||
(*list*) -- mapping of variant regexes and arches to list of multilib
|
(*list*) -- mapping of variant regexes and arches to list of multilib
|
||||||
methods
|
methods
|
||||||
@ -787,8 +822,14 @@ Options
|
|||||||
(*list*) -- additional packages to be included in a variant and
|
(*list*) -- additional packages to be included in a variant and
|
||||||
architecture; format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
|
architecture; format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
|
||||||
|
|
||||||
|
In contrast to the ``comps_file`` setting, the ``additional_packages``
|
||||||
|
setting merely adds the list of packages to the compose. When a package
|
||||||
|
is in a comps group, it is visible to users via ``dnf groupinstall`` and
|
||||||
|
Anaconda's Groups selection, but ``additional_packages`` does not affect
|
||||||
|
DNF groups.
|
||||||
|
|
||||||
The packages specified here are matched against RPM names, not any other
|
The packages specified here are matched against RPM names, not any other
|
||||||
provides in the package not the name of source package. Shell globbing is
|
provides in the package nor the name of source package. Shell globbing is
|
||||||
used, so wildcards are possible. The package can be specified as name only
|
used, so wildcards are possible. The package can be specified as name only
|
||||||
or ``name.arch``.
|
or ``name.arch``.
|
||||||
|
|
||||||
@ -797,6 +838,21 @@ Options
|
|||||||
it. If you add a debuginfo package that does not have anything else from
|
it. If you add a debuginfo package that does not have anything else from
|
||||||
the same build included in the compose, the sources will not be pulled in.
|
the same build included in the compose, the sources will not be pulled in.
|
||||||
|
|
||||||
|
If you list a package in ``additional_packages`` but Pungi cannot find
|
||||||
|
it (for example, it's not available in the Koji tag), Pungi will log a
|
||||||
|
warning in the "work" or "logs" directories and continue without aborting.
|
||||||
|
|
||||||
|
*Example*: This configuration will add all packages in a Koji tag to an
|
||||||
|
"Everything" variant::
|
||||||
|
|
||||||
|
additional_packages = [
|
||||||
|
('^Everything$', {
|
||||||
|
'*': [
|
||||||
|
'*',
|
||||||
|
],
|
||||||
|
})
|
||||||
|
]
|
||||||
|
|
||||||
**filter_packages**
|
**filter_packages**
|
||||||
(*list*) -- packages to be excluded from a variant and architecture;
|
(*list*) -- packages to be excluded from a variant and architecture;
|
||||||
format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
|
format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``
|
||||||
@ -867,7 +923,8 @@ Options
|
|||||||
**gather_source_mapping**
|
**gather_source_mapping**
|
||||||
(*str*) -- JSON mapping with initial packages for the compose. The value
|
(*str*) -- JSON mapping with initial packages for the compose. The value
|
||||||
should be a path to JSON file with following mapping: ``{variant: {arch:
|
should be a path to JSON file with following mapping: ``{variant: {arch:
|
||||||
{rpm_name: [rpm_arch|None]}}}``.
|
{rpm_name: [rpm_arch|None]}}}``. Relative paths are interpreted relative to
|
||||||
|
the location of main config file.
|
||||||
|
|
||||||
**gather_profiler** = False
|
**gather_profiler** = False
|
||||||
(*bool*) -- When set to ``True`` the gather tool will produce additional
|
(*bool*) -- When set to ``True`` the gather tool will produce additional
|
||||||
@ -1201,7 +1258,7 @@ Options
|
|||||||
|
|
||||||
Format: ``[(variant_uid_regex, {arch|*: bool})]``
|
Format: ``[(variant_uid_regex, {arch|*: bool})]``
|
||||||
|
|
||||||
**create_jigdo** = True
|
**create_jigdo** = False
|
||||||
(*bool*) -- controls the creation of jigdo from ISO
|
(*bool*) -- controls the creation of jigdo from ISO
|
||||||
|
|
||||||
**create_optional_isos** = False
|
**create_optional_isos** = False
|
||||||
@ -1228,6 +1285,11 @@ Options
|
|||||||
meaning size in bytes, or it can be a string with ``k``, ``M``, ``G``
|
meaning size in bytes, or it can be a string with ``k``, ``M``, ``G``
|
||||||
suffix (using multiples of 1024).
|
suffix (using multiples of 1024).
|
||||||
|
|
||||||
|
**iso_level**
|
||||||
|
(*int|list*) [optional] -- Set the ISO9660 conformance level. This is
|
||||||
|
either a global single value (a number from 1 to 4), or a variant/arch
|
||||||
|
mapping.
|
||||||
|
|
||||||
**split_iso_reserve** = 10MiB
|
**split_iso_reserve** = 10MiB
|
||||||
(*int|str*) -- how much free space should be left on each disk. The format
|
(*int|str*) -- how much free space should be left on each disk. The format
|
||||||
is the same as for ``iso_size`` option.
|
is the same as for ``iso_size`` option.
|
||||||
@ -1391,6 +1453,7 @@ Live Media Settings
|
|||||||
* ``repo`` (*str|[str]*) -- repos specified by URL or variant UID
|
* ``repo`` (*str|[str]*) -- repos specified by URL or variant UID
|
||||||
* ``title`` (*str*)
|
* ``title`` (*str*)
|
||||||
* ``install_tree_from`` (*str*) -- variant to take install tree from
|
* ``install_tree_from`` (*str*) -- variant to take install tree from
|
||||||
|
* ``nomacboot`` (*bool*)
|
||||||
|
|
||||||
|
|
||||||
Image Build Settings
|
Image Build Settings
|
||||||
@ -1531,7 +1594,9 @@ OSBuild Composer for building images
|
|||||||
|
|
||||||
* ``name`` -- name of the Koji package
|
* ``name`` -- name of the Koji package
|
||||||
* ``distro`` -- image for which distribution should be build TODO examples
|
* ``distro`` -- image for which distribution should be build TODO examples
|
||||||
* ``image_type`` -- a list of image types to build (e.g. ``qcow2``)
|
* ``image_types`` -- a list with a single image type string or just a
|
||||||
|
string representing the image type to build (e.g. ``qcow2``). In any
|
||||||
|
case, only a single image type can be provided as an argument.
|
||||||
|
|
||||||
Optional keys:
|
Optional keys:
|
||||||
|
|
||||||
@ -1547,6 +1612,50 @@ OSBuild Composer for building images
|
|||||||
* ``arches`` -- list of architectures for which to build the image. By
|
* ``arches`` -- list of architectures for which to build the image. By
|
||||||
default, the variant arches are used. This option can only restrict it,
|
default, the variant arches are used. This option can only restrict it,
|
||||||
not add a new one.
|
not add a new one.
|
||||||
|
* ``ostree_url`` -- URL of the repository that's used to fetch the parent
|
||||||
|
commit from.
|
||||||
|
* ``ostree_ref`` -- name of the ostree branch
|
||||||
|
* ``ostree_parent`` -- commit hash or a a branch-like reference to the
|
||||||
|
parent commit.
|
||||||
|
* ``upload_options`` -- a dictionary with upload options specific to the
|
||||||
|
target cloud environment. If provided, the image will be uploaded to the
|
||||||
|
cloud environment, in addition to the Koji server. One can't combine
|
||||||
|
arbitrary image types with arbitrary upload options.
|
||||||
|
The dictionary keys differ based on the target cloud environment. The
|
||||||
|
following keys are supported:
|
||||||
|
|
||||||
|
* **AWS EC2 upload options** -- upload to Amazon Web Services.
|
||||||
|
|
||||||
|
* ``region`` -- AWS region to upload the image to
|
||||||
|
* ``share_with_accounts`` -- list of AWS account IDs to share the image
|
||||||
|
with
|
||||||
|
* ``snapshot_name`` -- Snapshot name of the uploaded EC2 image
|
||||||
|
(optional)
|
||||||
|
|
||||||
|
* **AWS S3 upload options** -- upload to Amazon Web Services S3.
|
||||||
|
|
||||||
|
* ``region`` -- AWS region to upload the image to
|
||||||
|
|
||||||
|
* **Azure upload options** -- upload to Microsoft Azure.
|
||||||
|
|
||||||
|
* ``tenant_id`` -- Azure tenant ID to upload the image to
|
||||||
|
* ``subscription_id`` -- Azure subscription ID to upload the image to
|
||||||
|
* ``resource_group`` -- Azure resource group to upload the image to
|
||||||
|
* ``location`` -- Azure location to upload the image to
|
||||||
|
* ``image_name`` -- Image name of the uploaded Azure image (optional)
|
||||||
|
|
||||||
|
* **GCP upload options** -- upload to Google Cloud Platform.
|
||||||
|
|
||||||
|
* ``region`` -- GCP region to upload the image to
|
||||||
|
* ``bucket`` -- GCP bucket to upload the image to
|
||||||
|
* ``share_with_accounts`` -- list of GCP accounts to share the image
|
||||||
|
with
|
||||||
|
* ``image_name`` -- Image name of the uploaded GCP image (optional)
|
||||||
|
|
||||||
|
* **Container upload options** -- upload to a container registry.
|
||||||
|
|
||||||
|
* ``name`` -- name of the container image (optional)
|
||||||
|
* ``tag`` -- container tag to upload the image to (optional)
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
There is initial support for having this task as failable without aborting
|
There is initial support for having this task as failable without aborting
|
||||||
@ -1555,6 +1664,56 @@ OSBuild Composer for building images
|
|||||||
arch.
|
arch.
|
||||||
|
|
||||||
|
|
||||||
|
Image container
|
||||||
|
===============
|
||||||
|
|
||||||
|
This phase supports building containers in OSBS that embed an image created in
|
||||||
|
the same compose. This can be useful for delivering the image to users running
|
||||||
|
in containerized environments.
|
||||||
|
|
||||||
|
Pungi will start a ``buildContainer`` task in Koji with configured source
|
||||||
|
repository. The ``Dockerfile`` can expect that a repo file will be injected
|
||||||
|
into the container that defines a repo named ``image-to-include``, and its
|
||||||
|
``baseurl`` will point to the image to include. It is possible to extract the
|
||||||
|
URL with a command like ``dnf config-manager --dump image-to-include | awk
|
||||||
|
'/baseurl =/{print $3}'```
|
||||||
|
|
||||||
|
**image_container**
|
||||||
|
(*dict*) -- configuration for building containers embedding an image.
|
||||||
|
|
||||||
|
Format: ``{variant_uid_regex: [{...}]}``.
|
||||||
|
|
||||||
|
The inner object will define a single container. These keys are required:
|
||||||
|
|
||||||
|
* ``url``, ``target``, ``git_branch``. See OSBS section for definition of
|
||||||
|
these.
|
||||||
|
* ``image_spec`` -- (*object*) A string mapping of filters used to select
|
||||||
|
the image to embed. All images listed in metadata for the variant will be
|
||||||
|
processed. The keys of this filter are used to select metadata fields for
|
||||||
|
the image, and values are regular expression that need to match the
|
||||||
|
metadata value.
|
||||||
|
|
||||||
|
The filter should match exactly one image.
|
||||||
|
|
||||||
|
|
||||||
|
Example config
|
||||||
|
--------------
|
||||||
|
::
|
||||||
|
|
||||||
|
image_container = {
|
||||||
|
"^Server$": [{
|
||||||
|
"url": "git://example.com/dockerfiles.git?#HEAD",
|
||||||
|
"target": "f24-container-candidate",
|
||||||
|
"git_branch": "f24",
|
||||||
|
"image_spec": {
|
||||||
|
"format": "qcow2",
|
||||||
|
"arch": "x86_64",
|
||||||
|
"path": ".*/guest-image-.*$",
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
OSTree Settings
|
OSTree Settings
|
||||||
===============
|
===============
|
||||||
|
|
||||||
@ -1594,6 +1753,8 @@ repository with a new commit.
|
|||||||
* ``force_new_commit`` -- (*bool*) Do not use rpm-ostree's built-in change
|
* ``force_new_commit`` -- (*bool*) Do not use rpm-ostree's built-in change
|
||||||
detection.
|
detection.
|
||||||
Defaults to ``False``.
|
Defaults to ``False``.
|
||||||
|
* ``unified_core`` -- (*bool*) Use rpm-ostree in unified core mode for composes.
|
||||||
|
Defaults to ``False``.
|
||||||
* ``version`` -- (*str*) Version string to be added as versioning metadata.
|
* ``version`` -- (*str*) Version string to be added as versioning metadata.
|
||||||
If this option is set to ``!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN``,
|
If this option is set to ``!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN``,
|
||||||
a value will be generated automatically as ``$VERSION.$RELEASE``.
|
a value will be generated automatically as ``$VERSION.$RELEASE``.
|
||||||
@ -1675,6 +1836,8 @@ an OSTree repository. This always runs in Koji as a ``runroot`` task.
|
|||||||
with the optional key:
|
with the optional key:
|
||||||
|
|
||||||
* ``extra_runroot_pkgs`` -- (*[str]*)
|
* ``extra_runroot_pkgs`` -- (*[str]*)
|
||||||
|
* ``skip_branding`` -- (*bool*) Stops lorax to install packages with branding.
|
||||||
|
Defaults to ``False``.
|
||||||
|
|
||||||
**ostree_installer_overwrite** = False
|
**ostree_installer_overwrite** = False
|
||||||
(*bool*) -- by default if a variant including OSTree installer also creates
|
(*bool*) -- by default if a variant including OSTree installer also creates
|
||||||
@ -1754,24 +1917,34 @@ they are not scratch builds).
|
|||||||
to create the image will not abort the whole compose.
|
to create the image will not abort the whole compose.
|
||||||
|
|
||||||
The configuration will pass other attributes directly to the Koji task.
|
The configuration will pass other attributes directly to the Koji task.
|
||||||
This includes ``scratch`` and ``priority``.
|
This includes ``scratch`` and ``priority``. See ``koji list-api
|
||||||
|
buildContainer`` for more details about these options.
|
||||||
|
|
||||||
A value for ``yum_repourls`` will be created automatically and point at a
|
A value for ``yum_repourls`` will be created automatically and point at a
|
||||||
repository in the current compose. You can add extra repositories with
|
repository in the current compose. You can add extra repositories with
|
||||||
``repo`` key having a list of urls pointing to ``.repo`` files or just
|
``repo`` key having a list of urls pointing to ``.repo`` files or just
|
||||||
variant uid, Pungi will create the .repo file for that variant. ``gpgkey``
|
variant uid, Pungi will create the .repo file for that variant. If
|
||||||
can be specified to enable gpgcheck in repo files for variants.
|
specific URL is used in the ``repo``, the ``$COMPOSE_ID`` variable in
|
||||||
|
the ``repo`` string will be replaced with the real compose ID.
|
||||||
|
``gpgkey`` can be specified to enable gpgcheck in repo files for variants.
|
||||||
|
|
||||||
**osbs_registries**
|
**osbs_registries**
|
||||||
(*dict*) -- It is possible to configure extra information about where to
|
(*dict*) -- Use this optional setting to emit ``osbs-request-push``
|
||||||
push the image (unless it is a scratch build). For each finished build,
|
messages for each non-scratch container build. These messages can guide
|
||||||
Pungi will try to match NVR against a key in this mapping (using shell-style
|
other tools how to push the images to other registries. For example, an
|
||||||
globbing) and take the corresponding value and collect them across all built
|
external tool might trigger on these messages and copy the images from
|
||||||
images. The data will be saved into ``logs/global/osbs-registries.json`` as
|
OSBS's registry to a staging or production registry.
|
||||||
a mapping from Koji NVR to the registry data. The same data is also sent to
|
|
||||||
the message bus on ``osbs-request-push`` topic once the compose finishes
|
For each completed container build, Pungi will try to match the NVR against
|
||||||
successfully. Handling the message and performing the actual push is outside
|
a key in ``osbs_registries`` mapping (using shell-style globbing) and take
|
||||||
of scope for Pungi.
|
the corresponding value and collect them across all built images. Pungi
|
||||||
|
will save this data into ``logs/global/osbs-registries.json``, mapping each
|
||||||
|
Koji NVR to the registry data. Pungi will also send this data to the
|
||||||
|
message bus on the ``osbs-request-push`` topic once the compose finishes
|
||||||
|
successfully.
|
||||||
|
|
||||||
|
Pungi simply logs the mapped data and emits the messages. It does not
|
||||||
|
handle the messages or push images. A separate tool must do that.
|
||||||
|
|
||||||
|
|
||||||
Example config
|
Example config
|
||||||
|
@ -30,9 +30,17 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
|||||||
module_defaults_dir = {
|
module_defaults_dir = {
|
||||||
'scm': 'git',
|
'scm': 'git',
|
||||||
'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
|
'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
|
||||||
'branch': 'master',
|
'branch': 'main',
|
||||||
'dir': '.'
|
'dir': '.'
|
||||||
}
|
}
|
||||||
|
# Optional module obsoletes configuration which is merged
|
||||||
|
# into the module index and gets resolved
|
||||||
|
module_obsoletes_dir = {
|
||||||
|
'scm': 'git',
|
||||||
|
'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
|
||||||
|
'branch': 'main',
|
||||||
|
'dir': 'obsoletes'
|
||||||
|
}
|
||||||
|
|
||||||
variants_file='variants-fedora.xml'
|
variants_file='variants-fedora.xml'
|
||||||
sigkeys = ['12C944D0']
|
sigkeys = ['12C944D0']
|
||||||
@ -83,7 +91,6 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
|||||||
|
|
||||||
# CREATEISO
|
# CREATEISO
|
||||||
iso_hfs_ppc64le_compatible = False
|
iso_hfs_ppc64le_compatible = False
|
||||||
create_jigdo = False
|
|
||||||
|
|
||||||
# BUILDINSTALL
|
# BUILDINSTALL
|
||||||
buildinstall_method = 'lorax'
|
buildinstall_method = 'lorax'
|
||||||
@ -325,6 +332,8 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
|||||||
"tag_ref": False,
|
"tag_ref": False,
|
||||||
# Don't use change detection in ostree.
|
# Don't use change detection in ostree.
|
||||||
"force_new_commit": True,
|
"force_new_commit": True,
|
||||||
|
# Use unified core mode for rpm-ostree composes
|
||||||
|
"unified_core": True,
|
||||||
# This is the location for the repo where new commit will be
|
# This is the location for the repo where new commit will be
|
||||||
# created. Note that this is outside of the compose dir.
|
# created. Note that this is outside of the compose dir.
|
||||||
"ostree_repo": "/mnt/koji/compose/ostree/repo/",
|
"ostree_repo": "/mnt/koji/compose/ostree/repo/",
|
||||||
|
@ -12,8 +12,9 @@ happened. A JSON-encoded object will be passed to standard input to provide
|
|||||||
more information about the event. At the very least, the object will contain a
|
more information about the event. At the very least, the object will contain a
|
||||||
``compose_id`` key.
|
``compose_id`` key.
|
||||||
|
|
||||||
The script is invoked in compose directory and can read other information
|
The notification script inherits working directory from the parent process and it
|
||||||
there.
|
can be called from the same directory ``pungi-koji`` is called from. The working directory
|
||||||
|
is listed at the start of main log.
|
||||||
|
|
||||||
Currently these messages are sent:
|
Currently these messages are sent:
|
||||||
|
|
||||||
|
@ -115,16 +115,30 @@ ImageBuild
|
|||||||
This phase wraps up ``koji image-build``. It also updates the metadata
|
This phase wraps up ``koji image-build``. It also updates the metadata
|
||||||
ultimately responsible for ``images.json`` manifest.
|
ultimately responsible for ``images.json`` manifest.
|
||||||
|
|
||||||
|
OSBuild
|
||||||
|
-------
|
||||||
|
|
||||||
|
Similarly to image build, this phases creates a koji `osbuild` task. In the
|
||||||
|
background it uses OSBuild Composer to create images.
|
||||||
|
|
||||||
OSBS
|
OSBS
|
||||||
----
|
----
|
||||||
|
|
||||||
This phase builds docker base images in `OSBS
|
This phase builds container base images in `OSBS
|
||||||
<http://osbs.readthedocs.io/en/latest/index.html>`_.
|
<http://osbs.readthedocs.io/en/latest/index.html>`_.
|
||||||
|
|
||||||
The finished images are available in registry provided by OSBS, but not
|
The finished images are available in registry provided by OSBS, but not
|
||||||
downloaded directly into the compose. The is metadata about the created image
|
downloaded directly into the compose. The is metadata about the created image
|
||||||
in ``compose/metadata/osbs.json``.
|
in ``compose/metadata/osbs.json``.
|
||||||
|
|
||||||
|
ImageContainer
|
||||||
|
--------------
|
||||||
|
|
||||||
|
This phase builds a container image in OSBS, and stores the metadata in the
|
||||||
|
same file as OSBS phase. The container produced here wraps a different image,
|
||||||
|
created it ImageBuild or OSBuild phase. It can be useful to deliver a VM image
|
||||||
|
to containerized environments.
|
||||||
|
|
||||||
OSTreeInstaller
|
OSTreeInstaller
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
151
pungi.spec
151
pungi.spec
@ -1,5 +1,5 @@
|
|||||||
Name: pungi
|
Name: pungi
|
||||||
Version: 4.2.7
|
Version: 4.3.6
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: Distribution compose tool
|
Summary: Distribution compose tool
|
||||||
|
|
||||||
@ -91,7 +91,7 @@ rm -rf %{buildroot}
|
|||||||
%{_bindir}/comps_filter
|
%{_bindir}/comps_filter
|
||||||
%{_bindir}/%{name}-make-ostree
|
%{_bindir}/%{name}-make-ostree
|
||||||
%{_datadir}/%{name}
|
%{_datadir}/%{name}
|
||||||
/var/cache/%{name}
|
%dir %attr(1777, root, root) /var/cache/%{name}
|
||||||
|
|
||||||
%files utils
|
%files utils
|
||||||
%{python_sitelib}/%{name}_utils
|
%{python_sitelib}/%{name}_utils
|
||||||
@ -111,6 +111,153 @@ pytest
|
|||||||
cd tests && ./test_compose.sh
|
cd tests && ./test_compose.sh
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Fri Aug 26 2022 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.6-1
|
||||||
|
- pkgset: Report better error when module is missing an arch (lsedlar)
|
||||||
|
- osbuild: add support for building ostree artifacts (ondrej)
|
||||||
|
- ostree: Add unified core mode for compose in rpm-ostree (tim)
|
||||||
|
- createiso: Make ISO level more granular (lsedlar)
|
||||||
|
- Create DVDs with xorriso (lsedlar)
|
||||||
|
- Fix compatibility with jsonschema >= 4.0.0 (lsedlar)
|
||||||
|
- Fix black complaint (lsedlar)
|
||||||
|
- doc: fix osbuild's image_types field name (ondrej)
|
||||||
|
- Convert _ssh_run output to str for python3 (hlin)
|
||||||
|
- Print more logs for git_ls_remote (hlin)
|
||||||
|
- Log time taken of each phase (hlin)
|
||||||
|
- Avoid crash when loading pickle file failed (hlin)
|
||||||
|
- extra_isos: Fix detection of changed packages (lsedlar)
|
||||||
|
|
||||||
|
* Wed Jun 15 2022 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.5-1
|
||||||
|
- Fix module defaults and obsoletes validation (mkulik)
|
||||||
|
- Update the cts_keytab field in order to get the hostname of the server
|
||||||
|
(ounsal)
|
||||||
|
- Add skip_branding to ostree_installer. (lzhuang)
|
||||||
|
- kojiwrapper: Ignore warnings before task id (lsedlar)
|
||||||
|
- Restrict jsonschema version (lsedlar)
|
||||||
|
- Revert "Do not clone the same repository multiple times, re-use already
|
||||||
|
cloned repository" (hlin)
|
||||||
|
- Involve bandit (hlin)
|
||||||
|
|
||||||
|
* Fri Apr 01 2022 Ondřej Nosek <onosek@redhat.com> - 4.3.4-1
|
||||||
|
- kojiwrapper: Add retries to login call (lsedlar)
|
||||||
|
- Variants file in config can contain path (onosek)
|
||||||
|
- nomacboot option for livemedia koji tasks (cobrien)
|
||||||
|
- doc: improve osbs_registries explanation (kdreyer)
|
||||||
|
- osbs: only handle archives of type "image" (kdreyer)
|
||||||
|
- Update the default greedy_method value in doc (ounsal)
|
||||||
|
- Fix the wrong working directory for the progress_notification script (ounsal)
|
||||||
|
- Filter out environment groups unmatch given arch (hlin)
|
||||||
|
- profiler: Respect provided output stream (lsedlar)
|
||||||
|
- modules: Correct a typo in loading obsoletes (ppisar)
|
||||||
|
- Do not clone the same repository multiple times, re-use already cloned
|
||||||
|
repository (ounsal)
|
||||||
|
|
||||||
|
* Sat Jan 08 2022 Haibo Lin <hlin@redhat.com> - 4.3.3-1
|
||||||
|
- hybrid: Explicitly pull in debugsource packages (lsedlar)
|
||||||
|
- Add module obsoletes feature (fvalder)
|
||||||
|
- buildinstall: Add ability to install extra packages in runroot (ounsal)
|
||||||
|
- Ignore osbs/osbuild config when reusing iso images (hlin)
|
||||||
|
- compose: Make sure temporary dirs are world readable (lsedlar)
|
||||||
|
- Pass compose parameter for debugging git issue (hlin)
|
||||||
|
- Generate images.json for extra_isos phase (hlin)
|
||||||
|
- Fix tests for python 2.6 (hlin)
|
||||||
|
|
||||||
|
* Thu Nov 11 2021 Haibo Lin <hlin@redhat.com> - 4.3.2-1
|
||||||
|
- gather: Load JSON mapping relative to config dir (lsedlar)
|
||||||
|
- gather: Stop requiring all variants/arches in JSON (lsedlar)
|
||||||
|
- doc: make dnf "backend" settings easier to discover (kdreyer)
|
||||||
|
- Remove with_jigdo argument (lsedlar)
|
||||||
|
- Check dependencies after config validation (lsedlar)
|
||||||
|
- default "with_jigdo" to False (kdreyer)
|
||||||
|
- Stop trying to validate non-existent metadata (lsedlar)
|
||||||
|
- test images for metadata deserialization error (fdipretre)
|
||||||
|
- repoclosure: Use --forcearch for dnf repoclosure (lsedlar)
|
||||||
|
- extra_isos: Allow reusing old images (lsedlar)
|
||||||
|
- createiso: Allow reusing old images (lsedlar)
|
||||||
|
- Remove default runroot channel (lsedlar)
|
||||||
|
|
||||||
|
* Mon Oct 25 2021 Ozan Unsal <ounsal@redhat.com> - 4.3.1-1
|
||||||
|
- Correct irc network name & add matrix room (dan.cermak)
|
||||||
|
- Add missing mock to osbs tests (lsedlar)
|
||||||
|
- osbs: Reuse images from old compose (hlin)
|
||||||
|
- image_build: Allow reusing old image_build results (hlin)
|
||||||
|
- Allow ISO-Level configuration within the config file (ounsal)
|
||||||
|
- Work around ODCS creating COMPOSE_ID later (lsedlar)
|
||||||
|
- When `cts_url` is configured, use CTS `/repo` API for buildContainer
|
||||||
|
yum_repourls. (jkaluza)
|
||||||
|
- Add COMPOSE_ID into the pungi log file (ounsal)
|
||||||
|
- buildinstall: Add easy way to check if previous result was reused (lsedlar)
|
||||||
|
|
||||||
|
* Fri Sep 10 2021 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.0-1
|
||||||
|
- Only build CTS url when configured (lsedlar)
|
||||||
|
- Require requests_kerberos only when needed (lsedlar)
|
||||||
|
- Allow specifying $COMPOSE_ID in the `repo` value for osbs phase. (jkaluza)
|
||||||
|
- Make getting old compose config reusable (lsedlar)
|
||||||
|
- paths: Allow customizing log file extension (lsedlar)
|
||||||
|
- Add authentication for updating the compose URL in CTS. (ounsal)
|
||||||
|
- Fix type detection for osbuild images (lsedlar)
|
||||||
|
- Enable pungi to send compose_url patches to CTS (ounsal)
|
||||||
|
- Use xorriso instead of isoinfo when createiso_use_xorrisofs is enabled
|
||||||
|
(ounsal)
|
||||||
|
- Fix tests for createrepo (drumian)
|
||||||
|
- Formatted files according to flake8 and black feedback (drumian)
|
||||||
|
- Handle the pungi failures to ensure creation of log files (ounsal)
|
||||||
|
- Add createrepo_enable_cache to configuration doc (hlin)
|
||||||
|
- Fix formatting (hlin)
|
||||||
|
- Install missing deps in ci image (hlin)
|
||||||
|
- Use pytest directly incl. support for posargs, e.g.: tox -- -s -vvv
|
||||||
|
tests/path/to/a/single/test_something.py (fvalder)
|
||||||
|
- Supersede ModuleStream loading with ModuleIndex (fvalder)
|
||||||
|
- Better error message than 'KeyError' in pungi (drumian)
|
||||||
|
- Adding multithreading support for pungi/phases/image_checksum.py (jkunstle)
|
||||||
|
- doc: more additional_packages documentation (kdreyer)
|
||||||
|
- doc: fix typo in additional_packages description (kdreyer)
|
||||||
|
- doc: improve signed packages retry docs (kdreyer)
|
||||||
|
- Better error message than 'KeyError' in pungi (drumian)
|
||||||
|
- doc: explain buildContainer API (kdreyer)
|
||||||
|
|
||||||
|
* Wed Aug 04 2021 Haibo Lin <hlin@redhat.com> - 4.2.10-1
|
||||||
|
- Show and log command when using the run_blocking_cmd() method (fdipretre)
|
||||||
|
- Use cachedir when createrepo (hlin)
|
||||||
|
- gather: Add all srpms to variant lookaside repo (lsedlar)
|
||||||
|
- Add task URL to watch task log (hlin)
|
||||||
|
- Log warning when module defined in variants.xml not found (hlin)
|
||||||
|
- pkgset: Compare future events correctly (lsedlar)
|
||||||
|
- util: Strip file:// from local urls (lsedlar)
|
||||||
|
- Clean up temporary yumroot dir (hlin)
|
||||||
|
|
||||||
|
* Thu Apr 29 2021 Ondrej Nosek <onosek@redhat.com> - 4.2.9-1
|
||||||
|
- Fix can't link XDEV using repos as pkgset_sources (romain.forlot)
|
||||||
|
- Updated the deprecated ks argument name (to the current inst.ks) (lveyde)
|
||||||
|
- gather: Adjust reusing with lookaside (hlin)
|
||||||
|
- hybrid: Optimize getting lookaside packages (lsedlar)
|
||||||
|
- gather: Copy old logs when reusing gather result (hlin)
|
||||||
|
- Cancel koji tasks when pungi terminated (hlin)
|
||||||
|
- Add Dockerfile for building testing image (hlin)
|
||||||
|
- image_container: Fix incorrect arch processing (lsedlar)
|
||||||
|
- runroot: Adjust permissions always (hlin)
|
||||||
|
- Format code (hlin)
|
||||||
|
- pkgset: Fix meaning of retries (lsedlar)
|
||||||
|
- pkgset: Store module tag only if module is used (lsedlar)
|
||||||
|
- Store extended traceback for gather errors (lsedlar)
|
||||||
|
|
||||||
|
* Fri Feb 12 2021 Ondrej Nosek <onosek@redhat.com> - 4.2.8-1
|
||||||
|
- pkgset: Add ability to wait for signed packages (lsedlar)
|
||||||
|
- Add image-container phase (lsedlar)
|
||||||
|
- osbs: Move metadata processing to standalone function (lsedlar)
|
||||||
|
- Move container metadata into compose object (lsedlar)
|
||||||
|
- Move UnsignedPackagesError to a separate file (lsedlar)
|
||||||
|
- pkgset: Include just one version of module (hlin)
|
||||||
|
- pkgset: Check tag inheritance change before reuse (hlin)
|
||||||
|
- pkgset: Remove reuse file when packages are not signed (lsedlar)
|
||||||
|
- pkgset: Drop kobo.plugin usage from PkgsetSource (lsedlar)
|
||||||
|
- gather: Drop kobo.plugins usage from GatherMethod (lsedlar)
|
||||||
|
- pkgset: Drop kobo.plugins usage from GatherSources (lsedlar)
|
||||||
|
- doc: remove default createrepo_checksum value from example (kdreyer)
|
||||||
|
- comps: Preserve default arg on groupid (lsedlar)
|
||||||
|
- Stop copying .git directory with module defaults (hlin)
|
||||||
|
- React to SIGINT signal (hlin)
|
||||||
|
- scm: Only copy debugging data if we have a compose (lsedlar)
|
||||||
|
|
||||||
* Thu Dec 03 2020 Lubomír Sedlář <lsedlar@redhat.com> 4.2.7-1
|
* Thu Dec 03 2020 Lubomír Sedlář <lsedlar@redhat.com> 4.2.7-1
|
||||||
- osbuild: Fix not failing on failable tasks (lsedlar)
|
- osbuild: Fix not failing on failable tasks (lsedlar)
|
||||||
- kojiwrapper: Use gssapi_login (lsedlar)
|
- kojiwrapper: Use gssapi_login (lsedlar)
|
||||||
|
@ -131,8 +131,8 @@ def getArchList(thisarch=None): # pragma: no cover
|
|||||||
|
|
||||||
|
|
||||||
def _try_read_cpuinfo(): # pragma: no cover
|
def _try_read_cpuinfo(): # pragma: no cover
|
||||||
""" Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
|
"""Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
|
||||||
mounted). """
|
mounted)."""
|
||||||
try:
|
try:
|
||||||
with open("/proc/cpuinfo", "r") as f:
|
with open("/proc/cpuinfo", "r") as f:
|
||||||
return f.readlines()
|
return f.readlines()
|
||||||
@ -141,8 +141,8 @@ def _try_read_cpuinfo(): # pragma: no cover
|
|||||||
|
|
||||||
|
|
||||||
def _parse_auxv(): # pragma: no cover
|
def _parse_auxv(): # pragma: no cover
|
||||||
""" Read /proc/self/auxv and parse it into global dict for easier access
|
"""Read /proc/self/auxv and parse it into global dict for easier access
|
||||||
later on, very similar to what rpm does. """
|
later on, very similar to what rpm does."""
|
||||||
# In case we can't open and read /proc/self/auxv, just return
|
# In case we can't open and read /proc/self/auxv, just return
|
||||||
try:
|
try:
|
||||||
with open("/proc/self/auxv", "rb") as f:
|
with open("/proc/self/auxv", "rb") as f:
|
||||||
@ -326,8 +326,8 @@ def getMultiArchInfo(arch=canonArch): # pragma: no cover
|
|||||||
|
|
||||||
def getBaseArch(myarch=None): # pragma: no cover
|
def getBaseArch(myarch=None): # pragma: no cover
|
||||||
"""returns 'base' arch for myarch, if specified, or canonArch if not.
|
"""returns 'base' arch for myarch, if specified, or canonArch if not.
|
||||||
base arch is the arch before noarch in the arches dict if myarch is not
|
base arch is the arch before noarch in the arches dict if myarch is not
|
||||||
a key in the multilibArches."""
|
a key in the multilibArches."""
|
||||||
|
|
||||||
if not myarch:
|
if not myarch:
|
||||||
myarch = canonArch
|
myarch = canonArch
|
||||||
|
163
pungi/checks.py
163
pungi/checks.py
@ -53,7 +53,7 @@ from . import util
|
|||||||
|
|
||||||
|
|
||||||
def is_jigdo_needed(conf):
|
def is_jigdo_needed(conf):
|
||||||
return conf.get("create_jigdo", True)
|
return conf.get("create_jigdo")
|
||||||
|
|
||||||
|
|
||||||
def is_isohybrid_needed(conf):
|
def is_isohybrid_needed(conf):
|
||||||
@ -75,8 +75,7 @@ def is_isohybrid_needed(conf):
|
|||||||
|
|
||||||
|
|
||||||
def is_genisoimage_needed(conf):
|
def is_genisoimage_needed(conf):
|
||||||
"""This is only needed locally for createiso without runroot.
|
"""This is only needed locally for createiso without runroot."""
|
||||||
"""
|
|
||||||
runroot_tag = conf.get("runroot_tag", "")
|
runroot_tag = conf.get("runroot_tag", "")
|
||||||
if runroot_tag or conf.get("createiso_use_xorrisofs"):
|
if runroot_tag or conf.get("createiso_use_xorrisofs"):
|
||||||
return False
|
return False
|
||||||
@ -94,7 +93,7 @@ def is_xorrisofs_needed(conf):
|
|||||||
|
|
||||||
|
|
||||||
def is_createrepo_c_needed(conf):
|
def is_createrepo_c_needed(conf):
|
||||||
return conf.get("createrepo_c", True)
|
return conf.get("createrepo_c")
|
||||||
|
|
||||||
|
|
||||||
# The first element in the tuple is package name expected to have the
|
# The first element in the tuple is package name expected to have the
|
||||||
@ -230,7 +229,6 @@ def validate(config, offline=False, schema=None):
|
|||||||
)
|
)
|
||||||
validator = DefaultValidator(
|
validator = DefaultValidator(
|
||||||
schema,
|
schema,
|
||||||
{"array": (tuple, list), "regex": six.string_types, "url": six.string_types},
|
|
||||||
)
|
)
|
||||||
errors = []
|
errors = []
|
||||||
warnings = []
|
warnings = []
|
||||||
@ -446,6 +444,16 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
context=all_errors,
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def is_array(checker, instance):
|
||||||
|
return isinstance(instance, (tuple, list))
|
||||||
|
|
||||||
|
def is_string_type(checker, instance):
|
||||||
|
return isinstance(instance, six.string_types)
|
||||||
|
|
||||||
|
type_checker = validator_class.TYPE_CHECKER.redefine_many(
|
||||||
|
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||||
|
)
|
||||||
|
|
||||||
return jsonschema.validators.extend(
|
return jsonschema.validators.extend(
|
||||||
validator_class,
|
validator_class,
|
||||||
{
|
{
|
||||||
@ -456,6 +464,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
"additionalProperties": _validate_additional_properties,
|
"additionalProperties": _validate_additional_properties,
|
||||||
"anyOf": _validate_any_of,
|
"anyOf": _validate_any_of,
|
||||||
},
|
},
|
||||||
|
type_checker=type_checker,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -610,7 +619,7 @@ def make_schema():
|
|||||||
"runroot_ssh_init_template": {"type": "string"},
|
"runroot_ssh_init_template": {"type": "string"},
|
||||||
"runroot_ssh_install_packages_template": {"type": "string"},
|
"runroot_ssh_install_packages_template": {"type": "string"},
|
||||||
"runroot_ssh_run_template": {"type": "string"},
|
"runroot_ssh_run_template": {"type": "string"},
|
||||||
"create_jigdo": {"type": "boolean", "default": True},
|
"create_jigdo": {"type": "boolean", "default": False},
|
||||||
"check_deps": {"type": "boolean", "default": True},
|
"check_deps": {"type": "boolean", "default": True},
|
||||||
"require_all_comps_packages": {"type": "boolean", "default": False},
|
"require_all_comps_packages": {"type": "boolean", "default": False},
|
||||||
"bootable": {
|
"bootable": {
|
||||||
@ -654,6 +663,8 @@ def make_schema():
|
|||||||
"gather_profiler": {"type": "boolean", "default": False},
|
"gather_profiler": {"type": "boolean", "default": False},
|
||||||
"gather_allow_reuse": {"type": "boolean", "default": False},
|
"gather_allow_reuse": {"type": "boolean", "default": False},
|
||||||
"pkgset_allow_reuse": {"type": "boolean", "default": True},
|
"pkgset_allow_reuse": {"type": "boolean", "default": True},
|
||||||
|
"createiso_allow_reuse": {"type": "boolean", "default": True},
|
||||||
|
"extraiso_allow_reuse": {"type": "boolean", "default": True},
|
||||||
"pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
|
"pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
|
||||||
"createrepo_c": {"type": "boolean", "default": True},
|
"createrepo_c": {"type": "boolean", "default": True},
|
||||||
"createrepo_checksum": {
|
"createrepo_checksum": {
|
||||||
@ -661,6 +672,7 @@ def make_schema():
|
|||||||
"default": "sha256",
|
"default": "sha256",
|
||||||
"enum": ["sha1", "sha256", "sha512"],
|
"enum": ["sha1", "sha256", "sha512"],
|
||||||
},
|
},
|
||||||
|
"createrepo_enable_cache": {"type": "boolean", "default": True},
|
||||||
"createrepo_use_xz": {"type": "boolean", "default": False},
|
"createrepo_use_xz": {"type": "boolean", "default": False},
|
||||||
"createrepo_num_threads": {"type": "number", "default": get_num_cpus()},
|
"createrepo_num_threads": {"type": "number", "default": get_num_cpus()},
|
||||||
"createrepo_num_workers": {"type": "number", "default": 3},
|
"createrepo_num_workers": {"type": "number", "default": 3},
|
||||||
@ -722,6 +734,8 @@ def make_schema():
|
|||||||
"minItems": 1,
|
"minItems": 1,
|
||||||
"default": [None],
|
"default": [None],
|
||||||
},
|
},
|
||||||
|
"signed_packages_retries": {"type": "number", "default": 0},
|
||||||
|
"signed_packages_wait": {"type": "number", "default": 30},
|
||||||
"variants_file": {"$ref": "#/definitions/str_or_scm_dict"},
|
"variants_file": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
"comps_file": {"$ref": "#/definitions/str_or_scm_dict"},
|
"comps_file": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
"comps_filter_environments": {"type": "boolean", "default": True},
|
"comps_filter_environments": {"type": "boolean", "default": True},
|
||||||
@ -732,6 +746,7 @@ def make_schema():
|
|||||||
"patternProperties": {".+": {"$ref": "#/definitions/strings"}},
|
"patternProperties": {".+": {"$ref": "#/definitions/strings"}},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
|
"module_obsoletes_dir": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
"create_optional_isos": {"type": "boolean", "default": False},
|
"create_optional_isos": {"type": "boolean", "default": False},
|
||||||
"symlink_isos_to": {"type": "string"},
|
"symlink_isos_to": {"type": "string"},
|
||||||
"dogpile_cache_backend": {"type": "string"},
|
"dogpile_cache_backend": {"type": "string"},
|
||||||
@ -744,6 +759,12 @@ def make_schema():
|
|||||||
),
|
),
|
||||||
"createiso_break_hardlinks": {"type": "boolean", "default": False},
|
"createiso_break_hardlinks": {"type": "boolean", "default": False},
|
||||||
"createiso_use_xorrisofs": {"type": "boolean", "default": False},
|
"createiso_use_xorrisofs": {"type": "boolean", "default": False},
|
||||||
|
"iso_level": {
|
||||||
|
"anyOf": [
|
||||||
|
{"type": "number", "enum": [1, 2, 3, 4]},
|
||||||
|
_variant_arch_mapping({"type": "number", "enum": [1, 2, 3, 4]}),
|
||||||
|
],
|
||||||
|
},
|
||||||
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": True},
|
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": True},
|
||||||
"multilib": _variant_arch_mapping(
|
"multilib": _variant_arch_mapping(
|
||||||
{"$ref": "#/definitions/list_of_strings"}
|
{"$ref": "#/definitions/list_of_strings"}
|
||||||
@ -777,6 +798,10 @@ def make_schema():
|
|||||||
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
|
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
|
||||||
"buildinstall_skip": _variant_arch_mapping({"type": "boolean"}),
|
"buildinstall_skip": _variant_arch_mapping({"type": "boolean"}),
|
||||||
|
"buildinstall_packages": {
|
||||||
|
"$ref": "#/definitions/package_mapping",
|
||||||
|
"default": [],
|
||||||
|
},
|
||||||
"global_ksurl": {"type": "url"},
|
"global_ksurl": {"type": "url"},
|
||||||
"global_version": {"type": "string"},
|
"global_version": {"type": "string"},
|
||||||
"global_target": {"type": "string"},
|
"global_target": {"type": "string"},
|
||||||
@ -968,6 +993,7 @@ def make_schema():
|
|||||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
"arches": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"release": {"$ref": "#/definitions/optional_string"},
|
"release": {"$ref": "#/definitions/optional_string"},
|
||||||
|
"nomacboot": {"type": "boolean"},
|
||||||
},
|
},
|
||||||
"required": ["name", "kickstart"],
|
"required": ["name", "kickstart"],
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
@ -1001,6 +1027,7 @@ def make_schema():
|
|||||||
},
|
},
|
||||||
"update_summary": {"type": "boolean"},
|
"update_summary": {"type": "boolean"},
|
||||||
"force_new_commit": {"type": "boolean"},
|
"force_new_commit": {"type": "boolean"},
|
||||||
|
"unified_core": {"type": "boolean"},
|
||||||
"version": {"type": "string"},
|
"version": {"type": "string"},
|
||||||
"config_branch": {"type": "string"},
|
"config_branch": {"type": "string"},
|
||||||
"tag_ref": {"type": "boolean"},
|
"tag_ref": {"type": "boolean"},
|
||||||
@ -1035,6 +1062,7 @@ def make_schema():
|
|||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"update_summary": {"type": "boolean"},
|
"update_summary": {"type": "boolean"},
|
||||||
"force_new_commit": {"type": "boolean"},
|
"force_new_commit": {"type": "boolean"},
|
||||||
|
"unified_core": {"type": "boolean"},
|
||||||
"version": {"type": "string"},
|
"version": {"type": "string"},
|
||||||
"config_branch": {"type": "string"},
|
"config_branch": {"type": "string"},
|
||||||
"tag_ref": {"type": "boolean"},
|
"tag_ref": {"type": "boolean"},
|
||||||
@ -1064,6 +1092,7 @@ def make_schema():
|
|||||||
"template_repo": {"type": "string"},
|
"template_repo": {"type": "string"},
|
||||||
"template_branch": {"type": "string"},
|
"template_branch": {"type": "string"},
|
||||||
"extra_runroot_pkgs": {"$ref": "#/definitions/list_of_strings"},
|
"extra_runroot_pkgs": {"$ref": "#/definitions/list_of_strings"},
|
||||||
|
"skip_branding": {"type": "boolean"},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
}
|
}
|
||||||
@ -1074,6 +1103,7 @@ def make_schema():
|
|||||||
"live_images": _variant_arch_mapping(
|
"live_images": _variant_arch_mapping(
|
||||||
_one_or_list({"$ref": "#/definitions/live_image_config"})
|
_one_or_list({"$ref": "#/definitions/live_image_config"})
|
||||||
),
|
),
|
||||||
|
"image_build_allow_reuse": {"type": "boolean", "default": False},
|
||||||
"image_build": {
|
"image_build": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"patternProperties": {
|
"patternProperties": {
|
||||||
@ -1141,12 +1171,109 @@ def make_schema():
|
|||||||
"version": {"type": "string"},
|
"version": {"type": "string"},
|
||||||
"distro": {"type": "string"},
|
"distro": {"type": "string"},
|
||||||
"target": {"type": "string"},
|
"target": {"type": "string"},
|
||||||
"image_types": {"$ref": "#/definitions/strings"},
|
# Only a single image_type can be specified
|
||||||
|
# https://github.com/osbuild/koji-osbuild/commit/c7252650814f82281ee57b598cb2ad970b580451
|
||||||
|
# https://github.com/osbuild/koji-osbuild/commit/f21a2de39b145eb94f3d49cb4d8775a33ba56752
|
||||||
|
"image_types": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
"description": "Deprecated variant",
|
||||||
|
"minItems": 1,
|
||||||
|
"maxItems": 1,
|
||||||
|
},
|
||||||
|
{"type": "string"},
|
||||||
|
]
|
||||||
|
},
|
||||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
"arches": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"release": {"type": "string"},
|
"release": {"type": "string"},
|
||||||
"repo": {"$ref": "#/definitions/list_of_strings"},
|
"repo": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"subvariant": {"type": "string"},
|
"subvariant": {"type": "string"},
|
||||||
|
"ostree_url": {"type": "string"},
|
||||||
|
"ostree_ref": {"type": "string"},
|
||||||
|
"ostree_parent": {"type": "string"},
|
||||||
|
"upload_options": {
|
||||||
|
"oneOf": [
|
||||||
|
# AWSEC2UploadOptions
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"required": [
|
||||||
|
"region",
|
||||||
|
"share_with_accounts",
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"region": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"snapshot_name": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"share_with_accounts": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# AWSS3UploadOptions
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"required": ["region"],
|
||||||
|
"properties": {
|
||||||
|
"region": {"type": "string"}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# AzureUploadOptions
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"required": [
|
||||||
|
"tenant_id",
|
||||||
|
"subscription_id",
|
||||||
|
"resource_group",
|
||||||
|
"location",
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"tenant_id": {"type": "string"},
|
||||||
|
"subscription_id": {"type": "string"},
|
||||||
|
"resource_group": {"type": "string"},
|
||||||
|
"location": {"type": "string"},
|
||||||
|
"image_name": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# GCPUploadOptions
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"required": ["region", "bucket"],
|
||||||
|
"properties": {
|
||||||
|
"region": {"type": "string"},
|
||||||
|
"bucket": {"type": "string"},
|
||||||
|
"image_name": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"share_with_accounts": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# ContainerUploadOptions
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"tag": {"type": "string"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"required": ["name", "distro", "image_types"],
|
"required": ["name", "distro", "image_types"],
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
@ -1195,6 +1322,7 @@ def make_schema():
|
|||||||
"anyOf": [{"type": "string"}, {"type": "number"}],
|
"anyOf": [{"type": "string"}, {"type": "number"}],
|
||||||
"default": 10 * 1024 * 1024,
|
"default": 10 * 1024 * 1024,
|
||||||
},
|
},
|
||||||
|
"osbs_allow_reuse": {"type": "boolean", "default": False},
|
||||||
"osbs": {
|
"osbs": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"patternProperties": {
|
"patternProperties": {
|
||||||
@ -1213,6 +1341,26 @@ def make_schema():
|
|||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
|
"image_container": {
|
||||||
|
"type": "object",
|
||||||
|
"patternProperties": {
|
||||||
|
".+": _one_or_list(
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"url": {"type": "url"},
|
||||||
|
"target": {"type": "string"},
|
||||||
|
"priority": {"type": "number"},
|
||||||
|
"failable": {"type": "boolean"},
|
||||||
|
"git_branch": {"type": "string"},
|
||||||
|
"image_spec": {"type": "object"},
|
||||||
|
},
|
||||||
|
"required": ["url", "target", "git_branch", "image_spec"],
|
||||||
|
}
|
||||||
|
),
|
||||||
|
},
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
"extra_files": _variant_arch_mapping(
|
"extra_files": _variant_arch_mapping(
|
||||||
{
|
{
|
||||||
"type": "array",
|
"type": "array",
|
||||||
@ -1317,6 +1465,7 @@ CONFIG_DEPS = {
|
|||||||
"requires": ((lambda x: x, ["base_product_name", "base_product_short"]),),
|
"requires": ((lambda x: x, ["base_product_name", "base_product_short"]),),
|
||||||
"conflicts": ((lambda x: not x, ["base_product_name", "base_product_short"]),),
|
"conflicts": ((lambda x: not x, ["base_product_name", "base_product_short"]),),
|
||||||
},
|
},
|
||||||
|
"cts_url": {"requires": ((lambda x: x, ["translate_paths"]),)},
|
||||||
"product_id": {"conflicts": [(lambda x: not x, ["product_id_allow_missing"])]},
|
"product_id": {"conflicts": [(lambda x: not x, ["product_id_allow_missing"])]},
|
||||||
"pkgset_scratch_modules": {"requires": ((lambda x: x, ["mbs_api_url"]),)},
|
"pkgset_scratch_modules": {"requires": ((lambda x: x, ["mbs_api_url"]),)},
|
||||||
"pkgset_source": {
|
"pkgset_source": {
|
||||||
|
110
pungi/compose.py
110
pungi/compose.py
@ -24,8 +24,12 @@ import time
|
|||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
import json
|
import json
|
||||||
|
import socket
|
||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
|
import kobo.tback
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import RequestException
|
||||||
from productmd.composeinfo import ComposeInfo
|
from productmd.composeinfo import ComposeInfo
|
||||||
from productmd.images import Images
|
from productmd.images import Images
|
||||||
from dogpile.cache import make_region
|
from dogpile.cache import make_region
|
||||||
@ -40,6 +44,8 @@ from pungi.util import (
|
|||||||
get_arch_variant_data,
|
get_arch_variant_data,
|
||||||
get_format_substs,
|
get_format_substs,
|
||||||
get_variant_data,
|
get_variant_data,
|
||||||
|
retry,
|
||||||
|
translate_path_raw,
|
||||||
)
|
)
|
||||||
from pungi.metadata import compose_to_composeinfo
|
from pungi.metadata import compose_to_composeinfo
|
||||||
|
|
||||||
@ -51,6 +57,14 @@ except ImportError:
|
|||||||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||||
|
|
||||||
|
|
||||||
|
@retry(wait_on=RequestException)
|
||||||
|
def retry_request(method, url, data=None, auth=None):
|
||||||
|
request_method = getattr(requests, method)
|
||||||
|
rv = request_method(url, json=data, auth=auth)
|
||||||
|
rv.raise_for_status()
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def get_compose_info(
|
def get_compose_info(
|
||||||
conf,
|
conf,
|
||||||
compose_type="production",
|
compose_type="production",
|
||||||
@ -83,20 +97,19 @@ def get_compose_info(
|
|||||||
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
if cts_url:
|
if cts_url:
|
||||||
# Import requests and requests-kerberos here so it is not needed
|
|
||||||
# if running without Compose Tracking Service.
|
|
||||||
import requests
|
|
||||||
from requests_kerberos import HTTPKerberosAuth
|
|
||||||
|
|
||||||
# Requests-kerberos cannot accept custom keytab, we need to use
|
# Requests-kerberos cannot accept custom keytab, we need to use
|
||||||
# environment variable for this. But we need to change environment
|
# environment variable for this. But we need to change environment
|
||||||
# only temporarily just for this single requests.post.
|
# only temporarily just for this single requests.post.
|
||||||
# So at first backup the current environment and revert to it
|
# So at first backup the current environment and revert to it
|
||||||
# after the requests.post call.
|
# after the requests.post call.
|
||||||
cts_keytab = conf.get("cts_keytab", None)
|
cts_keytab = conf.get("cts_keytab", None)
|
||||||
|
authentication = get_authentication(conf)
|
||||||
if cts_keytab:
|
if cts_keytab:
|
||||||
environ_copy = dict(os.environ)
|
environ_copy = dict(os.environ)
|
||||||
|
if "$HOSTNAME" in cts_keytab:
|
||||||
|
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||||
|
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Create compose in CTS and get the reserved compose ID.
|
# Create compose in CTS and get the reserved compose ID.
|
||||||
@ -107,10 +120,10 @@ def get_compose_info(
|
|||||||
"parent_compose_ids": parent_compose_ids,
|
"parent_compose_ids": parent_compose_ids,
|
||||||
"respin_of": respin_of,
|
"respin_of": respin_of,
|
||||||
}
|
}
|
||||||
rv = requests.post(url, json=data, auth=HTTPKerberosAuth())
|
rv = retry_request("post", url, data=data, auth=authentication)
|
||||||
rv.raise_for_status()
|
|
||||||
finally:
|
finally:
|
||||||
if cts_keytab:
|
if cts_keytab:
|
||||||
|
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||||
os.environ.clear()
|
os.environ.clear()
|
||||||
os.environ.update(environ_copy)
|
os.environ.update(environ_copy)
|
||||||
|
|
||||||
@ -119,12 +132,23 @@ def get_compose_info(
|
|||||||
cts_ci.loads(rv.text)
|
cts_ci.loads(rv.text)
|
||||||
ci.compose.respin = cts_ci.compose.respin
|
ci.compose.respin = cts_ci.compose.respin
|
||||||
ci.compose.id = cts_ci.compose.id
|
ci.compose.id = cts_ci.compose.id
|
||||||
|
|
||||||
else:
|
else:
|
||||||
ci.compose.id = ci.create_compose_id()
|
ci.compose.id = ci.create_compose_id()
|
||||||
|
|
||||||
return ci
|
return ci
|
||||||
|
|
||||||
|
|
||||||
|
def get_authentication(conf):
|
||||||
|
authentication = None
|
||||||
|
cts_keytab = conf.get("cts_keytab", None)
|
||||||
|
if cts_keytab:
|
||||||
|
from requests_kerberos import HTTPKerberosAuth
|
||||||
|
|
||||||
|
authentication = HTTPKerberosAuth()
|
||||||
|
return authentication
|
||||||
|
|
||||||
|
|
||||||
def write_compose_info(compose_dir, ci):
|
def write_compose_info(compose_dir, ci):
|
||||||
"""
|
"""
|
||||||
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
||||||
@ -137,6 +161,20 @@ def write_compose_info(compose_dir, ci):
|
|||||||
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
|
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
|
||||||
|
|
||||||
|
|
||||||
|
def update_compose_url(compose_id, compose_dir, conf):
|
||||||
|
authentication = get_authentication(conf)
|
||||||
|
cts_url = conf.get("cts_url", None)
|
||||||
|
if cts_url:
|
||||||
|
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
||||||
|
tp = conf.get("translate_paths", None)
|
||||||
|
compose_url = translate_path_raw(tp, compose_dir)
|
||||||
|
data = {
|
||||||
|
"action": "set_url",
|
||||||
|
"compose_url": compose_url,
|
||||||
|
}
|
||||||
|
return retry_request("patch", url, data=data, auth=authentication)
|
||||||
|
|
||||||
|
|
||||||
def get_compose_dir(
|
def get_compose_dir(
|
||||||
topdir,
|
topdir,
|
||||||
conf,
|
conf,
|
||||||
@ -222,6 +260,8 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
self.koji_event = koji_event or conf.get("koji_event")
|
self.koji_event = koji_event or conf.get("koji_event")
|
||||||
self.notifier = notifier
|
self.notifier = notifier
|
||||||
|
|
||||||
|
self._old_config = None
|
||||||
|
|
||||||
# path definitions
|
# path definitions
|
||||||
self.paths = Paths(self)
|
self.paths = Paths(self)
|
||||||
|
|
||||||
@ -284,6 +324,8 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
self.im.compose.respin = self.compose_respin
|
self.im.compose.respin = self.compose_respin
|
||||||
self.im.metadata_path = self.paths.compose.metadata()
|
self.im.metadata_path = self.paths.compose.metadata()
|
||||||
|
|
||||||
|
self.containers_metadata = {}
|
||||||
|
|
||||||
# Stores list of deliverables that failed, but did not abort the
|
# Stores list of deliverables that failed, but did not abort the
|
||||||
# compose.
|
# compose.
|
||||||
# {deliverable: [(Variant.uid, arch, subvariant)]}
|
# {deliverable: [(Variant.uid, arch, subvariant)]}
|
||||||
@ -303,6 +345,7 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
get_compose_info = staticmethod(get_compose_info)
|
get_compose_info = staticmethod(get_compose_info)
|
||||||
write_compose_info = staticmethod(write_compose_info)
|
write_compose_info = staticmethod(write_compose_info)
|
||||||
get_compose_dir = staticmethod(get_compose_dir)
|
get_compose_dir = staticmethod(get_compose_dir)
|
||||||
|
update_compose_url = staticmethod(update_compose_url)
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
return self.variants[name]
|
return self.variants[name]
|
||||||
@ -343,6 +386,10 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
def has_module_defaults(self):
|
def has_module_defaults(self):
|
||||||
return bool(self.conf.get("module_defaults_dir", False))
|
return bool(self.conf.get("module_defaults_dir", False))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_module_obsoletes(self):
|
||||||
|
return bool(self.conf.get("module_obsoletes_dir", False))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def config_dir(self):
|
def config_dir(self):
|
||||||
return os.path.dirname(self.conf._open_file or "")
|
return os.path.dirname(self.conf._open_file or "")
|
||||||
@ -370,7 +417,7 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
file_name = os.path.basename(scm_dict)
|
file_name = os.path.basename(scm_dict)
|
||||||
scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
|
scm_dict = os.path.join(self.config_dir, scm_dict)
|
||||||
|
|
||||||
self.log_debug("Writing variants file: %s", variants_file)
|
self.log_debug("Writing variants file: %s", variants_file)
|
||||||
tmp_dir = self.mkdtemp(prefix="variants_file_")
|
tmp_dir = self.mkdtemp(prefix="variants_file_")
|
||||||
@ -573,7 +620,52 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
<compose_topdir>/work/{global,<arch>}/tmp[-<variant>]/
|
<compose_topdir>/work/{global,<arch>}/tmp[-<variant>]/
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.paths.work.tmp_dir(arch=arch, variant=variant))
|
path = os.path.join(self.paths.work.tmp_dir(arch=arch, variant=variant))
|
||||||
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
|
tmpdir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
|
||||||
|
os.chmod(tmpdir, 0o755)
|
||||||
|
return tmpdir
|
||||||
|
|
||||||
|
def dump_containers_metadata(self):
|
||||||
|
"""Create a file with container metadata if there are any containers."""
|
||||||
|
if not self.containers_metadata:
|
||||||
|
return
|
||||||
|
with open(self.paths.compose.metadata("osbs.json"), "w") as f:
|
||||||
|
json.dump(
|
||||||
|
self.containers_metadata,
|
||||||
|
f,
|
||||||
|
indent=4,
|
||||||
|
sort_keys=True,
|
||||||
|
separators=(",", ": "),
|
||||||
|
)
|
||||||
|
|
||||||
|
def traceback(self, detail=None):
|
||||||
|
"""Store an extended traceback. This method should only be called when
|
||||||
|
handling an exception.
|
||||||
|
|
||||||
|
:param str detail: Extra information appended to the filename
|
||||||
|
"""
|
||||||
|
basename = "traceback"
|
||||||
|
if detail:
|
||||||
|
basename += "-" + detail
|
||||||
|
tb_path = self.paths.log.log_file("global", basename)
|
||||||
|
self.log_error("Extended traceback in: %s", tb_path)
|
||||||
|
with open(tb_path, "wb") as f:
|
||||||
|
f.write(kobo.tback.Traceback().get_traceback())
|
||||||
|
|
||||||
|
def load_old_compose_config(self):
|
||||||
|
"""
|
||||||
|
Helper method to load Pungi config dump from old compose.
|
||||||
|
"""
|
||||||
|
if not self._old_config:
|
||||||
|
config_dump_full = self.paths.log.log_file("global", "config-dump")
|
||||||
|
config_dump_full = self.paths.old_compose_path(config_dump_full)
|
||||||
|
if not config_dump_full:
|
||||||
|
return None
|
||||||
|
|
||||||
|
self.log_info("Loading old config file: %s", config_dump_full)
|
||||||
|
with open(config_dump_full, "r") as f:
|
||||||
|
self._old_config = json.load(f)
|
||||||
|
|
||||||
|
return self._old_config
|
||||||
|
|
||||||
|
|
||||||
def get_ordered_variant_uids(compose):
|
def get_ordered_variant_uids(compose):
|
||||||
|
@ -15,6 +15,7 @@ CreateIsoOpts = namedtuple(
|
|||||||
"CreateIsoOpts",
|
"CreateIsoOpts",
|
||||||
[
|
[
|
||||||
"buildinstall_method",
|
"buildinstall_method",
|
||||||
|
"boot_iso",
|
||||||
"arch",
|
"arch",
|
||||||
"output_dir",
|
"output_dir",
|
||||||
"jigdo_dir",
|
"jigdo_dir",
|
||||||
@ -25,6 +26,8 @@ CreateIsoOpts = namedtuple(
|
|||||||
"os_tree",
|
"os_tree",
|
||||||
"hfs_compat",
|
"hfs_compat",
|
||||||
"use_xorrisofs",
|
"use_xorrisofs",
|
||||||
|
"iso_level",
|
||||||
|
"script_dir",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
|
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
|
||||||
@ -76,6 +79,8 @@ def make_image(f, opts):
|
|||||||
volid=opts.volid,
|
volid=opts.volid,
|
||||||
exclude=["./lost+found"],
|
exclude=["./lost+found"],
|
||||||
graft_points=opts.graft_points,
|
graft_points=opts.graft_points,
|
||||||
|
use_xorrisofs=opts.use_xorrisofs,
|
||||||
|
iso_level=opts.iso_level,
|
||||||
**mkisofs_kwargs
|
**mkisofs_kwargs
|
||||||
)
|
)
|
||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
@ -97,7 +102,7 @@ def run_isohybrid(f, opts):
|
|||||||
|
|
||||||
|
|
||||||
def make_manifest(f, opts):
|
def make_manifest(f, opts):
|
||||||
emit(f, iso.get_manifest_cmd(opts.iso_name))
|
emit(f, iso.get_manifest_cmd(opts.iso_name, opts.use_xorrisofs))
|
||||||
|
|
||||||
|
|
||||||
def make_jigdo(f, opts):
|
def make_jigdo(f, opts):
|
||||||
@ -113,6 +118,27 @@ def make_jigdo(f, opts):
|
|||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def write_xorriso_commands(opts):
|
||||||
|
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||||
|
with open(script, "w") as f:
|
||||||
|
emit(f, "-indev %s" % opts.boot_iso)
|
||||||
|
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
||||||
|
emit(f, "-boot_image any replay")
|
||||||
|
emit(f, "-volid %s" % opts.volid)
|
||||||
|
|
||||||
|
with open(opts.graft_points) as gp:
|
||||||
|
for line in gp:
|
||||||
|
iso_path, fs_path = line.strip().split("=", 1)
|
||||||
|
emit(f, "-map %s %s" % (fs_path, iso_path))
|
||||||
|
|
||||||
|
if opts.arch == "ppc64le":
|
||||||
|
# This is needed for the image to be bootable.
|
||||||
|
emit(f, "-as mkisofs -U --")
|
||||||
|
|
||||||
|
emit(f, "-end")
|
||||||
|
return script
|
||||||
|
|
||||||
|
|
||||||
def write_script(opts, f):
|
def write_script(opts, f):
|
||||||
if bool(opts.jigdo_dir) != bool(opts.os_tree):
|
if bool(opts.jigdo_dir) != bool(opts.os_tree):
|
||||||
raise RuntimeError("jigdo_dir must be used together with os_tree")
|
raise RuntimeError("jigdo_dir must be used together with os_tree")
|
||||||
@ -120,8 +146,14 @@ def write_script(opts, f):
|
|||||||
emit(f, "#!/bin/bash")
|
emit(f, "#!/bin/bash")
|
||||||
emit(f, "set -ex")
|
emit(f, "set -ex")
|
||||||
emit(f, "cd %s" % opts.output_dir)
|
emit(f, "cd %s" % opts.output_dir)
|
||||||
make_image(f, opts)
|
|
||||||
run_isohybrid(f, opts)
|
if opts.use_xorrisofs and opts.buildinstall_method:
|
||||||
|
script = write_xorriso_commands(opts)
|
||||||
|
emit(f, "xorriso -dialog on <%s" % script)
|
||||||
|
else:
|
||||||
|
make_image(f, opts)
|
||||||
|
run_isohybrid(f, opts)
|
||||||
|
|
||||||
implant_md5(f, opts)
|
implant_md5(f, opts)
|
||||||
make_manifest(f, opts)
|
make_manifest(f, opts)
|
||||||
if opts.jigdo_dir:
|
if opts.jigdo_dir:
|
||||||
|
20
pungi/errors.py
Normal file
20
pungi/errors.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 2 of the License.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Library General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
class UnsignedPackagesError(RuntimeError):
|
||||||
|
"""Raised when package set fails to find a properly signed copy of an
|
||||||
|
RPM."""
|
||||||
|
|
||||||
|
pass
|
@ -35,7 +35,7 @@ from pungi.wrappers.createrepo import CreaterepoWrapper
|
|||||||
|
|
||||||
|
|
||||||
class ReentrantYumLock(object):
|
class ReentrantYumLock(object):
|
||||||
""" A lock that can be acquired multiple times by the same process. """
|
"""A lock that can be acquired multiple times by the same process."""
|
||||||
|
|
||||||
def __init__(self, lock, log):
|
def __init__(self, lock, log):
|
||||||
self.lock = lock
|
self.lock = lock
|
||||||
@ -60,7 +60,7 @@ class ReentrantYumLock(object):
|
|||||||
|
|
||||||
|
|
||||||
def yumlocked(method):
|
def yumlocked(method):
|
||||||
""" A locking decorator. """
|
"""A locking decorator."""
|
||||||
|
|
||||||
def wrapper(self, *args, **kwargs):
|
def wrapper(self, *args, **kwargs):
|
||||||
with self.yumlock:
|
with self.yumlock:
|
||||||
@ -519,7 +519,7 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def verifyCachePkg(self, po, path): # Stolen from yum
|
def verifyCachePkg(self, po, path): # Stolen from yum
|
||||||
"""check the package checksum vs the cache
|
"""check the package checksum vs the cache
|
||||||
return True if pkg is good, False if not"""
|
return True if pkg is good, False if not"""
|
||||||
|
|
||||||
(csum_type, csum) = po.returnIdSum()
|
(csum_type, csum) = po.returnIdSum()
|
||||||
|
|
||||||
@ -682,7 +682,7 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def get_package_deps(self, po):
|
def get_package_deps(self, po):
|
||||||
"""Add the dependencies for a given package to the
|
"""Add the dependencies for a given package to the
|
||||||
transaction info"""
|
transaction info"""
|
||||||
added = set()
|
added = set()
|
||||||
if po.repoid in self.lookaside_repos:
|
if po.repoid in self.lookaside_repos:
|
||||||
# Don't resolve deps for stuff in lookaside.
|
# Don't resolve deps for stuff in lookaside.
|
||||||
@ -911,7 +911,7 @@ class Pungi(PungiBase):
|
|||||||
def getPackagesFromGroup(self, group):
|
def getPackagesFromGroup(self, group):
|
||||||
"""Get a list of package names from a ksparser group object
|
"""Get a list of package names from a ksparser group object
|
||||||
|
|
||||||
Returns a list of package names"""
|
Returns a list of package names"""
|
||||||
|
|
||||||
packages = []
|
packages = []
|
||||||
|
|
||||||
@ -951,7 +951,7 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def _addDefaultGroups(self, excludeGroups=None):
|
def _addDefaultGroups(self, excludeGroups=None):
|
||||||
"""Cycle through the groups and return at list of the ones that ara
|
"""Cycle through the groups and return at list of the ones that ara
|
||||||
default."""
|
default."""
|
||||||
excludeGroups = excludeGroups or []
|
excludeGroups = excludeGroups or []
|
||||||
|
|
||||||
# This is mostly stolen from anaconda.
|
# This is mostly stolen from anaconda.
|
||||||
@ -1217,8 +1217,8 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def createSourceHashes(self):
|
def createSourceHashes(self):
|
||||||
"""Create two dicts - one that maps binary POs to source POs, and
|
"""Create two dicts - one that maps binary POs to source POs, and
|
||||||
one that maps a single source PO to all binary POs it produces.
|
one that maps a single source PO to all binary POs it produces.
|
||||||
Requires yum still configured."""
|
Requires yum still configured."""
|
||||||
self.src_by_bin = {}
|
self.src_by_bin = {}
|
||||||
self.bin_by_src = {}
|
self.bin_by_src = {}
|
||||||
self.logger.info("Generating source <-> binary package mappings")
|
self.logger.info("Generating source <-> binary package mappings")
|
||||||
@ -1232,8 +1232,8 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def add_srpms(self, po_list=None):
|
def add_srpms(self, po_list=None):
|
||||||
"""Cycle through the list of package objects and
|
"""Cycle through the list of package objects and
|
||||||
find the sourcerpm for them. Requires yum still
|
find the sourcerpm for them. Requires yum still
|
||||||
configured and a list of package objects"""
|
configured and a list of package objects"""
|
||||||
|
|
||||||
srpms = set()
|
srpms = set()
|
||||||
po_list = po_list or self.po_list
|
po_list = po_list or self.po_list
|
||||||
@ -1275,9 +1275,9 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def add_fulltree(self, srpm_po_list=None):
|
def add_fulltree(self, srpm_po_list=None):
|
||||||
"""Cycle through all package objects, and add any
|
"""Cycle through all package objects, and add any
|
||||||
that correspond to a source rpm that we are including.
|
that correspond to a source rpm that we are including.
|
||||||
Requires yum still configured and a list of package
|
Requires yum still configured and a list of package
|
||||||
objects."""
|
objects."""
|
||||||
|
|
||||||
self.logger.info("Completing package set")
|
self.logger.info("Completing package set")
|
||||||
|
|
||||||
@ -1357,8 +1357,8 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def getDebuginfoList(self):
|
def getDebuginfoList(self):
|
||||||
"""Cycle through the list of package objects and find
|
"""Cycle through the list of package objects and find
|
||||||
debuginfo rpms for them. Requires yum still
|
debuginfo rpms for them. Requires yum still
|
||||||
configured and a list of package objects"""
|
configured and a list of package objects"""
|
||||||
|
|
||||||
added = set()
|
added = set()
|
||||||
for po in self.all_pkgs:
|
for po in self.all_pkgs:
|
||||||
@ -1398,7 +1398,7 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def _downloadPackageList(self, polist, relpkgdir):
|
def _downloadPackageList(self, polist, relpkgdir):
|
||||||
"""Cycle through the list of package objects and
|
"""Cycle through the list of package objects and
|
||||||
download them from their respective repos."""
|
download them from their respective repos."""
|
||||||
|
|
||||||
for pkg in sorted(polist):
|
for pkg in sorted(polist):
|
||||||
repo = self.ayum.repos.getRepo(pkg.repoid)
|
repo = self.ayum.repos.getRepo(pkg.repoid)
|
||||||
@ -1533,7 +1533,7 @@ class Pungi(PungiBase):
|
|||||||
@yumlocked
|
@yumlocked
|
||||||
def downloadSRPMs(self):
|
def downloadSRPMs(self):
|
||||||
"""Cycle through the list of srpms and
|
"""Cycle through the list of srpms and
|
||||||
find the package objects for them, Then download them."""
|
find the package objects for them, Then download them."""
|
||||||
|
|
||||||
# do the downloads
|
# do the downloads
|
||||||
self._downloadPackageList(self.srpm_po_list, os.path.join("source", "SRPMS"))
|
self._downloadPackageList(self.srpm_po_list, os.path.join("source", "SRPMS"))
|
||||||
@ -1541,7 +1541,7 @@ class Pungi(PungiBase):
|
|||||||
@yumlocked
|
@yumlocked
|
||||||
def downloadDebuginfo(self):
|
def downloadDebuginfo(self):
|
||||||
"""Cycle through the list of debuginfo rpms and
|
"""Cycle through the list of debuginfo rpms and
|
||||||
download them."""
|
download them."""
|
||||||
|
|
||||||
# do the downloads
|
# do the downloads
|
||||||
self._downloadPackageList(
|
self._downloadPackageList(
|
||||||
@ -1980,7 +1980,7 @@ class Pungi(PungiBase):
|
|||||||
|
|
||||||
def doGetRelnotes(self):
|
def doGetRelnotes(self):
|
||||||
"""Get extra files from packages in the tree to put in the topdir of
|
"""Get extra files from packages in the tree to put in the topdir of
|
||||||
the tree."""
|
the tree."""
|
||||||
docsdir = os.path.join(self.workdir, "docs")
|
docsdir = os.path.join(self.workdir, "docs")
|
||||||
relnoterpms = self.config.get("pungi", "relnotepkgs").split()
|
relnoterpms = self.config.get("pungi", "relnotepkgs").split()
|
||||||
|
|
||||||
|
@ -1029,7 +1029,7 @@ class Gather(GatherBase):
|
|||||||
|
|
||||||
# Link downloaded package in (or link package from file repo)
|
# Link downloaded package in (or link package from file repo)
|
||||||
try:
|
try:
|
||||||
linker.hardlink(pkg.localPkg(), target)
|
linker.link(pkg.localPkg(), target)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.error("Unable to link %s from the yum cache." % pkg.name)
|
self.logger.error("Unable to link %s from the yum cache." % pkg.name)
|
||||||
raise
|
raise
|
||||||
|
@ -54,8 +54,7 @@ class SimpleAcyclicOrientedGraph(object):
|
|||||||
return False if node in self._graph else True
|
return False if node in self._graph else True
|
||||||
|
|
||||||
def remove_final_endpoint(self, node):
|
def remove_final_endpoint(self, node):
|
||||||
"""
|
""""""
|
||||||
"""
|
|
||||||
remove_start_points = []
|
remove_start_points = []
|
||||||
for start, ends in self._graph.items():
|
for start, ends in self._graph.items():
|
||||||
if node in ends:
|
if node in ends:
|
||||||
|
@ -20,8 +20,8 @@ import os
|
|||||||
SIZE_UNITS = {
|
SIZE_UNITS = {
|
||||||
"b": 1,
|
"b": 1,
|
||||||
"k": 1024,
|
"k": 1024,
|
||||||
"M": 1024 ** 2,
|
"M": 1024**2,
|
||||||
"G": 1024 ** 3,
|
"G": 1024**3,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,6 +44,30 @@ def iter_module_defaults(path):
|
|||||||
yield module_name, index.get_module(module_name).get_defaults()
|
yield module_name, index.get_module(module_name).get_defaults()
|
||||||
|
|
||||||
|
|
||||||
|
def get_module_obsoletes_idx(path, mod_list):
|
||||||
|
"""Given a path to a directory with yaml files, return Index with
|
||||||
|
merged all obsoletes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
merger = Modulemd.ModuleIndexMerger.new()
|
||||||
|
md_idxs = []
|
||||||
|
|
||||||
|
# associate_index does NOT copy it's argument (nor increases a
|
||||||
|
# reference counter on the object). It only stores a pointer.
|
||||||
|
for file in glob.glob(os.path.join(path, "*.yaml")):
|
||||||
|
index = Modulemd.ModuleIndex()
|
||||||
|
index.update_from_file(file, strict=False)
|
||||||
|
mod_name = index.get_module_names()[0]
|
||||||
|
|
||||||
|
if mod_name and (mod_name in mod_list or not mod_list):
|
||||||
|
md_idxs.append(index)
|
||||||
|
merger.associate_index(md_idxs[-1], 0)
|
||||||
|
|
||||||
|
merged_idx = merger.resolve()
|
||||||
|
|
||||||
|
return merged_idx
|
||||||
|
|
||||||
|
|
||||||
def collect_module_defaults(
|
def collect_module_defaults(
|
||||||
defaults_dir, modules_to_load=None, mod_index=None, overrides_dir=None
|
defaults_dir, modules_to_load=None, mod_index=None, overrides_dir=None
|
||||||
):
|
):
|
||||||
@ -69,3 +93,26 @@ def collect_module_defaults(
|
|||||||
mod_index.add_defaults(defaults)
|
mod_index.add_defaults(defaults)
|
||||||
|
|
||||||
return mod_index
|
return mod_index
|
||||||
|
|
||||||
|
|
||||||
|
def collect_module_obsoletes(obsoletes_dir, modules_to_load, mod_index=None):
|
||||||
|
"""Load module obsoletes into index.
|
||||||
|
|
||||||
|
This works in a similar fashion as collect_module_defaults except it
|
||||||
|
merges indexes together instead of adding them during iteration.
|
||||||
|
|
||||||
|
Additionally if modules_to_load is not empty returned Index will include
|
||||||
|
only obsoletes for those modules.
|
||||||
|
"""
|
||||||
|
|
||||||
|
obsoletes_index = get_module_obsoletes_idx(obsoletes_dir, modules_to_load)
|
||||||
|
|
||||||
|
# Merge Obsoletes with Modules Index.
|
||||||
|
if mod_index:
|
||||||
|
merger = Modulemd.ModuleIndexMerger.new()
|
||||||
|
merger.associate_index(mod_index, 0)
|
||||||
|
merger.associate_index(obsoletes_index, 0)
|
||||||
|
merged_idx = merger.resolve()
|
||||||
|
obsoletes_index = merged_idx
|
||||||
|
|
||||||
|
return obsoletes_index
|
||||||
|
@ -81,9 +81,6 @@ class PungiNotifier(object):
|
|||||||
|
|
||||||
self._update_args(kwargs)
|
self._update_args(kwargs)
|
||||||
|
|
||||||
if self.compose:
|
|
||||||
workdir = self.compose.paths.compose.topdir()
|
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
for cmd in self.cmds:
|
for cmd in self.cmds:
|
||||||
self._run_script(cmd, msg, workdir, kwargs)
|
self._run_script(cmd, msg, workdir, kwargs)
|
||||||
|
@ -65,6 +65,11 @@ def main(args=None):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="do not use rpm-ostree's built-in change detection",
|
help="do not use rpm-ostree's built-in change detection",
|
||||||
)
|
)
|
||||||
|
treep.add_argument(
|
||||||
|
"--unified-core",
|
||||||
|
action="store_true",
|
||||||
|
help="use unified core mode in rpm-ostree",
|
||||||
|
)
|
||||||
|
|
||||||
installerp = subparser.add_parser(
|
installerp = subparser.add_parser(
|
||||||
"installer", help="Create an OSTree installer image"
|
"installer", help="Create an OSTree installer image"
|
||||||
|
@ -43,6 +43,9 @@ class Tree(OSTree):
|
|||||||
# because something went wrong.
|
# because something went wrong.
|
||||||
"--touch-if-changed=%s.stamp" % self.commitid_file,
|
"--touch-if-changed=%s.stamp" % self.commitid_file,
|
||||||
]
|
]
|
||||||
|
if self.unified_core:
|
||||||
|
# See https://github.com/coreos/rpm-ostree/issues/729
|
||||||
|
cmd.append("--unified-core")
|
||||||
if self.version:
|
if self.version:
|
||||||
# Add versioning metadata
|
# Add versioning metadata
|
||||||
cmd.append("--add-metadata-string=version=%s" % self.version)
|
cmd.append("--add-metadata-string=version=%s" % self.version)
|
||||||
@ -121,6 +124,7 @@ class Tree(OSTree):
|
|||||||
self.extra_config = self.args.extra_config
|
self.extra_config = self.args.extra_config
|
||||||
self.ostree_ref = self.args.ostree_ref
|
self.ostree_ref = self.args.ostree_ref
|
||||||
self.force_new_commit = self.args.force_new_commit
|
self.force_new_commit = self.args.force_new_commit
|
||||||
|
self.unified_core = self.args.unified_core
|
||||||
|
|
||||||
if self.extra_config or self.ostree_ref:
|
if self.extra_config or self.ostree_ref:
|
||||||
if self.extra_config:
|
if self.extra_config:
|
||||||
|
@ -103,12 +103,23 @@ class LogPaths(object):
|
|||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def log_file(self, arch, log_name, create_dir=True):
|
def koji_tasks_dir(self, create_dir=True):
|
||||||
|
"""
|
||||||
|
Examples:
|
||||||
|
logs/global/koji-tasks
|
||||||
|
"""
|
||||||
|
path = os.path.join(self.topdir(create_dir=create_dir), "koji-tasks")
|
||||||
|
if create_dir:
|
||||||
|
makedirs(path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
def log_file(self, arch, log_name, create_dir=True, ext=None):
|
||||||
|
ext = ext or "log"
|
||||||
arch = arch or "global"
|
arch = arch or "global"
|
||||||
if log_name.endswith(".log"):
|
if log_name.endswith(".log"):
|
||||||
log_name = log_name[:-4]
|
log_name = log_name[:-4]
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
self.topdir(arch, create_dir=create_dir), "%s.%s.log" % (log_name, arch)
|
self.topdir(arch, create_dir=create_dir), "%s.%s.%s" % (log_name, arch, ext)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -498,10 +509,23 @@ class WorkPaths(object):
|
|||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
def module_obsoletes_dir(self, create_dir=True):
|
||||||
|
"""
|
||||||
|
Example:
|
||||||
|
work/global/module_obsoletes
|
||||||
|
"""
|
||||||
|
path = os.path.join(self.topdir(create_dir=create_dir), "module_obsoletes")
|
||||||
|
if create_dir:
|
||||||
|
makedirs(path)
|
||||||
|
return path
|
||||||
|
|
||||||
def pkgset_file_cache(self, pkgset_name):
|
def pkgset_file_cache(self, pkgset_name):
|
||||||
"""
|
"""
|
||||||
Returns the path to file in which the cached version of
|
Returns the path to file in which the cached version of
|
||||||
PackageSetBase.file_cache should be stored.
|
PackageSetBase.file_cache should be stored.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
work/global/pkgset_f33-compose_file_cache.pickle
|
||||||
"""
|
"""
|
||||||
filename = "pkgset_%s_file_cache.pickle" % pkgset_name
|
filename = "pkgset_%s_file_cache.pickle" % pkgset_name
|
||||||
return os.path.join(self.topdir(arch="global"), filename)
|
return os.path.join(self.topdir(arch="global"), filename)
|
||||||
|
@ -27,6 +27,7 @@ from .createiso import CreateisoPhase # noqa
|
|||||||
from .extra_isos import ExtraIsosPhase # noqa
|
from .extra_isos import ExtraIsosPhase # noqa
|
||||||
from .live_images import LiveImagesPhase # noqa
|
from .live_images import LiveImagesPhase # noqa
|
||||||
from .image_build import ImageBuildPhase # noqa
|
from .image_build import ImageBuildPhase # noqa
|
||||||
|
from .image_container import ImageContainerPhase # noqa
|
||||||
from .osbuild import OSBuildPhase # noqa
|
from .osbuild import OSBuildPhase # noqa
|
||||||
from .repoclosure import RepoclosurePhase # noqa
|
from .repoclosure import RepoclosurePhase # noqa
|
||||||
from .test import TestPhase # noqa
|
from .test import TestPhase # noqa
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
|
||||||
from pungi import util
|
from pungi import util
|
||||||
|
|
||||||
@ -58,6 +60,7 @@ class PhaseBase(object):
|
|||||||
self.compose.log_warning("[SKIP ] %s" % self.msg)
|
self.compose.log_warning("[SKIP ] %s" % self.msg)
|
||||||
self.finished = True
|
self.finished = True
|
||||||
return
|
return
|
||||||
|
self._start_time = time.time()
|
||||||
self.compose.log_info("[BEGIN] %s" % self.msg)
|
self.compose.log_info("[BEGIN] %s" % self.msg)
|
||||||
self.compose.notifier.send("phase-start", phase_name=self.name)
|
self.compose.notifier.send("phase-start", phase_name=self.name)
|
||||||
self.run()
|
self.run()
|
||||||
@ -108,6 +111,13 @@ class PhaseBase(object):
|
|||||||
self.pool.stop()
|
self.pool.stop()
|
||||||
self.finished = True
|
self.finished = True
|
||||||
self.compose.log_info("[DONE ] %s" % self.msg)
|
self.compose.log_info("[DONE ] %s" % self.msg)
|
||||||
|
|
||||||
|
if hasattr(self, "_start_time"):
|
||||||
|
self.compose.log_info(
|
||||||
|
"PHASE %s took %d seconds"
|
||||||
|
% (self.name.upper(), math.ceil(time.time() - self._start_time))
|
||||||
|
)
|
||||||
|
|
||||||
if self.used_patterns is not None:
|
if self.used_patterns is not None:
|
||||||
# We only want to report this if the config was actually queried.
|
# We only want to report this if the config was actually queried.
|
||||||
self.report_unused_patterns()
|
self.report_unused_patterns()
|
||||||
|
@ -50,6 +50,9 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
# A set of (variant_uid, arch) pairs that completed successfully. This
|
# A set of (variant_uid, arch) pairs that completed successfully. This
|
||||||
# is needed to skip copying files for failed tasks.
|
# is needed to skip copying files for failed tasks.
|
||||||
self.pool.finished_tasks = set()
|
self.pool.finished_tasks = set()
|
||||||
|
# A set of (variant_uid, arch) pairs that were reused from previous
|
||||||
|
# compose.
|
||||||
|
self.pool.reused_tasks = set()
|
||||||
self.buildinstall_method = self.compose.conf.get("buildinstall_method")
|
self.buildinstall_method = self.compose.conf.get("buildinstall_method")
|
||||||
self.lorax_use_koji_plugin = self.compose.conf.get("lorax_use_koji_plugin")
|
self.lorax_use_koji_plugin = self.compose.conf.get("lorax_use_koji_plugin")
|
||||||
self.used_lorax = self.buildinstall_method == "lorax"
|
self.used_lorax = self.buildinstall_method == "lorax"
|
||||||
@ -312,6 +315,18 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
in self.pool.finished_tasks
|
in self.pool.finished_tasks
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def reused(self, variant, arch):
|
||||||
|
"""
|
||||||
|
Check if buildinstall phase reused previous results for given variant
|
||||||
|
and arch. If the phase is skipped, the results will be considered
|
||||||
|
reused as well.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
super(BuildinstallPhase, self).skip()
|
||||||
|
or (variant.uid if self.used_lorax else None, arch)
|
||||||
|
in self.pool.reused_tasks
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_kickstart_file(compose):
|
def get_kickstart_file(compose):
|
||||||
scm_dict = compose.conf.get("buildinstall_kickstart")
|
scm_dict = compose.conf.get("buildinstall_kickstart")
|
||||||
@ -368,7 +383,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
# double-escape volid in yaboot.conf
|
# double-escape volid in yaboot.conf
|
||||||
new_volid = volid_escaped_2 if "yaboot" in config else volid_escaped
|
new_volid = volid_escaped_2 if "yaboot" in config else volid_escaped
|
||||||
|
|
||||||
ks = (" ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
|
ks = (" inst.ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
|
||||||
|
|
||||||
# pre-f18
|
# pre-f18
|
||||||
data = re.sub(r":CDLABEL=[^ \n]*", r":CDLABEL=%s%s" % (new_volid, ks), data)
|
data = re.sub(r":CDLABEL=[^ \n]*", r":CDLABEL=%s%s" % (new_volid, ks), data)
|
||||||
@ -654,9 +669,16 @@ class BuildinstallThread(WorkerThread):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
compose.log_info("Loading old BUILDINSTALL phase metadata: %s", old_metadata)
|
compose.log_info("Loading old BUILDINSTALL phase metadata: %s", old_metadata)
|
||||||
with open(old_metadata, "rb") as f:
|
try:
|
||||||
old_result = pickle.load(f)
|
with open(old_metadata, "rb") as f:
|
||||||
return old_result
|
old_result = pickle.load(f)
|
||||||
|
return old_result
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_debug(
|
||||||
|
"Failed to load old BUILDINSTALL phase metadata %s : %s"
|
||||||
|
% (old_metadata, str(e))
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
def _reuse_old_buildinstall_result(self, compose, arch, variant, cmd, pkgset_phase):
|
def _reuse_old_buildinstall_result(self, compose, arch, variant, cmd, pkgset_phase):
|
||||||
"""
|
"""
|
||||||
@ -722,7 +744,7 @@ class BuildinstallThread(WorkerThread):
|
|||||||
# Ask Koji for all the RPMs in the `runroot_tag` and check that
|
# Ask Koji for all the RPMs in the `runroot_tag` and check that
|
||||||
# those installed in the old buildinstall buildroot are still in the
|
# those installed in the old buildinstall buildroot are still in the
|
||||||
# very same versions/releases.
|
# very same versions/releases.
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(compose)
|
||||||
rpms = koji_wrapper.koji_proxy.listTaggedRPMS(
|
rpms = koji_wrapper.koji_proxy.listTaggedRPMS(
|
||||||
compose.conf.get("runroot_tag"), inherit=True, latest=True
|
compose.conf.get("runroot_tag"), inherit=True, latest=True
|
||||||
)[0]
|
)[0]
|
||||||
@ -794,12 +816,15 @@ class BuildinstallThread(WorkerThread):
|
|||||||
chown_paths.append(_get_log_dir(compose, variant, arch))
|
chown_paths.append(_get_log_dir(compose, variant, arch))
|
||||||
elif buildinstall_method == "buildinstall":
|
elif buildinstall_method == "buildinstall":
|
||||||
packages += ["anaconda"]
|
packages += ["anaconda"]
|
||||||
|
packages += get_arch_variant_data(
|
||||||
|
compose.conf, "buildinstall_packages", arch, variant
|
||||||
|
)
|
||||||
if self._reuse_old_buildinstall_result(
|
if self._reuse_old_buildinstall_result(
|
||||||
compose, arch, variant, cmd, pkgset_phase
|
compose, arch, variant, cmd, pkgset_phase
|
||||||
):
|
):
|
||||||
self.copy_files(compose, variant, arch)
|
self.copy_files(compose, variant, arch)
|
||||||
self.pool.finished_tasks.add((variant.uid if variant else None, arch))
|
self.pool.finished_tasks.add((variant.uid if variant else None, arch))
|
||||||
|
self.pool.reused_tasks.add((variant.uid if variant else None, arch))
|
||||||
self.pool.log_info("[DONE ] %s" % msg)
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ import os
|
|||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
|
import json
|
||||||
|
|
||||||
import productmd.treeinfo
|
import productmd.treeinfo
|
||||||
from productmd.images import Image
|
from productmd.images import Image
|
||||||
@ -36,6 +37,7 @@ from pungi.util import (
|
|||||||
failable,
|
failable,
|
||||||
get_file_size,
|
get_file_size,
|
||||||
get_mtime,
|
get_mtime,
|
||||||
|
read_json_file,
|
||||||
)
|
)
|
||||||
from pungi.media_split import MediaSplitter, convert_media_size
|
from pungi.media_split import MediaSplitter, convert_media_size
|
||||||
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
||||||
@ -73,6 +75,170 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
return False
|
return False
|
||||||
return bool(self.compose.conf.get("buildinstall_method", ""))
|
return bool(self.compose.conf.get("buildinstall_method", ""))
|
||||||
|
|
||||||
|
def _metadata_path(self, variant, arch, disc_num, disc_count):
|
||||||
|
return self.compose.paths.log.log_file(
|
||||||
|
arch,
|
||||||
|
"createiso-%s-%d-%d" % (variant.uid, disc_num, disc_count),
|
||||||
|
ext="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
def save_reuse_metadata(self, cmd, variant, arch, opts):
|
||||||
|
"""Save metadata for future composes to verify if the compose can be reused."""
|
||||||
|
metadata = {
|
||||||
|
"cmd": cmd,
|
||||||
|
"opts": opts._asdict(),
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata_path = self._metadata_path(
|
||||||
|
variant, arch, cmd["disc_num"], cmd["disc_count"]
|
||||||
|
)
|
||||||
|
with open(metadata_path, "w") as f:
|
||||||
|
json.dump(metadata, f, indent=2)
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
def _load_old_metadata(self, cmd, variant, arch):
|
||||||
|
metadata_path = self._metadata_path(
|
||||||
|
variant, arch, cmd["disc_num"], cmd["disc_count"]
|
||||||
|
)
|
||||||
|
old_path = self.compose.paths.old_compose_path(metadata_path)
|
||||||
|
self.logger.info(
|
||||||
|
"Loading old metadata for %s.%s from: %s", variant, arch, old_path
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
return read_json_file(old_path)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def perform_reuse(self, cmd, variant, arch, opts, iso_path):
|
||||||
|
"""
|
||||||
|
Copy all related files from old compose to the new one. As a last step
|
||||||
|
add the new image to metadata.
|
||||||
|
"""
|
||||||
|
linker = OldFileLinker(self.logger)
|
||||||
|
old_file_name = os.path.basename(iso_path)
|
||||||
|
current_file_name = os.path.basename(cmd["iso_path"])
|
||||||
|
try:
|
||||||
|
# Hardlink ISO and manifest
|
||||||
|
for suffix in ("", ".manifest"):
|
||||||
|
linker.link(iso_path + suffix, cmd["iso_path"] + suffix)
|
||||||
|
# Copy log files
|
||||||
|
# The log file name includes filename of the image, so we need to
|
||||||
|
# find old file with the old name, and rename it to the new name.
|
||||||
|
log_file = self.compose.paths.log.log_file(
|
||||||
|
arch, "createiso-%s" % current_file_name
|
||||||
|
)
|
||||||
|
old_log_file = self.compose.paths.old_compose_path(
|
||||||
|
self.compose.paths.log.log_file(arch, "createiso-%s" % old_file_name)
|
||||||
|
)
|
||||||
|
linker.link(old_log_file, log_file)
|
||||||
|
# Copy jigdo files
|
||||||
|
if opts.jigdo_dir:
|
||||||
|
old_jigdo_dir = self.compose.paths.old_compose_path(opts.jigdo_dir)
|
||||||
|
for suffix in (".template", ".jigdo"):
|
||||||
|
linker.link(
|
||||||
|
os.path.join(old_jigdo_dir, old_file_name) + suffix,
|
||||||
|
os.path.join(opts.jigdo_dir, current_file_name) + suffix,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# A problem happened while linking some file, let's clean up
|
||||||
|
# everything.
|
||||||
|
linker.abort()
|
||||||
|
raise
|
||||||
|
# Add image to manifest
|
||||||
|
add_iso_to_metadata(
|
||||||
|
self.compose,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
cmd["iso_path"],
|
||||||
|
bootable=cmd["bootable"],
|
||||||
|
disc_num=cmd["disc_num"],
|
||||||
|
disc_count=cmd["disc_count"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def try_reuse(self, cmd, variant, arch, opts):
|
||||||
|
"""Try to reuse image from previous compose.
|
||||||
|
|
||||||
|
:returns bool: True if reuse was successful, False otherwise
|
||||||
|
"""
|
||||||
|
if not self.compose.conf["createiso_allow_reuse"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
log_msg = "Cannot reuse ISO for %s.%s" % (variant, arch)
|
||||||
|
current_metadata = self.save_reuse_metadata(cmd, variant, arch, opts)
|
||||||
|
|
||||||
|
if opts.buildinstall_method and not self.bi.reused(variant, arch):
|
||||||
|
# If buildinstall phase was not reused for some reason, we can not
|
||||||
|
# reuse any bootable image. If a package change caused rebuild of
|
||||||
|
# boot.iso, we would catch it here too, but there could be a
|
||||||
|
# configuration change in lorax template which would remain
|
||||||
|
# undetected.
|
||||||
|
self.logger.info("%s - boot configuration changed", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check old compose configuration: extra_files and product_ids can be
|
||||||
|
# reflected on ISO.
|
||||||
|
old_config = self.compose.load_old_compose_config()
|
||||||
|
if not old_config:
|
||||||
|
self.logger.info("%s - no config for old compose", log_msg)
|
||||||
|
return False
|
||||||
|
# Convert current configuration to JSON and back to encode it similarly
|
||||||
|
# to the old one
|
||||||
|
config = json.loads(json.dumps(self.compose.conf))
|
||||||
|
for opt in self.compose.conf:
|
||||||
|
# Skip a selection of options: these affect what packages can be
|
||||||
|
# included, which we explicitly check later on.
|
||||||
|
config_whitelist = set(
|
||||||
|
[
|
||||||
|
"gather_lookaside_repos",
|
||||||
|
"pkgset_koji_builds",
|
||||||
|
"pkgset_koji_scratch_tasks",
|
||||||
|
"pkgset_koji_module_builds",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
# Skip irrelevant options
|
||||||
|
config_whitelist.update(["osbs", "osbuild"])
|
||||||
|
if opt in config_whitelist:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if old_config.get(opt) != config.get(opt):
|
||||||
|
self.logger.info("%s - option %s differs", log_msg, opt)
|
||||||
|
return False
|
||||||
|
|
||||||
|
old_metadata = self._load_old_metadata(cmd, variant, arch)
|
||||||
|
if not old_metadata:
|
||||||
|
self.logger.info("%s - no old metadata found", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test if volume ID matches - volid can be generated dynamically based on
|
||||||
|
# other values, and could change even if nothing else is different.
|
||||||
|
if current_metadata["opts"]["volid"] != old_metadata["opts"]["volid"]:
|
||||||
|
self.logger.info("%s - volume ID differs", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Compare packages on the ISO.
|
||||||
|
if compare_packages(
|
||||||
|
old_metadata["opts"]["graft_points"],
|
||||||
|
current_metadata["opts"]["graft_points"],
|
||||||
|
):
|
||||||
|
self.logger.info("%s - packages differ", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.perform_reuse(
|
||||||
|
cmd,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
opts,
|
||||||
|
old_metadata["cmd"]["iso_path"],
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except Exception as exc:
|
||||||
|
self.compose.log_error(
|
||||||
|
"Error while reusing ISO for %s.%s: %s", variant, arch, exc
|
||||||
|
)
|
||||||
|
self.compose.traceback("createiso-reuse-%s-%s" % (variant, arch))
|
||||||
|
return False
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
||||||
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
@ -172,21 +338,29 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
supported=self.compose.supported,
|
supported=self.compose.supported,
|
||||||
hfs_compat=self.compose.conf["iso_hfs_ppc64le_compatible"],
|
hfs_compat=self.compose.conf["iso_hfs_ppc64le_compatible"],
|
||||||
use_xorrisofs=self.compose.conf.get("createiso_use_xorrisofs"),
|
use_xorrisofs=self.compose.conf.get("createiso_use_xorrisofs"),
|
||||||
|
iso_level=get_iso_level_config(self.compose, variant, arch),
|
||||||
)
|
)
|
||||||
|
|
||||||
if bootable:
|
if bootable:
|
||||||
opts = opts._replace(
|
opts = opts._replace(
|
||||||
buildinstall_method=self.compose.conf["buildinstall_method"]
|
buildinstall_method=self.compose.conf[
|
||||||
|
"buildinstall_method"
|
||||||
|
],
|
||||||
|
boot_iso=os.path.join(os_tree, "images", "boot.iso"),
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.compose.conf["create_jigdo"]:
|
if self.compose.conf["create_jigdo"]:
|
||||||
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
|
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
|
||||||
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
||||||
|
|
||||||
script_file = os.path.join(
|
# Try to reuse
|
||||||
self.compose.paths.work.tmp_dir(arch, variant),
|
if self.try_reuse(cmd, variant, arch, opts):
|
||||||
"createiso-%s.sh" % filename,
|
# Reuse was successful, go to next ISO
|
||||||
)
|
continue
|
||||||
|
|
||||||
|
script_dir = self.compose.paths.work.tmp_dir(arch, variant)
|
||||||
|
opts = opts._replace(script_dir=script_dir)
|
||||||
|
script_file = os.path.join(script_dir, "createiso-%s.sh" % filename)
|
||||||
with open(script_file, "w") as f:
|
with open(script_file, "w") as f:
|
||||||
createiso.write_script(opts, f)
|
createiso.write_script(opts, f)
|
||||||
cmd["cmd"] = ["bash", script_file]
|
cmd["cmd"] = ["bash", script_file]
|
||||||
@ -202,6 +376,36 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
self.pool.start()
|
self.pool.start()
|
||||||
|
|
||||||
|
|
||||||
|
def read_packages(graft_points):
|
||||||
|
"""Read packages that were listed in given graft points file.
|
||||||
|
|
||||||
|
Only files under Packages directory are considered. Particularly this
|
||||||
|
excludes .discinfo, .treeinfo and media.repo as well as repodata and
|
||||||
|
any extra files.
|
||||||
|
|
||||||
|
Extra files are easier to check by configuration (same name doesn't
|
||||||
|
imply same content). Repodata depend entirely on included packages (and
|
||||||
|
possibly product id certificate), but are affected by current time
|
||||||
|
which can change checksum despite data being the same.
|
||||||
|
"""
|
||||||
|
with open(graft_points) as f:
|
||||||
|
return set(
|
||||||
|
line.split("=", 1)[0]
|
||||||
|
for line in f
|
||||||
|
if line.startswith("Packages/") or "/Packages/" in line
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def compare_packages(old_graft_points, new_graft_points):
|
||||||
|
"""Read packages from the two files and compare them.
|
||||||
|
|
||||||
|
:returns bool: True if there are differences, False otherwise
|
||||||
|
"""
|
||||||
|
old_files = read_packages(old_graft_points)
|
||||||
|
new_files = read_packages(new_graft_points)
|
||||||
|
return old_files != new_files
|
||||||
|
|
||||||
|
|
||||||
class CreateIsoThread(WorkerThread):
|
class CreateIsoThread(WorkerThread):
|
||||||
def fail(self, compose, cmd, variant, arch):
|
def fail(self, compose, cmd, variant, arch):
|
||||||
self.pool.log_error("CreateISO failed, removing ISO: %s" % cmd["iso_path"])
|
self.pool.log_error("CreateISO failed, removing ISO: %s" % cmd["iso_path"])
|
||||||
@ -324,15 +528,13 @@ def add_iso_to_metadata(
|
|||||||
return img
|
return img
|
||||||
|
|
||||||
|
|
||||||
def run_createiso_command(
|
def run_createiso_command(num, compose, bootable, arch, cmd, mounts, log_file):
|
||||||
num, compose, bootable, arch, cmd, mounts, log_file, with_jigdo=True
|
|
||||||
):
|
|
||||||
packages = [
|
packages = [
|
||||||
"coreutils",
|
"coreutils",
|
||||||
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
|
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
|
||||||
"isomd5sum",
|
"isomd5sum",
|
||||||
]
|
]
|
||||||
if with_jigdo and compose.conf["create_jigdo"]:
|
if compose.conf["create_jigdo"]:
|
||||||
packages.append("jigdo")
|
packages.append("jigdo")
|
||||||
if bootable:
|
if bootable:
|
||||||
extra_packages = {
|
extra_packages = {
|
||||||
@ -346,7 +548,7 @@ def run_createiso_command(
|
|||||||
build_arch = arch
|
build_arch = arch
|
||||||
if runroot.runroot_method == "koji" and not bootable:
|
if runroot.runroot_method == "koji" and not bootable:
|
||||||
runroot_tag = compose.conf["runroot_tag"]
|
runroot_tag = compose.conf["runroot_tag"]
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(compose)
|
||||||
koji_proxy = koji_wrapper.koji_proxy
|
koji_proxy = koji_wrapper.koji_proxy
|
||||||
tag_info = koji_proxy.getTag(runroot_tag)
|
tag_info = koji_proxy.getTag(runroot_tag)
|
||||||
if not tag_info:
|
if not tag_info:
|
||||||
@ -598,3 +800,36 @@ def create_hardlinks(staging_dir, log_file):
|
|||||||
"""
|
"""
|
||||||
cmd = ["/usr/sbin/hardlink", "-c", "-vv", staging_dir]
|
cmd = ["/usr/sbin/hardlink", "-c", "-vv", staging_dir]
|
||||||
run(cmd, logfile=log_file, show_cmd=True)
|
run(cmd, logfile=log_file, show_cmd=True)
|
||||||
|
|
||||||
|
|
||||||
|
class OldFileLinker(object):
|
||||||
|
"""
|
||||||
|
A wrapper around os.link that remembers which files were linked and can
|
||||||
|
clean them up.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, logger):
|
||||||
|
self.logger = logger
|
||||||
|
self.linked_files = []
|
||||||
|
|
||||||
|
def link(self, src, dst):
|
||||||
|
self.logger.debug("Hardlinking %s to %s", src, dst)
|
||||||
|
os.link(src, dst)
|
||||||
|
self.linked_files.append(dst)
|
||||||
|
|
||||||
|
def abort(self):
|
||||||
|
"""Clean up all files created by this instance."""
|
||||||
|
for f in self.linked_files:
|
||||||
|
os.unlink(f)
|
||||||
|
|
||||||
|
|
||||||
|
def get_iso_level_config(compose, variant, arch):
|
||||||
|
"""
|
||||||
|
Get configured ISO level for this variant and architecture.
|
||||||
|
"""
|
||||||
|
level = compose.conf.get("iso_level")
|
||||||
|
if isinstance(level, list):
|
||||||
|
level = None
|
||||||
|
for c in get_arch_variant_data(compose.conf, "iso_level", arch, variant):
|
||||||
|
level = c
|
||||||
|
return level
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
__all__ = ("create_variant_repo",)
|
__all__ = ("create_variant_repo",)
|
||||||
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import errno
|
import errno
|
||||||
import glob
|
import glob
|
||||||
@ -25,19 +24,22 @@ import shutil
|
|||||||
import threading
|
import threading
|
||||||
import xml.dom.minidom
|
import xml.dom.minidom
|
||||||
|
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
|
||||||
from kobo.shortcuts import run, relative_path
|
|
||||||
|
|
||||||
from ..wrappers.scm import get_dir_from_scm
|
|
||||||
from ..wrappers.createrepo import CreaterepoWrapper
|
|
||||||
from .base import PhaseBase
|
|
||||||
from ..util import get_arch_variant_data, temp_dir
|
|
||||||
from ..module_util import Modulemd, collect_module_defaults
|
|
||||||
|
|
||||||
import productmd.rpms
|
|
||||||
import productmd.modules
|
import productmd.modules
|
||||||
|
import productmd.rpms
|
||||||
|
from kobo.shortcuts import relative_path, run
|
||||||
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
|
|
||||||
|
from ..module_util import Modulemd, collect_module_defaults, collect_module_obsoletes
|
||||||
|
from ..util import (
|
||||||
|
get_arch_variant_data,
|
||||||
|
read_single_module_stream_from_file,
|
||||||
|
temp_dir,
|
||||||
|
)
|
||||||
|
from ..wrappers.createrepo import CreaterepoWrapper
|
||||||
|
from ..wrappers.scm import get_dir_from_scm
|
||||||
|
from .base import PhaseBase
|
||||||
|
|
||||||
|
CACHE_TOPDIR = "/var/cache/pungi/createrepo_c/"
|
||||||
createrepo_lock = threading.Lock()
|
createrepo_lock = threading.Lock()
|
||||||
createrepo_dirs = set()
|
createrepo_dirs = set()
|
||||||
|
|
||||||
@ -79,6 +81,7 @@ class CreaterepoPhase(PhaseBase):
|
|||||||
get_dir_from_scm(
|
get_dir_from_scm(
|
||||||
self.compose.conf["createrepo_extra_modulemd"][variant.uid],
|
self.compose.conf["createrepo_extra_modulemd"][variant.uid],
|
||||||
self.compose.paths.work.tmp_dir(variant=variant, create_dir=False),
|
self.compose.paths.work.tmp_dir(variant=variant, create_dir=False),
|
||||||
|
compose=self.compose,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.pool.queue_put((self.compose, None, variant, "srpm"))
|
self.pool.queue_put((self.compose, None, variant, "srpm"))
|
||||||
@ -188,6 +191,23 @@ def create_variant_repo(
|
|||||||
comps_path = None
|
comps_path = None
|
||||||
if compose.has_comps and pkg_type == "rpm":
|
if compose.has_comps and pkg_type == "rpm":
|
||||||
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
||||||
|
|
||||||
|
if compose.conf["createrepo_enable_cache"]:
|
||||||
|
cachedir = os.path.join(
|
||||||
|
CACHE_TOPDIR,
|
||||||
|
"%s-%s" % (compose.conf["release_short"], os.getuid()),
|
||||||
|
)
|
||||||
|
if not os.path.exists(cachedir):
|
||||||
|
try:
|
||||||
|
os.makedirs(cachedir)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_warning(
|
||||||
|
"Cache disabled because cannot create cache dir %s %s"
|
||||||
|
% (cachedir, str(e))
|
||||||
|
)
|
||||||
|
cachedir = None
|
||||||
|
else:
|
||||||
|
cachedir = None
|
||||||
cmd = repo.get_createrepo_cmd(
|
cmd = repo.get_createrepo_cmd(
|
||||||
repo_dir,
|
repo_dir,
|
||||||
update=True,
|
update=True,
|
||||||
@ -203,6 +223,7 @@ def create_variant_repo(
|
|||||||
oldpackagedirs=old_package_dirs,
|
oldpackagedirs=old_package_dirs,
|
||||||
use_xz=compose.conf["createrepo_use_xz"],
|
use_xz=compose.conf["createrepo_use_xz"],
|
||||||
extra_args=compose.conf["createrepo_extra_args"],
|
extra_args=compose.conf["createrepo_extra_args"],
|
||||||
|
cachedir=cachedir,
|
||||||
)
|
)
|
||||||
log_file = compose.paths.log.log_file(
|
log_file = compose.paths.log.log_file(
|
||||||
arch, "createrepo-%s.%s" % (variant, pkg_type)
|
arch, "createrepo-%s.%s" % (variant, pkg_type)
|
||||||
@ -245,12 +266,15 @@ def create_variant_repo(
|
|||||||
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
|
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
|
||||||
)
|
)
|
||||||
|
|
||||||
|
obsoletes_dir = compose.paths.work.module_obsoletes_dir()
|
||||||
|
mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
|
||||||
|
|
||||||
# Add extra modulemd files
|
# Add extra modulemd files
|
||||||
if variant.uid in compose.conf.get("createrepo_extra_modulemd", {}):
|
if variant.uid in compose.conf.get("createrepo_extra_modulemd", {}):
|
||||||
compose.log_debug("Adding extra modulemd for %s.%s", variant.uid, arch)
|
compose.log_debug("Adding extra modulemd for %s.%s", variant.uid, arch)
|
||||||
dirname = compose.paths.work.tmp_dir(variant=variant, create_dir=False)
|
dirname = compose.paths.work.tmp_dir(variant=variant, create_dir=False)
|
||||||
for filepath in glob.glob(os.path.join(dirname, arch) + "/*.yaml"):
|
for filepath in glob.glob(os.path.join(dirname, arch) + "/*.yaml"):
|
||||||
module_stream = Modulemd.ModuleStream.read_file(filepath, strict=True)
|
module_stream = read_single_module_stream_from_file(filepath)
|
||||||
if not mod_index.add_module_stream(module_stream):
|
if not mod_index.add_module_stream(module_stream):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Failed parsing modulemd data from %s" % filepath
|
"Failed parsing modulemd data from %s" % filepath
|
||||||
@ -343,7 +367,7 @@ def get_productids_from_scm(compose):
|
|||||||
|
|
||||||
tmp_dir = compose.mkdtemp(prefix="pungi_")
|
tmp_dir = compose.mkdtemp(prefix="pungi_")
|
||||||
try:
|
try:
|
||||||
get_dir_from_scm(product_id, tmp_dir)
|
get_dir_from_scm(product_id, tmp_dir, compose=compose)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT and product_id_allow_missing:
|
if e.errno == errno.ENOENT and product_id_allow_missing:
|
||||||
compose.log_warning("No product IDs in %s" % product_id)
|
compose.log_warning("No product IDs in %s" % product_id)
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
|
||||||
from kobo.shortcuts import force_list
|
from kobo.shortcuts import force_list
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
@ -28,8 +30,17 @@ from pungi.phases.createiso import (
|
|||||||
copy_boot_images,
|
copy_boot_images,
|
||||||
run_createiso_command,
|
run_createiso_command,
|
||||||
load_and_tweak_treeinfo,
|
load_and_tweak_treeinfo,
|
||||||
|
compare_packages,
|
||||||
|
OldFileLinker,
|
||||||
|
get_iso_level_config,
|
||||||
|
)
|
||||||
|
from pungi.util import (
|
||||||
|
failable,
|
||||||
|
get_format_substs,
|
||||||
|
get_variant_data,
|
||||||
|
get_volid,
|
||||||
|
read_json_file,
|
||||||
)
|
)
|
||||||
from pungi.util import failable, get_format_substs, get_variant_data, get_volid
|
|
||||||
from pungi.wrappers import iso
|
from pungi.wrappers import iso
|
||||||
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
||||||
|
|
||||||
@ -37,9 +48,10 @@ from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
|||||||
class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
||||||
name = "extra_isos"
|
name = "extra_isos"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose, buildinstall_phase):
|
||||||
super(ExtraIsosPhase, self).__init__(compose)
|
super(ExtraIsosPhase, self).__init__(compose)
|
||||||
self.pool = ThreadPool(logger=self.logger)
|
self.pool = ThreadPool(logger=self.logger)
|
||||||
|
self.bi = buildinstall_phase
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
for variant in self.compose.get_variants(types=["variant"]):
|
for variant in self.compose.get_variants(types=["variant"]):
|
||||||
@ -65,13 +77,17 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
|||||||
commands.append((config, variant, arch))
|
commands.append((config, variant, arch))
|
||||||
|
|
||||||
for (config, variant, arch) in commands:
|
for (config, variant, arch) in commands:
|
||||||
self.pool.add(ExtraIsosThread(self.pool))
|
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
||||||
self.pool.queue_put((self.compose, config, variant, arch))
|
self.pool.queue_put((self.compose, config, variant, arch))
|
||||||
|
|
||||||
self.pool.start()
|
self.pool.start()
|
||||||
|
|
||||||
|
|
||||||
class ExtraIsosThread(WorkerThread):
|
class ExtraIsosThread(WorkerThread):
|
||||||
|
def __init__(self, pool, buildinstall_phase):
|
||||||
|
super(ExtraIsosThread, self).__init__(pool)
|
||||||
|
self.bi = buildinstall_phase
|
||||||
|
|
||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
self.num = num
|
self.num = num
|
||||||
compose, config, variant, arch = item
|
compose, config, variant, arch = item
|
||||||
@ -115,35 +131,42 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
supported=compose.supported,
|
supported=compose.supported,
|
||||||
hfs_compat=compose.conf["iso_hfs_ppc64le_compatible"],
|
hfs_compat=compose.conf["iso_hfs_ppc64le_compatible"],
|
||||||
use_xorrisofs=compose.conf.get("createiso_use_xorrisofs"),
|
use_xorrisofs=compose.conf.get("createiso_use_xorrisofs"),
|
||||||
|
iso_level=get_iso_level_config(compose, variant, arch),
|
||||||
)
|
)
|
||||||
|
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||||
if compose.conf["create_jigdo"]:
|
if compose.conf["create_jigdo"]:
|
||||||
jigdo_dir = compose.paths.compose.jigdo_dir(arch, variant)
|
jigdo_dir = compose.paths.compose.jigdo_dir(arch, variant)
|
||||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
|
||||||
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
||||||
|
|
||||||
if bootable:
|
if bootable:
|
||||||
opts = opts._replace(
|
opts = opts._replace(
|
||||||
buildinstall_method=compose.conf["buildinstall_method"]
|
buildinstall_method=compose.conf["buildinstall_method"],
|
||||||
|
boot_iso=os.path.join(os_tree, "images", "boot.iso"),
|
||||||
)
|
)
|
||||||
|
|
||||||
script_file = os.path.join(
|
# Check if it can be reused.
|
||||||
compose.paths.work.tmp_dir(arch, variant), "extraiso-%s.sh" % filename
|
hash = hashlib.sha256()
|
||||||
)
|
hash.update(json.dumps(config, sort_keys=True).encode("utf-8"))
|
||||||
with open(script_file, "w") as f:
|
config_hash = hash.hexdigest()
|
||||||
createiso.write_script(opts, f)
|
|
||||||
|
|
||||||
run_createiso_command(
|
if not self.try_reuse(compose, variant, arch, config_hash, opts):
|
||||||
self.num,
|
script_dir = compose.paths.work.tmp_dir(arch, variant)
|
||||||
compose,
|
opts = opts._replace(script_dir=script_dir)
|
||||||
bootable,
|
script_file = os.path.join(script_dir, "extraiso-%s.sh" % filename)
|
||||||
arch,
|
with open(script_file, "w") as f:
|
||||||
["bash", script_file],
|
createiso.write_script(opts, f)
|
||||||
[compose.topdir],
|
|
||||||
log_file=compose.paths.log.log_file(
|
run_createiso_command(
|
||||||
arch, "extraiso-%s" % os.path.basename(iso_path)
|
self.num,
|
||||||
),
|
compose,
|
||||||
with_jigdo=compose.conf["create_jigdo"],
|
bootable,
|
||||||
)
|
arch,
|
||||||
|
["bash", script_file],
|
||||||
|
[compose.topdir],
|
||||||
|
log_file=compose.paths.log.log_file(
|
||||||
|
arch, "extraiso-%s" % os.path.basename(iso_path)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
img = add_iso_to_metadata(
|
img = add_iso_to_metadata(
|
||||||
compose,
|
compose,
|
||||||
@ -155,8 +178,155 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
img._max_size = config.get("max_size")
|
img._max_size = config.get("max_size")
|
||||||
|
|
||||||
|
save_reuse_metadata(compose, variant, arch, config_hash, opts, iso_path)
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s" % msg)
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
|
def try_reuse(self, compose, variant, arch, config_hash, opts):
|
||||||
|
# Check explicit config
|
||||||
|
if not compose.conf["extraiso_allow_reuse"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
log_msg = "Cannot reuse ISO for %s.%s" % (variant, arch)
|
||||||
|
|
||||||
|
if opts.buildinstall_method and not self.bi.reused(variant, arch):
|
||||||
|
# If buildinstall phase was not reused for some reason, we can not
|
||||||
|
# reuse any bootable image. If a package change caused rebuild of
|
||||||
|
# boot.iso, we would catch it here too, but there could be a
|
||||||
|
# configuration change in lorax template which would remain
|
||||||
|
# undetected.
|
||||||
|
self.pool.log_info("%s - boot configuration changed", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check old compose configuration: extra_files and product_ids can be
|
||||||
|
# reflected on ISO.
|
||||||
|
old_config = compose.load_old_compose_config()
|
||||||
|
if not old_config:
|
||||||
|
self.pool.log_info("%s - no config for old compose", log_msg)
|
||||||
|
return False
|
||||||
|
# Convert current configuration to JSON and back to encode it similarly
|
||||||
|
# to the old one
|
||||||
|
config = json.loads(json.dumps(compose.conf))
|
||||||
|
for opt in compose.conf:
|
||||||
|
# Skip a selection of options: these affect what packages can be
|
||||||
|
# included, which we explicitly check later on.
|
||||||
|
config_whitelist = set(
|
||||||
|
[
|
||||||
|
"gather_lookaside_repos",
|
||||||
|
"pkgset_koji_builds",
|
||||||
|
"pkgset_koji_scratch_tasks",
|
||||||
|
"pkgset_koji_module_builds",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
# Skip irrelevant options
|
||||||
|
config_whitelist.update(["osbs", "osbuild"])
|
||||||
|
if opt in config_whitelist:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if old_config.get(opt) != config.get(opt):
|
||||||
|
self.pool.log_info("%s - option %s differs", log_msg, opt)
|
||||||
|
return False
|
||||||
|
|
||||||
|
old_metadata = load_old_metadata(compose, variant, arch, config_hash)
|
||||||
|
if not old_metadata:
|
||||||
|
self.pool.log_info("%s - no old metadata found", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test if volume ID matches - volid can be generated dynamically based on
|
||||||
|
# other values, and could change even if nothing else is different.
|
||||||
|
if opts.volid != old_metadata["opts"]["volid"]:
|
||||||
|
self.pool.log_info("%s - volume ID differs", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Compare packages on the ISO.
|
||||||
|
if compare_packages(
|
||||||
|
old_metadata["opts"]["graft_points"],
|
||||||
|
opts.graft_points,
|
||||||
|
):
|
||||||
|
self.pool.log_info("%s - packages differ", log_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.perform_reuse(
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
opts,
|
||||||
|
old_metadata["opts"]["output_dir"],
|
||||||
|
old_metadata["opts"]["iso_name"],
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except Exception as exc:
|
||||||
|
self.pool.log_error(
|
||||||
|
"Error while reusing ISO for %s.%s: %s", variant, arch, exc
|
||||||
|
)
|
||||||
|
compose.traceback("extraiso-reuse-%s-%s-%s" % (variant, arch, config_hash))
|
||||||
|
return False
|
||||||
|
|
||||||
|
def perform_reuse(self, compose, variant, arch, opts, old_iso_dir, old_file_name):
|
||||||
|
"""
|
||||||
|
Copy all related files from old compose to the new one. As a last step
|
||||||
|
add the new image to metadata.
|
||||||
|
"""
|
||||||
|
linker = OldFileLinker(self.pool._logger)
|
||||||
|
old_iso_path = os.path.join(old_iso_dir, old_file_name)
|
||||||
|
iso_path = os.path.join(opts.output_dir, opts.iso_name)
|
||||||
|
try:
|
||||||
|
# Hardlink ISO and manifest
|
||||||
|
for suffix in ("", ".manifest"):
|
||||||
|
linker.link(old_iso_path + suffix, iso_path + suffix)
|
||||||
|
# Copy log files
|
||||||
|
# The log file name includes filename of the image, so we need to
|
||||||
|
# find old file with the old name, and rename it to the new name.
|
||||||
|
log_file = compose.paths.log.log_file(arch, "extraiso-%s" % opts.iso_name)
|
||||||
|
old_log_file = compose.paths.old_compose_path(
|
||||||
|
compose.paths.log.log_file(arch, "extraiso-%s" % old_file_name)
|
||||||
|
)
|
||||||
|
linker.link(old_log_file, log_file)
|
||||||
|
# Copy jigdo files
|
||||||
|
if opts.jigdo_dir:
|
||||||
|
old_jigdo_dir = compose.paths.old_compose_path(opts.jigdo_dir)
|
||||||
|
for suffix in (".template", ".jigdo"):
|
||||||
|
linker.link(
|
||||||
|
os.path.join(old_jigdo_dir, old_file_name) + suffix,
|
||||||
|
os.path.join(opts.jigdo_dir, opts.iso_name) + suffix,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# A problem happened while linking some file, let's clean up
|
||||||
|
# everything.
|
||||||
|
linker.abort()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def save_reuse_metadata(compose, variant, arch, config_hash, opts, iso_path):
|
||||||
|
"""
|
||||||
|
Save metadata for possible reuse of this image. The file name is determined
|
||||||
|
from the hash of a configuration snippet for this image. Any change in that
|
||||||
|
configuration in next compose will change the hash and thus reuse will be
|
||||||
|
blocked.
|
||||||
|
"""
|
||||||
|
metadata = {"opts": opts._asdict()}
|
||||||
|
metadata_path = compose.paths.log.log_file(
|
||||||
|
arch,
|
||||||
|
"extraiso-reuse-%s-%s-%s" % (variant.uid, arch, config_hash),
|
||||||
|
ext="json",
|
||||||
|
)
|
||||||
|
with open(metadata_path, "w") as f:
|
||||||
|
json.dump(metadata, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def load_old_metadata(compose, variant, arch, config_hash):
|
||||||
|
metadata_path = compose.paths.log.log_file(
|
||||||
|
arch,
|
||||||
|
"extraiso-reuse-%s-%s-%s" % (variant.uid, arch, config_hash),
|
||||||
|
ext="json",
|
||||||
|
)
|
||||||
|
old_path = compose.paths.old_compose_path(metadata_path)
|
||||||
|
try:
|
||||||
|
return read_json_file(old_path)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_extra_files(compose, variant, arch, extra_files):
|
def get_extra_files(compose, variant, arch, extra_files):
|
||||||
"""Clone the configured files into a directory from where they can be
|
"""Clone the configured files into a directory from where they can be
|
||||||
|
@ -14,51 +14,49 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
import glob
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
import six
|
|
||||||
from six.moves import cPickle as pickle
|
|
||||||
|
|
||||||
from kobo.rpmlib import parse_nvra
|
from kobo.rpmlib import parse_nvra
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
from productmd.rpms import Rpms
|
from productmd.rpms import Rpms
|
||||||
|
from six.moves import cPickle as pickle
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from Queue import Queue
|
from Queue import Queue
|
||||||
|
|
||||||
from pungi.wrappers.scm import get_file_from_scm
|
|
||||||
from .link import link_files
|
|
||||||
from ...wrappers.createrepo import CreaterepoWrapper
|
|
||||||
import pungi.wrappers.kojiwrapper
|
import pungi.wrappers.kojiwrapper
|
||||||
|
|
||||||
from pungi.compose import get_ordered_variant_uids
|
|
||||||
from pungi.arch import get_compatible_arches, split_name_arch
|
from pungi.arch import get_compatible_arches, split_name_arch
|
||||||
|
from pungi.compose import get_ordered_variant_uids
|
||||||
|
from pungi.module_util import (
|
||||||
|
Modulemd,
|
||||||
|
collect_module_defaults,
|
||||||
|
collect_module_obsoletes,
|
||||||
|
)
|
||||||
from pungi.phases.base import PhaseBase
|
from pungi.phases.base import PhaseBase
|
||||||
from pungi.util import get_arch_data, get_arch_variant_data, get_variant_data, makedirs
|
|
||||||
from pungi.module_util import Modulemd, collect_module_defaults
|
|
||||||
from pungi.phases.createrepo import add_modular_metadata
|
from pungi.phases.createrepo import add_modular_metadata
|
||||||
|
from pungi.util import get_arch_data, get_arch_variant_data, get_variant_data, makedirs
|
||||||
|
from pungi.wrappers.scm import get_file_from_scm
|
||||||
|
|
||||||
|
from ...wrappers.createrepo import CreaterepoWrapper
|
||||||
|
from .link import link_files
|
||||||
|
|
||||||
|
|
||||||
def get_gather_source(name):
|
def get_gather_source(name):
|
||||||
import pungi.phases.gather.sources
|
import pungi.phases.gather.sources
|
||||||
from .source import GatherSourceContainer
|
|
||||||
|
|
||||||
GatherSourceContainer.register_module(pungi.phases.gather.sources)
|
return pungi.phases.gather.sources.ALL_SOURCES[name.lower()]
|
||||||
container = GatherSourceContainer()
|
|
||||||
return container["GatherSource%s" % name]
|
|
||||||
|
|
||||||
|
|
||||||
def get_gather_method(name):
|
def get_gather_method(name):
|
||||||
import pungi.phases.gather.methods
|
import pungi.phases.gather.methods
|
||||||
from .method import GatherMethodContainer
|
|
||||||
|
|
||||||
GatherMethodContainer.register_module(pungi.phases.gather.methods)
|
return pungi.phases.gather.methods.ALL_METHODS[name.lower()]
|
||||||
container = GatherMethodContainer()
|
|
||||||
return container["GatherMethod%s" % name]
|
|
||||||
|
|
||||||
|
|
||||||
class GatherPhase(PhaseBase):
|
class GatherPhase(PhaseBase):
|
||||||
@ -87,10 +85,11 @@ class GatherPhase(PhaseBase):
|
|||||||
if variant.modules:
|
if variant.modules:
|
||||||
errors.append("Modular compose requires libmodulemd package.")
|
errors.append("Modular compose requires libmodulemd package.")
|
||||||
|
|
||||||
# check whether variants from configuration value
|
|
||||||
# 'variant_as_lookaside' are correct
|
|
||||||
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
|
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
|
||||||
all_variants = self.compose.all_variants
|
all_variants = self.compose.all_variants
|
||||||
|
|
||||||
|
# check whether variants from configuration value
|
||||||
|
# 'variant_as_lookaside' are correct
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for (requiring, required) in variant_as_lookaside:
|
||||||
if requiring in all_variants and required not in all_variants:
|
if requiring in all_variants and required not in all_variants:
|
||||||
errors.append(
|
errors.append(
|
||||||
@ -98,6 +97,22 @@ class GatherPhase(PhaseBase):
|
|||||||
"required by %r" % (required, requiring)
|
"required by %r" % (required, requiring)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# check whether variants from configuration value
|
||||||
|
# 'variant_as_lookaside' have same architectures
|
||||||
|
for (requiring, required) in variant_as_lookaside:
|
||||||
|
if (
|
||||||
|
requiring in all_variants
|
||||||
|
and required in all_variants
|
||||||
|
and not set(all_variants[requiring].arches).issubset(
|
||||||
|
set(all_variants[required].arches)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
errors.append(
|
||||||
|
"variant_as_lookaside: architectures of variant '%s' "
|
||||||
|
"aren't subset of architectures of variant '%s'"
|
||||||
|
% (requiring, required)
|
||||||
|
)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise ValueError("\n".join(errors))
|
raise ValueError("\n".join(errors))
|
||||||
|
|
||||||
@ -178,27 +193,19 @@ def load_old_gather_result(compose, arch, variant):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
compose.log_info("Loading old GATHER phase results: %s", old_gather_result)
|
compose.log_info("Loading old GATHER phase results: %s", old_gather_result)
|
||||||
with open(old_gather_result, "rb") as f:
|
try:
|
||||||
old_result = pickle.load(f)
|
with open(old_gather_result, "rb") as f:
|
||||||
return old_result
|
old_result = pickle.load(f)
|
||||||
|
return old_result
|
||||||
|
except Exception as e:
|
||||||
def load_old_compose_config(compose):
|
compose.log_debug(
|
||||||
"""
|
"Failed to load old GATHER phase results %s : %s"
|
||||||
Helper method to load Pungi config dump from old compose.
|
% (old_gather_result, str(e))
|
||||||
"""
|
)
|
||||||
config_dump_full = compose.paths.log.log_file("global", "config-dump")
|
|
||||||
config_dump_full = compose.paths.old_compose_path(config_dump_full)
|
|
||||||
if not config_dump_full:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
compose.log_info("Loading old config file: %s", config_dump_full)
|
|
||||||
with open(config_dump_full, "r") as f:
|
|
||||||
old_config = json.load(f)
|
|
||||||
return old_config
|
|
||||||
|
|
||||||
|
def reuse_old_gather_packages(compose, arch, variant, package_sets, methods):
|
||||||
def reuse_old_gather_packages(compose, arch, variant, package_sets):
|
|
||||||
"""
|
"""
|
||||||
Tries to reuse `gather_packages` result from older compose.
|
Tries to reuse `gather_packages` result from older compose.
|
||||||
|
|
||||||
@ -206,6 +213,7 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets):
|
|||||||
:param str arch: Architecture to reuse old gather data for.
|
:param str arch: Architecture to reuse old gather data for.
|
||||||
:param str variant: Variant to reuse old gather data for.
|
:param str variant: Variant to reuse old gather data for.
|
||||||
:param list package_sets: List of package sets to gather packages from.
|
:param list package_sets: List of package sets to gather packages from.
|
||||||
|
:param str methods: Gather method.
|
||||||
:return: Old `gather_packages` result or None if old result cannot be used.
|
:return: Old `gather_packages` result or None if old result cannot be used.
|
||||||
"""
|
"""
|
||||||
log_msg = "Cannot reuse old GATHER phase results - %s"
|
log_msg = "Cannot reuse old GATHER phase results - %s"
|
||||||
@ -218,38 +226,38 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets):
|
|||||||
compose.log_info(log_msg % "no old gather results.")
|
compose.log_info(log_msg % "no old gather results.")
|
||||||
return
|
return
|
||||||
|
|
||||||
old_config = load_old_compose_config(compose)
|
old_config = compose.load_old_compose_config()
|
||||||
if old_config is None:
|
if old_config is None:
|
||||||
compose.log_info(log_msg % "no old compose config dump.")
|
compose.log_info(log_msg % "no old compose config dump.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Do not reuse when required variant is not reused.
|
||||||
|
if not hasattr(compose, "_gather_reused_variant_arch"):
|
||||||
|
setattr(compose, "_gather_reused_variant_arch", [])
|
||||||
|
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
||||||
|
for (requiring, required) in variant_as_lookaside:
|
||||||
|
if (
|
||||||
|
requiring == variant.uid
|
||||||
|
and (required, arch) not in compose._gather_reused_variant_arch
|
||||||
|
):
|
||||||
|
compose.log_info(
|
||||||
|
log_msg % "variant %s as lookaside is not reused." % required
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Do not reuse if there's external lookaside repo.
|
||||||
|
with open(compose.paths.log.log_file("global", "config-dump"), "r") as f:
|
||||||
|
config_dump = json.load(f)
|
||||||
|
if config_dump.get("gather_lookaside_repos") or old_config.get(
|
||||||
|
"gather_lookaside_repos"
|
||||||
|
):
|
||||||
|
compose.log_info(log_msg % "there's external lookaside repo.")
|
||||||
|
return
|
||||||
|
|
||||||
# The dumps/loads is needed to convert all unicode strings to non-unicode ones.
|
# The dumps/loads is needed to convert all unicode strings to non-unicode ones.
|
||||||
config = json.loads(json.dumps(compose.conf))
|
config = json.loads(json.dumps(compose.conf))
|
||||||
for opt, value in old_config.items():
|
for opt, value in old_config.items():
|
||||||
# Gather lookaside repos are updated during the gather phase. Check that
|
if opt == "gather_lookaside_repos":
|
||||||
# the gather_lookaside_repos except the ones added are the same.
|
|
||||||
if opt == "gather_lookaside_repos" and opt in config:
|
|
||||||
value_to_compare = []
|
|
||||||
# Filter out repourls which starts with `compose.topdir` and also remove
|
|
||||||
# their parent list in case it would be empty.
|
|
||||||
for variant, per_arch_repos in config[opt]:
|
|
||||||
per_arch_repos_to_compare = {}
|
|
||||||
for arch, repourl in per_arch_repos.items():
|
|
||||||
# The gather_lookaside_repos config allows setting multiple repourls
|
|
||||||
# using list, but `_update_config` always uses strings. Therefore we
|
|
||||||
# only try to filter out string_types.
|
|
||||||
if not isinstance(repourl, six.string_types):
|
|
||||||
continue
|
|
||||||
if not repourl.startswith(compose.topdir):
|
|
||||||
per_arch_repos_to_compare[arch] = repourl
|
|
||||||
if per_arch_repos_to_compare:
|
|
||||||
value_to_compare.append([variant, per_arch_repos_to_compare])
|
|
||||||
if value != value_to_compare:
|
|
||||||
compose.log_info(
|
|
||||||
log_msg
|
|
||||||
% ("compose configuration option gather_lookaside_repos changed.")
|
|
||||||
)
|
|
||||||
return
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip checking for frequently changing configuration options which do *not*
|
# Skip checking for frequently changing configuration options which do *not*
|
||||||
@ -378,6 +386,30 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets):
|
|||||||
compose.log_info(log_msg % "some RPMs have been removed.")
|
compose.log_info(log_msg % "some RPMs have been removed.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
compose._gather_reused_variant_arch.append((variant.uid, arch))
|
||||||
|
|
||||||
|
# Copy old gather log for debugging
|
||||||
|
try:
|
||||||
|
if methods == "hybrid":
|
||||||
|
log_dir = compose.paths.log.topdir(arch, create_dir=False)
|
||||||
|
old_log_dir = compose.paths.old_compose_path(log_dir)
|
||||||
|
for log_file in glob.glob(
|
||||||
|
os.path.join(old_log_dir, "hybrid-depsolver-%s-iter-*" % variant)
|
||||||
|
):
|
||||||
|
compose.log_info(
|
||||||
|
"Copying old gather log %s to %s" % (log_file, log_dir)
|
||||||
|
)
|
||||||
|
shutil.copy2(log_file, log_dir)
|
||||||
|
else:
|
||||||
|
log_dir = os.path.dirname(
|
||||||
|
compose.paths.work.pungi_log(arch, variant, create_dir=False)
|
||||||
|
)
|
||||||
|
old_log_dir = compose.paths.old_compose_path(log_dir)
|
||||||
|
compose.log_info("Copying old gather log %s to %s" % (old_log_dir, log_dir))
|
||||||
|
shutil.copytree(old_log_dir, log_dir)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_warning("Copying old gather log failed: %s" % str(e))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -404,7 +436,9 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
|||||||
prepopulate = get_prepopulate_packages(compose, arch, variant)
|
prepopulate = get_prepopulate_packages(compose, arch, variant)
|
||||||
fulltree_excludes = fulltree_excludes or set()
|
fulltree_excludes = fulltree_excludes or set()
|
||||||
|
|
||||||
reused_result = reuse_old_gather_packages(compose, arch, variant, package_sets)
|
reused_result = reuse_old_gather_packages(
|
||||||
|
compose, arch, variant, package_sets, methods
|
||||||
|
)
|
||||||
if reused_result:
|
if reused_result:
|
||||||
result = reused_result
|
result = reused_result
|
||||||
elif methods == "hybrid":
|
elif methods == "hybrid":
|
||||||
@ -507,7 +541,8 @@ def write_packages(compose, arch, variant, pkg_map, path_prefix):
|
|||||||
|
|
||||||
|
|
||||||
def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs=None):
|
def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs=None):
|
||||||
"""Remove parent variant's packages from pkg_map <-- it gets modified in this function
|
"""Remove parent variant's packages from pkg_map <-- it gets modified in
|
||||||
|
this function
|
||||||
|
|
||||||
There are three cases where changes may happen:
|
There are three cases where changes may happen:
|
||||||
|
|
||||||
@ -613,19 +648,33 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
)
|
)
|
||||||
+ "/",
|
+ "/",
|
||||||
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
||||||
compose.conf["koji_profile"]
|
compose
|
||||||
).koji_module.config.topdir.rstrip("/")
|
).koji_module.config.topdir.rstrip("/")
|
||||||
+ "/",
|
+ "/",
|
||||||
}
|
}
|
||||||
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
||||||
|
package_list = set()
|
||||||
|
for pkg_arch in pkg_map.keys():
|
||||||
|
try:
|
||||||
|
for pkg_type, packages in pkg_map[pkg_arch][variant.uid].items():
|
||||||
|
# We want all packages for current arch, and SRPMs for any
|
||||||
|
# arch. Ultimately there will only be one source repository, so
|
||||||
|
# we need a union of all SRPMs.
|
||||||
|
if pkg_type == "srpm" or pkg_arch == arch:
|
||||||
|
for pkg in packages:
|
||||||
|
pkg = pkg["path"]
|
||||||
|
if path_prefix and pkg.startswith(path_prefix):
|
||||||
|
pkg = pkg[len(path_prefix) :]
|
||||||
|
package_list.add(pkg)
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Variant '%s' does not have architecture " "'%s'!" % (variant, pkg_arch)
|
||||||
|
)
|
||||||
|
|
||||||
pkglist = compose.paths.work.lookaside_package_list(arch=arch, variant=variant)
|
pkglist = compose.paths.work.lookaside_package_list(arch=arch, variant=variant)
|
||||||
with open(pkglist, "w") as f:
|
with open(pkglist, "w") as f:
|
||||||
for packages in pkg_map[arch][variant.uid].values():
|
for pkg in sorted(package_list):
|
||||||
for pkg in packages:
|
f.write("%s\n" % pkg)
|
||||||
pkg = pkg["path"]
|
|
||||||
if path_prefix and pkg.startswith(path_prefix):
|
|
||||||
pkg = pkg[len(path_prefix) :]
|
|
||||||
f.write("%s\n" % pkg)
|
|
||||||
|
|
||||||
cr = CreaterepoWrapper(compose.conf["createrepo_c"])
|
cr = CreaterepoWrapper(compose.conf["createrepo_c"])
|
||||||
update_metadata = None
|
update_metadata = None
|
||||||
@ -661,6 +710,8 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
collect_module_defaults(
|
collect_module_defaults(
|
||||||
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
|
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
|
||||||
)
|
)
|
||||||
|
obsoletes_dir = compose.paths.work.module_obsoletes_dir()
|
||||||
|
mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
|
||||||
|
|
||||||
log_file = compose.paths.log.log_file(
|
log_file = compose.paths.log.log_file(
|
||||||
arch, "lookaside_repo_modules_%s" % (variant.uid)
|
arch, "lookaside_repo_modules_%s" % (variant.uid)
|
||||||
@ -736,6 +787,10 @@ def _gather_variants(
|
|||||||
try:
|
try:
|
||||||
que.put((arch, gather_packages(*args, **kwargs)))
|
que.put((arch, gather_packages(*args, **kwargs)))
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
compose.log_error(
|
||||||
|
"Error in gathering for %s.%s: %s", variant, arch, exc
|
||||||
|
)
|
||||||
|
compose.traceback("gather-%s-%s" % (variant, arch))
|
||||||
errors.put(exc)
|
errors.put(exc)
|
||||||
|
|
||||||
# Run gather_packages() in parallel with multi threads and store
|
# Run gather_packages() in parallel with multi threads and store
|
||||||
|
@ -14,15 +14,6 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
import kobo.plugins
|
class GatherMethodBase(object):
|
||||||
|
|
||||||
|
|
||||||
class GatherMethodBase(kobo.plugins.Plugin):
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
|
||||||
|
|
||||||
class GatherMethodContainer(kobo.plugins.PluginContainer):
|
|
||||||
@classmethod
|
|
||||||
def normalize_name(cls, name):
|
|
||||||
return name.lower()
|
|
||||||
|
@ -0,0 +1,24 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 2 of the License.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Library General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from .method_deps import GatherMethodDeps
|
||||||
|
from .method_nodeps import GatherMethodNodeps
|
||||||
|
from .method_hybrid import GatherMethodHybrid
|
||||||
|
|
||||||
|
ALL_METHODS = {
|
||||||
|
"deps": GatherMethodDeps,
|
||||||
|
"nodeps": GatherMethodNodeps,
|
||||||
|
"hybrid": GatherMethodHybrid,
|
||||||
|
}
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
||||||
@ -31,8 +32,6 @@ import pungi.phases.gather.method
|
|||||||
|
|
||||||
|
|
||||||
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
|
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
arch,
|
arch,
|
||||||
@ -243,8 +242,19 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
)
|
)
|
||||||
# Use temp working directory directory as workaround for
|
# Use temp working directory directory as workaround for
|
||||||
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
||||||
with temp_dir(prefix="pungi_") as tmp_dir:
|
with temp_dir(prefix="pungi_") as work_dir:
|
||||||
run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir, env=os.environ)
|
run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
|
||||||
|
|
||||||
|
# Clean up tmp dir
|
||||||
|
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||||
|
yumroot_dir = os.path.join(tmp_dir, "work", arch, "yumroot")
|
||||||
|
if os.path.isdir(yumroot_dir):
|
||||||
|
try:
|
||||||
|
shutil.rmtree(yumroot_dir)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_warning(
|
||||||
|
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||||
|
)
|
||||||
|
|
||||||
with open(pungi_log, "r") as f:
|
with open(pungi_log, "r") as f:
|
||||||
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
||||||
|
@ -60,8 +60,6 @@ class FakePackage(object):
|
|||||||
|
|
||||||
|
|
||||||
class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(GatherMethodHybrid, self).__init__(*args, **kwargs)
|
super(GatherMethodHybrid, self).__init__(*args, **kwargs)
|
||||||
self.package_maps = {}
|
self.package_maps = {}
|
||||||
@ -351,8 +349,11 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
|||||||
|
|
||||||
# There are two ways how the debuginfo package can be named. We
|
# There are two ways how the debuginfo package can be named. We
|
||||||
# want to get them all.
|
# want to get them all.
|
||||||
for pattern in ["%s-debuginfo", "%s-debugsource"]:
|
source_name = kobo.rpmlib.parse_nvra(pkg.rpm_sourcerpm)["name"]
|
||||||
debuginfo_name = pattern % pkg.name
|
for debuginfo_name in [
|
||||||
|
"%s-debuginfo" % pkg.name,
|
||||||
|
"%s-debugsource" % source_name,
|
||||||
|
]:
|
||||||
debuginfo = self._get_debuginfo(debuginfo_name, pkg_arch)
|
debuginfo = self._get_debuginfo(debuginfo_name, pkg_arch)
|
||||||
for dbg in debuginfo:
|
for dbg in debuginfo:
|
||||||
# For each debuginfo package that matches on name and
|
# For each debuginfo package that matches on name and
|
||||||
@ -501,6 +502,27 @@ def _make_result(paths):
|
|||||||
return [{"path": path, "flags": []} for path in sorted(paths)]
|
return [{"path": path, "flags": []} for path in sorted(paths)]
|
||||||
|
|
||||||
|
|
||||||
|
def get_repo_packages(path):
|
||||||
|
"""Extract file names of all packages in the given repository."""
|
||||||
|
|
||||||
|
packages = set()
|
||||||
|
|
||||||
|
def callback(pkg):
|
||||||
|
packages.add(os.path.basename(pkg.location_href))
|
||||||
|
|
||||||
|
repomd = os.path.join(path, "repodata/repomd.xml")
|
||||||
|
with as_local_file(repomd) as url_:
|
||||||
|
repomd = cr.Repomd(url_)
|
||||||
|
for rec in repomd.records:
|
||||||
|
if rec.type != "primary":
|
||||||
|
continue
|
||||||
|
record_url = os.path.join(path, rec.location_href)
|
||||||
|
with as_local_file(record_url) as url_:
|
||||||
|
cr.xml_parse_primary(url_, pkgcb=callback, do_files=False)
|
||||||
|
|
||||||
|
return packages
|
||||||
|
|
||||||
|
|
||||||
def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
|
def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
|
||||||
"""For each package add source RPM."""
|
"""For each package add source RPM."""
|
||||||
# This will serve as the final result. We collect sets of paths to the
|
# This will serve as the final result. We collect sets of paths to the
|
||||||
@ -511,25 +533,16 @@ def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
|
|||||||
|
|
||||||
filters = set(filter_packages)
|
filters = set(filter_packages)
|
||||||
|
|
||||||
# Collect list of all packages in lookaside. These will not be added to the
|
|
||||||
# result. Fus handles this in part: if a package is explicitly mentioned as
|
|
||||||
# input (which can happen with comps group expansion), it will be in the
|
|
||||||
# output even if it's in lookaside.
|
|
||||||
lookaside_packages = set()
|
lookaside_packages = set()
|
||||||
for repo in lookasides:
|
for repo in lookasides:
|
||||||
md = cr.Metadata()
|
lookaside_packages.update(get_repo_packages(repo))
|
||||||
md.locate_and_load_xml(repo)
|
|
||||||
for key in md.keys():
|
|
||||||
pkg = md.get(key)
|
|
||||||
url = os.path.join(pkg.location_base or repo, pkg.location_href)
|
|
||||||
# Strip file:// prefix
|
|
||||||
lookaside_packages.add(url[7:])
|
|
||||||
|
|
||||||
for nvr, pkg_arch, flags in nvrs:
|
for nvr, pkg_arch, flags in nvrs:
|
||||||
pkg = nevra_to_pkg["%s.%s" % (nvr, pkg_arch)]
|
pkg = nevra_to_pkg["%s.%s" % (nvr, pkg_arch)]
|
||||||
if pkg.file_path in lookaside_packages:
|
if os.path.basename(pkg.file_path) in lookaside_packages:
|
||||||
# Package is in lookaside, don't add it and ignore sources and
|
# Fus can return lookaside package in output if the package is
|
||||||
# debuginfo too.
|
# explicitly listed as input. This can happen during comps
|
||||||
|
# expansion.
|
||||||
continue
|
continue
|
||||||
if pkg_is_debug(pkg):
|
if pkg_is_debug(pkg):
|
||||||
debuginfo.add(pkg.file_path)
|
debuginfo.add(pkg.file_path)
|
||||||
@ -542,7 +555,7 @@ def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
|
|||||||
if (srpm.name, "src") in filters:
|
if (srpm.name, "src") in filters:
|
||||||
# Filtered package, skipping
|
# Filtered package, skipping
|
||||||
continue
|
continue
|
||||||
if srpm.file_path not in lookaside_packages:
|
if os.path.basename(srpm.file_path) not in lookaside_packages:
|
||||||
srpms.add(srpm.file_path)
|
srpms.add(srpm.file_path)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# Didn't find source RPM.. this should be logged
|
# Didn't find source RPM.. this should be logged
|
||||||
|
@ -28,8 +28,6 @@ from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
|||||||
|
|
||||||
|
|
||||||
class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self, arch, variant, *args, **kwargs):
|
def __call__(self, arch, variant, *args, **kwargs):
|
||||||
fname = "gather-nodeps-%s" % variant.uid
|
fname = "gather-nodeps-%s" % variant.uid
|
||||||
if self.source_name:
|
if self.source_name:
|
||||||
|
@ -14,15 +14,6 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
import kobo.plugins
|
class GatherSourceBase(object):
|
||||||
|
|
||||||
|
|
||||||
class GatherSourceBase(kobo.plugins.Plugin):
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
|
||||||
|
|
||||||
class GatherSourceContainer(kobo.plugins.PluginContainer):
|
|
||||||
@classmethod
|
|
||||||
def normalize_name(cls, name):
|
|
||||||
return name.lower()
|
|
||||||
|
@ -0,0 +1,26 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 2 of the License.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Library General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from .source_comps import GatherSourceComps
|
||||||
|
from .source_json import GatherSourceJson
|
||||||
|
from .source_module import GatherSourceModule
|
||||||
|
from .source_none import GatherSourceNone
|
||||||
|
|
||||||
|
ALL_SOURCES = {
|
||||||
|
"comps": GatherSourceComps,
|
||||||
|
"json": GatherSourceJson,
|
||||||
|
"module": GatherSourceModule,
|
||||||
|
"none": GatherSourceNone,
|
||||||
|
}
|
@ -30,8 +30,6 @@ import pungi.phases.gather.source
|
|||||||
|
|
||||||
|
|
||||||
class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
|
class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self, arch, variant):
|
def __call__(self, arch, variant):
|
||||||
groups = set()
|
groups = set()
|
||||||
if not self.compose.conf.get("comps_file"):
|
if not self.compose.conf.get("comps_file"):
|
||||||
|
@ -32,30 +32,31 @@ set([(rpm_name, rpm_arch or None)])
|
|||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
import pungi.phases.gather.source
|
import pungi.phases.gather.source
|
||||||
|
|
||||||
|
|
||||||
class GatherSourceJson(pungi.phases.gather.source.GatherSourceBase):
|
class GatherSourceJson(pungi.phases.gather.source.GatherSourceBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self, arch, variant):
|
def __call__(self, arch, variant):
|
||||||
json_path = self.compose.conf.get("gather_source_mapping")
|
json_path = self.compose.conf.get("gather_source_mapping")
|
||||||
if not json_path:
|
if not json_path:
|
||||||
return set(), set()
|
return set(), set()
|
||||||
with open(json_path, "r") as f:
|
with open(os.path.join(self.compose.config_dir, json_path), "r") as f:
|
||||||
mapping = json.load(f)
|
mapping = json.load(f)
|
||||||
|
|
||||||
packages = set()
|
packages = set()
|
||||||
if variant is None:
|
if variant is None:
|
||||||
# get all packages for all variants
|
# get all packages for all variants
|
||||||
for variant_uid in mapping:
|
for variant_uid in mapping:
|
||||||
for pkg_name, pkg_arches in mapping[variant_uid][arch].items():
|
for pkg_name, pkg_arches in mapping[variant_uid].get(arch, {}).items():
|
||||||
for pkg_arch in pkg_arches:
|
for pkg_arch in pkg_arches:
|
||||||
packages.add((pkg_name, pkg_arch))
|
packages.add((pkg_name, pkg_arch))
|
||||||
else:
|
else:
|
||||||
# get packages for a particular variant
|
# get packages for a particular variant
|
||||||
for pkg_name, pkg_arches in mapping[variant.uid][arch].items():
|
for pkg_name, pkg_arches in (
|
||||||
|
mapping.get(variant.uid, {}).get(arch, {}).items()
|
||||||
|
):
|
||||||
for pkg_arch in pkg_arches:
|
for pkg_arch in pkg_arches:
|
||||||
packages.add((pkg_name, pkg_arch))
|
packages.add((pkg_name, pkg_arch))
|
||||||
return packages, set()
|
return packages, set()
|
||||||
|
@ -26,8 +26,6 @@ import pungi.phases.gather.source
|
|||||||
|
|
||||||
|
|
||||||
class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self, arch, variant):
|
def __call__(self, arch, variant):
|
||||||
groups = set()
|
groups = set()
|
||||||
packages = set()
|
packages = set()
|
||||||
|
@ -29,7 +29,5 @@ import pungi.phases.gather.source
|
|||||||
|
|
||||||
|
|
||||||
class GatherSourceNone(pungi.phases.gather.source.GatherSourceBase):
|
class GatherSourceNone(pungi.phases.gather.source.GatherSourceBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self, arch, variant):
|
def __call__(self, arch, variant):
|
||||||
return set(), set()
|
return set(), set()
|
||||||
|
@ -1,18 +1,22 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import time
|
import time
|
||||||
from kobo import shortcuts
|
from kobo import shortcuts
|
||||||
|
|
||||||
from pungi.util import makedirs, get_mtime, get_file_size, failable, log_failed_task
|
from pungi.util import makedirs, get_mtime, get_file_size, failable, log_failed_task
|
||||||
from pungi.util import translate_path, get_repo_urls, version_generator
|
from pungi.util import as_local_file, translate_path, get_repo_urls, version_generator
|
||||||
from pungi.phases import base
|
from pungi.phases import base
|
||||||
from pungi.linker import Linker
|
from pungi.linker import Linker
|
||||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
from kobo.shortcuts import force_list
|
from kobo.shortcuts import force_list
|
||||||
from productmd.images import Image
|
from productmd.images import Image
|
||||||
|
from productmd.rpms import Rpms
|
||||||
|
|
||||||
|
|
||||||
# This is a mapping from formats to file extensions. The format is what koji
|
# This is a mapping from formats to file extensions. The format is what koji
|
||||||
@ -46,9 +50,10 @@ class ImageBuildPhase(
|
|||||||
|
|
||||||
name = "image_build"
|
name = "image_build"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose, buildinstall_phase=None):
|
||||||
super(ImageBuildPhase, self).__init__(compose)
|
super(ImageBuildPhase, self).__init__(compose)
|
||||||
self.pool = ThreadPool(logger=self.logger)
|
self.pool = ThreadPool(logger=self.logger)
|
||||||
|
self.buildinstall_phase = buildinstall_phase
|
||||||
|
|
||||||
def _get_install_tree(self, image_conf, variant):
|
def _get_install_tree(self, image_conf, variant):
|
||||||
"""
|
"""
|
||||||
@ -117,6 +122,7 @@ class ImageBuildPhase(
|
|||||||
# prevent problems in next iteration where the original
|
# prevent problems in next iteration where the original
|
||||||
# value is needed.
|
# value is needed.
|
||||||
image_conf = copy.deepcopy(image_conf)
|
image_conf = copy.deepcopy(image_conf)
|
||||||
|
original_image_conf = copy.deepcopy(image_conf)
|
||||||
|
|
||||||
# image_conf is passed to get_image_build_cmd as dict
|
# image_conf is passed to get_image_build_cmd as dict
|
||||||
|
|
||||||
@ -167,6 +173,7 @@ class ImageBuildPhase(
|
|||||||
image_conf["image-build"]["can_fail"] = sorted(can_fail)
|
image_conf["image-build"]["can_fail"] = sorted(can_fail)
|
||||||
|
|
||||||
cmd = {
|
cmd = {
|
||||||
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": image_conf,
|
"image_conf": image_conf,
|
||||||
"conf_file": self.compose.paths.work.image_build_conf(
|
"conf_file": self.compose.paths.work.image_build_conf(
|
||||||
image_conf["image-build"]["variant"],
|
image_conf["image-build"]["variant"],
|
||||||
@ -182,7 +189,7 @@ class ImageBuildPhase(
|
|||||||
"scratch": image_conf["image-build"].pop("scratch", False),
|
"scratch": image_conf["image-build"].pop("scratch", False),
|
||||||
}
|
}
|
||||||
self.pool.add(CreateImageBuildThread(self.pool))
|
self.pool.add(CreateImageBuildThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd))
|
self.pool.queue_put((self.compose, cmd, self.buildinstall_phase))
|
||||||
|
|
||||||
self.pool.start()
|
self.pool.start()
|
||||||
|
|
||||||
@ -192,7 +199,7 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
self.pool.log_error("CreateImageBuild failed.")
|
self.pool.log_error("CreateImageBuild failed.")
|
||||||
|
|
||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, cmd = item
|
compose, cmd, buildinstall_phase = item
|
||||||
variant = cmd["image_conf"]["image-build"]["variant"]
|
variant = cmd["image_conf"]["image-build"]["variant"]
|
||||||
subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)
|
subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)
|
||||||
self.failable_arches = cmd["image_conf"]["image-build"].get("can_fail", "")
|
self.failable_arches = cmd["image_conf"]["image-build"].get("can_fail", "")
|
||||||
@ -208,22 +215,54 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
subvariant,
|
subvariant,
|
||||||
logger=self.pool._logger,
|
logger=self.pool._logger,
|
||||||
):
|
):
|
||||||
self.worker(num, compose, variant, subvariant, cmd)
|
self.worker(num, compose, variant, subvariant, cmd, buildinstall_phase)
|
||||||
|
|
||||||
def worker(self, num, compose, variant, subvariant, cmd):
|
def worker(self, num, compose, variant, subvariant, cmd, buildinstall_phase):
|
||||||
arches = cmd["image_conf"]["image-build"]["arches"]
|
arches = cmd["image_conf"]["image-build"]["arches"]
|
||||||
formats = "-".join(cmd["image_conf"]["image-build"]["format"])
|
formats = "-".join(cmd["image_conf"]["image-build"]["format"])
|
||||||
dash_arches = "-".join(arches)
|
dash_arches = "-".join(arches)
|
||||||
log_file = compose.paths.log.log_file(
|
log_file = compose.paths.log.log_file(
|
||||||
dash_arches, "imagebuild-%s-%s-%s" % (variant.uid, subvariant, formats)
|
dash_arches, "imagebuild-%s-%s-%s" % (variant.uid, subvariant, formats)
|
||||||
)
|
)
|
||||||
|
metadata_file = log_file[:-4] + ".reuse.json"
|
||||||
|
|
||||||
|
external_repo_checksum = {}
|
||||||
|
try:
|
||||||
|
for repo in cmd["original_image_conf"]["image-build"]["repo"]:
|
||||||
|
if repo in compose.all_variants:
|
||||||
|
continue
|
||||||
|
with as_local_file(
|
||||||
|
os.path.join(repo, "repodata/repomd.xml")
|
||||||
|
) as filename:
|
||||||
|
with open(filename, "rb") as f:
|
||||||
|
external_repo_checksum[repo] = hashlib.sha256(
|
||||||
|
f.read()
|
||||||
|
).hexdigest()
|
||||||
|
except Exception as e:
|
||||||
|
external_repo_checksum = None
|
||||||
|
self.pool.log_info(
|
||||||
|
"Can't calculate checksum of repomd.xml of external repo - %s" % str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._try_to_reuse(
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
subvariant,
|
||||||
|
metadata_file,
|
||||||
|
log_file,
|
||||||
|
cmd,
|
||||||
|
external_repo_checksum,
|
||||||
|
buildinstall_phase,
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
msg = (
|
msg = (
|
||||||
"Creating image (formats: %s, arches: %s, variant: %s, subvariant: %s)"
|
"Creating image (formats: %s, arches: %s, variant: %s, subvariant: %s)"
|
||||||
% (formats, dash_arches, variant, subvariant)
|
% (formats, dash_arches, variant, subvariant)
|
||||||
)
|
)
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = KojiWrapper(compose)
|
||||||
|
|
||||||
# writes conf file for koji image-build
|
# writes conf file for koji image-build
|
||||||
self.pool.log_info(
|
self.pool.log_info(
|
||||||
@ -275,6 +314,22 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
self._link_images(compose, variant, subvariant, cmd, image_infos)
|
||||||
|
self._write_reuse_metadata(
|
||||||
|
compose, metadata_file, cmd, image_infos, external_repo_checksum
|
||||||
|
)
|
||||||
|
|
||||||
|
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||||
|
|
||||||
|
def _link_images(self, compose, variant, subvariant, cmd, image_infos):
|
||||||
|
"""Link images to compose and update image manifest.
|
||||||
|
|
||||||
|
:param Compose compose: Current compose.
|
||||||
|
:param Variant variant: Current variant.
|
||||||
|
:param str subvariant:
|
||||||
|
:param dict cmd: Dict of params for image-build.
|
||||||
|
:param dict image_infos: Dict contains image info.
|
||||||
|
"""
|
||||||
# The usecase here is that you can run koji image-build with multiple --format
|
# The usecase here is that you can run koji image-build with multiple --format
|
||||||
# It's ok to do it serialized since we're talking about max 2 images per single
|
# It's ok to do it serialized since we're talking about max 2 images per single
|
||||||
# image_build record
|
# image_build record
|
||||||
@ -308,4 +363,160 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
setattr(img, "deliverable", "image-build")
|
setattr(img, "deliverable", "image-build")
|
||||||
compose.im.add(variant=variant.uid, arch=image_info["arch"], image=img)
|
compose.im.add(variant=variant.uid, arch=image_info["arch"], image=img)
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
def _try_to_reuse(
|
||||||
|
self,
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
subvariant,
|
||||||
|
metadata_file,
|
||||||
|
log_file,
|
||||||
|
cmd,
|
||||||
|
external_repo_checksum,
|
||||||
|
buildinstall_phase,
|
||||||
|
):
|
||||||
|
"""Try to reuse images from old compose.
|
||||||
|
|
||||||
|
:param Compose compose: Current compose.
|
||||||
|
:param Variant variant: Current variant.
|
||||||
|
:param str subvariant:
|
||||||
|
:param str metadata_file: Path to reuse metadata file.
|
||||||
|
:param str log_file: Path to log file.
|
||||||
|
:param dict cmd: Dict of params for image-build.
|
||||||
|
:param dict external_repo_checksum: Dict contains checksum of repomd.xml
|
||||||
|
or None if can't get checksum.
|
||||||
|
:param BuildinstallPhase buildinstall_phase: buildinstall phase of
|
||||||
|
current compose.
|
||||||
|
"""
|
||||||
|
log_msg = "Cannot reuse old image_build phase results - %s"
|
||||||
|
if not compose.conf["image_build_allow_reuse"]:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "reuse of old image_build results is disabled."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if external_repo_checksum is None:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't ensure that external repo is not changed."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
old_metadata_file = compose.paths.old_compose_path(metadata_file)
|
||||||
|
if not old_metadata_file:
|
||||||
|
self.pool.log_info(log_msg % "Can't find old reuse metadata file")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
old_metadata = self._load_reuse_metadata(old_metadata_file)
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't load old reuse metadata file: %s" % str(e)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if old_metadata["cmd"]["original_image_conf"] != cmd["original_image_conf"]:
|
||||||
|
self.pool.log_info(log_msg % "image_build config changed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Make sure external repo does not change
|
||||||
|
if (
|
||||||
|
old_metadata["external_repo_checksum"] is None
|
||||||
|
or old_metadata["external_repo_checksum"] != external_repo_checksum
|
||||||
|
):
|
||||||
|
self.pool.log_info(log_msg % "External repo may be changed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Make sure buildinstall phase is reused
|
||||||
|
for arch in cmd["image_conf"]["image-build"]["arches"]:
|
||||||
|
if buildinstall_phase and not buildinstall_phase.reused(variant, arch):
|
||||||
|
self.pool.log_info(log_msg % "buildinstall phase changed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Make sure packages in variant not change
|
||||||
|
rpm_manifest_file = compose.paths.compose.metadata("rpms.json")
|
||||||
|
rpm_manifest = Rpms()
|
||||||
|
rpm_manifest.load(rpm_manifest_file)
|
||||||
|
|
||||||
|
old_rpm_manifest_file = compose.paths.old_compose_path(rpm_manifest_file)
|
||||||
|
old_rpm_manifest = Rpms()
|
||||||
|
old_rpm_manifest.load(old_rpm_manifest_file)
|
||||||
|
|
||||||
|
for repo in cmd["original_image_conf"]["image-build"]["repo"]:
|
||||||
|
if repo not in compose.all_variants:
|
||||||
|
# External repos are checked using other logic.
|
||||||
|
continue
|
||||||
|
for arch in cmd["image_conf"]["image-build"]["arches"]:
|
||||||
|
if (
|
||||||
|
rpm_manifest.rpms[variant.uid][arch]
|
||||||
|
!= old_rpm_manifest.rpms[variant.uid][arch]
|
||||||
|
):
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Packages in %s.%s changed." % (variant.uid, arch)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.pool.log_info(
|
||||||
|
"Reusing images from old compose for variant %s" % variant.uid
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
self._link_images(
|
||||||
|
compose, variant, subvariant, cmd, old_metadata["image_infos"]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info(log_msg % "Can't link images %s" % str(e))
|
||||||
|
return False
|
||||||
|
|
||||||
|
old_log_file = compose.paths.old_compose_path(log_file)
|
||||||
|
try:
|
||||||
|
shutil.copy2(old_log_file, log_file)
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't copy old log_file: %s %s" % (old_log_file, str(e))
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._write_reuse_metadata(
|
||||||
|
compose,
|
||||||
|
metadata_file,
|
||||||
|
cmd,
|
||||||
|
old_metadata["image_infos"],
|
||||||
|
external_repo_checksum,
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _write_reuse_metadata(
|
||||||
|
self, compose, metadata_file, cmd, image_infos, external_repo_checksum
|
||||||
|
):
|
||||||
|
"""Write metadata file.
|
||||||
|
|
||||||
|
:param Compose compose: Current compose.
|
||||||
|
:param str metadata_file: Path to reuse metadata file.
|
||||||
|
:param dict cmd: Dict of params for image-build.
|
||||||
|
:param dict image_infos: Dict contains image info.
|
||||||
|
:param dict external_repo_checksum: Dict contains checksum of repomd.xml
|
||||||
|
or None if can't get checksum.
|
||||||
|
"""
|
||||||
|
msg = "Writing reuse metadata file: %s" % metadata_file
|
||||||
|
self.pool.log_info(msg)
|
||||||
|
|
||||||
|
cmd_copy = copy.deepcopy(cmd)
|
||||||
|
del cmd_copy["image_conf"]["image-build"]["variant"]
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"cmd": cmd_copy,
|
||||||
|
"image_infos": image_infos,
|
||||||
|
"external_repo_checksum": external_repo_checksum,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
with open(metadata_file, "w") as f:
|
||||||
|
json.dump(data, f, indent=4)
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info("%s Failed: %s" % (msg, str(e)))
|
||||||
|
|
||||||
|
def _load_reuse_metadata(self, metadata_file):
|
||||||
|
"""Load metadata file.
|
||||||
|
|
||||||
|
:param str metadata_file: Path to reuse metadata file.
|
||||||
|
"""
|
||||||
|
with open(metadata_file, "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
import os
|
import os
|
||||||
from kobo import shortcuts
|
from kobo import shortcuts
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
import threading
|
||||||
|
|
||||||
from .base import PhaseBase
|
from .base import PhaseBase
|
||||||
from ..util import get_format_substs, get_file_size
|
from ..util import get_format_substs, get_file_size
|
||||||
@ -68,6 +69,7 @@ class ImageChecksumPhase(PhaseBase):
|
|||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
topdir = self.compose.paths.compose.topdir()
|
topdir = self.compose.paths.compose.topdir()
|
||||||
|
|
||||||
make_checksums(
|
make_checksums(
|
||||||
topdir,
|
topdir,
|
||||||
self.compose.im,
|
self.compose.im,
|
||||||
@ -87,6 +89,8 @@ def _compute_checksums(
|
|||||||
checksum_types,
|
checksum_types,
|
||||||
base_checksum_name_gen,
|
base_checksum_name_gen,
|
||||||
one_file,
|
one_file,
|
||||||
|
results_lock,
|
||||||
|
cache_lock,
|
||||||
):
|
):
|
||||||
for image in images:
|
for image in images:
|
||||||
filename = os.path.basename(image.path)
|
filename = os.path.basename(image.path)
|
||||||
@ -96,14 +100,21 @@ def _compute_checksums(
|
|||||||
|
|
||||||
filesize = image.size or get_file_size(full_path)
|
filesize = image.size or get_file_size(full_path)
|
||||||
|
|
||||||
|
cache_lock.acquire()
|
||||||
if full_path not in cache:
|
if full_path not in cache:
|
||||||
|
cache_lock.release()
|
||||||
# Source ISO is listed under each binary architecture. There's no
|
# Source ISO is listed under each binary architecture. There's no
|
||||||
# point in checksumming it twice, so we can just remember the
|
# point in checksumming it twice, so we can just remember the
|
||||||
# digest from first run..
|
# digest from first run..
|
||||||
cache[full_path] = shortcuts.compute_file_checksums(
|
checksum_value = shortcuts.compute_file_checksums(full_path, checksum_types)
|
||||||
full_path, checksum_types
|
with cache_lock:
|
||||||
)
|
cache[full_path] = checksum_value
|
||||||
digests = cache[full_path]
|
else:
|
||||||
|
cache_lock.release()
|
||||||
|
|
||||||
|
with cache_lock:
|
||||||
|
digests = cache[full_path]
|
||||||
|
|
||||||
for checksum, digest in digests.items():
|
for checksum, digest in digests.items():
|
||||||
# Update metadata with the checksum
|
# Update metadata with the checksum
|
||||||
image.add_checksum(None, checksum, digest)
|
image.add_checksum(None, checksum, digest)
|
||||||
@ -112,7 +123,10 @@ def _compute_checksums(
|
|||||||
checksum_filename = os.path.join(
|
checksum_filename = os.path.join(
|
||||||
path, "%s.%sSUM" % (filename, checksum.upper())
|
path, "%s.%sSUM" % (filename, checksum.upper())
|
||||||
)
|
)
|
||||||
results[checksum_filename].add((filename, filesize, checksum, digest))
|
with results_lock:
|
||||||
|
results[checksum_filename].add(
|
||||||
|
(filename, filesize, checksum, digest)
|
||||||
|
)
|
||||||
|
|
||||||
if one_file:
|
if one_file:
|
||||||
dirname = os.path.basename(path)
|
dirname = os.path.basename(path)
|
||||||
@ -125,24 +139,42 @@ def _compute_checksums(
|
|||||||
checksum_filename = "%s%sSUM" % (base_checksum_name, checksum.upper())
|
checksum_filename = "%s%sSUM" % (base_checksum_name, checksum.upper())
|
||||||
checksum_path = os.path.join(path, checksum_filename)
|
checksum_path = os.path.join(path, checksum_filename)
|
||||||
|
|
||||||
results[checksum_path].add((filename, filesize, checksum, digest))
|
with results_lock:
|
||||||
|
results[checksum_path].add((filename, filesize, checksum, digest))
|
||||||
|
|
||||||
|
|
||||||
def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen):
|
def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen):
|
||||||
results = defaultdict(set)
|
results = defaultdict(set)
|
||||||
cache = {}
|
cache = {}
|
||||||
|
threads = []
|
||||||
|
results_lock = threading.Lock() # lock to synchronize access to the results dict.
|
||||||
|
cache_lock = threading.Lock() # lock to synchronize access to the cache dict.
|
||||||
|
|
||||||
|
# create all worker threads
|
||||||
for (variant, arch, path), images in get_images(topdir, im).items():
|
for (variant, arch, path), images in get_images(topdir, im).items():
|
||||||
_compute_checksums(
|
threads.append(
|
||||||
results,
|
threading.Thread(
|
||||||
cache,
|
target=_compute_checksums,
|
||||||
variant,
|
args=[
|
||||||
arch,
|
results,
|
||||||
path,
|
cache,
|
||||||
images,
|
variant,
|
||||||
checksum_types,
|
arch,
|
||||||
base_checksum_name_gen,
|
path,
|
||||||
one_file,
|
images,
|
||||||
|
checksum_types,
|
||||||
|
base_checksum_name_gen,
|
||||||
|
one_file,
|
||||||
|
results_lock,
|
||||||
|
cache_lock,
|
||||||
|
],
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
threads[-1].start()
|
||||||
|
|
||||||
|
# wait for all worker threads to finish
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
for file in results:
|
for file in results:
|
||||||
dump_checksums(file, results[file])
|
dump_checksums(file, results[file])
|
||||||
|
122
pungi/phases/image_container.py
Normal file
122
pungi/phases/image_container.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
|
|
||||||
|
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||||
|
from .. import util
|
||||||
|
from ..wrappers import kojiwrapper
|
||||||
|
from ..phases.osbs import add_metadata
|
||||||
|
|
||||||
|
|
||||||
|
class ImageContainerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||||
|
name = "image_container"
|
||||||
|
|
||||||
|
def __init__(self, compose):
|
||||||
|
super(ImageContainerPhase, self).__init__(compose)
|
||||||
|
self.pool = ThreadPool(logger=self.logger)
|
||||||
|
self.pool.metadata = {}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for variant in self.compose.get_variants():
|
||||||
|
for conf in self.get_config_block(variant):
|
||||||
|
self.pool.add(ImageContainerThread(self.pool))
|
||||||
|
self.pool.queue_put((self.compose, variant, conf))
|
||||||
|
|
||||||
|
self.pool.start()
|
||||||
|
|
||||||
|
|
||||||
|
class ImageContainerThread(WorkerThread):
|
||||||
|
def process(self, item, num):
|
||||||
|
compose, variant, config = item
|
||||||
|
self.num = num
|
||||||
|
with util.failable(
|
||||||
|
compose,
|
||||||
|
bool(config.pop("failable", None)),
|
||||||
|
variant,
|
||||||
|
"*",
|
||||||
|
"osbs",
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
|
self.worker(compose, variant, config)
|
||||||
|
|
||||||
|
def worker(self, compose, variant, config):
|
||||||
|
msg = "Image container task for variant %s" % variant.uid
|
||||||
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
|
source = config.pop("url")
|
||||||
|
target = config.pop("target")
|
||||||
|
priority = config.pop("priority", None)
|
||||||
|
|
||||||
|
config["yum_repourls"] = [
|
||||||
|
self._get_repo(
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
config.get("arch_override", "").split(),
|
||||||
|
config.pop("image_spec"),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Start task
|
||||||
|
koji = kojiwrapper.KojiWrapper(compose)
|
||||||
|
koji.login()
|
||||||
|
task_id = koji.koji_proxy.buildContainer(
|
||||||
|
source, target, config, priority=priority
|
||||||
|
)
|
||||||
|
|
||||||
|
koji.save_task_id(task_id)
|
||||||
|
|
||||||
|
# Wait for it to finish and capture the output into log file (even
|
||||||
|
# though there is not much there).
|
||||||
|
log_dir = os.path.join(compose.paths.log.topdir(), "image_container")
|
||||||
|
util.makedirs(log_dir)
|
||||||
|
log_file = os.path.join(
|
||||||
|
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
||||||
|
)
|
||||||
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
|
raise RuntimeError(
|
||||||
|
"ImageContainer: task %s failed: see %s for details"
|
||||||
|
% (task_id, log_file)
|
||||||
|
)
|
||||||
|
|
||||||
|
add_metadata(variant, task_id, compose, config.get("scratch", False))
|
||||||
|
|
||||||
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
|
def _get_repo(self, compose, variant, arches, image_spec):
|
||||||
|
"""
|
||||||
|
Return a repo file that points baseurl to the image specified by
|
||||||
|
image_spec.
|
||||||
|
"""
|
||||||
|
image_paths = set()
|
||||||
|
|
||||||
|
for arch in arches or compose.im.images[variant.uid].keys():
|
||||||
|
for image in compose.im.images[variant.uid].get(arch, []):
|
||||||
|
for key, value in image_spec.items():
|
||||||
|
if not re.match(value, getattr(image, key)):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
image_paths.add(image.path.replace(arch, "$basearch"))
|
||||||
|
|
||||||
|
if len(image_paths) != 1:
|
||||||
|
raise RuntimeError(
|
||||||
|
"%d images matched specification. Only one was expected."
|
||||||
|
% len(image_paths)
|
||||||
|
)
|
||||||
|
|
||||||
|
image_path = image_paths.pop()
|
||||||
|
absolute_path = os.path.join(compose.paths.compose.topdir(), image_path)
|
||||||
|
|
||||||
|
repo_file = os.path.join(
|
||||||
|
compose.paths.work.tmp_dir(None, variant),
|
||||||
|
"image-container-%s-%s.repo" % (variant, self.num),
|
||||||
|
)
|
||||||
|
with open(repo_file, "w") as f:
|
||||||
|
f.write("[image-to-include]\n")
|
||||||
|
f.write("name=Location of image to embed\n")
|
||||||
|
f.write("baseurl=%s\n" % util.translate_path(compose, absolute_path))
|
||||||
|
f.write("enabled=0\n")
|
||||||
|
f.write("gpgcheck=0\n")
|
||||||
|
|
||||||
|
return util.translate_path(compose, repo_file)
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os
|
import os
|
||||||
|
import glob
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
@ -72,6 +73,10 @@ class InitPhase(PhaseBase):
|
|||||||
self.compose.paths.work.module_defaults_dir(create_dir=False)
|
self.compose.paths.work.module_defaults_dir(create_dir=False)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# download module obsoletes
|
||||||
|
if self.compose.has_module_obsoletes:
|
||||||
|
write_module_obsoletes(self.compose)
|
||||||
|
|
||||||
# write prepopulate file
|
# write prepopulate file
|
||||||
write_prepopulate_file(self.compose)
|
write_prepopulate_file(self.compose)
|
||||||
|
|
||||||
@ -218,12 +223,33 @@ def write_module_defaults(compose):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def write_module_obsoletes(compose):
|
||||||
|
scm_dict = compose.conf["module_obsoletes_dir"]
|
||||||
|
if isinstance(scm_dict, dict):
|
||||||
|
if scm_dict["scm"] == "file":
|
||||||
|
scm_dict["dir"] = os.path.join(compose.config_dir, scm_dict["dir"])
|
||||||
|
else:
|
||||||
|
scm_dict = os.path.join(compose.config_dir, scm_dict)
|
||||||
|
|
||||||
|
with temp_dir(prefix="moduleobsoletes_") as tmp_dir:
|
||||||
|
get_dir_from_scm(scm_dict, tmp_dir, compose=compose)
|
||||||
|
compose.log_debug("Writing module obsoletes")
|
||||||
|
shutil.copytree(
|
||||||
|
tmp_dir,
|
||||||
|
compose.paths.work.module_obsoletes_dir(create_dir=False),
|
||||||
|
ignore=shutil.ignore_patterns(".git"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_module_defaults(path):
|
def validate_module_defaults(path):
|
||||||
"""Make sure there are no conflicting defaults. Each module name can only
|
"""Make sure there are no conflicting defaults and every default can be loaded.
|
||||||
have one default stream.
|
Each module name can onlyhave one default stream.
|
||||||
|
|
||||||
:param str path: directory with cloned module defaults
|
:param str path: directory with cloned module defaults
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
defaults_num = len(glob.glob(os.path.join(path, "*.yaml")))
|
||||||
|
|
||||||
seen_defaults = collections.defaultdict(set)
|
seen_defaults = collections.defaultdict(set)
|
||||||
|
|
||||||
for module_name, defaults in iter_module_defaults(path):
|
for module_name, defaults in iter_module_defaults(path):
|
||||||
@ -242,6 +268,11 @@ def validate_module_defaults(path):
|
|||||||
"There are duplicated module defaults:\n%s" % "\n".join(errors)
|
"There are duplicated module defaults:\n%s" % "\n".join(errors)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Make sure all defaults are valid otherwise update_from_defaults_directory
|
||||||
|
# will return empty object
|
||||||
|
if defaults_num != len(seen_defaults):
|
||||||
|
raise RuntimeError("Defaults contains not valid default file")
|
||||||
|
|
||||||
|
|
||||||
def validate_comps(path):
|
def validate_comps(path):
|
||||||
"""Check that there are whitespace issues in comps."""
|
"""Check that there are whitespace issues in comps."""
|
||||||
|
@ -186,7 +186,7 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = KojiWrapper(compose)
|
||||||
_, version = compose.compose_id.rsplit("-", 1)
|
_, version = compose.compose_id.rsplit("-", 1)
|
||||||
name = cmd["name"] or imgname
|
name = cmd["name"] or imgname
|
||||||
version = cmd["version"] or version
|
version = cmd["version"] or version
|
||||||
|
@ -71,6 +71,7 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
|||||||
"ksurl": self.get_ksurl(image_conf),
|
"ksurl": self.get_ksurl(image_conf),
|
||||||
"ksversion": image_conf.get("ksversion"),
|
"ksversion": image_conf.get("ksversion"),
|
||||||
"scratch": image_conf.get("scratch", False),
|
"scratch": image_conf.get("scratch", False),
|
||||||
|
"nomacboot": image_conf.get("nomacboot", False),
|
||||||
"release": self.get_release(image_conf),
|
"release": self.get_release(image_conf),
|
||||||
"skip_tag": image_conf.get("skip_tag"),
|
"skip_tag": image_conf.get("skip_tag"),
|
||||||
"name": name,
|
"name": name,
|
||||||
@ -140,7 +141,7 @@ class LiveMediaThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = KojiWrapper(compose)
|
||||||
cmd = self._get_cmd(koji_wrapper, config)
|
cmd = self._get_cmd(koji_wrapper, config)
|
||||||
|
|
||||||
log_file = self._get_log_file(compose, variant, subvariant, config)
|
log_file = self._get_log_file(compose, variant, subvariant, config)
|
||||||
|
@ -1,24 +1,29 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import copy
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
from kobo import shortcuts
|
from kobo import shortcuts
|
||||||
|
from productmd.rpms import Rpms
|
||||||
|
from six.moves import configparser
|
||||||
|
|
||||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||||
from .. import util
|
from .. import util
|
||||||
from ..wrappers import kojiwrapper
|
from ..wrappers import kojiwrapper
|
||||||
|
from ..wrappers.scm import get_file_from_scm
|
||||||
|
|
||||||
|
|
||||||
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||||
name = "osbs"
|
name = "osbs"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose, pkgset_phase, buildinstall_phase):
|
||||||
super(OSBSPhase, self).__init__(compose)
|
super(OSBSPhase, self).__init__(compose)
|
||||||
self.pool = ThreadPool(logger=self.logger)
|
self.pool = ThreadPool(logger=self.logger)
|
||||||
self.pool.metadata = {}
|
|
||||||
self.pool.registries = {}
|
self.pool.registries = {}
|
||||||
|
self.pool.pkgset_phase = pkgset_phase
|
||||||
|
self.pool.buildinstall_phase = buildinstall_phase
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
@ -28,15 +33,6 @@ class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
|||||||
|
|
||||||
self.pool.start()
|
self.pool.start()
|
||||||
|
|
||||||
def dump_metadata(self):
|
|
||||||
"""Create a file with image metadata if the phase actually ran."""
|
|
||||||
if self._skipped:
|
|
||||||
return
|
|
||||||
with open(self.compose.paths.compose.metadata("osbs.json"), "w") as f:
|
|
||||||
json.dump(
|
|
||||||
self.pool.metadata, f, indent=4, sort_keys=True, separators=(",", ": ")
|
|
||||||
)
|
|
||||||
|
|
||||||
def request_push(self):
|
def request_push(self):
|
||||||
"""Store configuration data about where to push the created images and
|
"""Store configuration data about where to push the created images and
|
||||||
then send the same data to message bus.
|
then send the same data to message bus.
|
||||||
@ -87,8 +83,8 @@ class OSBSThread(WorkerThread):
|
|||||||
def worker(self, compose, variant, config):
|
def worker(self, compose, variant, config):
|
||||||
msg = "OSBS task for variant %s" % variant.uid
|
msg = "OSBS task for variant %s" % variant.uid
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
|
||||||
koji.login()
|
original_config = copy.deepcopy(config)
|
||||||
|
|
||||||
# Start task
|
# Start task
|
||||||
source = config.pop("url")
|
source = config.pop("url")
|
||||||
@ -104,86 +100,98 @@ class OSBSThread(WorkerThread):
|
|||||||
|
|
||||||
config["yum_repourls"] = repos
|
config["yum_repourls"] = repos
|
||||||
|
|
||||||
task_id = koji.koji_proxy.buildContainer(
|
|
||||||
source, target, config, priority=priority
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wait for it to finish and capture the output into log file (even
|
|
||||||
# though there is not much there).
|
|
||||||
log_dir = os.path.join(compose.paths.log.topdir(), "osbs")
|
log_dir = os.path.join(compose.paths.log.topdir(), "osbs")
|
||||||
util.makedirs(log_dir)
|
util.makedirs(log_dir)
|
||||||
log_file = os.path.join(
|
log_file = os.path.join(
|
||||||
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
||||||
)
|
)
|
||||||
|
reuse_file = log_file[:-4] + ".reuse.json"
|
||||||
|
|
||||||
|
try:
|
||||||
|
image_conf = self._get_image_conf(compose, original_config)
|
||||||
|
except Exception as e:
|
||||||
|
image_conf = None
|
||||||
|
self.pool.log_info(
|
||||||
|
"Can't get image-build.conf for variant: %s source: %s - %s"
|
||||||
|
% (variant.uid, source, str(e))
|
||||||
|
)
|
||||||
|
|
||||||
|
koji = kojiwrapper.KojiWrapper(compose)
|
||||||
|
koji.login()
|
||||||
|
|
||||||
|
task_id = self._try_to_reuse(
|
||||||
|
compose, variant, original_config, image_conf, reuse_file
|
||||||
|
)
|
||||||
|
|
||||||
|
if not task_id:
|
||||||
|
task_id = koji.koji_proxy.buildContainer(
|
||||||
|
source, target, config, priority=priority
|
||||||
|
)
|
||||||
|
|
||||||
|
koji.save_task_id(task_id)
|
||||||
|
|
||||||
|
# Wait for it to finish and capture the output into log file (even
|
||||||
|
# though there is not much there).
|
||||||
if koji.watch_task(task_id, log_file) != 0:
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"OSBS: task %s failed: see %s for details" % (task_id, log_file)
|
"OSBS: task %s failed: see %s for details" % (task_id, log_file)
|
||||||
)
|
)
|
||||||
|
|
||||||
scratch = config.get("scratch", False)
|
scratch = config.get("scratch", False)
|
||||||
nvr = self._add_metadata(variant, task_id, compose, scratch)
|
nvr, archive_ids = add_metadata(variant, task_id, compose, scratch)
|
||||||
if nvr:
|
if nvr:
|
||||||
registry = get_registry(compose, nvr, registry)
|
registry = get_registry(compose, nvr, registry)
|
||||||
if registry:
|
if registry:
|
||||||
self.pool.registries[nvr] = registry
|
self.pool.registries[nvr] = registry
|
||||||
|
|
||||||
|
self._write_reuse_metadata(
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
original_config,
|
||||||
|
image_conf,
|
||||||
|
task_id,
|
||||||
|
archive_ids,
|
||||||
|
reuse_file,
|
||||||
|
)
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s" % msg)
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
def _add_metadata(self, variant, task_id, compose, is_scratch):
|
def _get_image_conf(self, compose, config):
|
||||||
# Create new Koji session. The task could take so long to finish that
|
"""Get image-build.conf from git repo.
|
||||||
# our session will expire. This second session does not need to be
|
|
||||||
# authenticated since it will only do reading operations.
|
|
||||||
koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
|
||||||
|
|
||||||
# Create metadata
|
:param Compose compose: Current compose.
|
||||||
metadata = {
|
:param dict config: One osbs config item of compose.conf["osbs"][$variant]
|
||||||
"compose_id": compose.compose_id,
|
"""
|
||||||
"koji_task": task_id,
|
tmp_dir = compose.mkdtemp(prefix="osbs_")
|
||||||
}
|
|
||||||
|
|
||||||
result = koji.koji_proxy.getTaskResult(task_id)
|
url = config["url"].split("#")
|
||||||
if is_scratch:
|
if len(url) == 1:
|
||||||
metadata.update({"repositories": result["repositories"]})
|
url.append(config["git_branch"])
|
||||||
# add a fake arch of 'scratch', so we can construct the metadata
|
|
||||||
# in same data structure as real builds.
|
|
||||||
self.pool.metadata.setdefault(variant.uid, {}).setdefault(
|
|
||||||
"scratch", []
|
|
||||||
).append(metadata)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
filename = "image-build.conf"
|
||||||
|
get_file_from_scm(
|
||||||
|
{
|
||||||
|
"scm": "git",
|
||||||
|
"repo": url[0],
|
||||||
|
"branch": url[1],
|
||||||
|
"file": [filename],
|
||||||
|
},
|
||||||
|
tmp_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
c = configparser.ConfigParser()
|
||||||
|
c.read(os.path.join(tmp_dir, filename))
|
||||||
|
return c
|
||||||
|
|
||||||
|
def _get_ksurl(self, image_conf):
|
||||||
|
"""Get ksurl from image-build.conf"""
|
||||||
|
ksurl = image_conf.get("image-build", "ksurl")
|
||||||
|
|
||||||
|
if ksurl:
|
||||||
|
resolver = util.GitUrlResolver(offline=False)
|
||||||
|
return resolver(ksurl)
|
||||||
else:
|
else:
|
||||||
build_id = int(result["koji_builds"][0])
|
return None
|
||||||
buildinfo = koji.koji_proxy.getBuild(build_id)
|
|
||||||
archives = koji.koji_proxy.listArchives(build_id)
|
|
||||||
|
|
||||||
nvr = "%(name)s-%(version)s-%(release)s" % buildinfo
|
|
||||||
|
|
||||||
metadata.update(
|
|
||||||
{
|
|
||||||
"name": buildinfo["name"],
|
|
||||||
"version": buildinfo["version"],
|
|
||||||
"release": buildinfo["release"],
|
|
||||||
"nvr": nvr,
|
|
||||||
"creation_time": buildinfo["creation_time"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
for archive in archives:
|
|
||||||
data = {
|
|
||||||
"filename": archive["filename"],
|
|
||||||
"size": archive["size"],
|
|
||||||
"checksum": archive["checksum"],
|
|
||||||
}
|
|
||||||
data.update(archive["extra"])
|
|
||||||
data.update(metadata)
|
|
||||||
arch = archive["extra"]["image"]["arch"]
|
|
||||||
self.pool.log_debug(
|
|
||||||
"Created Docker base image %s-%s-%s.%s"
|
|
||||||
% (metadata["name"], metadata["version"], metadata["release"], arch)
|
|
||||||
)
|
|
||||||
self.pool.metadata.setdefault(variant.uid, {}).setdefault(
|
|
||||||
arch, []
|
|
||||||
).append(data)
|
|
||||||
return nvr
|
|
||||||
|
|
||||||
def _get_repo(self, compose, repo, gpgkey=None):
|
def _get_repo(self, compose, repo, gpgkey=None):
|
||||||
"""
|
"""
|
||||||
@ -192,7 +200,7 @@ class OSBSThread(WorkerThread):
|
|||||||
file pointing to that location and return the URL to .repo file.
|
file pointing to that location and return the URL to .repo file.
|
||||||
"""
|
"""
|
||||||
if "://" in repo:
|
if "://" in repo:
|
||||||
return repo
|
return repo.replace("$COMPOSE_ID", compose.compose_id)
|
||||||
|
|
||||||
if repo.startswith("/"):
|
if repo.startswith("/"):
|
||||||
# The repo is an absolute path on the filesystem
|
# The repo is an absolute path on the filesystem
|
||||||
@ -211,6 +219,15 @@ class OSBSThread(WorkerThread):
|
|||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"There is no variant %s to get repo from to pass to OSBS." % repo
|
"There is no variant %s to get repo from to pass to OSBS." % repo
|
||||||
)
|
)
|
||||||
|
cts_url = compose.conf.get("cts_url", None)
|
||||||
|
if cts_url:
|
||||||
|
return os.path.join(
|
||||||
|
cts_url,
|
||||||
|
"api/1/composes",
|
||||||
|
compose.compose_id,
|
||||||
|
"repo/?variant=%s" % variant,
|
||||||
|
)
|
||||||
|
|
||||||
repo_path = compose.paths.compose.repository(
|
repo_path = compose.paths.compose.repository(
|
||||||
"$basearch", variant, create_dir=False
|
"$basearch", variant, create_dir=False
|
||||||
)
|
)
|
||||||
@ -231,3 +248,209 @@ class OSBSThread(WorkerThread):
|
|||||||
f.write("gpgkey=%s\n" % gpgkey)
|
f.write("gpgkey=%s\n" % gpgkey)
|
||||||
|
|
||||||
return util.translate_path(compose, repo_file)
|
return util.translate_path(compose, repo_file)
|
||||||
|
|
||||||
|
def _try_to_reuse(self, compose, variant, config, image_conf, reuse_file):
|
||||||
|
"""Try to reuse results of old compose.
|
||||||
|
|
||||||
|
:param Compose compose: Current compose.
|
||||||
|
:param Variant variant: Current variant.
|
||||||
|
:param dict config: One osbs config item of compose.conf["osbs"][$variant]
|
||||||
|
:param ConfigParser image_conf: ConfigParser obj of image-build.conf.
|
||||||
|
:param str reuse_file: Path to reuse metadata file
|
||||||
|
"""
|
||||||
|
log_msg = "Cannot reuse old osbs phase results - %s"
|
||||||
|
|
||||||
|
if not compose.conf["osbs_allow_reuse"]:
|
||||||
|
self.pool.log_info(log_msg % "reuse of old osbs results is disabled.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
old_reuse_file = compose.paths.old_compose_path(reuse_file)
|
||||||
|
if not old_reuse_file:
|
||||||
|
self.pool.log_info(log_msg % "Can't find old reuse metadata file")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(old_reuse_file) as f:
|
||||||
|
old_reuse_metadata = json.load(f)
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't load old reuse metadata file: %s" % str(e)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if old_reuse_metadata["config"] != config:
|
||||||
|
self.pool.log_info(log_msg % "osbs config changed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not image_conf:
|
||||||
|
self.pool.log_info(log_msg % "Can't get image-build.conf")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Make sure ksurl not change
|
||||||
|
try:
|
||||||
|
ksurl = self._get_ksurl(image_conf)
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't get ksurl from image-build.conf - %s" % str(e)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not old_reuse_metadata["ksurl"]:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't get ksurl from old compose reuse metadata."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if ksurl != old_reuse_metadata["ksurl"]:
|
||||||
|
self.pool.log_info(log_msg % "ksurl changed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Make sure buildinstall phase is reused
|
||||||
|
try:
|
||||||
|
arches = image_conf.get("image-build", "arches").split(",")
|
||||||
|
except Exception as e:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "Can't get arches from image-build.conf - %s" % str(e)
|
||||||
|
)
|
||||||
|
for arch in arches:
|
||||||
|
if not self.pool.buildinstall_phase.reused(variant, arch):
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "buildinstall phase changed %s.%s" % (variant, arch)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Make sure rpms installed in image exists in current compose
|
||||||
|
rpm_manifest_file = compose.paths.compose.metadata("rpms.json")
|
||||||
|
rpm_manifest = Rpms()
|
||||||
|
rpm_manifest.load(rpm_manifest_file)
|
||||||
|
rpms = set()
|
||||||
|
for variant in rpm_manifest.rpms:
|
||||||
|
for arch in rpm_manifest.rpms[variant]:
|
||||||
|
for src in rpm_manifest.rpms[variant][arch]:
|
||||||
|
for nevra in rpm_manifest.rpms[variant][arch][src]:
|
||||||
|
rpms.add(nevra)
|
||||||
|
|
||||||
|
for nevra in old_reuse_metadata["rpmlist"]:
|
||||||
|
if nevra not in rpms:
|
||||||
|
self.pool.log_info(
|
||||||
|
log_msg % "%s does not exist in current compose" % nevra
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.pool.log_info(
|
||||||
|
"Reusing old OSBS task %d result" % old_reuse_file["task_id"]
|
||||||
|
)
|
||||||
|
return old_reuse_file["task_id"]
|
||||||
|
|
||||||
|
def _write_reuse_metadata(
|
||||||
|
self, compose, variant, config, image_conf, task_id, archive_ids, reuse_file
|
||||||
|
):
|
||||||
|
"""Write metadata to file for reusing.
|
||||||
|
|
||||||
|
:param Compose compose: Current compose.
|
||||||
|
:param Variant variant: Current variant.
|
||||||
|
:param dict config: One osbs config item of compose.conf["osbs"][$variant]
|
||||||
|
:param ConfigParser image_conf: ConfigParser obj of image-build.conf.
|
||||||
|
:param int task_id: Koji task id of osbs task.
|
||||||
|
:param list archive_ids: List of koji archive id
|
||||||
|
:param str reuse_file: Path to reuse metadata file.
|
||||||
|
"""
|
||||||
|
msg = "Writing reuse metadata file %s" % reuse_file
|
||||||
|
compose.log_info(msg)
|
||||||
|
|
||||||
|
rpmlist = set()
|
||||||
|
koji = kojiwrapper.KojiWrapper(compose)
|
||||||
|
for archive_id in archive_ids:
|
||||||
|
rpms = koji.koji_proxy.listRPMs(imageID=archive_id)
|
||||||
|
for item in rpms:
|
||||||
|
if item["epoch"]:
|
||||||
|
rpmlist.add(
|
||||||
|
"%s:%s-%s-%s.%s"
|
||||||
|
% (
|
||||||
|
item["name"],
|
||||||
|
item["epoch"],
|
||||||
|
item["version"],
|
||||||
|
item["release"],
|
||||||
|
item["arch"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
rpmlist.add("%s.%s" % (item["nvr"], item["arch"]))
|
||||||
|
|
||||||
|
try:
|
||||||
|
ksurl = self._get_ksurl(image_conf)
|
||||||
|
except Exception:
|
||||||
|
ksurl = None
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"config": config,
|
||||||
|
"ksurl": ksurl,
|
||||||
|
"rpmlist": sorted(rpmlist),
|
||||||
|
"task_id": task_id,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
with open(reuse_file, "w") as f:
|
||||||
|
json.dump(data, f, indent=4)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_info(msg + " failed - %s" % str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def add_metadata(variant, task_id, compose, is_scratch):
|
||||||
|
"""Given a task ID, find details about the container and add it to global
|
||||||
|
metadata."""
|
||||||
|
# Create new Koji session. The task could take so long to finish that
|
||||||
|
# our session will expire. This second session does not need to be
|
||||||
|
# authenticated since it will only do reading operations.
|
||||||
|
koji = kojiwrapper.KojiWrapper(compose)
|
||||||
|
|
||||||
|
# Create metadata
|
||||||
|
metadata = {
|
||||||
|
"compose_id": compose.compose_id,
|
||||||
|
"koji_task": task_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = koji.koji_proxy.getTaskResult(task_id)
|
||||||
|
if is_scratch:
|
||||||
|
metadata.update({"repositories": result["repositories"]})
|
||||||
|
# add a fake arch of 'scratch', so we can construct the metadata
|
||||||
|
# in same data structure as real builds.
|
||||||
|
compose.containers_metadata.setdefault(variant.uid, {}).setdefault(
|
||||||
|
"scratch", []
|
||||||
|
).append(metadata)
|
||||||
|
return None, []
|
||||||
|
|
||||||
|
else:
|
||||||
|
build_id = int(result["koji_builds"][0])
|
||||||
|
buildinfo = koji.koji_proxy.getBuild(build_id)
|
||||||
|
archives = koji.koji_proxy.listArchives(build_id, type="image")
|
||||||
|
|
||||||
|
nvr = "%(name)s-%(version)s-%(release)s" % buildinfo
|
||||||
|
|
||||||
|
metadata.update(
|
||||||
|
{
|
||||||
|
"name": buildinfo["name"],
|
||||||
|
"version": buildinfo["version"],
|
||||||
|
"release": buildinfo["release"],
|
||||||
|
"nvr": nvr,
|
||||||
|
"creation_time": buildinfo["creation_time"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
archive_ids = []
|
||||||
|
for archive in archives:
|
||||||
|
data = {
|
||||||
|
"filename": archive["filename"],
|
||||||
|
"size": archive["size"],
|
||||||
|
"checksum": archive["checksum"],
|
||||||
|
}
|
||||||
|
data.update(archive["extra"])
|
||||||
|
data.update(metadata)
|
||||||
|
arch = archive["extra"]["image"]["arch"]
|
||||||
|
compose.log_debug(
|
||||||
|
"Created Docker base image %s-%s-%s.%s"
|
||||||
|
% (metadata["name"], metadata["version"], metadata["release"], arch)
|
||||||
|
)
|
||||||
|
compose.containers_metadata.setdefault(variant.uid, {}).setdefault(
|
||||||
|
arch, []
|
||||||
|
).append(data)
|
||||||
|
archive_ids.append(archive["id"])
|
||||||
|
return nvr, archive_ids
|
||||||
|
@ -96,7 +96,12 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
self.can_fail = can_fail
|
self.can_fail = can_fail
|
||||||
self.num = num
|
self.num = num
|
||||||
with util.failable(
|
with util.failable(
|
||||||
compose, can_fail, variant, "*", "osbuild", logger=self.pool._logger,
|
compose,
|
||||||
|
can_fail,
|
||||||
|
variant,
|
||||||
|
"*",
|
||||||
|
"osbuild",
|
||||||
|
logger=self.pool._logger,
|
||||||
):
|
):
|
||||||
self.worker(
|
self.worker(
|
||||||
compose, variant, config, arches, version, release, target, repo
|
compose, variant, config, arches, version, release, target, repo
|
||||||
@ -105,11 +110,26 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
def worker(self, compose, variant, config, arches, version, release, target, repo):
|
def worker(self, compose, variant, config, arches, version, release, target, repo):
|
||||||
msg = "OSBuild task for variant %s" % variant.uid
|
msg = "OSBuild task for variant %s" % variant.uid
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
koji = kojiwrapper.KojiWrapper(compose)
|
||||||
koji.login()
|
koji.login()
|
||||||
|
|
||||||
|
ostree = {}
|
||||||
|
if config.get("ostree_url"):
|
||||||
|
ostree["url"] = config["ostree_url"]
|
||||||
|
if config.get("ostree_ref"):
|
||||||
|
ostree["ref"] = config["ostree_ref"]
|
||||||
|
if config.get("ostree_parent"):
|
||||||
|
ostree["parent"] = config["ostree_parent"]
|
||||||
|
|
||||||
# Start task
|
# Start task
|
||||||
opts = {"repo": repo}
|
opts = {"repo": repo}
|
||||||
|
if ostree:
|
||||||
|
opts["ostree"] = ostree
|
||||||
|
|
||||||
|
upload_options = config.get("upload_options")
|
||||||
|
if upload_options:
|
||||||
|
opts["upload_options"] = upload_options
|
||||||
|
|
||||||
if release:
|
if release:
|
||||||
opts["release"] = release
|
opts["release"] = release
|
||||||
task_id = koji.koji_proxy.osbuildImage(
|
task_id = koji.koji_proxy.osbuildImage(
|
||||||
@ -122,6 +142,8 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
opts=opts,
|
opts=opts,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
koji.save_task_id(task_id)
|
||||||
|
|
||||||
# Wait for it to finish and capture the output into log file.
|
# Wait for it to finish and capture the output into log file.
|
||||||
log_dir = os.path.join(compose.paths.log.topdir(), "osbuild")
|
log_dir = os.path.join(compose.paths.log.topdir(), "osbuild")
|
||||||
util.makedirs(log_dir)
|
util.makedirs(log_dir)
|
||||||
@ -136,7 +158,7 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
# Refresh koji session which may have timed out while the task was
|
# Refresh koji session which may have timed out while the task was
|
||||||
# running. Watching is done via a subprocess, so the session is
|
# running. Watching is done via a subprocess, so the session is
|
||||||
# inactive.
|
# inactive.
|
||||||
koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
koji = kojiwrapper.KojiWrapper(compose)
|
||||||
|
|
||||||
# Get build id via the task's result json data
|
# Get build id via the task's result json data
|
||||||
result = koji.koji_proxy.getTaskResult(task_id)
|
result = koji.koji_proxy.getTaskResult(task_id)
|
||||||
@ -148,7 +170,7 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
# architecture, but we don't verify that.
|
# architecture, but we don't verify that.
|
||||||
build_info = koji.koji_proxy.getBuild(build_id)
|
build_info = koji.koji_proxy.getBuild(build_id)
|
||||||
for archive in koji.koji_proxy.listArchives(buildID=build_id):
|
for archive in koji.koji_proxy.listArchives(buildID=build_id):
|
||||||
if archive["type_name"] not in config["image_types"]:
|
if archive["type_name"] not in EXTENSIONS:
|
||||||
# Ignore values that are not of required types.
|
# Ignore values that are not of required types.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -175,8 +197,11 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
|
|
||||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||||
|
|
||||||
suffix = archive["filename"].rsplit(".", 1)[-1]
|
for suffix in EXTENSIONS[archive["type_name"]]:
|
||||||
if suffix not in EXTENSIONS[archive["type_name"]]:
|
if archive["filename"].endswith(suffix):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# No suffix matched.
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Failed to generate metadata. Format %s doesn't match type %s"
|
"Failed to generate metadata. Format %s doesn't match type %s"
|
||||||
% (suffix, archive["type_name"])
|
% (suffix, archive["type_name"])
|
||||||
|
@ -165,6 +165,7 @@ class OSTreeThread(WorkerThread):
|
|||||||
("update-summary", config.get("update_summary", False)),
|
("update-summary", config.get("update_summary", False)),
|
||||||
("ostree-ref", config.get("ostree_ref")),
|
("ostree-ref", config.get("ostree_ref")),
|
||||||
("force-new-commit", config.get("force_new_commit", False)),
|
("force-new-commit", config.get("force_new_commit", False)),
|
||||||
|
("unified-core", config.get("unified_core", False)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
packages = ["pungi", "ostree", "rpm-ostree"]
|
packages = ["pungi", "ostree", "rpm-ostree"]
|
||||||
|
@ -272,6 +272,7 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
rootfs_size=config.get("rootfs_size"),
|
rootfs_size=config.get("rootfs_size"),
|
||||||
is_final=compose.supported,
|
is_final=compose.supported,
|
||||||
log_dir=self.logdir,
|
log_dir=self.logdir,
|
||||||
|
skip_branding=config.get("skip_branding"),
|
||||||
)
|
)
|
||||||
cmd = "rm -rf %s && %s" % (
|
cmd = "rm -rf %s && %s" % (
|
||||||
shlex_quote(output_dir),
|
shlex_quote(output_dir),
|
||||||
|
@ -29,13 +29,10 @@ class PkgsetPhase(PhaseBase):
|
|||||||
self.path_prefix = None
|
self.path_prefix = None
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
|
|
||||||
from .source import PkgsetSourceContainer
|
|
||||||
from . import sources
|
from . import sources
|
||||||
|
|
||||||
PkgsetSourceContainer.register_module(sources)
|
SourceClass = sources.ALL_SOURCES[self.compose.conf["pkgset_source"].lower()]
|
||||||
container = PkgsetSourceContainer()
|
|
||||||
SourceClass = container[pkgset_source]
|
|
||||||
self.package_sets, self.path_prefix = SourceClass(self.compose)()
|
self.package_sets, self.path_prefix = SourceClass(self.compose)()
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
|
@ -28,7 +28,11 @@ from pungi.util import (
|
|||||||
PartialFuncWorkerThread,
|
PartialFuncWorkerThread,
|
||||||
PartialFuncThreadPool,
|
PartialFuncThreadPool,
|
||||||
)
|
)
|
||||||
from pungi.module_util import Modulemd, collect_module_defaults
|
from pungi.module_util import (
|
||||||
|
Modulemd,
|
||||||
|
collect_module_defaults,
|
||||||
|
collect_module_obsoletes,
|
||||||
|
)
|
||||||
from pungi.phases.createrepo import add_modular_metadata
|
from pungi.phases.createrepo import add_modular_metadata
|
||||||
|
|
||||||
|
|
||||||
@ -159,6 +163,9 @@ def _create_arch_repo(worker_thread, args, task_num):
|
|||||||
mod_index = collect_module_defaults(
|
mod_index = collect_module_defaults(
|
||||||
compose.paths.work.module_defaults_dir(), names, overrides_dir=overrides_dir
|
compose.paths.work.module_defaults_dir(), names, overrides_dir=overrides_dir
|
||||||
)
|
)
|
||||||
|
mod_index = collect_module_obsoletes(
|
||||||
|
compose.paths.work.module_obsoletes_dir(), names, mod_index
|
||||||
|
)
|
||||||
for x in mmd:
|
for x in mmd:
|
||||||
mod_index.add_module_stream(x)
|
mod_index.add_module_stream(x)
|
||||||
add_modular_metadata(
|
add_modular_metadata(
|
||||||
|
@ -22,6 +22,7 @@ It automatically finds a signed copies according to *sigkey_ordering*.
|
|||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
@ -30,9 +31,9 @@ import kobo.rpmlib
|
|||||||
|
|
||||||
from kobo.threads import WorkerThread, ThreadPool
|
from kobo.threads import WorkerThread, ThreadPool
|
||||||
|
|
||||||
import pungi.wrappers.kojiwrapper
|
|
||||||
from pungi.util import pkg_is_srpm, copy_all
|
from pungi.util import pkg_is_srpm, copy_all
|
||||||
from pungi.arch import get_valid_arches, is_excluded
|
from pungi.arch import get_valid_arches, is_excluded
|
||||||
|
from pungi.errors import UnsignedPackagesError
|
||||||
|
|
||||||
|
|
||||||
class ExtendedRpmWrapper(kobo.pkgset.SimpleRpmWrapper):
|
class ExtendedRpmWrapper(kobo.pkgset.SimpleRpmWrapper):
|
||||||
@ -144,7 +145,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
def raise_invalid_sigkeys_exception(self, rpminfos):
|
def raise_invalid_sigkeys_exception(self, rpminfos):
|
||||||
"""
|
"""
|
||||||
Raises RuntimeError containing details of RPMs with invalid
|
Raises UnsignedPackagesError containing details of RPMs with invalid
|
||||||
sigkeys defined in `rpminfos`.
|
sigkeys defined in `rpminfos`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -166,7 +167,9 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
if not isinstance(rpminfos, dict):
|
if not isinstance(rpminfos, dict):
|
||||||
rpminfos = {self.sigkey_ordering: rpminfos}
|
rpminfos = {self.sigkey_ordering: rpminfos}
|
||||||
raise RuntimeError("\n".join(get_error(k, v) for k, v in rpminfos.items()))
|
raise UnsignedPackagesError(
|
||||||
|
"\n".join(get_error(k, v) for k, v in rpminfos.items())
|
||||||
|
)
|
||||||
|
|
||||||
def read_packages(self, rpms, srpms):
|
def read_packages(self, rpms, srpms):
|
||||||
srpm_pool = ReaderPool(self, self._logger)
|
srpm_pool = ReaderPool(self, self._logger)
|
||||||
@ -329,6 +332,8 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
cache_region=None,
|
cache_region=None,
|
||||||
extra_builds=None,
|
extra_builds=None,
|
||||||
extra_tasks=None,
|
extra_tasks=None,
|
||||||
|
signed_packages_retries=0,
|
||||||
|
signed_packages_wait=30,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Creates new KojiPackageSet.
|
Creates new KojiPackageSet.
|
||||||
@ -361,6 +366,9 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
:param list extra_tasks: Extra RPMs defined as Koji task IDs to get from Koji
|
:param list extra_tasks: Extra RPMs defined as Koji task IDs to get from Koji
|
||||||
and include in the package set. Useful when building testing compose
|
and include in the package set. Useful when building testing compose
|
||||||
with RPM scratch builds.
|
with RPM scratch builds.
|
||||||
|
:param int signed_packages_retries: How many times should a search for
|
||||||
|
signed package be repeated.
|
||||||
|
:param int signed_packages_wait: How long to wait between search attemts.
|
||||||
"""
|
"""
|
||||||
super(KojiPackageSet, self).__init__(
|
super(KojiPackageSet, self).__init__(
|
||||||
name,
|
name,
|
||||||
@ -377,10 +385,11 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
self.extra_builds = extra_builds or []
|
self.extra_builds = extra_builds or []
|
||||||
self.extra_tasks = extra_tasks or []
|
self.extra_tasks = extra_tasks or []
|
||||||
self.reuse = None
|
self.reuse = None
|
||||||
|
self.signed_packages_retries = signed_packages_retries
|
||||||
|
self.signed_packages_wait = signed_packages_wait
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
result = self.__dict__.copy()
|
result = self.__dict__.copy()
|
||||||
result["koji_profile"] = self.koji_wrapper.profile
|
|
||||||
del result["koji_wrapper"]
|
del result["koji_wrapper"]
|
||||||
del result["_logger"]
|
del result["_logger"]
|
||||||
if "cache_region" in result:
|
if "cache_region" in result:
|
||||||
@ -388,8 +397,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def __setstate__(self, data):
|
def __setstate__(self, data):
|
||||||
koji_profile = data.pop("koji_profile")
|
|
||||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
|
|
||||||
self._logger = None
|
self._logger = None
|
||||||
self.__dict__.update(data)
|
self.__dict__.update(data)
|
||||||
|
|
||||||
@ -503,17 +510,28 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
|
|
||||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||||
paths = []
|
paths = []
|
||||||
for sigkey in self.sigkey_ordering:
|
|
||||||
if not sigkey:
|
attempts_left = self.signed_packages_retries + 1
|
||||||
# we're looking for *signed* copies here
|
while attempts_left > 0:
|
||||||
continue
|
for sigkey in self.sigkey_ordering:
|
||||||
sigkey = sigkey.lower()
|
if not sigkey:
|
||||||
rpm_path = os.path.join(
|
# we're looking for *signed* copies here
|
||||||
pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey)
|
continue
|
||||||
)
|
sigkey = sigkey.lower()
|
||||||
paths.append(rpm_path)
|
rpm_path = os.path.join(
|
||||||
if os.path.isfile(rpm_path):
|
pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey)
|
||||||
return rpm_path
|
)
|
||||||
|
if rpm_path not in paths:
|
||||||
|
paths.append(rpm_path)
|
||||||
|
if os.path.isfile(rpm_path):
|
||||||
|
return rpm_path
|
||||||
|
|
||||||
|
# No signed copy was found, wait a little and try again.
|
||||||
|
attempts_left -= 1
|
||||||
|
if attempts_left > 0:
|
||||||
|
nvr = "%(name)s-%(version)s-%(release)s" % rpm_info
|
||||||
|
self.log_debug("Waiting for signed package to appear for %s", nvr)
|
||||||
|
time.sleep(self.signed_packages_wait)
|
||||||
|
|
||||||
if None in self.sigkey_ordering or "" in self.sigkey_ordering:
|
if None in self.sigkey_ordering or "" in self.sigkey_ordering:
|
||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
@ -725,20 +743,26 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
% (old_koji_event, koji_event)
|
% (old_koji_event, koji_event)
|
||||||
)
|
)
|
||||||
changed = self.koji_proxy.queryHistory(
|
changed = self.koji_proxy.queryHistory(
|
||||||
tables=["tag_listing"], tag=tag, afterEvent=old_koji_event
|
tables=["tag_listing", "tag_inheritance"],
|
||||||
|
tag=tag,
|
||||||
|
afterEvent=min(koji_event, old_koji_event),
|
||||||
|
beforeEvent=max(koji_event, old_koji_event) + 1,
|
||||||
)
|
)
|
||||||
if changed["tag_listing"]:
|
if changed["tag_listing"]:
|
||||||
self.log_debug("Builds under tag %s changed. Can't reuse." % tag)
|
self.log_debug("Builds under tag %s changed. Can't reuse." % tag)
|
||||||
return False
|
return False
|
||||||
|
if changed["tag_inheritance"]:
|
||||||
|
self.log_debug("Tag inheritance %s changed. Can't reuse." % tag)
|
||||||
|
return False
|
||||||
|
|
||||||
if inherit:
|
if inherit:
|
||||||
inherit_tags = self.koji_proxy.getFullInheritance(tag, koji_event)
|
inherit_tags = self.koji_proxy.getFullInheritance(tag, koji_event)
|
||||||
for t in inherit_tags:
|
for t in inherit_tags:
|
||||||
changed = self.koji_proxy.queryHistory(
|
changed = self.koji_proxy.queryHistory(
|
||||||
tables=["tag_listing"],
|
tables=["tag_listing", "tag_inheritance"],
|
||||||
tag=t["name"],
|
tag=t["name"],
|
||||||
afterEvent=old_koji_event,
|
afterEvent=min(koji_event, old_koji_event),
|
||||||
beforeEvent=koji_event + 1,
|
beforeEvent=max(koji_event, old_koji_event) + 1,
|
||||||
)
|
)
|
||||||
if changed["tag_listing"]:
|
if changed["tag_listing"]:
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
@ -746,6 +770,9 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
% t["name"]
|
% t["name"]
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
if changed["tag_inheritance"]:
|
||||||
|
self.log_debug("Tag inheritance %s changed. Can't reuse." % tag)
|
||||||
|
return False
|
||||||
|
|
||||||
repo_dir = compose.paths.work.pkgset_repo(tag, create_dir=False)
|
repo_dir = compose.paths.work.pkgset_repo(tag, create_dir=False)
|
||||||
old_repo_dir = compose.paths.old_compose_path(repo_dir)
|
old_repo_dir = compose.paths.old_compose_path(repo_dir)
|
||||||
|
@ -14,15 +14,6 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
import kobo.plugins
|
class PkgsetSourceBase(object):
|
||||||
|
|
||||||
|
|
||||||
class PkgsetSourceBase(kobo.plugins.Plugin):
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
|
||||||
|
|
||||||
class PkgsetSourceContainer(kobo.plugins.PluginContainer):
|
|
||||||
@classmethod
|
|
||||||
def normalize_name(cls, name):
|
|
||||||
return name.lower()
|
|
||||||
|
@ -0,0 +1,22 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 2 of the License.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Library General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from .source_koji import PkgsetSourceKoji
|
||||||
|
from .source_repos import PkgsetSourceRepos
|
||||||
|
|
||||||
|
ALL_SOURCES = {
|
||||||
|
"koji": PkgsetSourceKoji,
|
||||||
|
"repos": PkgsetSourceRepos,
|
||||||
|
}
|
@ -29,7 +29,13 @@ from pungi.wrappers.comps import CompsWrapper
|
|||||||
from pungi.wrappers.mbs import MBSWrapper
|
from pungi.wrappers.mbs import MBSWrapper
|
||||||
import pungi.phases.pkgset.pkgsets
|
import pungi.phases.pkgset.pkgsets
|
||||||
from pungi.arch import getBaseArch
|
from pungi.arch import getBaseArch
|
||||||
from pungi.util import retry, get_arch_variant_data, get_variant_data
|
from pungi.util import (
|
||||||
|
retry,
|
||||||
|
get_arch_variant_data,
|
||||||
|
get_variant_data,
|
||||||
|
read_single_module_stream_from_file,
|
||||||
|
read_single_module_stream_from_string,
|
||||||
|
)
|
||||||
from pungi.module_util import Modulemd
|
from pungi.module_util import Modulemd
|
||||||
|
|
||||||
from pungi.phases.pkgset.common import MaterializedPackageSet, get_all_arches
|
from pungi.phases.pkgset.common import MaterializedPackageSet, get_all_arches
|
||||||
@ -184,12 +190,9 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
|
|||||||
|
|
||||||
|
|
||||||
class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
compose = self.compose
|
compose = self.compose
|
||||||
koji_profile = compose.conf["koji_profile"]
|
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
||||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
|
|
||||||
# path prefix must contain trailing '/'
|
# path prefix must contain trailing '/'
|
||||||
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
||||||
package_sets = get_pkgset_from_koji(
|
package_sets = get_pkgset_from_koji(
|
||||||
@ -204,7 +207,12 @@ def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
|||||||
|
|
||||||
|
|
||||||
def _add_module_to_variant(
|
def _add_module_to_variant(
|
||||||
koji_wrapper, variant, build, add_to_variant_modules=False, compose=None
|
koji_wrapper,
|
||||||
|
variant,
|
||||||
|
build,
|
||||||
|
add_to_variant_modules=False,
|
||||||
|
compose=None,
|
||||||
|
exclude_module_ns=None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Adds module defined by Koji build info to variant.
|
Adds module defined by Koji build info to variant.
|
||||||
@ -214,6 +222,7 @@ def _add_module_to_variant(
|
|||||||
:param bool add_to_variant_modules: Adds the modules also to
|
:param bool add_to_variant_modules: Adds the modules also to
|
||||||
variant.modules.
|
variant.modules.
|
||||||
:param compose: Compose object to get filters from
|
:param compose: Compose object to get filters from
|
||||||
|
:param list exclude_module_ns: Module name:stream which will be excluded.
|
||||||
"""
|
"""
|
||||||
mmds = {}
|
mmds = {}
|
||||||
archives = koji_wrapper.koji_proxy.listArchives(build["id"])
|
archives = koji_wrapper.koji_proxy.listArchives(build["id"])
|
||||||
@ -243,6 +252,10 @@ def _add_module_to_variant(
|
|||||||
|
|
||||||
info = build["extra"]["typeinfo"]["module"]
|
info = build["extra"]["typeinfo"]["module"]
|
||||||
nsvc = "%(name)s:%(stream)s:%(version)s:%(context)s" % info
|
nsvc = "%(name)s:%(stream)s:%(version)s:%(context)s" % info
|
||||||
|
ns = "%(name)s:%(stream)s" % info
|
||||||
|
|
||||||
|
if exclude_module_ns and ns in exclude_module_ns:
|
||||||
|
return
|
||||||
|
|
||||||
added = False
|
added = False
|
||||||
|
|
||||||
@ -251,17 +264,18 @@ def _add_module_to_variant(
|
|||||||
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
|
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
filename = "modulemd.%s.txt" % arch
|
||||||
mmd = Modulemd.ModuleStream.read_file(
|
if filename not in mmds:
|
||||||
mmds["modulemd.%s.txt" % arch], strict=True
|
raise RuntimeError(
|
||||||
|
"Module %s does not have metadata for arch %s and is not filtered "
|
||||||
|
"out via filter_modules option." % (nsvc, arch)
|
||||||
)
|
)
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
|
mod_stream = read_single_module_stream_from_file(
|
||||||
|
mmds[filename], compose, arch, build
|
||||||
|
)
|
||||||
|
if mod_stream:
|
||||||
added = True
|
added = True
|
||||||
except KeyError:
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
||||||
# There is no modulemd for this arch. This could mean an arch was
|
|
||||||
# added to the compose after the module was built. We don't want to
|
|
||||||
# process this, let's skip this module.
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not added:
|
if not added:
|
||||||
# The module is filtered on all arches of this variant.
|
# The module is filtered on all arches of this variant.
|
||||||
@ -341,9 +355,7 @@ def _add_scratch_modules_to_variant(
|
|||||||
tag_to_mmd.setdefault(tag, {})
|
tag_to_mmd.setdefault(tag, {})
|
||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
try:
|
try:
|
||||||
mmd = Modulemd.ModuleStream.read_string(
|
mmd = read_single_module_stream_from_string(final_modulemd[arch])
|
||||||
final_modulemd[arch], strict=True
|
|
||||||
)
|
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
|
||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
@ -383,7 +395,7 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
|
|||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji(
|
def _get_modules_from_koji(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd, exclude_module_ns
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from koji `session`, adds them to
|
Loads modules for given `variant` from koji `session`, adds them to
|
||||||
@ -394,6 +406,7 @@ def _get_modules_from_koji(
|
|||||||
:param Variant variant: Variant with modules to find.
|
:param Variant variant: Variant with modules to find.
|
||||||
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
||||||
and value is list of Koji tags to get the RPMs from.
|
and value is list of Koji tags to get the RPMs from.
|
||||||
|
:param list exclude_module_ns: Module name:stream which will be excluded.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Find out all modules in every variant and add their Koji tags
|
# Find out all modules in every variant and add their Koji tags
|
||||||
@ -402,7 +415,11 @@ def _get_modules_from_koji(
|
|||||||
koji_modules = get_koji_modules(compose, koji_wrapper, event, module["name"])
|
koji_modules = get_koji_modules(compose, koji_wrapper, event, module["name"])
|
||||||
for koji_module in koji_modules:
|
for koji_module in koji_modules:
|
||||||
nsvc = _add_module_to_variant(
|
nsvc = _add_module_to_variant(
|
||||||
koji_wrapper, variant, koji_module, compose=compose
|
koji_wrapper,
|
||||||
|
variant,
|
||||||
|
koji_module,
|
||||||
|
compose=compose,
|
||||||
|
exclude_module_ns=exclude_module_ns,
|
||||||
)
|
)
|
||||||
if not nsvc:
|
if not nsvc:
|
||||||
continue
|
continue
|
||||||
@ -517,7 +534,13 @@ def filter_by_whitelist(compose, module_builds, input_modules, expected_modules)
|
|||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji_tags(
|
def _get_modules_from_koji_tags(
|
||||||
compose, koji_wrapper, event_id, variant, variant_tags, tag_to_mmd
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
event_id,
|
||||||
|
variant,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
|
exclude_module_ns,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from Koji, adds them to
|
Loads modules for given `variant` from Koji, adds them to
|
||||||
@ -529,6 +552,7 @@ def _get_modules_from_koji_tags(
|
|||||||
:param Variant variant: Variant with modules to find.
|
:param Variant variant: Variant with modules to find.
|
||||||
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
||||||
and value is list of Koji tags to get the RPMs from.
|
and value is list of Koji tags to get the RPMs from.
|
||||||
|
:param list exclude_module_ns: Module name:stream which will be excluded.
|
||||||
"""
|
"""
|
||||||
# Compose tags from configuration
|
# Compose tags from configuration
|
||||||
compose_tags = [
|
compose_tags = [
|
||||||
@ -595,21 +619,26 @@ def _get_modules_from_koji_tags(
|
|||||||
for build in latest_builds:
|
for build in latest_builds:
|
||||||
# Get the Build from Koji to get modulemd and module_tag.
|
# Get the Build from Koji to get modulemd and module_tag.
|
||||||
build = koji_proxy.getBuild(build["build_id"])
|
build = koji_proxy.getBuild(build["build_id"])
|
||||||
|
|
||||||
|
nsvc = _add_module_to_variant(
|
||||||
|
koji_wrapper,
|
||||||
|
variant,
|
||||||
|
build,
|
||||||
|
True,
|
||||||
|
compose=compose,
|
||||||
|
exclude_module_ns=exclude_module_ns,
|
||||||
|
)
|
||||||
|
if not nsvc:
|
||||||
|
continue
|
||||||
|
|
||||||
module_tag = (
|
module_tag = (
|
||||||
build.get("extra", {})
|
build.get("extra", {})
|
||||||
.get("typeinfo", {})
|
.get("typeinfo", {})
|
||||||
.get("module", {})
|
.get("module", {})
|
||||||
.get("content_koji_tag", "")
|
.get("content_koji_tag", "")
|
||||||
)
|
)
|
||||||
|
|
||||||
variant_tags[variant].append(module_tag)
|
variant_tags[variant].append(module_tag)
|
||||||
|
|
||||||
nsvc = _add_module_to_variant(
|
|
||||||
koji_wrapper, variant, build, True, compose=compose
|
|
||||||
)
|
|
||||||
if not nsvc:
|
|
||||||
continue
|
|
||||||
|
|
||||||
tag_to_mmd.setdefault(module_tag, {})
|
tag_to_mmd.setdefault(module_tag, {})
|
||||||
for arch in variant.arch_mmds:
|
for arch in variant.arch_mmds:
|
||||||
try:
|
try:
|
||||||
@ -635,7 +664,7 @@ def _get_modules_from_koji_tags(
|
|||||||
if expected_modules:
|
if expected_modules:
|
||||||
# There are some module names that were listed in configuration and not
|
# There are some module names that were listed in configuration and not
|
||||||
# found in any tag...
|
# found in any tag...
|
||||||
raise RuntimeError(
|
compose.log_warning(
|
||||||
"Configuration specified patterns (%s) that don't match "
|
"Configuration specified patterns (%s) that don't match "
|
||||||
"any modules in the configured tags." % ", ".join(expected_modules)
|
"any modules in the configured tags." % ", ".join(expected_modules)
|
||||||
)
|
)
|
||||||
@ -695,23 +724,44 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
"modules."
|
"modules."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
extra_modules = get_variant_data(
|
||||||
|
compose.conf, "pkgset_koji_module_builds", variant
|
||||||
|
)
|
||||||
|
|
||||||
|
# When adding extra modules, other modules of the same name:stream available
|
||||||
|
# in brew tag should be excluded.
|
||||||
|
exclude_module_ns = []
|
||||||
|
if extra_modules:
|
||||||
|
exclude_module_ns = [
|
||||||
|
":".join(nsvc.split(":")[:2]) for nsvc in extra_modules
|
||||||
|
]
|
||||||
|
|
||||||
if modular_koji_tags or (
|
if modular_koji_tags or (
|
||||||
compose.conf["pkgset_koji_module_tag"] and variant.modules
|
compose.conf["pkgset_koji_module_tag"] and variant.modules
|
||||||
):
|
):
|
||||||
# List modules tagged in particular tags.
|
# List modules tagged in particular tags.
|
||||||
_get_modules_from_koji_tags(
|
_get_modules_from_koji_tags(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
event,
|
||||||
|
variant,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
|
exclude_module_ns,
|
||||||
)
|
)
|
||||||
elif variant.modules:
|
elif variant.modules:
|
||||||
# Search each module in Koji separately. Tagging does not come into
|
# Search each module in Koji separately. Tagging does not come into
|
||||||
# play here.
|
# play here.
|
||||||
_get_modules_from_koji(
|
_get_modules_from_koji(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
event,
|
||||||
|
variant,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
|
exclude_module_ns,
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_modules = get_variant_data(
|
|
||||||
compose.conf, "pkgset_koji_module_builds", variant
|
|
||||||
)
|
|
||||||
if extra_modules:
|
if extra_modules:
|
||||||
_add_extra_modules_to_variant(
|
_add_extra_modules_to_variant(
|
||||||
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
||||||
@ -765,6 +815,8 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
cache_region=compose.cache_region,
|
cache_region=compose.cache_region,
|
||||||
extra_builds=extra_builds,
|
extra_builds=extra_builds,
|
||||||
extra_tasks=extra_tasks,
|
extra_tasks=extra_tasks,
|
||||||
|
signed_packages_retries=compose.conf["signed_packages_retries"],
|
||||||
|
signed_packages_wait=compose.conf["signed_packages_wait"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if we have cache for this tag from previous compose. If so, use
|
# Check if we have cache for this tag from previous compose. If so, use
|
||||||
@ -773,11 +825,16 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
compose.paths.work.pkgset_file_cache(compose_tag)
|
compose.paths.work.pkgset_file_cache(compose_tag)
|
||||||
)
|
)
|
||||||
if old_cache_path:
|
if old_cache_path:
|
||||||
pkgset.set_old_file_cache(
|
try:
|
||||||
pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache(
|
pkgset.set_old_file_cache(
|
||||||
old_cache_path
|
pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache(
|
||||||
|
old_cache_path
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_debug(
|
||||||
|
"Failed to load old cache file %s : %s" % (old_cache_path, str(e))
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
is_traditional = compose_tag in compose.conf.get("pkgset_koji_tag", [])
|
is_traditional = compose_tag in compose.conf.get("pkgset_koji_tag", [])
|
||||||
should_inherit = inherit if is_traditional else inherit_modules
|
should_inherit = inherit if is_traditional else inherit_modules
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
|
|
||||||
@ -31,8 +32,6 @@ import pungi.phases.pkgset.source
|
|||||||
|
|
||||||
|
|
||||||
class PkgsetSourceRepos(pungi.phases.pkgset.source.PkgsetSourceBase):
|
class PkgsetSourceRepos(pungi.phases.pkgset.source.PkgsetSourceBase):
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
package_sets, path_prefix = get_pkgset_from_repos(self.compose)
|
package_sets, path_prefix = get_pkgset_from_repos(self.compose)
|
||||||
return (package_sets, path_prefix)
|
return (package_sets, path_prefix)
|
||||||
@ -112,6 +111,17 @@ def get_pkgset_from_repos(compose):
|
|||||||
flist.append(dst)
|
flist.append(dst)
|
||||||
pool.queue_put((src, dst))
|
pool.queue_put((src, dst))
|
||||||
|
|
||||||
|
# Clean up tmp dir
|
||||||
|
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||||
|
yumroot_dir = os.path.join(pungi_dir, "work", arch, "yumroot")
|
||||||
|
if os.path.isdir(yumroot_dir):
|
||||||
|
try:
|
||||||
|
shutil.rmtree(yumroot_dir)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_warning(
|
||||||
|
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||||
|
)
|
||||||
|
|
||||||
msg = "Linking downloaded pkgset packages"
|
msg = "Linking downloaded pkgset packages"
|
||||||
compose.log_info("[BEGIN] %s" % msg)
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
pool.start()
|
pool.start()
|
||||||
|
@ -18,6 +18,7 @@ import os
|
|||||||
|
|
||||||
from pungi.phases.base import PhaseBase
|
from pungi.phases.base import PhaseBase
|
||||||
from pungi.util import failable, get_arch_variant_data
|
from pungi.util import failable, get_arch_variant_data
|
||||||
|
import productmd.compose
|
||||||
|
|
||||||
|
|
||||||
class TestPhase(PhaseBase):
|
class TestPhase(PhaseBase):
|
||||||
@ -25,6 +26,7 @@ class TestPhase(PhaseBase):
|
|||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
check_image_sanity(self.compose)
|
check_image_sanity(self.compose)
|
||||||
|
check_image_metadata(self.compose)
|
||||||
|
|
||||||
|
|
||||||
def check_image_sanity(compose):
|
def check_image_sanity(compose):
|
||||||
@ -45,6 +47,17 @@ def check_image_sanity(compose):
|
|||||||
check_size_limit(compose, variant, arch, img)
|
check_size_limit(compose, variant, arch, img)
|
||||||
|
|
||||||
|
|
||||||
|
def check_image_metadata(compose):
|
||||||
|
"""
|
||||||
|
Check the images metadata for entries that cannot be serialized.
|
||||||
|
Often caused by isos with duplicate metadata.
|
||||||
|
Accessing the `images` attribute will raise an exception if there's a problem
|
||||||
|
"""
|
||||||
|
if compose.im.images:
|
||||||
|
compose = productmd.compose.Compose(compose.paths.compose.topdir())
|
||||||
|
return compose.images
|
||||||
|
|
||||||
|
|
||||||
def check_sanity(compose, variant, arch, image):
|
def check_sanity(compose, variant, arch, image):
|
||||||
path = os.path.join(compose.paths.compose.topdir(), image.path)
|
path = os.path.join(compose.paths.compose.topdir(), image.path)
|
||||||
deliverable = getattr(image, "deliverable")
|
deliverable = getattr(image, "deliverable")
|
||||||
|
@ -69,10 +69,13 @@ class Profiler(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def print_results(cls, stream=sys.stdout):
|
def print_results(cls, stream=sys.stdout):
|
||||||
print("Profiling results:", file=sys.stdout)
|
# Ensure all data that was printed to stdout was already flushed. If
|
||||||
|
# the caller is redirecting stderr to stdout, and there's buffered
|
||||||
|
# data, we may end up in a situation where the stderr output printed
|
||||||
|
# below ends up mixed with the stdout lines.
|
||||||
|
sys.stdout.flush()
|
||||||
|
print("Profiling results:", file=stream)
|
||||||
results = cls._data.items()
|
results = cls._data.items()
|
||||||
results = sorted(results, key=lambda x: x[1]["time"], reverse=True)
|
results = sorted(results, key=lambda x: x[1]["time"], reverse=True)
|
||||||
for name, data in results:
|
for name, data in results:
|
||||||
print(
|
print(" %6.2f %5d %s" % (data["time"], data["calls"], name), file=stream)
|
||||||
" %6.2f %5d %s" % (data["time"], data["calls"], name), file=sys.stdout
|
|
||||||
)
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import six
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
import kobo.log
|
import kobo.log
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
@ -110,7 +111,7 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
runroot_tag = self.compose.conf["runroot_tag"]
|
runroot_tag = self.compose.conf["runroot_tag"]
|
||||||
log_dir = kwargs.pop("log_dir", None)
|
log_dir = kwargs.pop("log_dir", None)
|
||||||
|
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(self.compose)
|
||||||
koji_cmd = koji_wrapper.get_runroot_cmd(
|
koji_cmd = koji_wrapper.get_runroot_cmd(
|
||||||
runroot_tag,
|
runroot_tag,
|
||||||
arch,
|
arch,
|
||||||
@ -149,7 +150,11 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
"""
|
"""
|
||||||
formatted_cmd = command.format(**fmt_dict) if fmt_dict else command
|
formatted_cmd = command.format(**fmt_dict) if fmt_dict else command
|
||||||
ssh_cmd = ["ssh", "-oBatchMode=yes", "-n", "-l", user, hostname, formatted_cmd]
|
ssh_cmd = ["ssh", "-oBatchMode=yes", "-n", "-l", user, hostname, formatted_cmd]
|
||||||
return run(ssh_cmd, show_cmd=True, logfile=log_file)[1]
|
output = run(ssh_cmd, show_cmd=True, logfile=log_file)[1]
|
||||||
|
if six.PY3 and isinstance(output, bytes):
|
||||||
|
return output.decode()
|
||||||
|
else:
|
||||||
|
return output
|
||||||
|
|
||||||
def _log_file(self, base, suffix):
|
def _log_file(self, base, suffix):
|
||||||
return base.replace(".log", "." + suffix + ".log")
|
return base.replace(".log", "." + suffix + ".log")
|
||||||
@ -174,10 +179,13 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
# by the runroot task, so the Pungi user can access them.
|
# by the runroot task, so the Pungi user can access them.
|
||||||
if chown_paths:
|
if chown_paths:
|
||||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||||
|
command += " ; EXIT_CODE=$?"
|
||||||
# Make the files world readable
|
# Make the files world readable
|
||||||
command += " && chmod -R a+r %s" % paths
|
command += " ; chmod -R a+r %s" % paths
|
||||||
# and owned by the same user that is running the process
|
# and owned by the same user that is running the process
|
||||||
command += " && chown -R %d %s" % (os.getuid(), paths)
|
command += " ; chown -R %d %s" % (os.getuid(), paths)
|
||||||
|
# Exit with code of main command
|
||||||
|
command += " ; exit $EXIT_CODE"
|
||||||
|
|
||||||
hostname = runroot_ssh_hostnames[arch]
|
hostname = runroot_ssh_hostnames[arch]
|
||||||
user = self.compose.conf.get("runroot_ssh_username", "root")
|
user = self.compose.conf.get("runroot_ssh_username", "root")
|
||||||
@ -300,7 +308,7 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
runroot_channel = self.compose.conf.get("runroot_channel")
|
runroot_channel = self.compose.conf.get("runroot_channel")
|
||||||
runroot_tag = self.compose.conf["runroot_tag"]
|
runroot_tag = self.compose.conf["runroot_tag"]
|
||||||
|
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(self.compose)
|
||||||
koji_cmd = koji_wrapper.get_pungi_buildinstall_cmd(
|
koji_cmd = koji_wrapper.get_pungi_buildinstall_cmd(
|
||||||
runroot_tag,
|
runroot_tag,
|
||||||
arch,
|
arch,
|
||||||
@ -334,7 +342,7 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
runroot_channel = self.compose.conf.get("runroot_channel")
|
runroot_channel = self.compose.conf.get("runroot_channel")
|
||||||
runroot_tag = self.compose.conf["runroot_tag"]
|
runroot_tag = self.compose.conf["runroot_tag"]
|
||||||
|
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(self.compose)
|
||||||
koji_cmd = koji_wrapper.get_pungi_ostree_cmd(
|
koji_cmd = koji_wrapper.get_pungi_ostree_cmd(
|
||||||
runroot_tag, arch, args, channel=runroot_channel, **kwargs
|
runroot_tag, arch, args, channel=runroot_channel, **kwargs
|
||||||
)
|
)
|
||||||
|
@ -96,7 +96,7 @@ def main():
|
|||||||
f.filter_environments(opts.arch, opts.variant, opts.arch_only_environments)
|
f.filter_environments(opts.arch, opts.variant, opts.arch_only_environments)
|
||||||
|
|
||||||
if not opts.no_cleanup:
|
if not opts.no_cleanup:
|
||||||
f.cleanup(opts.keep_empty_group, opts.lookaside_group)
|
f.cleanup(opts.arch, opts.keep_empty_group, opts.lookaside_group)
|
||||||
|
|
||||||
if opts.remove_categories:
|
if opts.remove_categories:
|
||||||
f.remove_categories()
|
f.remove_categories()
|
||||||
|
@ -127,7 +127,7 @@ def run(config, topdir, has_old, offline, defined_variables, schema_overrides):
|
|||||||
pungi.phases.OstreeInstallerPhase(compose, buildinstall_phase),
|
pungi.phases.OstreeInstallerPhase(compose, buildinstall_phase),
|
||||||
pungi.phases.OSTreePhase(compose),
|
pungi.phases.OSTreePhase(compose),
|
||||||
pungi.phases.CreateisoPhase(compose, buildinstall_phase),
|
pungi.phases.CreateisoPhase(compose, buildinstall_phase),
|
||||||
pungi.phases.ExtraIsosPhase(compose),
|
pungi.phases.ExtraIsosPhase(compose, buildinstall_phase),
|
||||||
pungi.phases.LiveImagesPhase(compose),
|
pungi.phases.LiveImagesPhase(compose),
|
||||||
pungi.phases.LiveMediaPhase(compose),
|
pungi.phases.LiveMediaPhase(compose),
|
||||||
pungi.phases.ImageBuildPhase(compose),
|
pungi.phases.ImageBuildPhase(compose),
|
||||||
|
@ -16,7 +16,10 @@ def parse_args():
|
|||||||
parser = argparse.ArgumentParser(add_help=True)
|
parser = argparse.ArgumentParser(add_help=True)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"compose", metavar="<compose-path>", nargs=1, help="path to compose",
|
"compose",
|
||||||
|
metavar="<compose-path>",
|
||||||
|
nargs=1,
|
||||||
|
help="path to compose",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--arch",
|
"--arch",
|
||||||
|
@ -476,14 +476,14 @@ def main():
|
|||||||
else:
|
else:
|
||||||
mypungi.downloadSRPMs()
|
mypungi.downloadSRPMs()
|
||||||
|
|
||||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024 ** 2))
|
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
||||||
if not opts.nodebuginfo:
|
if not opts.nodebuginfo:
|
||||||
print(
|
print(
|
||||||
"DEBUGINFO size: %s MiB"
|
"DEBUGINFO size: %s MiB"
|
||||||
% (mypungi.size_debuginfo() / 1024 ** 2)
|
% (mypungi.size_debuginfo() / 1024**2)
|
||||||
)
|
)
|
||||||
if not opts.nosource:
|
if not opts.nosource:
|
||||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024 ** 2))
|
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
||||||
|
|
||||||
# Furthermore (but without the yumlock...)
|
# Furthermore (but without the yumlock...)
|
||||||
if not opts.sourceisos:
|
if not opts.sourceisos:
|
||||||
|
@ -18,13 +18,18 @@ from pungi.util import temp_dir
|
|||||||
def get_parser():
|
def get_parser():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--profiler", action="store_true",
|
"--profiler",
|
||||||
|
action="store_true",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--arch", required=True,
|
"--arch",
|
||||||
|
required=True,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--config", metavar="PATH", required=True, help="path to kickstart config file",
|
"--config",
|
||||||
|
metavar="PATH",
|
||||||
|
required=True,
|
||||||
|
help="path to kickstart config file",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--download-to",
|
"--download-to",
|
||||||
@ -42,7 +47,9 @@ def get_parser():
|
|||||||
|
|
||||||
group = parser.add_argument_group("Gather options")
|
group = parser.add_argument_group("Gather options")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--nodeps", action="store_true", help="disable resolving dependencies",
|
"--nodeps",
|
||||||
|
action="store_true",
|
||||||
|
help="disable resolving dependencies",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--selfhosting",
|
"--selfhosting",
|
||||||
@ -61,7 +68,9 @@ def get_parser():
|
|||||||
choices=["none", "all", "build"],
|
choices=["none", "all", "build"],
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--multilib", metavar="[METHOD]", action="append",
|
"--multilib",
|
||||||
|
metavar="[METHOD]",
|
||||||
|
action="append",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--tempdir",
|
"--tempdir",
|
||||||
|
@ -5,6 +5,7 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import getpass
|
import getpass
|
||||||
|
import glob
|
||||||
import json
|
import json
|
||||||
import locale
|
import locale
|
||||||
import logging
|
import logging
|
||||||
@ -20,6 +21,8 @@ from six.moves import shlex_quote
|
|||||||
|
|
||||||
from pungi.phases import PHASES_NAMES
|
from pungi.phases import PHASES_NAMES
|
||||||
from pungi import get_full_version, util
|
from pungi import get_full_version, util
|
||||||
|
from pungi.errors import UnsignedPackagesError
|
||||||
|
from pungi.wrappers import kojiwrapper
|
||||||
|
|
||||||
|
|
||||||
# force C locales
|
# force C locales
|
||||||
@ -262,14 +265,12 @@ def main():
|
|||||||
# check if all requirements are met
|
# check if all requirements are met
|
||||||
import pungi.checks
|
import pungi.checks
|
||||||
|
|
||||||
if not pungi.checks.check(conf):
|
|
||||||
sys.exit(1)
|
|
||||||
pungi.checks.check_umask(logger)
|
pungi.checks.check_umask(logger)
|
||||||
if not pungi.checks.check_skip_phases(
|
if not pungi.checks.check_skip_phases(
|
||||||
logger, opts.skip_phase + conf.get("skip_phases", []), opts.just_phase
|
logger, opts.skip_phase + conf.get("skip_phases", []), opts.just_phase
|
||||||
):
|
):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
errors, warnings = pungi.checks.validate(conf)
|
errors, warnings = pungi.checks.validate(conf, offline=True)
|
||||||
|
|
||||||
if not opts.quiet:
|
if not opts.quiet:
|
||||||
# TODO: workaround for config files containing skip_phase = productimg
|
# TODO: workaround for config files containing skip_phase = productimg
|
||||||
@ -294,6 +295,9 @@ def main():
|
|||||||
fail_to_start("Config validation failed", errors=errors)
|
fail_to_start("Config validation failed", errors=errors)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not pungi.checks.check(conf):
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if opts.target_dir:
|
if opts.target_dir:
|
||||||
compose_dir = Compose.get_compose_dir(
|
compose_dir = Compose.get_compose_dir(
|
||||||
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
|
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
|
||||||
@ -325,14 +329,34 @@ def main():
|
|||||||
logger=logger,
|
logger=logger,
|
||||||
notifier=notifier,
|
notifier=notifier,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
rv = Compose.update_compose_url(compose.compose_id, compose_dir, conf)
|
||||||
|
if rv and not rv.ok:
|
||||||
|
logger.error("CTS compose_url update failed with the error: %s" % rv.text)
|
||||||
|
|
||||||
|
errors, warnings = pungi.checks.validate(conf, offline=False)
|
||||||
|
if errors:
|
||||||
|
for error in errors:
|
||||||
|
logger.error("Config validation failed with the error: %s" % error)
|
||||||
|
fail_to_start("Config validation failed", errors=errors)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
notifier.compose = compose
|
notifier.compose = compose
|
||||||
COMPOSE = compose
|
COMPOSE = compose
|
||||||
run_compose(
|
try:
|
||||||
compose,
|
run_compose(
|
||||||
create_latest_link=create_latest_link,
|
compose,
|
||||||
latest_link_status=latest_link_status,
|
create_latest_link=create_latest_link,
|
||||||
latest_link_components=latest_link_components,
|
latest_link_status=latest_link_status,
|
||||||
)
|
latest_link_components=latest_link_components,
|
||||||
|
)
|
||||||
|
except UnsignedPackagesError:
|
||||||
|
# There was an unsigned package somewhere. It is not safe to reuse any
|
||||||
|
# package set from this compose (since we could leak the unsigned
|
||||||
|
# package). Let's make sure all reuse files are deleted.
|
||||||
|
for fp in glob.glob(compose.paths.work.pkgset_reuse_file("*")):
|
||||||
|
os.unlink(fp)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def run_compose(
|
def run_compose(
|
||||||
@ -354,6 +378,8 @@ def run_compose(
|
|||||||
)
|
)
|
||||||
compose.log_info("Compose top directory: %s" % compose.topdir)
|
compose.log_info("Compose top directory: %s" % compose.topdir)
|
||||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||||
|
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
||||||
|
|
||||||
compose.read_variants()
|
compose.read_variants()
|
||||||
|
|
||||||
# dump the config file
|
# dump the config file
|
||||||
@ -378,12 +404,13 @@ def run_compose(
|
|||||||
)
|
)
|
||||||
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
|
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
|
||||||
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
||||||
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose)
|
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose, buildinstall_phase)
|
||||||
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
||||||
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
||||||
image_build_phase = pungi.phases.ImageBuildPhase(compose)
|
image_build_phase = pungi.phases.ImageBuildPhase(compose, buildinstall_phase)
|
||||||
osbuild_phase = pungi.phases.OSBuildPhase(compose)
|
osbuild_phase = pungi.phases.OSBuildPhase(compose)
|
||||||
osbs_phase = pungi.phases.OSBSPhase(compose)
|
osbs_phase = pungi.phases.OSBSPhase(compose, pkgset_phase, buildinstall_phase)
|
||||||
|
image_container_phase = pungi.phases.ImageContainerPhase(compose)
|
||||||
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
|
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
|
||||||
repoclosure_phase = pungi.phases.RepoclosurePhase(compose)
|
repoclosure_phase = pungi.phases.RepoclosurePhase(compose)
|
||||||
test_phase = pungi.phases.TestPhase(compose)
|
test_phase = pungi.phases.TestPhase(compose)
|
||||||
@ -407,6 +434,7 @@ def run_compose(
|
|||||||
extra_isos_phase,
|
extra_isos_phase,
|
||||||
osbs_phase,
|
osbs_phase,
|
||||||
osbuild_phase,
|
osbuild_phase,
|
||||||
|
image_container_phase,
|
||||||
):
|
):
|
||||||
if phase.skip():
|
if phase.skip():
|
||||||
continue
|
continue
|
||||||
@ -506,9 +534,12 @@ def run_compose(
|
|||||||
livemedia_phase,
|
livemedia_phase,
|
||||||
osbuild_phase,
|
osbuild_phase,
|
||||||
)
|
)
|
||||||
|
post_image_phase = pungi.phases.WeaverPhase(
|
||||||
|
compose, (image_checksum_phase, image_container_phase)
|
||||||
|
)
|
||||||
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
|
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
|
||||||
extra_phase_schema = (
|
extra_phase_schema = (
|
||||||
(compose_images_phase, image_checksum_phase),
|
(compose_images_phase, post_image_phase),
|
||||||
osbs_phase,
|
osbs_phase,
|
||||||
repoclosure_phase,
|
repoclosure_phase,
|
||||||
)
|
)
|
||||||
@ -522,13 +553,14 @@ def run_compose(
|
|||||||
buildinstall_phase.skip()
|
buildinstall_phase.skip()
|
||||||
and ostree_installer_phase.skip()
|
and ostree_installer_phase.skip()
|
||||||
and createiso_phase.skip()
|
and createiso_phase.skip()
|
||||||
|
and extra_isos_phase.skip()
|
||||||
and liveimages_phase.skip()
|
and liveimages_phase.skip()
|
||||||
and livemedia_phase.skip()
|
and livemedia_phase.skip()
|
||||||
and image_build_phase.skip()
|
and image_build_phase.skip()
|
||||||
and osbuild_phase.skip()
|
and osbuild_phase.skip()
|
||||||
):
|
):
|
||||||
compose.im.dump(compose.paths.compose.metadata("images.json"))
|
compose.im.dump(compose.paths.compose.metadata("images.json"))
|
||||||
osbs_phase.dump_metadata()
|
compose.dump_containers_metadata()
|
||||||
|
|
||||||
test_phase.start()
|
test_phase.start()
|
||||||
test_phase.stop()
|
test_phase.stop()
|
||||||
@ -600,9 +632,25 @@ def try_kill_children(signal):
|
|||||||
COMPOSE.log_warning("Failed to kill all subprocesses")
|
COMPOSE.log_warning("Failed to kill all subprocesses")
|
||||||
|
|
||||||
|
|
||||||
|
def try_kill_koji_tasks():
|
||||||
|
try:
|
||||||
|
if COMPOSE:
|
||||||
|
koji_tasks_dir = COMPOSE.paths.log.koji_tasks_dir(create_dir=False)
|
||||||
|
if os.path.exists(koji_tasks_dir):
|
||||||
|
COMPOSE.log_warning("Trying to kill koji tasks")
|
||||||
|
koji = kojiwrapper.KojiWrapper(COMPOSE)
|
||||||
|
koji.login()
|
||||||
|
for task_id in os.listdir(koji_tasks_dir):
|
||||||
|
koji.koji_proxy.cancelTask(int(task_id))
|
||||||
|
except Exception:
|
||||||
|
if COMPOSE:
|
||||||
|
COMPOSE.log_warning("Failed to kill koji tasks")
|
||||||
|
|
||||||
|
|
||||||
def sigterm_handler(signum, frame):
|
def sigterm_handler(signum, frame):
|
||||||
if COMPOSE:
|
if COMPOSE:
|
||||||
try_kill_children(signum)
|
try_kill_children(signum)
|
||||||
|
try_kill_koji_tasks()
|
||||||
COMPOSE.log_error("Compose run failed: signal %s" % signum)
|
COMPOSE.log_error("Compose run failed: signal %s" % signum)
|
||||||
COMPOSE.log_error("Traceback:\n%s" % "\n".join(traceback.format_stack(frame)))
|
COMPOSE.log_error("Traceback:\n%s" % "\n".join(traceback.format_stack(frame)))
|
||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
@ -622,15 +670,10 @@ def cli_main():
|
|||||||
main()
|
main()
|
||||||
except (Exception, KeyboardInterrupt) as ex:
|
except (Exception, KeyboardInterrupt) as ex:
|
||||||
if COMPOSE:
|
if COMPOSE:
|
||||||
tb_path = COMPOSE.paths.log.log_file("global", "traceback")
|
|
||||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||||
COMPOSE.log_error("Extended traceback in: %s" % tb_path)
|
COMPOSE.traceback()
|
||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
COMPOSE.write_status("DOOMED")
|
COMPOSE.write_status("DOOMED")
|
||||||
import kobo.tback
|
|
||||||
|
|
||||||
with open(tb_path, "wb") as f:
|
|
||||||
f.write(kobo.tback.Traceback().get_traceback())
|
|
||||||
else:
|
else:
|
||||||
print("Exception: %s" % ex)
|
print("Exception: %s" % ex)
|
||||||
raise
|
raise
|
||||||
|
@ -34,6 +34,7 @@ import kobo.conf
|
|||||||
from kobo.shortcuts import run, force_list
|
from kobo.shortcuts import run, force_list
|
||||||
from kobo.threads import WorkerThread, ThreadPool
|
from kobo.threads import WorkerThread, ThreadPool
|
||||||
from productmd.common import get_major_version
|
from productmd.common import get_major_version
|
||||||
|
from pungi.module_util import Modulemd
|
||||||
|
|
||||||
# Patterns that match all names of debuginfo packages
|
# Patterns that match all names of debuginfo packages
|
||||||
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
|
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
|
||||||
@ -287,8 +288,13 @@ def resolve_git_ref(repourl, ref):
|
|||||||
if re.match(r"^[a-f0-9]{40}$", ref):
|
if re.match(r"^[a-f0-9]{40}$", ref):
|
||||||
# This looks like a commit ID already.
|
# This looks like a commit ID already.
|
||||||
return ref
|
return ref
|
||||||
|
try:
|
||||||
_, output = git_ls_remote(repourl, ref)
|
_, output = git_ls_remote(repourl, ref)
|
||||||
|
except RuntimeError as e:
|
||||||
|
raise GitUrlResolveError(
|
||||||
|
"ref does not exist in remote repo %s with the error %s %s"
|
||||||
|
% (repourl, e, e.output)
|
||||||
|
)
|
||||||
|
|
||||||
lines = []
|
lines = []
|
||||||
for line in output.split("\n"):
|
for line in output.split("\n"):
|
||||||
@ -941,7 +947,7 @@ def get_repo_dicts(repos, logger=None):
|
|||||||
|
|
||||||
def version_generator(compose, gen):
|
def version_generator(compose, gen):
|
||||||
"""If ``gen`` is a known generator, create a value. Otherwise return
|
"""If ``gen`` is a known generator, create a value. Otherwise return
|
||||||
the argument value unchanged.
|
the argument value unchanged.
|
||||||
"""
|
"""
|
||||||
if gen == "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN":
|
if gen == "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN":
|
||||||
return "%s.%s" % (compose.image_version, compose.image_release)
|
return "%s.%s" % (compose.image_version, compose.image_release)
|
||||||
@ -963,8 +969,8 @@ def version_generator(compose, gen):
|
|||||||
|
|
||||||
|
|
||||||
def retry(timeout=120, interval=30, wait_on=Exception):
|
def retry(timeout=120, interval=30, wait_on=Exception):
|
||||||
""" A decorator that allows to retry a section of code until success or
|
"""A decorator that allows to retry a section of code until success or
|
||||||
timeout.
|
timeout.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(function):
|
def wrapper(function):
|
||||||
@ -1034,6 +1040,46 @@ def load_config(file_path, defaults={}):
|
|||||||
return conf
|
return conf
|
||||||
|
|
||||||
|
|
||||||
|
def _read_single_module_stream(
|
||||||
|
file_or_string, compose=None, arch=None, build=None, is_file=True
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
mod_index = Modulemd.ModuleIndex.new()
|
||||||
|
if is_file:
|
||||||
|
mod_index.update_from_file(file_or_string, True)
|
||||||
|
else:
|
||||||
|
mod_index.update_from_string(file_or_string, True)
|
||||||
|
mod_names = mod_index.get_module_names()
|
||||||
|
emit_warning = False
|
||||||
|
if len(mod_names) > 1:
|
||||||
|
emit_warning = True
|
||||||
|
mod_streams = mod_index.get_module(mod_names[0]).get_all_streams()
|
||||||
|
if len(mod_streams) > 1:
|
||||||
|
emit_warning = True
|
||||||
|
if emit_warning and compose:
|
||||||
|
compose.log_warning(
|
||||||
|
"Multiple modules/streams for arch: %s. Build: %s. "
|
||||||
|
"Processing first module/stream only.",
|
||||||
|
arch,
|
||||||
|
build,
|
||||||
|
)
|
||||||
|
return mod_streams[0]
|
||||||
|
except (KeyError, IndexError):
|
||||||
|
# There is no modulemd for this arch. This could mean an arch was
|
||||||
|
# added to the compose after the module was built. We don't want to
|
||||||
|
# process this, let's skip this module.
|
||||||
|
if compose:
|
||||||
|
compose.log_info("Skipping arch: %s. Build: %s", arch, build)
|
||||||
|
|
||||||
|
|
||||||
|
def read_single_module_stream_from_file(*args, **kwargs):
|
||||||
|
return _read_single_module_stream(*args, is_file=True, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def read_single_module_stream_from_string(*args, **kwargs):
|
||||||
|
return _read_single_module_stream(*args, is_file=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def as_local_file(url):
|
def as_local_file(url):
|
||||||
"""If URL points to a file over HTTP, the file will be downloaded locally
|
"""If URL points to a file over HTTP, the file will be downloaded locally
|
||||||
@ -1046,6 +1092,8 @@ def as_local_file(url):
|
|||||||
yield local_filename
|
yield local_filename
|
||||||
finally:
|
finally:
|
||||||
os.remove(local_filename)
|
os.remove(local_filename)
|
||||||
|
elif url.startswith("file://"):
|
||||||
|
yield url[7:]
|
||||||
else:
|
else:
|
||||||
# Not a remote url, return unchanged.
|
# Not a remote url, return unchanged.
|
||||||
yield url
|
yield url
|
||||||
@ -1083,3 +1131,9 @@ class PartialFuncThreadPool(ThreadPool):
|
|||||||
@property
|
@property
|
||||||
def results(self):
|
def results(self):
|
||||||
return self._results
|
return self._results
|
||||||
|
|
||||||
|
|
||||||
|
def read_json_file(file_path):
|
||||||
|
"""A helper function to read a JSON file."""
|
||||||
|
with open(file_path) as f:
|
||||||
|
return json.load(f)
|
||||||
|
@ -177,9 +177,9 @@ class CompsFilter(object):
|
|||||||
for i in self.tree.xpath("//*[@xml:lang]"):
|
for i in self.tree.xpath("//*[@xml:lang]"):
|
||||||
i.getparent().remove(i)
|
i.getparent().remove(i)
|
||||||
|
|
||||||
def filter_environment_groups(self, lookaside_groups=[]):
|
def filter_environment_groups(self, arch, lookaside_groups=[]):
|
||||||
"""
|
"""
|
||||||
Remove undefined groups from environments.
|
Remove undefined groups or groups not matching given arch from environments.
|
||||||
"""
|
"""
|
||||||
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
||||||
for environment in self.tree.xpath("/comps/environment"):
|
for environment in self.tree.xpath("/comps/environment"):
|
||||||
@ -187,6 +187,12 @@ class CompsFilter(object):
|
|||||||
if group.text not in all_groups:
|
if group.text not in all_groups:
|
||||||
group.getparent().remove(group)
|
group.getparent().remove(group)
|
||||||
|
|
||||||
|
for group in environment.xpath("grouplist/groupid[@arch]"):
|
||||||
|
value = group.attrib.get("arch")
|
||||||
|
values = [v for v in re.split(r"[, ]+", value) if v]
|
||||||
|
if arch not in values:
|
||||||
|
group.getparent().remove(group)
|
||||||
|
|
||||||
def remove_empty_environments(self):
|
def remove_empty_environments(self):
|
||||||
"""
|
"""
|
||||||
Remove all environments without groups.
|
Remove all environments without groups.
|
||||||
@ -212,7 +218,7 @@ class CompsFilter(object):
|
|||||||
)
|
)
|
||||||
file_obj.write(b"\n")
|
file_obj.write(b"\n")
|
||||||
|
|
||||||
def cleanup(self, keep_groups=[], lookaside_groups=[]):
|
def cleanup(self, arch, keep_groups=[], lookaside_groups=[]):
|
||||||
"""
|
"""
|
||||||
Remove empty groups, categories and environment from the comps file.
|
Remove empty groups, categories and environment from the comps file.
|
||||||
Groups given in ``keep_groups`` will be preserved even if empty.
|
Groups given in ``keep_groups`` will be preserved even if empty.
|
||||||
@ -223,7 +229,7 @@ class CompsFilter(object):
|
|||||||
self.remove_empty_groups(keep_groups)
|
self.remove_empty_groups(keep_groups)
|
||||||
self.filter_category_groups()
|
self.filter_category_groups()
|
||||||
self.remove_empty_categories()
|
self.remove_empty_categories()
|
||||||
self.filter_environment_groups(lookaside_groups)
|
self.filter_environment_groups(arch, lookaside_groups)
|
||||||
self.remove_empty_environments()
|
self.remove_empty_environments()
|
||||||
|
|
||||||
|
|
||||||
@ -355,7 +361,10 @@ class CompsWrapper(object):
|
|||||||
|
|
||||||
if environment.option_ids:
|
if environment.option_ids:
|
||||||
append_grouplist(
|
append_grouplist(
|
||||||
doc, env_node, set(environment.option_ids), "optionlist",
|
doc,
|
||||||
|
env_node,
|
||||||
|
set(environment.option_ids),
|
||||||
|
"optionlist",
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.comps.langpacks:
|
if self.comps.langpacks:
|
||||||
|
@ -26,7 +26,12 @@ Pungi).
|
|||||||
|
|
||||||
|
|
||||||
def get_cmd(
|
def get_cmd(
|
||||||
conf_file, arch, repos, lookasides, platform=None, filter_packages=None,
|
conf_file,
|
||||||
|
arch,
|
||||||
|
repos,
|
||||||
|
lookasides,
|
||||||
|
platform=None,
|
||||||
|
filter_packages=None,
|
||||||
):
|
):
|
||||||
cmd = ["fus", "--verbose", "--arch", arch]
|
cmd = ["fus", "--verbose", "--arch", arch]
|
||||||
|
|
||||||
|
@ -146,6 +146,7 @@ def get_mkisofs_cmd(
|
|||||||
input_charset="utf-8",
|
input_charset="utf-8",
|
||||||
graft_points=None,
|
graft_points=None,
|
||||||
use_xorrisofs=False,
|
use_xorrisofs=False,
|
||||||
|
iso_level=None,
|
||||||
):
|
):
|
||||||
# following options are always enabled
|
# following options are always enabled
|
||||||
untranslated_filenames = True
|
untranslated_filenames = True
|
||||||
@ -155,6 +156,10 @@ def get_mkisofs_cmd(
|
|||||||
rock = True
|
rock = True
|
||||||
|
|
||||||
cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"]
|
cmd = ["/usr/bin/xorrisofs" if use_xorrisofs else "/usr/bin/genisoimage"]
|
||||||
|
|
||||||
|
if iso_level:
|
||||||
|
cmd.extend(["-iso-level", str(iso_level)])
|
||||||
|
|
||||||
if appid:
|
if appid:
|
||||||
cmd.extend(["-appid", appid])
|
cmd.extend(["-appid", appid])
|
||||||
|
|
||||||
@ -255,11 +260,21 @@ def get_isohybrid_cmd(iso_path, arch):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def get_manifest_cmd(iso_name):
|
def get_manifest_cmd(iso_name, xorriso=False):
|
||||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
if xorriso:
|
||||||
shlex_quote(iso_name),
|
return """xorriso -dev %s --find |
|
||||||
shlex_quote(iso_name),
|
tail -n+2 |
|
||||||
)
|
tr -d "'" |
|
||||||
|
cut -c2- |
|
||||||
|
sort >> %s.manifest""" % (
|
||||||
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(iso_name),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
||||||
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(iso_name),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_volume_id(path):
|
def get_volume_id(path):
|
||||||
|
@ -25,7 +25,7 @@ class JigdoWrapper(kobo.log.LoggingBase):
|
|||||||
self, image, files, output_dir, cache=None, no_servers=False, report=None
|
self, image, files, output_dir, cache=None, no_servers=False, report=None
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
files: [{"path", "label", "uri"}]
|
files: [{"path", "label", "uri"}]
|
||||||
"""
|
"""
|
||||||
cmd = ["jigdo-file", "make-template"]
|
cmd = ["jigdo-file", "make-template"]
|
||||||
|
|
||||||
|
@ -36,10 +36,14 @@ KOJI_BUILD_DELETED = koji.BUILD_STATES["DELETED"]
|
|||||||
class KojiWrapper(object):
|
class KojiWrapper(object):
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
|
|
||||||
def __init__(self, profile):
|
def __init__(self, compose):
|
||||||
self.profile = profile
|
self.compose = compose
|
||||||
|
try:
|
||||||
|
self.profile = self.compose.conf["koji_profile"]
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError("Koji profile must be configured")
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self.koji_module = koji.get_profile_module(profile)
|
self.koji_module = koji.get_profile_module(self.profile)
|
||||||
session_opts = {}
|
session_opts = {}
|
||||||
for key in (
|
for key in (
|
||||||
"timeout",
|
"timeout",
|
||||||
@ -61,6 +65,9 @@ class KojiWrapper(object):
|
|||||||
self.koji_module.config.server, session_opts
|
self.koji_module.config.server, session_opts
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# This retry should be removed once https://pagure.io/koji/issue/3170 is
|
||||||
|
# fixed and released.
|
||||||
|
@util.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError))
|
||||||
def login(self):
|
def login(self):
|
||||||
"""Authenticate to the hub."""
|
"""Authenticate to the hub."""
|
||||||
auth_type = self.koji_module.config.authtype
|
auth_type = self.koji_module.config.authtype
|
||||||
@ -111,8 +118,6 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
if channel:
|
if channel:
|
||||||
cmd.append("--channel-override=%s" % channel)
|
cmd.append("--channel-override=%s" % channel)
|
||||||
else:
|
|
||||||
cmd.append("--channel-override=runroot-local")
|
|
||||||
|
|
||||||
if weight:
|
if weight:
|
||||||
cmd.append("--weight=%s" % int(weight))
|
cmd.append("--weight=%s" % int(weight))
|
||||||
@ -142,10 +147,13 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
if chown_paths:
|
if chown_paths:
|
||||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||||
|
command += " ; EXIT_CODE=$?"
|
||||||
# Make the files world readable
|
# Make the files world readable
|
||||||
command += " && chmod -R a+r %s" % paths
|
command += " ; chmod -R a+r %s" % paths
|
||||||
# and owned by the same user that is running the process
|
# and owned by the same user that is running the process
|
||||||
command += " && chown -R %d %s" % (os.getuid(), paths)
|
command += " ; chown -R %d %s" % (os.getuid(), paths)
|
||||||
|
# Exit with code of main command
|
||||||
|
command += " ; exit $EXIT_CODE"
|
||||||
cmd.append(command)
|
cmd.append(command)
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
@ -165,8 +173,6 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
if channel:
|
if channel:
|
||||||
cmd.append("--channel-override=%s" % channel)
|
cmd.append("--channel-override=%s" % channel)
|
||||||
else:
|
|
||||||
cmd.append("--channel-override=runroot-local")
|
|
||||||
|
|
||||||
if weight:
|
if weight:
|
||||||
cmd.append("--weight=%s" % int(weight))
|
cmd.append("--weight=%s" % int(weight))
|
||||||
@ -202,14 +208,19 @@ class KojiWrapper(object):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_pungi_ostree_cmd(
|
def get_pungi_ostree_cmd(
|
||||||
self, target, arch, args, channel=None, packages=None, mounts=None, weight=None,
|
self,
|
||||||
|
target,
|
||||||
|
arch,
|
||||||
|
args,
|
||||||
|
channel=None,
|
||||||
|
packages=None,
|
||||||
|
mounts=None,
|
||||||
|
weight=None,
|
||||||
):
|
):
|
||||||
cmd = self._get_cmd("pungi-ostree", "--nowait", "--task-id")
|
cmd = self._get_cmd("pungi-ostree", "--nowait", "--task-id")
|
||||||
|
|
||||||
if channel:
|
if channel:
|
||||||
cmd.append("--channel-override=%s" % channel)
|
cmd.append("--channel-override=%s" % channel)
|
||||||
else:
|
|
||||||
cmd.append("--channel-override=runroot-local")
|
|
||||||
|
|
||||||
if weight:
|
if weight:
|
||||||
cmd.append("--weight=%s" % int(weight))
|
cmd.append("--weight=%s" % int(weight))
|
||||||
@ -280,15 +291,22 @@ class KojiWrapper(object):
|
|||||||
universal_newlines=True,
|
universal_newlines=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
first_line = output.splitlines()[0]
|
# Look for first line that contains only a number. This is the ID of
|
||||||
match = re.search(r"^(\d+)$", first_line)
|
# the new task. Usually this should be the first line, but there may be
|
||||||
if not match:
|
# warnings before it.
|
||||||
|
for line in output.splitlines():
|
||||||
|
match = re.search(r"^(\d+)$", line)
|
||||||
|
if match:
|
||||||
|
task_id = int(match.groups()[0])
|
||||||
|
break
|
||||||
|
|
||||||
|
if not task_id:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||||
% (" ".join(command), output)
|
% (" ".join(command), output)
|
||||||
)
|
)
|
||||||
|
|
||||||
task_id = int(match.groups()[0])
|
self.save_task_id(task_id)
|
||||||
|
|
||||||
retcode, output = self._wait_for_task(task_id, logfile=log_file)
|
retcode, output = self._wait_for_task(task_id, logfile=log_file)
|
||||||
|
|
||||||
@ -322,9 +340,11 @@ class KojiWrapper(object):
|
|||||||
"ksurl",
|
"ksurl",
|
||||||
"distro",
|
"distro",
|
||||||
)
|
)
|
||||||
assert set(min_options).issubset(set(config_options["image-build"].keys())), (
|
assert set(min_options).issubset(
|
||||||
"image-build requires at least %s got '%s'"
|
set(config_options["image-build"].keys())
|
||||||
% (", ".join(min_options), config_options)
|
), "image-build requires at least %s got '%s'" % (
|
||||||
|
", ".join(min_options),
|
||||||
|
config_options,
|
||||||
)
|
)
|
||||||
cfg_parser = configparser.ConfigParser()
|
cfg_parser = configparser.ConfigParser()
|
||||||
for section, opts in config_options.items():
|
for section, opts in config_options.items():
|
||||||
@ -379,6 +399,9 @@ class KojiWrapper(object):
|
|||||||
if "can_fail" in options:
|
if "can_fail" in options:
|
||||||
cmd.append("--can-fail=%s" % ",".join(options["can_fail"]))
|
cmd.append("--can-fail=%s" % ",".join(options["can_fail"]))
|
||||||
|
|
||||||
|
if options.get("nomacboot"):
|
||||||
|
cmd.append("--nomacboot")
|
||||||
|
|
||||||
if wait:
|
if wait:
|
||||||
cmd.append("--wait")
|
cmd.append("--wait")
|
||||||
|
|
||||||
@ -516,6 +539,7 @@ class KojiWrapper(object):
|
|||||||
retcode, output = run(
|
retcode, output = run(
|
||||||
command,
|
command,
|
||||||
can_fail=True,
|
can_fail=True,
|
||||||
|
show_cmd=True,
|
||||||
logfile=log_file,
|
logfile=log_file,
|
||||||
env=env,
|
env=env,
|
||||||
buffer_size=-1,
|
buffer_size=-1,
|
||||||
@ -530,6 +554,8 @@ class KojiWrapper(object):
|
|||||||
)
|
)
|
||||||
task_id = int(match.groups()[0])
|
task_id = int(match.groups()[0])
|
||||||
|
|
||||||
|
self.save_task_id(task_id)
|
||||||
|
|
||||||
if retcode != 0 and (
|
if retcode != 0 and (
|
||||||
self._has_connection_error(output) or self._has_offline_error(output)
|
self._has_connection_error(output) or self._has_offline_error(output)
|
||||||
):
|
):
|
||||||
@ -544,6 +570,19 @@ class KojiWrapper(object):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def watch_task(self, task_id, log_file=None, max_retries=None):
|
def watch_task(self, task_id, log_file=None, max_retries=None):
|
||||||
|
"""Watch and wait for a task to finish.
|
||||||
|
|
||||||
|
:param int task_id: ID of koji task.
|
||||||
|
:param str log_file: Path to log file.
|
||||||
|
:param int max_retries: Max times to retry when error occurs,
|
||||||
|
no limits by default.
|
||||||
|
"""
|
||||||
|
if log_file:
|
||||||
|
task_url = os.path.join(
|
||||||
|
self.koji_module.config.weburl, "taskinfo?taskID=%d" % task_id
|
||||||
|
)
|
||||||
|
with open(log_file, "a") as f:
|
||||||
|
f.write("Task URL: %s\n" % task_url)
|
||||||
retcode, _ = self._wait_for_task(
|
retcode, _ = self._wait_for_task(
|
||||||
task_id, logfile=log_file, max_retries=max_retries
|
task_id, logfile=log_file, max_retries=max_retries
|
||||||
)
|
)
|
||||||
@ -815,13 +854,22 @@ class KojiWrapper(object):
|
|||||||
"""
|
"""
|
||||||
return self.multicall_map(*args, **kwargs)
|
return self.multicall_map(*args, **kwargs)
|
||||||
|
|
||||||
|
def save_task_id(self, task_id):
|
||||||
|
"""Save task id by creating a file using task_id as file name
|
||||||
|
|
||||||
|
:param int task_id: ID of koji task
|
||||||
|
"""
|
||||||
|
log_dir = self.compose.paths.log.koji_tasks_dir()
|
||||||
|
with open(os.path.join(log_dir, str(task_id)), "w"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_buildroot_rpms(compose, task_id):
|
def get_buildroot_rpms(compose, task_id):
|
||||||
"""Get build root RPMs - either from runroot or local"""
|
"""Get build root RPMs - either from runroot or local"""
|
||||||
result = []
|
result = []
|
||||||
if task_id:
|
if task_id:
|
||||||
# runroot
|
# runroot
|
||||||
koji = KojiWrapper(compose.conf["koji_profile"])
|
koji = KojiWrapper(compose)
|
||||||
buildroot_infos = koji.koji_proxy.listBuildroots(taskID=task_id)
|
buildroot_infos = koji.koji_proxy.listBuildroots(taskID=task_id)
|
||||||
if not buildroot_infos:
|
if not buildroot_infos:
|
||||||
children_tasks = koji.koji_proxy.getTaskChildren(task_id)
|
children_tasks = koji.koji_proxy.getTaskChildren(task_id)
|
||||||
|
@ -40,9 +40,13 @@ def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
|||||||
# There are options that are not exposed here, because we don't need
|
# There are options that are not exposed here, because we don't need
|
||||||
# them.
|
# them.
|
||||||
|
|
||||||
for i in force_list(arch or []):
|
arches = force_list(arch or [])
|
||||||
|
for i in arches:
|
||||||
cmd.append("--arch=%s" % i)
|
cmd.append("--arch=%s" % i)
|
||||||
|
|
||||||
|
if backend == "dnf" and arches:
|
||||||
|
cmd.append("--forcearch=%s" % arches[0])
|
||||||
|
|
||||||
repos = repos or {}
|
repos = repos or {}
|
||||||
for repo_id, repo_path in repos.items():
|
for repo_id, repo_path in repos.items():
|
||||||
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
||||||
|
@ -265,11 +265,7 @@ class RpmScmWrapper(ScmBase):
|
|||||||
class KojiScmWrapper(ScmBase):
|
class KojiScmWrapper(ScmBase):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(KojiScmWrapper, self).__init__(*args, **kwargs)
|
super(KojiScmWrapper, self).__init__(*args, **kwargs)
|
||||||
try:
|
wrapper = KojiWrapper(kwargs["compose"])
|
||||||
profile = kwargs["compose"].conf["koji_profile"]
|
|
||||||
except KeyError:
|
|
||||||
raise RuntimeError("Koji profile must be configured")
|
|
||||||
wrapper = KojiWrapper(profile)
|
|
||||||
self.koji = wrapper.koji_module
|
self.koji = wrapper.koji_module
|
||||||
self.proxy = wrapper.koji_proxy
|
self.proxy = wrapper.koji_proxy
|
||||||
|
|
||||||
|
@ -302,8 +302,7 @@ def block_on(parts, name):
|
|||||||
|
|
||||||
|
|
||||||
def check_finished_processes(processes):
|
def check_finished_processes(processes):
|
||||||
"""Walk through all active processes and check if something finished.
|
"""Walk through all active processes and check if something finished."""
|
||||||
"""
|
|
||||||
for proc in processes.keys():
|
for proc in processes.keys():
|
||||||
proc.poll()
|
proc.poll()
|
||||||
if proc.returncode is not None:
|
if proc.returncode is not None:
|
||||||
|
2
setup.py
2
setup.py
@ -25,7 +25,7 @@ packages = sorted(packages)
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="pungi",
|
name="pungi",
|
||||||
version="4.2.7",
|
version="4.3.6",
|
||||||
description="Distribution compose tool",
|
description="Distribution compose tool",
|
||||||
url="https://pagure.io/pungi",
|
url="https://pagure.io/pungi",
|
||||||
author="Dennis Gilmore",
|
author="Dennis Gilmore",
|
||||||
|
24
tests/Dockerfile-test
Normal file
24
tests/Dockerfile-test
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
FROM fedora:33
|
||||||
|
LABEL \
|
||||||
|
name="Pungi test" \
|
||||||
|
description="Run tests using tox with Python 3" \
|
||||||
|
vendor="Pungi developers" \
|
||||||
|
license="MIT"
|
||||||
|
|
||||||
|
RUN dnf -y update && dnf -y install \
|
||||||
|
findutils \
|
||||||
|
libmodulemd \
|
||||||
|
git \
|
||||||
|
koji \
|
||||||
|
make \
|
||||||
|
python3-createrepo_c \
|
||||||
|
python3-gobject-base \
|
||||||
|
python3-tox \
|
||||||
|
python3-urlgrabber \
|
||||||
|
&& dnf clean all
|
||||||
|
|
||||||
|
WORKDIR /src
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
CMD ["tox", "-e", "flake8,black,py3"]
|
27
tests/Dockerfile-test-py2
Normal file
27
tests/Dockerfile-test-py2
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
FROM centos:7
|
||||||
|
LABEL \
|
||||||
|
name="Pungi test" \
|
||||||
|
description="Run tests using tox with Python 2" \
|
||||||
|
vendor="Pungi developers" \
|
||||||
|
license="MIT"
|
||||||
|
|
||||||
|
RUN yum -y update && yum -y install epel-release && yum -y install \
|
||||||
|
git \
|
||||||
|
libmodulemd2 \
|
||||||
|
make \
|
||||||
|
python3 \
|
||||||
|
python-createrepo_c \
|
||||||
|
python-gobject-base \
|
||||||
|
python-gssapi \
|
||||||
|
python-libcomps \
|
||||||
|
pykickstart \
|
||||||
|
&& yum clean all
|
||||||
|
|
||||||
|
# python-tox in yum repo is too old, let's install latest version
|
||||||
|
RUN pip3 install tox
|
||||||
|
|
||||||
|
WORKDIR /src
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
CMD ["tox", "-e", "py27"]
|
59
tests/Jenkinsfile
vendored
Normal file
59
tests/Jenkinsfile
vendored
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
def DUFFY_SESSION_ID
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
label 'cico-workspace'
|
||||||
|
}
|
||||||
|
|
||||||
|
parameters {
|
||||||
|
string(name: 'REPO', defaultValue: '', description: 'Git repo URL where the pull request from')
|
||||||
|
string(name: 'BRANCH', defaultValue: '', description: 'Git branch where the pull request from')
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('CI') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
if (params.REPO == "" || params.BRANCH == "") {
|
||||||
|
error "Please supply both params (REPO and BRANCH)"
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
echo "Requesting duffy node ..."
|
||||||
|
def session_str = sh returnStdout: true, script: "set +x; duffy client --url https://duffy.ci.centos.org/api/v1 --auth-name fedora-infra --auth-key $CICO_API_KEY request-session pool=virt-ec2-t2-centos-9s-x86_64,quantity=1"
|
||||||
|
def session = readJSON text: session_str
|
||||||
|
DUFFY_SESSION_ID= session.session.id
|
||||||
|
def hostname = session.session.nodes[0].hostname
|
||||||
|
echo "duffy session id: $DUFFY_SESSION_ID hostname: $hostname"
|
||||||
|
def remote_dir = "/tmp/$JENKINS_AGENT_NAME"
|
||||||
|
echo "remote_dir: $remote_dir"
|
||||||
|
writeFile file: 'job.sh', text: """
|
||||||
|
set -xe
|
||||||
|
dnf install -y git podman
|
||||||
|
git config --global user.email "jenkins@localhost"
|
||||||
|
git config --global user.name "jenkins"
|
||||||
|
cd $remote_dir
|
||||||
|
git clone https://pagure.io/pungi.git -b master
|
||||||
|
cd pungi
|
||||||
|
git remote rm proposed || true
|
||||||
|
git remote add proposed "$params.REPO"
|
||||||
|
git fetch proposed
|
||||||
|
git checkout origin/master
|
||||||
|
git merge --no-ff "proposed/$params.BRANCH" -m "Merge PR"
|
||||||
|
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test tox -r -e flake8,black,py3,bandit
|
||||||
|
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test-py2 tox -r -e py27
|
||||||
|
"""
|
||||||
|
sh "cat job.sh"
|
||||||
|
sh "ssh -o StrictHostKeyChecking=no root@$hostname mkdir $remote_dir"
|
||||||
|
sh "scp job.sh root@$hostname:$remote_dir"
|
||||||
|
sh "ssh root@$hostname sh $remote_dir/job.sh"
|
||||||
|
} finally {
|
||||||
|
if (DUFFY_SESSION_ID) {
|
||||||
|
echo "Release duffy node ..."
|
||||||
|
sh "set +x; duffy client --url https://duffy.ci.centos.org/api/v1 --auth-name fedora-infra --auth-key $CICO_API_KEY retire-session $DUFFY_SESSION_ID > /dev/null"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -118,7 +118,7 @@
|
|||||||
<display_order>10</display_order>
|
<display_order>10</display_order>
|
||||||
<grouplist>
|
<grouplist>
|
||||||
<groupid>core</groupid>
|
<groupid>core</groupid>
|
||||||
<groupid>standard</groupid>
|
<groupid arch="x86_64">standard</groupid>
|
||||||
<groupid>basic-desktop</groupid>
|
<groupid>basic-desktop</groupid>
|
||||||
</grouplist>
|
</grouplist>
|
||||||
<optionlist>
|
<optionlist>
|
||||||
|
@ -110,4 +110,8 @@ extra_isos = {
|
|||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
create_jigdo = False
|
iso_level = [
|
||||||
|
(".*", {
|
||||||
|
"src": 3,
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
58
tests/fixtures/invalid-image-metadata/compose/metadata/images.json
vendored
Normal file
58
tests/fixtures/invalid-image-metadata/compose/metadata/images.json
vendored
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
{
|
||||||
|
"header": {
|
||||||
|
"type": "productmd.images",
|
||||||
|
"version": "1.2"
|
||||||
|
},
|
||||||
|
"payload": {
|
||||||
|
"compose": {
|
||||||
|
"date": "20181001",
|
||||||
|
"id": "Mixed-1.0-20181001.n.0",
|
||||||
|
"respin": 0,
|
||||||
|
"type": "nightly"
|
||||||
|
},
|
||||||
|
"images": {
|
||||||
|
"Server": {
|
||||||
|
"x86_64": [
|
||||||
|
{
|
||||||
|
"arch": "x86_64",
|
||||||
|
"bootable": false,
|
||||||
|
"checksums": {
|
||||||
|
"md5": "c7977d67f6522bce7fb04c0818a3c744",
|
||||||
|
"sha1": "c7d65673b2eb477016f9e09f321935bace545515",
|
||||||
|
"sha256": "6d9cfc9be59cba96763dcca5d1b5759127d2f7920055b663dbcf29474bc368de"
|
||||||
|
},
|
||||||
|
"disc_count": 1,
|
||||||
|
"disc_number": 1,
|
||||||
|
"format": "iso",
|
||||||
|
"implant_md5": "340b7dc15b9c74b8576b81c3b33fc3f2",
|
||||||
|
"mtime": 1636012560,
|
||||||
|
"path": "Server-Gluster/x86_64/iso/Gluster-2.3-DP-1-20211104.t.4-Server-x86_64-dvd1.iso",
|
||||||
|
"size": 419840,
|
||||||
|
"subvariant": "Server-Gluster",
|
||||||
|
"type": "dvd",
|
||||||
|
"volume_id": "Gluster-2.3 DP-1 Server.x86_64"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"arch": "x86_64",
|
||||||
|
"bootable": false,
|
||||||
|
"checksums": {
|
||||||
|
"md5": "a7977d67f6522bce7fb04c0818a3c744",
|
||||||
|
"sha1": "a7d65673b2eb477016f9e09f321935bace545515",
|
||||||
|
"sha256": "ad9cfc9be59cba96763dcca5d1b5759127d2f7920055b663dbcf29474bc368de"
|
||||||
|
},
|
||||||
|
"disc_count": 1,
|
||||||
|
"disc_number": 1,
|
||||||
|
"format": "iso",
|
||||||
|
"implant_md5": "340b7dc15b9c74b8576b81c3b33fc3f2",
|
||||||
|
"mtime": 1636012560,
|
||||||
|
"path": "Server-Gluster/x86_64/iso/Gluster-2.3-DP-1-20211104.t.4-Server-x86_64-dvd1.iso",
|
||||||
|
"size": 419840,
|
||||||
|
"subvariant": "Server-Gluster",
|
||||||
|
"type": "dvd",
|
||||||
|
"volume_id": "Gluster-2.3 DP-1 Server.x86_64"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
20
tests/fixtures/mmds/m1.x86_64.txt
vendored
Normal file
20
tests/fixtures/mmds/m1.x86_64.txt
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: m1
|
||||||
|
stream: latest
|
||||||
|
version: 20190101
|
||||||
|
context: cafe
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
20
tests/fixtures/mmds/modulemd.armv7hl.txt
vendored
Normal file
20
tests/fixtures/mmds/modulemd.armv7hl.txt
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: module
|
||||||
|
stream: master
|
||||||
|
version: 20190318
|
||||||
|
context: abcdef
|
||||||
|
arch: armhfp
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
20
tests/fixtures/mmds/modulemd.x86_64.txt
vendored
Normal file
20
tests/fixtures/mmds/modulemd.x86_64.txt
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: module
|
||||||
|
stream: master
|
||||||
|
version: 20190318
|
||||||
|
context: abcdef
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
20
tests/fixtures/mmds/scratch-module.x86_64.txt
vendored
Normal file
20
tests/fixtures/mmds/scratch-module.x86_64.txt
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: scratch-module
|
||||||
|
stream: master
|
||||||
|
version: 20200710
|
||||||
|
context: abcdef
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
@ -215,7 +215,10 @@ class DummyCompose(object):
|
|||||||
self.log_warning = mock.Mock()
|
self.log_warning = mock.Mock()
|
||||||
self.get_image_name = mock.Mock(return_value="image-name")
|
self.get_image_name = mock.Mock(return_value="image-name")
|
||||||
self.image = mock.Mock(
|
self.image = mock.Mock(
|
||||||
path="Client/i386/iso/image.iso", can_fail=False, size=123, _max_size=None,
|
path="Client/i386/iso/image.iso",
|
||||||
|
can_fail=False,
|
||||||
|
size=123,
|
||||||
|
_max_size=None,
|
||||||
)
|
)
|
||||||
self.im = mock.Mock(images={"Client": {"amd64": [self.image]}})
|
self.im = mock.Mock(images={"Client": {"amd64": [self.image]}})
|
||||||
self.old_composes = []
|
self.old_composes = []
|
||||||
@ -226,6 +229,8 @@ class DummyCompose(object):
|
|||||||
self.require_deliverable = mock.Mock()
|
self.require_deliverable = mock.Mock()
|
||||||
self.should_create_yum_database = True
|
self.should_create_yum_database = True
|
||||||
self.cache_region = None
|
self.cache_region = None
|
||||||
|
self.containers_metadata = {}
|
||||||
|
self.load_old_compose_config = mock.Mock(return_value=None)
|
||||||
|
|
||||||
def setup_optional(self):
|
def setup_optional(self):
|
||||||
self.all_variants["Server-optional"] = MockVariant(
|
self.all_variants["Server-optional"] = MockVariant(
|
||||||
@ -301,7 +306,10 @@ def mk_boom(cls=Exception, msg="BOOM"):
|
|||||||
return b
|
return b
|
||||||
|
|
||||||
|
|
||||||
PKGSET_REPOS = dict(pkgset_source="repos", pkgset_repos={},)
|
PKGSET_REPOS = dict(
|
||||||
|
pkgset_source="repos",
|
||||||
|
pkgset_repos={},
|
||||||
|
)
|
||||||
|
|
||||||
BASE_CONFIG = dict(
|
BASE_CONFIG = dict(
|
||||||
release_short="test",
|
release_short="test",
|
||||||
|
@ -1920,7 +1920,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
|
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
|
||||||
)
|
)
|
||||||
def test_reuse_old_buildinstall_result_no_old_compose(
|
def test_reuse_old_buildinstall_result_no_old_compose(
|
||||||
self, load_old_buildinstall_metadata,
|
self,
|
||||||
|
load_old_buildinstall_metadata,
|
||||||
):
|
):
|
||||||
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
||||||
load_old_buildinstall_metadata.return_value = None
|
load_old_buildinstall_metadata.return_value = None
|
||||||
@ -1935,7 +1936,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
|
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
|
||||||
)
|
)
|
||||||
def test_reuse_old_buildinstall_result_different_cmd(
|
def test_reuse_old_buildinstall_result_different_cmd(
|
||||||
self, load_old_buildinstall_metadata,
|
self,
|
||||||
|
load_old_buildinstall_metadata,
|
||||||
):
|
):
|
||||||
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
||||||
|
|
||||||
@ -1958,7 +1960,8 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
|
"pungi.phases.buildinstall.BuildinstallThread._load_old_buildinstall_metadata"
|
||||||
)
|
)
|
||||||
def test_reuse_old_buildinstall_result_different_installed_pkgs(
|
def test_reuse_old_buildinstall_result_different_installed_pkgs(
|
||||||
self, load_old_buildinstall_metadata,
|
self,
|
||||||
|
load_old_buildinstall_metadata,
|
||||||
):
|
):
|
||||||
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
||||||
load_old_buildinstall_metadata.return_value = {
|
load_old_buildinstall_metadata.return_value = {
|
||||||
@ -1978,7 +1981,9 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
)
|
)
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
def test_reuse_old_buildinstall_result_different_buildroot_rpms(
|
def test_reuse_old_buildinstall_result_different_buildroot_rpms(
|
||||||
self, KojiWrapperMock, load_old_buildinstall_metadata,
|
self,
|
||||||
|
KojiWrapperMock,
|
||||||
|
load_old_buildinstall_metadata,
|
||||||
):
|
):
|
||||||
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
compose, pkgset_phase, cmd = self._prepare_buildinstall_reuse_test()
|
||||||
load_old_buildinstall_metadata.return_value = {
|
load_old_buildinstall_metadata.return_value = {
|
||||||
@ -2156,7 +2161,7 @@ class TestTweakConfigs(PungiTestCase):
|
|||||||
)
|
)
|
||||||
for cfg in configs:
|
for cfg in configs:
|
||||||
self.assertFileContent(
|
self.assertFileContent(
|
||||||
cfg, ":LABEL=new\\x20volid ks=hd:LABEL=new\\x20volid:/ks.cfg\n"
|
cfg, ":LABEL=new\\x20volid inst.ks=hd:LABEL=new\\x20volid:/ks.cfg\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_tweak_configs_yaboot(self):
|
def test_tweak_configs_yaboot(self):
|
||||||
@ -2168,5 +2173,5 @@ class TestTweakConfigs(PungiTestCase):
|
|||||||
tweak_configs(self.topdir, "new volid", os.path.join(self.topdir, "ks.cfg"))
|
tweak_configs(self.topdir, "new volid", os.path.join(self.topdir, "ks.cfg"))
|
||||||
for cfg in configs:
|
for cfg in configs:
|
||||||
self.assertFileContent(
|
self.assertFileContent(
|
||||||
cfg, ":LABEL=new\\\\x20volid ks=hd:LABEL=new\\\\x20volid:/ks.cfg\n"
|
cfg, ":LABEL=new\\\\x20volid inst.ks=hd:LABEL=new\\\\x20volid:/ks.cfg\n"
|
||||||
)
|
)
|
||||||
|
@ -147,7 +147,7 @@ class CheckDependenciesTestCase(unittest.TestCase):
|
|||||||
with mock.patch("sys.stdout", new_callable=StringIO) as out:
|
with mock.patch("sys.stdout", new_callable=StringIO) as out:
|
||||||
with mock.patch("os.path.exists") as exists:
|
with mock.patch("os.path.exists") as exists:
|
||||||
exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
|
exists.side_effect = self.dont_find(["/usr/bin/createrepo_c"])
|
||||||
result = checks.check({})
|
result = checks.check({"createrepo_c": True})
|
||||||
|
|
||||||
self.assertIn("createrepo_c", out.getvalue())
|
self.assertIn("createrepo_c", out.getvalue())
|
||||||
self.assertFalse(result)
|
self.assertFalse(result)
|
||||||
|
@ -13,7 +13,9 @@ import tempfile
|
|||||||
import shutil
|
import shutil
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from pungi.compose import Compose
|
from requests.exceptions import HTTPError
|
||||||
|
|
||||||
|
from pungi.compose import Compose, retry_request
|
||||||
|
|
||||||
|
|
||||||
class ConfigWrapper(dict):
|
class ConfigWrapper(dict):
|
||||||
@ -608,8 +610,9 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
ci_json = json.loads(ci.dumps())
|
ci_json = json.loads(ci.dumps())
|
||||||
self.assertEqual(ci_json, self.ci_json)
|
self.assertEqual(ci_json, self.ci_json)
|
||||||
|
|
||||||
|
@mock.patch("pungi.compose.requests")
|
||||||
@mock.patch("time.strftime", new=lambda fmt, time: "20200526")
|
@mock.patch("time.strftime", new=lambda fmt, time: "20200526")
|
||||||
def test_get_compose_info_cts(self):
|
def test_get_compose_info_cts(self, mocked_requests):
|
||||||
conf = ConfigWrapper(
|
conf = ConfigWrapper(
|
||||||
release_name="Test",
|
release_name="Test",
|
||||||
release_version="1.0",
|
release_version="1.0",
|
||||||
@ -626,7 +629,6 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
ci_copy["header"]["version"] = "1.2"
|
ci_copy["header"]["version"] = "1.2"
|
||||||
mocked_response = mock.MagicMock()
|
mocked_response = mock.MagicMock()
|
||||||
mocked_response.text = json.dumps(self.ci_json)
|
mocked_response.text = json.dumps(self.ci_json)
|
||||||
mocked_requests = mock.MagicMock()
|
|
||||||
mocked_requests.post.return_value = mocked_response
|
mocked_requests.post.return_value = mocked_response
|
||||||
|
|
||||||
mocked_requests_kerberos = mock.MagicMock()
|
mocked_requests_kerberos = mock.MagicMock()
|
||||||
@ -637,7 +639,6 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
# `import`.
|
# `import`.
|
||||||
with mock.patch.dict(
|
with mock.patch.dict(
|
||||||
"sys.modules",
|
"sys.modules",
|
||||||
requests=mocked_requests,
|
|
||||||
requests_kerberos=mocked_requests_kerberos,
|
requests_kerberos=mocked_requests_kerberos,
|
||||||
):
|
):
|
||||||
ci = Compose.get_compose_info(conf, respin_of="Fedora-Rawhide-20200517.n.1")
|
ci = Compose.get_compose_info(conf, respin_of="Fedora-Rawhide-20200517.n.1")
|
||||||
@ -753,3 +754,76 @@ class StatusTest(unittest.TestCase):
|
|||||||
self.compose.conf["gather_backend"] = "yum"
|
self.compose.conf["gather_backend"] = "yum"
|
||||||
self.compose.conf["createrepo_database"] = False
|
self.compose.conf["createrepo_database"] = False
|
||||||
self.assertFalse(self.compose.should_create_yum_database)
|
self.assertFalse(self.compose.should_create_yum_database)
|
||||||
|
|
||||||
|
|
||||||
|
class DumpContainerMetadataTest(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.tmp_dir = tempfile.mkdtemp()
|
||||||
|
with mock.patch("pungi.compose.ComposeInfo"):
|
||||||
|
self.compose = Compose({}, self.tmp_dir)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
shutil.rmtree(self.tmp_dir)
|
||||||
|
|
||||||
|
def test_dump_metadata(self):
|
||||||
|
metadata = {"Server": {"x86_64": "Metadata"}}
|
||||||
|
self.compose.containers_metadata = metadata
|
||||||
|
self.compose.dump_containers_metadata()
|
||||||
|
|
||||||
|
with open(self.tmp_dir + "/compose/metadata/osbs.json") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
self.assertEqual(data, metadata)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.osbs.ThreadPool")
|
||||||
|
def test_dump_empty_metadata(self, ThreadPool):
|
||||||
|
self.compose.dump_containers_metadata()
|
||||||
|
self.assertFalse(os.path.isfile(self.tmp_dir + "/compose/metadata/osbs.json"))
|
||||||
|
|
||||||
|
|
||||||
|
class TracebackTest(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.tmp_dir = tempfile.mkdtemp()
|
||||||
|
with mock.patch("pungi.compose.ComposeInfo"):
|
||||||
|
self.compose = Compose({}, self.tmp_dir)
|
||||||
|
self.patcher = mock.patch("kobo.tback.Traceback")
|
||||||
|
self.Traceback = self.patcher.start()
|
||||||
|
self.Traceback.return_value.get_traceback.return_value = b"traceback"
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
shutil.rmtree(self.tmp_dir)
|
||||||
|
self.patcher.stop()
|
||||||
|
|
||||||
|
def assertTraceback(self, filename):
|
||||||
|
self.assertTrue(
|
||||||
|
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
self.Traceback.mock_calls, [mock.call(), mock.call().get_traceback()]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_traceback_default(self):
|
||||||
|
self.compose.traceback()
|
||||||
|
self.assertTraceback("traceback")
|
||||||
|
|
||||||
|
def test_with_detail(self):
|
||||||
|
self.compose.traceback("extra-info")
|
||||||
|
self.assertTraceback("traceback-extra-info")
|
||||||
|
|
||||||
|
|
||||||
|
class RetryRequestTest(unittest.TestCase):
|
||||||
|
@mock.patch("pungi.compose.requests")
|
||||||
|
def test_retry_timeout(self, mocked_requests):
|
||||||
|
mocked_requests.post.side_effect = [
|
||||||
|
HTTPError("Gateway Timeout", response=mock.Mock(status_code=504)),
|
||||||
|
mock.Mock(status_code=200),
|
||||||
|
]
|
||||||
|
url = "http://locahost/api/1/composes/"
|
||||||
|
rv = retry_request("post", url)
|
||||||
|
self.assertEqual(
|
||||||
|
mocked_requests.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call.post(url, json=None, auth=None),
|
||||||
|
mock.call.post(url, json=None, auth=None),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
self.assertEqual(rv.status_code, 200)
|
||||||
|
@ -196,22 +196,22 @@ class CompsFilterTest(unittest.TestCase):
|
|||||||
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-environments.xml"))
|
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-removed-environments.xml"))
|
||||||
|
|
||||||
def test_cleanup(self):
|
def test_cleanup(self):
|
||||||
self.filter.cleanup()
|
self.filter.cleanup("ppc64le")
|
||||||
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup.xml"))
|
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup.xml"))
|
||||||
|
|
||||||
def test_cleanup_after_filter(self):
|
def test_cleanup_after_filter(self):
|
||||||
self.filter.filter_packages("ppc64le", None)
|
self.filter.filter_packages("ppc64le", None)
|
||||||
self.filter.cleanup()
|
self.filter.cleanup("ppc64le")
|
||||||
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-filter.xml"))
|
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-filter.xml"))
|
||||||
|
|
||||||
def test_cleanup_after_filter_keep_group(self):
|
def test_cleanup_after_filter_keep_group(self):
|
||||||
self.filter.filter_packages("ppc64le", None)
|
self.filter.filter_packages("ppc64le", None)
|
||||||
self.filter.cleanup(["standard"])
|
self.filter.cleanup("ppc64le", ["standard"])
|
||||||
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-keep.xml"))
|
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-keep.xml"))
|
||||||
|
|
||||||
def test_cleanup_all(self):
|
def test_cleanup_all(self):
|
||||||
self.filter.filter_packages("ppc64le", None)
|
self.filter.filter_packages("ppc64le", None)
|
||||||
self.filter.filter_groups("ppc64le", None)
|
self.filter.filter_groups("ppc64le", None)
|
||||||
self.filter.filter_environments("ppc64le", None)
|
self.filter.filter_environments("ppc64le", None)
|
||||||
self.filter.cleanup()
|
self.filter.cleanup("ppc64le")
|
||||||
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-all.xml"))
|
self.assertOutput(os.path.join(FIXTURE_DIR, "comps-cleanup-all.xml"))
|
||||||
|
@ -22,7 +22,9 @@ class ConfigTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
class PkgsetConfigTestCase(ConfigTestCase):
|
class PkgsetConfigTestCase(ConfigTestCase):
|
||||||
def test_validate_minimal_pkgset_koji(self):
|
def test_validate_minimal_pkgset_koji(self):
|
||||||
cfg = load_config(pkgset_source="koji",)
|
cfg = load_config(
|
||||||
|
pkgset_source="koji",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
|
|
||||||
@ -36,7 +38,9 @@ class PkgsetConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
def test_pkgset_mismatch_repos(self):
|
def test_pkgset_mismatch_repos(self):
|
||||||
cfg = load_config(
|
cfg = load_config(
|
||||||
pkgset_source="repos", pkgset_koji_tag="f25", pkgset_koji_inherit=False,
|
pkgset_source="repos",
|
||||||
|
pkgset_koji_tag="f25",
|
||||||
|
pkgset_koji_inherit=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
@ -51,7 +55,10 @@ class PkgsetConfigTestCase(ConfigTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_pkgset_mismatch_koji(self):
|
def test_pkgset_mismatch_koji(self):
|
||||||
cfg = load_config(pkgset_source="koji", pkgset_repos={"whatever": "/foo"},)
|
cfg = load_config(
|
||||||
|
pkgset_source="koji",
|
||||||
|
pkgset_repos={"whatever": "/foo"},
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")]
|
cfg, [checks.CONFLICTS.format("pkgset_source", "koji", "pkgset_repos")]
|
||||||
@ -78,7 +85,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_only_config_base_product_name(self):
|
def test_only_config_base_product_name(self):
|
||||||
cfg = load_config(PKGSET_REPOS, base_product_name="Prod",)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
base_product_name="Prod",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
@ -99,7 +109,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_only_config_base_product_short(self):
|
def test_only_config_base_product_short(self):
|
||||||
cfg = load_config(PKGSET_REPOS, base_product_short="bp",)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
base_product_short="bp",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
@ -118,7 +131,10 @@ class ReleaseConfigTestCase(ConfigTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_only_config_base_product_version(self):
|
def test_only_config_base_product_version(self):
|
||||||
cfg = load_config(PKGSET_REPOS, base_product_version="1.0",)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
base_product_version="1.0",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
@ -141,19 +157,28 @@ class ReleaseConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
class ImageNameConfigTestCase(ConfigTestCase):
|
class ImageNameConfigTestCase(ConfigTestCase):
|
||||||
def test_image_name_simple_string(self):
|
def test_image_name_simple_string(self):
|
||||||
cfg = load_config(PKGSET_REPOS, image_name_format="foobar",)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
image_name_format="foobar",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
|
|
||||||
def test_image_name_variant_mapping(self):
|
def test_image_name_variant_mapping(self):
|
||||||
cfg = load_config(PKGSET_REPOS, image_name_format={"^Server$": "foobar"},)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
image_name_format={"^Server$": "foobar"},
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
|
|
||||||
|
|
||||||
class RunrootConfigTestCase(ConfigTestCase):
|
class RunrootConfigTestCase(ConfigTestCase):
|
||||||
def test_set_runroot_true(self):
|
def test_set_runroot_true(self):
|
||||||
cfg = load_config(PKGSET_REPOS, runroot=True,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
runroot=True,
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
@ -163,7 +188,10 @@ class RunrootConfigTestCase(ConfigTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_set_runroot_false(self):
|
def test_set_runroot_false(self):
|
||||||
cfg = load_config(PKGSET_REPOS, runroot=False,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
runroot=False,
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
@ -175,7 +203,10 @@ class RunrootConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
class BuildinstallConfigTestCase(ConfigTestCase):
|
class BuildinstallConfigTestCase(ConfigTestCase):
|
||||||
def test_bootable_deprecated(self):
|
def test_bootable_deprecated(self):
|
||||||
cfg = load_config(PKGSET_REPOS, bootable=True,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
bootable=True,
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
cfg,
|
cfg,
|
||||||
@ -185,7 +216,10 @@ class BuildinstallConfigTestCase(ConfigTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_buildinstall_method_without_bootable(self):
|
def test_buildinstall_method_without_bootable(self):
|
||||||
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax",)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
buildinstall_method="lorax",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
|
|
||||||
@ -231,7 +265,9 @@ class BuildinstallConfigTestCase(ConfigTestCase):
|
|||||||
class CreaterepoConfigTestCase(ConfigTestCase):
|
class CreaterepoConfigTestCase(ConfigTestCase):
|
||||||
def test_validate_minimal_pkgset_koji(self):
|
def test_validate_minimal_pkgset_koji(self):
|
||||||
cfg = load_config(
|
cfg = load_config(
|
||||||
pkgset_source="koji", pkgset_koji_tag="f25", product_id_allow_missing=True,
|
pkgset_source="koji",
|
||||||
|
pkgset_koji_tag="f25",
|
||||||
|
product_id_allow_missing=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertValidation(
|
self.assertValidation(
|
||||||
@ -242,14 +278,20 @@ class CreaterepoConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
class GatherConfigTestCase(ConfigTestCase):
|
class GatherConfigTestCase(ConfigTestCase):
|
||||||
def test_dnf_backend_is_default_on_py3(self):
|
def test_dnf_backend_is_default_on_py3(self):
|
||||||
cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
|
cfg = load_config(
|
||||||
|
pkgset_source="koji",
|
||||||
|
pkgset_koji_tag="f27",
|
||||||
|
)
|
||||||
|
|
||||||
with mock.patch("six.PY2", new=False):
|
with mock.patch("six.PY2", new=False):
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
self.assertEqual(cfg["gather_backend"], "dnf")
|
self.assertEqual(cfg["gather_backend"], "dnf")
|
||||||
|
|
||||||
def test_yum_backend_is_default_on_py2(self):
|
def test_yum_backend_is_default_on_py2(self):
|
||||||
cfg = load_config(pkgset_source="koji", pkgset_koji_tag="f27",)
|
cfg = load_config(
|
||||||
|
pkgset_source="koji",
|
||||||
|
pkgset_koji_tag="f27",
|
||||||
|
)
|
||||||
|
|
||||||
with mock.patch("six.PY2", new=True):
|
with mock.patch("six.PY2", new=True):
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
@ -257,7 +299,9 @@ class GatherConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
def test_yum_backend_is_rejected_on_py3(self):
|
def test_yum_backend_is_rejected_on_py3(self):
|
||||||
cfg = load_config(
|
cfg = load_config(
|
||||||
pkgset_source="koji", pkgset_koji_tag="f27", gather_backend="yum",
|
pkgset_source="koji",
|
||||||
|
pkgset_koji_tag="f27",
|
||||||
|
gather_backend="yum",
|
||||||
)
|
)
|
||||||
|
|
||||||
with mock.patch("six.PY2", new=False):
|
with mock.patch("six.PY2", new=False):
|
||||||
@ -402,7 +446,10 @@ class LiveMediaConfigTestCase(ConfigTestCase):
|
|||||||
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
||||||
|
|
||||||
def test_global_config_null_release(self):
|
def test_global_config_null_release(self):
|
||||||
cfg = load_config(PKGSET_REPOS, live_media_release=None,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
live_media_release=None,
|
||||||
|
)
|
||||||
|
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
|
|
||||||
@ -429,7 +476,8 @@ class TestRegexValidation(ConfigTestCase):
|
|||||||
class RepoclosureTestCase(ConfigTestCase):
|
class RepoclosureTestCase(ConfigTestCase):
|
||||||
def test_invalid_backend(self):
|
def test_invalid_backend(self):
|
||||||
cfg = load_config(
|
cfg = load_config(
|
||||||
PKGSET_REPOS, repoclosure_backend="fnd", # Intentionally with a typo
|
PKGSET_REPOS,
|
||||||
|
repoclosure_backend="fnd", # Intentionally with a typo
|
||||||
)
|
)
|
||||||
|
|
||||||
options = ["yum", "dnf"] if six.PY2 else ["dnf"]
|
options = ["yum", "dnf"] if six.PY2 else ["dnf"]
|
||||||
@ -445,7 +493,10 @@ class RepoclosureTestCase(ConfigTestCase):
|
|||||||
class VariantAsLookasideTestCase(ConfigTestCase):
|
class VariantAsLookasideTestCase(ConfigTestCase):
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
variant_as_lookaside = []
|
variant_as_lookaside = []
|
||||||
cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
variant_as_lookaside=variant_as_lookaside,
|
||||||
|
)
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
@ -454,14 +505,20 @@ class VariantAsLookasideTestCase(ConfigTestCase):
|
|||||||
("Server", "Client"),
|
("Server", "Client"),
|
||||||
("Everything", "Spin"),
|
("Everything", "Spin"),
|
||||||
]
|
]
|
||||||
cfg = load_config(PKGSET_REPOS, variant_as_lookaside=variant_as_lookaside,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
variant_as_lookaside=variant_as_lookaside,
|
||||||
|
)
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
|
|
||||||
|
|
||||||
class SkipPhasesTestCase(ConfigTestCase):
|
class SkipPhasesTestCase(ConfigTestCase):
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
skip_phases = []
|
skip_phases = []
|
||||||
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
skip_phases=skip_phases,
|
||||||
|
)
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
@ -469,7 +526,10 @@ class SkipPhasesTestCase(ConfigTestCase):
|
|||||||
"buildinstall",
|
"buildinstall",
|
||||||
"gather",
|
"gather",
|
||||||
]
|
]
|
||||||
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
skip_phases=skip_phases,
|
||||||
|
)
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
|
|
||||||
def test_bad_phase_name(self):
|
def test_bad_phase_name(self):
|
||||||
@ -477,5 +537,8 @@ class SkipPhasesTestCase(ConfigTestCase):
|
|||||||
"gather",
|
"gather",
|
||||||
"non-existing-phase_name",
|
"non-existing-phase_name",
|
||||||
]
|
]
|
||||||
cfg = load_config(PKGSET_REPOS, skip_phases=skip_phases,)
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
skip_phases=skip_phases,
|
||||||
|
)
|
||||||
self.assertNotEqual(checks.validate(cfg), ([], []))
|
self.assertNotEqual(checks.validate(cfg), ([], []))
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
import mock
|
import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
@ -119,10 +120,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
|
|||||||
graft_points="dummy-graft-points",
|
graft_points="dummy-graft-points",
|
||||||
arch="x86_64",
|
arch="x86_64",
|
||||||
supported=True,
|
supported=True,
|
||||||
jigdo_dir="%s/compose/Server/x86_64/jigdo" % self.topdir,
|
jigdo_dir=None,
|
||||||
os_tree="%s/compose/Server/x86_64/os" % self.topdir,
|
os_tree=None,
|
||||||
hfs_compat=True,
|
hfs_compat=True,
|
||||||
use_xorrisofs=False,
|
use_xorrisofs=False,
|
||||||
|
script_dir="%s/work/x86_64/tmp-Server" % self.topdir,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -239,16 +241,20 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
|
|||||||
[
|
[
|
||||||
CreateIsoOpts(
|
CreateIsoOpts(
|
||||||
output_dir="%s/compose/Server/x86_64/iso" % self.topdir,
|
output_dir="%s/compose/Server/x86_64/iso" % self.topdir,
|
||||||
|
boot_iso=(
|
||||||
|
"%s/compose/Server/x86_64/os/images/boot.iso" % self.topdir
|
||||||
|
),
|
||||||
iso_name="image-name",
|
iso_name="image-name",
|
||||||
volid="test-1.0 Server.x86_64",
|
volid="test-1.0 Server.x86_64",
|
||||||
graft_points="dummy-graft-points",
|
graft_points="dummy-graft-points",
|
||||||
arch="x86_64",
|
arch="x86_64",
|
||||||
buildinstall_method="lorax",
|
buildinstall_method="lorax",
|
||||||
supported=True,
|
supported=True,
|
||||||
jigdo_dir="%s/compose/Server/x86_64/jigdo" % self.topdir,
|
jigdo_dir=None,
|
||||||
os_tree="%s/compose/Server/x86_64/os" % self.topdir,
|
os_tree=None,
|
||||||
hfs_compat=True,
|
hfs_compat=True,
|
||||||
use_xorrisofs=False,
|
use_xorrisofs=False,
|
||||||
|
script_dir="%s/work/x86_64/tmp-Server" % self.topdir,
|
||||||
),
|
),
|
||||||
CreateIsoOpts(
|
CreateIsoOpts(
|
||||||
output_dir="%s/compose/Server/source/iso" % self.topdir,
|
output_dir="%s/compose/Server/source/iso" % self.topdir,
|
||||||
@ -257,10 +263,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
|
|||||||
graft_points="dummy-graft-points",
|
graft_points="dummy-graft-points",
|
||||||
arch="src",
|
arch="src",
|
||||||
supported=True,
|
supported=True,
|
||||||
jigdo_dir="%s/compose/Server/source/jigdo" % self.topdir,
|
jigdo_dir=None,
|
||||||
os_tree="%s/compose/Server/source/tree" % self.topdir,
|
os_tree=None,
|
||||||
hfs_compat=True,
|
hfs_compat=True,
|
||||||
use_xorrisofs=False,
|
use_xorrisofs=False,
|
||||||
|
script_dir="%s/work/src/tmp-Server" % self.topdir,
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -389,10 +396,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
|
|||||||
graft_points="dummy-graft-points",
|
graft_points="dummy-graft-points",
|
||||||
arch="src",
|
arch="src",
|
||||||
supported=True,
|
supported=True,
|
||||||
jigdo_dir="%s/compose/Server/source/jigdo" % self.topdir,
|
jigdo_dir=None,
|
||||||
os_tree="%s/compose/Server/source/tree" % self.topdir,
|
os_tree=None,
|
||||||
hfs_compat=True,
|
hfs_compat=True,
|
||||||
use_xorrisofs=False,
|
use_xorrisofs=False,
|
||||||
|
script_dir="%s/work/src/tmp-Server" % self.topdir,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -496,10 +504,11 @@ class CreateisoPhaseTest(helpers.PungiTestCase):
|
|||||||
graft_points="dummy-graft-points",
|
graft_points="dummy-graft-points",
|
||||||
arch="x86_64",
|
arch="x86_64",
|
||||||
supported=True,
|
supported=True,
|
||||||
jigdo_dir="%s/compose/Server/x86_64/jigdo" % self.topdir,
|
jigdo_dir=None,
|
||||||
os_tree="%s/compose/Server/x86_64/os" % self.topdir,
|
os_tree=None,
|
||||||
hfs_compat=False,
|
hfs_compat=False,
|
||||||
use_xorrisofs=False,
|
use_xorrisofs=False,
|
||||||
|
script_dir="%s/work/x86_64/tmp-Server" % self.topdir,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -579,7 +588,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
cmd["cmd"],
|
cmd["cmd"],
|
||||||
channel=None,
|
channel=None,
|
||||||
mounts=[self.topdir],
|
mounts=[self.topdir],
|
||||||
packages=["coreutils", "genisoimage", "isomd5sum", "jigdo"],
|
packages=["coreutils", "genisoimage", "isomd5sum"],
|
||||||
use_shell=True,
|
use_shell=True,
|
||||||
weight=None,
|
weight=None,
|
||||||
)
|
)
|
||||||
@ -749,7 +758,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"coreutils",
|
"coreutils",
|
||||||
"genisoimage",
|
"genisoimage",
|
||||||
"isomd5sum",
|
"isomd5sum",
|
||||||
"jigdo",
|
|
||||||
"lorax",
|
"lorax",
|
||||||
"which",
|
"which",
|
||||||
],
|
],
|
||||||
@ -1105,8 +1113,8 @@ class SplitIsoTest(helpers.PungiTestCase):
|
|||||||
os.path.join(self.topdir, "compose/Server/x86_64/os/n/media.repo")
|
os.path.join(self.topdir, "compose/Server/x86_64/os/n/media.repo")
|
||||||
)
|
)
|
||||||
|
|
||||||
M = 1024 ** 2
|
M = 1024**2
|
||||||
G = 1024 ** 3
|
G = 1024**3
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"os.path.getsize",
|
"os.path.getsize",
|
||||||
@ -1157,8 +1165,8 @@ class SplitIsoTest(helpers.PungiTestCase):
|
|||||||
os.path.join(self.topdir, "compose/Server/x86_64/os/n/media.repo")
|
os.path.join(self.topdir, "compose/Server/x86_64/os/n/media.repo")
|
||||||
)
|
)
|
||||||
|
|
||||||
M = 1024 ** 2
|
M = 1024**2
|
||||||
G = 1024 ** 3
|
G = 1024**3
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"os.path.getsize",
|
"os.path.getsize",
|
||||||
@ -1209,7 +1217,7 @@ class SplitIsoTest(helpers.PungiTestCase):
|
|||||||
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
|
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
|
||||||
)
|
)
|
||||||
|
|
||||||
M = 1024 ** 2
|
M = 1024**2
|
||||||
|
|
||||||
# treeinfo has size 0, spacer leaves 11M of free space, so with 10M
|
# treeinfo has size 0, spacer leaves 11M of free space, so with 10M
|
||||||
# reserve the padding package should be on second disk
|
# reserve the padding package should be on second disk
|
||||||
@ -1233,7 +1241,7 @@ class SplitIsoTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_can_customize_reserve(self):
|
def test_can_customize_reserve(self):
|
||||||
compose = helpers.DummyCompose(self.topdir, {"split_iso_reserve": 1024 ** 2})
|
compose = helpers.DummyCompose(self.topdir, {"split_iso_reserve": 1024**2})
|
||||||
helpers.touch(
|
helpers.touch(
|
||||||
os.path.join(self.topdir, "compose/Server/x86_64/os/.treeinfo"), TREEINFO
|
os.path.join(self.topdir, "compose/Server/x86_64/os/.treeinfo"), TREEINFO
|
||||||
)
|
)
|
||||||
@ -1244,7 +1252,7 @@ class SplitIsoTest(helpers.PungiTestCase):
|
|||||||
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
|
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
|
||||||
)
|
)
|
||||||
|
|
||||||
M = 1024 ** 2
|
M = 1024**2
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"os.path.getsize", DummySize({"spacer": 4688465664, "pad": 5 * M})
|
"os.path.getsize", DummySize({"spacer": 4688465664, "pad": 5 * M})
|
||||||
@ -1265,7 +1273,7 @@ class SplitIsoTest(helpers.PungiTestCase):
|
|||||||
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
|
os.path.join(self.topdir, "compose/Server/x86_64/os/Packages/x/pad.rpm")
|
||||||
)
|
)
|
||||||
|
|
||||||
M = 1024 ** 2
|
M = 1024**2
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"os.path.getsize", DummySize({"spacer": 4688465664, "pad": 5 * M})
|
"os.path.getsize", DummySize({"spacer": 4688465664, "pad": 5 * M})
|
||||||
@ -1322,3 +1330,262 @@ class TweakTreeinfo(helpers.PungiTestCase):
|
|||||||
ti.dump(output)
|
ti.dump(output)
|
||||||
|
|
||||||
self.assertFilesEqual(output, expected)
|
self.assertFilesEqual(output, expected)
|
||||||
|
|
||||||
|
|
||||||
|
class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(CreateisoTryReusePhaseTest, self).setUp()
|
||||||
|
self.logger = logging.getLogger()
|
||||||
|
self.logger.setLevel(logging.DEBUG)
|
||||||
|
self.logger.addHandler(logging.StreamHandler(os.devnull))
|
||||||
|
|
||||||
|
def test_disabled(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": False})
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
|
||||||
|
self.assertFalse(phase.try_reuse(mock.Mock(), "Server", "x86_64", mock.Mock()))
|
||||||
|
|
||||||
|
def test_buildinstall_changed(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
phase.bi = mock.Mock()
|
||||||
|
phase.bi.reused.return_value = False
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
opts = CreateIsoOpts(buildinstall_method="lorax")
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_old_config(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_old_config_changed(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
old_config = compose.conf.copy()
|
||||||
|
old_config["release_version"] = "2"
|
||||||
|
compose.load_old_compose_config.return_value = old_config
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_old_metadata(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.createiso.read_json_file")
|
||||||
|
def test_volume_id_differs(self, read_json_file):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
|
||||||
|
opts = CreateIsoOpts(volid="new-volid")
|
||||||
|
|
||||||
|
read_json_file.return_value = {"opts": {"volid": "old-volid"}}
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.createiso.read_json_file")
|
||||||
|
def test_packages_differ(self, read_json_file):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
|
||||||
|
new_graft_points = os.path.join(self.topdir, "new_graft_points")
|
||||||
|
helpers.touch(new_graft_points, "Packages/f/foo-1-1.x86_64.rpm\n")
|
||||||
|
opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
|
||||||
|
|
||||||
|
old_graft_points = os.path.join(self.topdir, "old_graft_points")
|
||||||
|
helpers.touch(old_graft_points, "Packages/f/foo-1-2.x86_64.rpm\n")
|
||||||
|
read_json_file.return_value = {
|
||||||
|
"opts": {"graft_points": old_graft_points, "volid": "volid"}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.createiso.read_json_file")
|
||||||
|
def test_runs_perform_reuse(self, read_json_file):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
phase.logger = self.logger
|
||||||
|
phase.perform_reuse = mock.Mock()
|
||||||
|
cmd = {"disc_num": 1, "disc_count": 1}
|
||||||
|
|
||||||
|
new_graft_points = os.path.join(self.topdir, "new_graft_points")
|
||||||
|
helpers.touch(new_graft_points)
|
||||||
|
opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
|
||||||
|
|
||||||
|
old_graft_points = os.path.join(self.topdir, "old_graft_points")
|
||||||
|
helpers.touch(old_graft_points)
|
||||||
|
dummy_iso_path = "dummy-iso-path"
|
||||||
|
read_json_file.return_value = {
|
||||||
|
"opts": {
|
||||||
|
"graft_points": old_graft_points,
|
||||||
|
"volid": "volid",
|
||||||
|
},
|
||||||
|
"cmd": {"iso_path": dummy_iso_path},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertTrue(
|
||||||
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
phase.perform_reuse.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
cmd,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
opts,
|
||||||
|
dummy_iso_path,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.createiso.OldFileLinker")
|
||||||
|
@mock.patch("pungi.phases.createiso.add_iso_to_metadata")
|
||||||
|
class CreateisoPerformReusePhaseTest(helpers.PungiTestCase):
|
||||||
|
def test_success(self, add_iso_to_metadata, OldFileLinker):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
cmd = {
|
||||||
|
"iso_path": "target/image.iso",
|
||||||
|
"bootable": False,
|
||||||
|
"disc_num": 1,
|
||||||
|
"disc_count": 2,
|
||||||
|
}
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
phase.perform_reuse(
|
||||||
|
cmd,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
opts,
|
||||||
|
"old/image.iso",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
add_iso_to_metadata.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
compose,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
cmd["iso_path"],
|
||||||
|
bootable=False,
|
||||||
|
disc_count=2,
|
||||||
|
disc_num=1,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
OldFileLinker.return_value.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call.link("old/image.iso", "target/image.iso"),
|
||||||
|
mock.call.link("old/image.iso.manifest", "target/image.iso.manifest"),
|
||||||
|
# The old log file doesn't exist in the test scenario.
|
||||||
|
mock.call.link(
|
||||||
|
None,
|
||||||
|
os.path.join(
|
||||||
|
self.topdir, "logs/x86_64/createiso-image.iso.x86_64.log"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_failure(self, add_iso_to_metadata, OldFileLinker):
|
||||||
|
OldFileLinker.return_value.link.side_effect = helpers.mk_boom()
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
|
cmd = {
|
||||||
|
"iso_path": "target/image.iso",
|
||||||
|
"bootable": False,
|
||||||
|
"disc_num": 1,
|
||||||
|
"disc_count": 2,
|
||||||
|
}
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
phase.perform_reuse(
|
||||||
|
cmd,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
opts,
|
||||||
|
"old/image.iso",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(add_iso_to_metadata.call_args_list, [])
|
||||||
|
self.assertEqual(
|
||||||
|
OldFileLinker.return_value.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call.link("old/image.iso", "target/image.iso"),
|
||||||
|
mock.call.abort(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ComposeConfGetIsoLevelTest(helpers.PungiTestCase):
|
||||||
|
def test_global_config(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"iso_level": 3})
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
createiso.get_iso_level_config(
|
||||||
|
compose, compose.variants["Server"], "x86_64"
|
||||||
|
),
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_src_only_config(self):
|
||||||
|
compose = helpers.DummyCompose(
|
||||||
|
self.topdir,
|
||||||
|
{"iso_level": [(".*", {"src": 4})]},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
createiso.get_iso_level_config(compose, compose.variants["Server"], "src"),
|
||||||
|
4,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_match(self):
|
||||||
|
compose = helpers.DummyCompose(
|
||||||
|
self.topdir,
|
||||||
|
{"iso_level": [("^Server$", {"*": 4})]},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertIsNone(
|
||||||
|
createiso.get_iso_level_config(
|
||||||
|
compose, compose.variants["Client"], "x86_64"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import unittest
|
import unittest
|
||||||
import mock
|
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
from pungi.module_util import Modulemd
|
||||||
from pungi.phases.createrepo import (
|
from pungi.phases.createrepo import (
|
||||||
CreaterepoPhase,
|
CreaterepoPhase,
|
||||||
|
ModulesMetadata,
|
||||||
create_variant_repo,
|
create_variant_repo,
|
||||||
get_productids_from_scm,
|
get_productids_from_scm,
|
||||||
ModulesMetadata,
|
|
||||||
)
|
)
|
||||||
from tests.helpers import DummyCompose, PungiTestCase, copy_fixture, touch
|
from tests.helpers import DummyCompose, PungiTestCase, copy_fixture, touch
|
||||||
from pungi.module_util import Modulemd
|
|
||||||
|
|
||||||
|
|
||||||
class TestCreaterepoPhase(PungiTestCase):
|
class TestCreaterepoPhase(PungiTestCase):
|
||||||
@ -141,7 +141,13 @@ class TestCreaterepoPhase(PungiTestCase):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
get_dir_from_scm.call_args_list,
|
get_dir_from_scm.call_args_list,
|
||||||
[mock.call(scm, os.path.join(compose.topdir, "work/global/tmp-Server"))],
|
[
|
||||||
|
mock.call(
|
||||||
|
scm,
|
||||||
|
os.path.join(compose.topdir, "work/global/tmp-Server"),
|
||||||
|
compose=compose,
|
||||||
|
)
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -158,7 +164,9 @@ def make_mocked_modifyrepo_cmd(tc, module_artifacts):
|
|||||||
for ms in module_streams:
|
for ms in module_streams:
|
||||||
tc.assertIn(ms.get_stream_name(), module_artifacts)
|
tc.assertIn(ms.get_stream_name(), module_artifacts)
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
tc, ms.get_rpm_artifacts(), module_artifacts[ms.get_stream_name()],
|
tc,
|
||||||
|
ms.get_rpm_artifacts(),
|
||||||
|
module_artifacts[ms.get_stream_name()],
|
||||||
)
|
)
|
||||||
|
|
||||||
return mocked_modifyrepo_cmd
|
return mocked_modifyrepo_cmd
|
||||||
@ -174,19 +182,24 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.run")
|
@mock.patch("pungi.phases.createrepo.run")
|
||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_rpms(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_rpms(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
|
compose = DummyCompose(
|
||||||
|
self.topdir,
|
||||||
|
{"createrepo_checksum": "sha256"},
|
||||||
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
repo = CreaterepoWrapperCls.return_value
|
repo = CreaterepoWrapperCls.return_value
|
||||||
copy_fixture("server-rpms.json", compose.paths.compose.metadata("rpms.json"))
|
copy_fixture("server-rpms.json", compose.paths.compose.metadata("rpms.json"))
|
||||||
|
|
||||||
create_variant_repo(
|
with mock.patch("pungi.phases.createrepo.CACHE_TOPDIR", self.topdir):
|
||||||
compose, "x86_64", compose.variants["Server"], "rpm", self.pkgset
|
create_variant_repo(
|
||||||
)
|
compose, "x86_64", compose.variants["Server"], "rpm", self.pkgset
|
||||||
|
)
|
||||||
|
|
||||||
list_file = (
|
list_file = (
|
||||||
self.topdir + "/work/x86_64/repo_package_list/Server.x86_64.rpm.conf"
|
self.topdir + "/work/x86_64/repo_package_list/Server.x86_64.rpm.conf"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
CreaterepoWrapperCls.mock_calls[0], mock.call(createrepo_c=True)
|
CreaterepoWrapperCls.mock_calls[0], mock.call(createrepo_c=True)
|
||||||
)
|
)
|
||||||
@ -208,6 +221,10 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=os.path.join(
|
||||||
|
self.topdir,
|
||||||
|
"%s-%s" % (compose.conf["release_short"], os.getuid()),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -217,7 +234,10 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.run")
|
@mock.patch("pungi.phases.createrepo.run")
|
||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_rpms_without_database(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_rpms_without_database(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
|
compose = DummyCompose(
|
||||||
|
self.topdir,
|
||||||
|
{"createrepo_checksum": "sha256", "createrepo_enable_cache": False},
|
||||||
|
)
|
||||||
compose.should_create_yum_database = False
|
compose.should_create_yum_database = False
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
@ -252,6 +272,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -261,7 +282,10 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.run")
|
@mock.patch("pungi.phases.createrepo.run")
|
||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_source(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_source(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
|
compose = DummyCompose(
|
||||||
|
self.topdir,
|
||||||
|
{"createrepo_checksum": "sha256", "createrepo_enable_cache": False},
|
||||||
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
repo = CreaterepoWrapperCls.return_value
|
repo = CreaterepoWrapperCls.return_value
|
||||||
@ -293,6 +317,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -302,7 +327,10 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.run")
|
@mock.patch("pungi.phases.createrepo.run")
|
||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_debug(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_debug(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(self.topdir, {"createrepo_checksum": "sha256"})
|
compose = DummyCompose(
|
||||||
|
self.topdir,
|
||||||
|
{"createrepo_checksum": "sha256", "createrepo_enable_cache": False},
|
||||||
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
repo = CreaterepoWrapperCls.return_value
|
repo = CreaterepoWrapperCls.return_value
|
||||||
@ -337,6 +365,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -349,7 +378,12 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_no_createrepo_c(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_no_createrepo_c(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir, {"createrepo_c": False, "createrepo_checksum": "sha256"}
|
self.topdir,
|
||||||
|
{
|
||||||
|
"createrepo_c": False,
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
|
"createrepo_checksum": "sha256",
|
||||||
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
@ -384,6 +418,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -395,7 +430,11 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
def test_variant_repo_is_idepotent(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_is_idepotent(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
{"createrepo_checksum": "sha256", "createrepo_num_workers": 10},
|
{
|
||||||
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
|
"createrepo_num_workers": 10,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
@ -434,6 +473,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -444,7 +484,12 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_rpms_with_xz(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_rpms_with_xz(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir, {"createrepo_checksum": "sha256", "createrepo_use_xz": True}
|
self.topdir,
|
||||||
|
{
|
||||||
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
|
"createrepo_use_xz": True,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
|
|
||||||
@ -479,6 +524,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=True,
|
use_xz=True,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -489,7 +535,12 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
@mock.patch("pungi.phases.createrepo.CreaterepoWrapper")
|
||||||
def test_variant_repo_rpms_with_deltas(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_rpms_with_deltas(self, CreaterepoWrapperCls, run):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir, {"createrepo_checksum": "sha256", "createrepo_deltas": True}
|
self.topdir,
|
||||||
|
{
|
||||||
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_deltas": True,
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
compose.old_composes = [self.topdir + "/old"]
|
compose.old_composes = [self.topdir + "/old"]
|
||||||
@ -534,6 +585,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages",
|
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages",
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -550,6 +602,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
"createrepo_deltas": [("^Server$", {"*": True})],
|
"createrepo_deltas": [("^Server$", {"*": True})],
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
@ -594,6 +647,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages",
|
+ "/old/test-1.0-20151203.0/compose/Server/x86_64/os/Packages",
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -610,6 +664,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
"createrepo_deltas": [("^Everything$", {"x86_64": True})],
|
"createrepo_deltas": [("^Everything$", {"x86_64": True})],
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
@ -650,6 +705,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -666,6 +722,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
"createrepo_deltas": [("^Server$", {"s390x": True})],
|
"createrepo_deltas": [("^Server$", {"s390x": True})],
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
@ -706,6 +763,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -720,6 +778,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
"createrepo_deltas": True,
|
"createrepo_deltas": True,
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
"hashed_directories": True,
|
"hashed_directories": True,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -774,6 +833,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
],
|
],
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -790,6 +850,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
"createrepo_deltas": True,
|
"createrepo_deltas": True,
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
"hashed_directories": True,
|
"hashed_directories": True,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -832,6 +893,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -843,7 +905,12 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
def test_variant_repo_source_with_deltas(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_source_with_deltas(self, CreaterepoWrapperCls, run):
|
||||||
# This should not actually create deltas, only binary repos do.
|
# This should not actually create deltas, only binary repos do.
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir, {"createrepo_checksum": "sha256", "createrepo_deltas": True}
|
self.topdir,
|
||||||
|
{
|
||||||
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
|
"createrepo_deltas": True,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
compose.old_composes = [self.topdir + "/old"]
|
compose.old_composes = [self.topdir + "/old"]
|
||||||
@ -881,6 +948,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -892,7 +960,12 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
def test_variant_repo_debug_with_deltas(self, CreaterepoWrapperCls, run):
|
def test_variant_repo_debug_with_deltas(self, CreaterepoWrapperCls, run):
|
||||||
# This should not actually create deltas, only binary repos do.
|
# This should not actually create deltas, only binary repos do.
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(
|
||||||
self.topdir, {"createrepo_checksum": "sha256", "createrepo_deltas": True}
|
self.topdir,
|
||||||
|
{
|
||||||
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_deltas": True,
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
compose.has_comps = False
|
compose.has_comps = False
|
||||||
compose.old_composes = [self.topdir + "/old"]
|
compose.old_composes = [self.topdir + "/old"]
|
||||||
@ -932,6 +1005,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -947,6 +1021,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
"product_id": "yes", # Truthy value is enough for this test
|
"product_id": "yes", # Truthy value is enough for this test
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -991,6 +1066,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -1007,6 +1083,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
"product_id": "yes", # Truthy value is enough for this test
|
"product_id": "yes", # Truthy value is enough for this test
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1044,6 +1121,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -1059,6 +1137,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"createrepo_checksum": "sha256",
|
"createrepo_checksum": "sha256",
|
||||||
|
"createrepo_enable_cache": False,
|
||||||
"product_id": "yes", # Truthy value is enough for this test
|
"product_id": "yes", # Truthy value is enough for this test
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1094,6 +1173,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
oldpackagedirs=None,
|
oldpackagedirs=None,
|
||||||
use_xz=False,
|
use_xz=False,
|
||||||
extra_args=[],
|
extra_args=[],
|
||||||
|
cachedir=None,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -1259,7 +1339,7 @@ class TestCreateVariantRepo(PungiTestCase):
|
|||||||
|
|
||||||
class TestGetProductIds(PungiTestCase):
|
class TestGetProductIds(PungiTestCase):
|
||||||
def mock_get(self, filenames):
|
def mock_get(self, filenames):
|
||||||
def _mock_get(scm, dest):
|
def _mock_get(scm, dest, compose=None):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
touch(os.path.join(dest, filename))
|
touch(os.path.join(dest, filename))
|
||||||
|
|
||||||
@ -1305,7 +1385,10 @@ class TestGetProductIds(PungiTestCase):
|
|||||||
|
|
||||||
get_productids_from_scm(self.compose)
|
get_productids_from_scm(self.compose)
|
||||||
|
|
||||||
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
|
self.assertEqual(
|
||||||
|
get_dir_from_scm.call_args_list,
|
||||||
|
[mock.call(cfg, mock.ANY, compose=self.compose)],
|
||||||
|
)
|
||||||
self.assertProductIds(
|
self.assertProductIds(
|
||||||
{
|
{
|
||||||
"Client": ["amd64"],
|
"Client": ["amd64"],
|
||||||
@ -1326,7 +1409,10 @@ class TestGetProductIds(PungiTestCase):
|
|||||||
|
|
||||||
get_productids_from_scm(self.compose)
|
get_productids_from_scm(self.compose)
|
||||||
|
|
||||||
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
|
self.assertEqual(
|
||||||
|
get_dir_from_scm.call_args_list,
|
||||||
|
[mock.call(cfg, mock.ANY, compose=self.compose)],
|
||||||
|
)
|
||||||
self.assertProductIds({"Server": ["amd64", "x86_64"]})
|
self.assertProductIds({"Server": ["amd64", "x86_64"]})
|
||||||
|
|
||||||
@mock.patch("pungi.phases.createrepo.get_dir_from_scm")
|
@mock.patch("pungi.phases.createrepo.get_dir_from_scm")
|
||||||
@ -1340,7 +1426,10 @@ class TestGetProductIds(PungiTestCase):
|
|||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
get_productids_from_scm(self.compose)
|
get_productids_from_scm(self.compose)
|
||||||
|
|
||||||
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
|
self.assertEqual(
|
||||||
|
get_dir_from_scm.call_args_list,
|
||||||
|
[mock.call(cfg, mock.ANY, compose=self.compose)],
|
||||||
|
)
|
||||||
self.assertRegex(
|
self.assertRegex(
|
||||||
str(ctx.exception),
|
str(ctx.exception),
|
||||||
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)", # noqa: E501
|
r"No product certificate found \(arch: amd64, variant: (Everything|Client)\)", # noqa: E501
|
||||||
@ -1364,5 +1453,8 @@ class TestGetProductIds(PungiTestCase):
|
|||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
get_productids_from_scm(self.compose)
|
get_productids_from_scm(self.compose)
|
||||||
|
|
||||||
self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, mock.ANY)])
|
self.assertEqual(
|
||||||
|
get_dir_from_scm.call_args_list,
|
||||||
|
[mock.call(cfg, mock.ANY, compose=self.compose)],
|
||||||
|
)
|
||||||
self.assertRegex(str(ctx.exception), "Multiple product certificates found.+")
|
self.assertRegex(str(ctx.exception), "Multiple product certificates found.+")
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import logging
|
||||||
import mock
|
import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from tests import helpers
|
from tests import helpers
|
||||||
|
from pungi.createiso import CreateIsoOpts
|
||||||
from pungi.phases import extra_isos
|
from pungi.phases import extra_isos
|
||||||
|
|
||||||
|
|
||||||
@ -19,7 +20,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
||||||
|
|
||||||
phase = extra_isos.ExtraIsosPhase(compose)
|
phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
|
||||||
phase.validate()
|
phase.validate()
|
||||||
|
|
||||||
self.assertEqual(len(compose.log_warning.call_args_list), 1)
|
self.assertEqual(len(compose.log_warning.call_args_list), 1)
|
||||||
@ -30,7 +31,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
||||||
|
|
||||||
phase = extra_isos.ExtraIsosPhase(compose)
|
phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
|
||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 3)
|
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 3)
|
||||||
@ -51,7 +52,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
||||||
|
|
||||||
phase = extra_isos.ExtraIsosPhase(compose)
|
phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
|
||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 2)
|
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 2)
|
||||||
@ -71,7 +72,7 @@ class ExtraIsosPhaseTest(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
compose = helpers.DummyCompose(self.topdir, {"extra_isos": {"^Server$": [cfg]}})
|
||||||
|
|
||||||
phase = extra_isos.ExtraIsosPhase(compose)
|
phase = extra_isos.ExtraIsosPhase(compose, mock.Mock())
|
||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 2)
|
self.assertEqual(len(ThreadPool.return_value.add.call_args_list), 2)
|
||||||
@ -106,7 +107,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gvi.return_value = "my volume id"
|
gvi.return_value = "my volume id"
|
||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "x86_64"), 1)
|
t.process((compose, cfg, server, "x86_64"), 1)
|
||||||
|
|
||||||
@ -147,7 +148,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
with_jigdo=True,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -182,7 +182,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gvi.return_value = "my volume id"
|
gvi.return_value = "my volume id"
|
||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "x86_64"), 1)
|
t.process((compose, cfg, server, "x86_64"), 1)
|
||||||
|
|
||||||
@ -223,7 +223,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
with_jigdo=False,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -256,7 +255,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gvi.return_value = "my volume id"
|
gvi.return_value = "my volume id"
|
||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "x86_64"), 1)
|
t.process((compose, cfg, server, "x86_64"), 1)
|
||||||
|
|
||||||
@ -297,7 +296,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
with_jigdo=True,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -330,7 +328,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gvi.return_value = "my volume id"
|
gvi.return_value = "my volume id"
|
||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "x86_64"), 1)
|
t.process((compose, cfg, server, "x86_64"), 1)
|
||||||
|
|
||||||
@ -373,7 +371,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
with_jigdo=True,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -405,7 +402,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gvi.return_value = "my volume id"
|
gvi.return_value = "my volume id"
|
||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "src"), 1)
|
t.process((compose, cfg, server, "src"), 1)
|
||||||
|
|
||||||
@ -444,7 +441,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/src/extraiso-my.iso.src.log"
|
self.topdir, "logs/src/extraiso-my.iso.src.log"
|
||||||
),
|
),
|
||||||
with_jigdo=True,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -476,7 +472,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
rcc.side_effect = helpers.mk_boom()
|
rcc.side_effect = helpers.mk_boom()
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "x86_64"), 1)
|
t.process((compose, cfg, server, "x86_64"), 1)
|
||||||
|
|
||||||
@ -494,7 +490,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
gic.return_value = "/tmp/iso-graft-points"
|
gic.return_value = "/tmp/iso-graft-points"
|
||||||
rcc.side_effect = helpers.mk_boom(RuntimeError)
|
rcc.side_effect = helpers.mk_boom(RuntimeError)
|
||||||
|
|
||||||
t = extra_isos.ExtraIsosThread(mock.Mock())
|
t = extra_isos.ExtraIsosThread(mock.Mock(), mock.Mock())
|
||||||
with self.assertRaises(RuntimeError):
|
with self.assertRaises(RuntimeError):
|
||||||
with mock.patch("time.sleep"):
|
with mock.patch("time.sleep"):
|
||||||
t.process((compose, cfg, server, "x86_64"), 1)
|
t.process((compose, cfg, server, "x86_64"), 1)
|
||||||
@ -596,7 +592,9 @@ class GetExtraFilesTest(helpers.PungiTestCase):
|
|||||||
get_file.call_args_list,
|
get_file.call_args_list,
|
||||||
[
|
[
|
||||||
mock.call(
|
mock.call(
|
||||||
cfg1, os.path.join(self.dir, "legalese"), compose=self.compose,
|
cfg1,
|
||||||
|
os.path.join(self.dir, "legalese"),
|
||||||
|
compose=self.compose,
|
||||||
),
|
),
|
||||||
mock.call(cfg2, self.dir, compose=self.compose),
|
mock.call(cfg2, self.dir, compose=self.compose),
|
||||||
],
|
],
|
||||||
@ -832,7 +830,8 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
|||||||
["Client"],
|
["Client"],
|
||||||
os.path.join(self.topdir, "compose/Server/source/tree/.treeinfo"),
|
os.path.join(self.topdir, "compose/Server/source/tree/.treeinfo"),
|
||||||
os.path.join(
|
os.path.join(
|
||||||
self.topdir, "work/src/Server/extra-iso-extra-files/.treeinfo",
|
self.topdir,
|
||||||
|
"work/src/Server/extra-iso-extra-files/.treeinfo",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@ -1058,3 +1057,215 @@ class PrepareMetadataTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(ExtraisoTryReusePhaseTest, self).setUp()
|
||||||
|
self.logger = logging.getLogger()
|
||||||
|
self.logger.setLevel(logging.DEBUG)
|
||||||
|
self.logger.addHandler(logging.StreamHandler(os.devnull))
|
||||||
|
|
||||||
|
def test_disabled(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": False})
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_buildinstall_changed(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
thread.bi = mock.Mock()
|
||||||
|
thread.bi.reused.return_value = False
|
||||||
|
opts = CreateIsoOpts(buildinstall_method="lorax")
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_old_config(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_old_config_changed(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
old_config = compose.conf.copy()
|
||||||
|
old_config["release_version"] = "2"
|
||||||
|
compose.load_old_compose_config.return_value = old_config
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_old_metadata(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
opts = CreateIsoOpts()
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
||||||
|
def test_volume_id_differs(self, read_json_file):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
|
||||||
|
opts = CreateIsoOpts(volid="new-volid")
|
||||||
|
|
||||||
|
read_json_file.return_value = {"opts": {"volid": "old-volid"}}
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
||||||
|
def test_packages_differ(self, read_json_file):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
|
||||||
|
new_graft_points = os.path.join(self.topdir, "new_graft_points")
|
||||||
|
helpers.touch(new_graft_points, "Packages/f/foo-1-1.x86_64.rpm\n")
|
||||||
|
opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
|
||||||
|
|
||||||
|
old_graft_points = os.path.join(self.topdir, "old_graft_points")
|
||||||
|
helpers.touch(old_graft_points, "Packages/f/foo-1-2.x86_64.rpm\n")
|
||||||
|
read_json_file.return_value = {
|
||||||
|
"opts": {"graft_points": old_graft_points, "volid": "volid"}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
||||||
|
def test_runs_perform_reuse(self, read_json_file):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
thread.logger = self.logger
|
||||||
|
thread.perform_reuse = mock.Mock()
|
||||||
|
|
||||||
|
new_graft_points = os.path.join(self.topdir, "new_graft_points")
|
||||||
|
helpers.touch(new_graft_points)
|
||||||
|
opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
|
||||||
|
|
||||||
|
old_graft_points = os.path.join(self.topdir, "old_graft_points")
|
||||||
|
helpers.touch(old_graft_points)
|
||||||
|
dummy_iso_path = "dummy-iso-path/dummy.iso"
|
||||||
|
read_json_file.return_value = {
|
||||||
|
"opts": {
|
||||||
|
"graft_points": old_graft_points,
|
||||||
|
"volid": "volid",
|
||||||
|
"output_dir": os.path.dirname(dummy_iso_path),
|
||||||
|
"iso_name": os.path.basename(dummy_iso_path),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertTrue(
|
||||||
|
thread.try_reuse(
|
||||||
|
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
thread.perform_reuse.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
compose,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
opts,
|
||||||
|
"dummy-iso-path",
|
||||||
|
"dummy.iso",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.extra_isos.OldFileLinker")
|
||||||
|
class ExtraIsoPerformReusePhaseTest(helpers.PungiTestCase):
|
||||||
|
def test_success(self, OldFileLinker):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
opts = CreateIsoOpts(output_dir="new/path", iso_name="new.iso")
|
||||||
|
|
||||||
|
thread.perform_reuse(
|
||||||
|
compose,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
opts,
|
||||||
|
"old",
|
||||||
|
"image.iso",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
OldFileLinker.return_value.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call.link("old/image.iso", "new/path/new.iso"),
|
||||||
|
mock.call.link("old/image.iso.manifest", "new/path/new.iso.manifest"),
|
||||||
|
# The old log file doesn't exist in the test scenario.
|
||||||
|
mock.call.link(
|
||||||
|
None,
|
||||||
|
os.path.join(
|
||||||
|
self.topdir, "logs/x86_64/extraiso-new.iso.x86_64.log"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_failure(self, OldFileLinker):
|
||||||
|
OldFileLinker.return_value.link.side_effect = helpers.mk_boom()
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
|
opts = CreateIsoOpts(output_dir="new/path", iso_name="new.iso")
|
||||||
|
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
thread.perform_reuse(
|
||||||
|
compose,
|
||||||
|
compose.variants["Server"],
|
||||||
|
"x86_64",
|
||||||
|
opts,
|
||||||
|
"old",
|
||||||
|
"image.iso",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
OldFileLinker.return_value.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call.link("old/image.iso", "new/path/new.iso"),
|
||||||
|
mock.call.abort(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@ -147,7 +147,8 @@ class TestParseOutput(unittest.TestCase):
|
|||||||
touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n")
|
touch(self.file, "*pkg-1.0-1.x86_64@repo-0\n")
|
||||||
packages, modules = fus.parse_output(self.file)
|
packages, modules = fus.parse_output(self.file)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
packages, set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
|
packages,
|
||||||
|
set([("pkg-1.0-1", "x86_64", frozenset(["modular"]))]),
|
||||||
)
|
)
|
||||||
self.assertEqual(modules, set())
|
self.assertEqual(modules, set())
|
||||||
|
|
||||||
|
@ -2620,5 +2620,7 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
|
|||||||
six.assertCountEqual(self, pkg_map["rpm"], [])
|
six.assertCountEqual(self, pkg_map["rpm"], [])
|
||||||
six.assertCountEqual(self, pkg_map["srpm"], [])
|
six.assertCountEqual(self, pkg_map["srpm"], [])
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self, pkg_map["debuginfo"], ["dummy-bash-debuginfo-4.2.37-6.x86_64.rpm"],
|
self,
|
||||||
|
pkg_map["debuginfo"],
|
||||||
|
["dummy-bash-debuginfo-4.2.37-6.x86_64.rpm"],
|
||||||
)
|
)
|
||||||
|
@ -350,7 +350,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
wc.call_args_list, [mock.call(self.config1, ["mod:master"], [])],
|
wc.call_args_list,
|
||||||
|
[mock.call(self.config1, ["mod:master"], [])],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
gc.call_args_list,
|
gc.call_args_list,
|
||||||
@ -390,7 +391,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"])
|
po.return_value = ([("p-1-1", "x86_64", frozenset())], ["m1"])
|
||||||
self.phase.packages = {"p-1-1.x86_64": mock.Mock()}
|
self.phase.packages = {"p-1-1.x86_64": mock.Mock(rpm_sourcerpm="p-1-1.src.rpm")}
|
||||||
|
|
||||||
res = self.phase.run_solver(
|
res = self.phase.run_solver(
|
||||||
self.compose.variants["Server"],
|
self.compose.variants["Server"],
|
||||||
@ -430,7 +431,9 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_with_comps(self, run, gc, po, wc):
|
def test_with_comps(self, run, gc, po, wc):
|
||||||
self.phase.packages = {"pkg-1.0-1.x86_64": mock.Mock()}
|
self.phase.packages = {
|
||||||
|
"pkg-1.0-1.x86_64": mock.Mock(rpm_sourcerpm="pkg-1.0-1.src.rpm")
|
||||||
|
}
|
||||||
self.phase.debuginfo = {"x86_64": {}}
|
self.phase.debuginfo = {"x86_64": {}}
|
||||||
po.return_value = ([("pkg-1.0-1", "x86_64", frozenset())], [])
|
po.return_value = ([("pkg-1.0-1", "x86_64", frozenset())], [])
|
||||||
res = self.phase.run_solver(
|
res = self.phase.run_solver(
|
||||||
@ -454,7 +457,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
wc.call_args_list, [mock.call(self.config1, [], ["pkg"])],
|
wc.call_args_list,
|
||||||
|
[mock.call(self.config1, [], ["pkg"])],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
gc.call_args_list,
|
gc.call_args_list,
|
||||||
@ -471,11 +475,23 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_with_comps_with_debuginfo(self, run, gc, po, wc):
|
def test_with_comps_with_debuginfo(self, run, gc, po, wc):
|
||||||
dbg1 = NamedMock(name="pkg-debuginfo", arch="x86_64", sourcerpm="pkg.src.rpm")
|
# dbg1 and dbg2 mocks both package from Kobo (with sourcerpm) and from
|
||||||
dbg2 = NamedMock(name="pkg-debuginfo", arch="x86_64", sourcerpm="x.src.rpm")
|
# createrepo_c (with rpm_sourcerpm)
|
||||||
|
dbg1 = NamedMock(
|
||||||
|
name="pkg-debuginfo",
|
||||||
|
arch="x86_64",
|
||||||
|
sourcerpm="pkg-1.0-1.src.rpm",
|
||||||
|
rpm_sourcerpm="pkg-1.0-1.src.rpm",
|
||||||
|
)
|
||||||
|
dbg2 = NamedMock(
|
||||||
|
name="pkg-debuginfo",
|
||||||
|
arch="x86_64",
|
||||||
|
sourcerpm="pkg-1.0-2.src.rpm",
|
||||||
|
rpm_sourcerpm="pkg-1.0-2.src.rpm",
|
||||||
|
)
|
||||||
self.phase.packages = {
|
self.phase.packages = {
|
||||||
"pkg-1.0-1.x86_64": NamedMock(
|
"pkg-1.0-1.x86_64": NamedMock(
|
||||||
name="pkg", arch="x86_64", rpm_sourcerpm="pkg.src.rpm"
|
name="pkg", arch="x86_64", rpm_sourcerpm="pkg-1.0-1.src.rpm"
|
||||||
),
|
),
|
||||||
"pkg-debuginfo-1.0-1.x86_64": dbg1,
|
"pkg-debuginfo-1.0-1.x86_64": dbg1,
|
||||||
"pkg-debuginfo-1.0-2.x86_64": dbg2,
|
"pkg-debuginfo-1.0-2.x86_64": dbg2,
|
||||||
@ -556,8 +572,8 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
po.side_effect = [([("pkg-1.0-1", "x86_64", frozenset())], []), (final, [])]
|
po.side_effect = [([("pkg-1.0-1", "x86_64", frozenset())], []), (final, [])]
|
||||||
self.phase.packages = {
|
self.phase.packages = {
|
||||||
"pkg-1.0-1.x86_64": mock.Mock(),
|
"pkg-1.0-1.x86_64": mock.Mock(rpm_sourcerpm="pkg-1.0-1.src.rpm"),
|
||||||
"pkg-en-1.0-1.noarch": mock.Mock(),
|
"pkg-en-1.0-1.noarch": mock.Mock(rpm_sourcerpm="pkg-1.0-1.src.rpm"),
|
||||||
}
|
}
|
||||||
|
|
||||||
res = self.phase.run_solver(
|
res = self.phase.run_solver(
|
||||||
@ -626,9 +642,15 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
cr.Metadata.return_value.keys.return_value = []
|
cr.Metadata.return_value.keys.return_value = []
|
||||||
self.phase.package_maps = {
|
self.phase.package_maps = {
|
||||||
"x86_64": {
|
"x86_64": {
|
||||||
"pkg-devel-1.0-1.x86_64": NamedMock(name="pkg-devel"),
|
"pkg-devel-1.0-1.x86_64": NamedMock(
|
||||||
"pkg-devel-1.0-1.i686": NamedMock(name="pkg-devel"),
|
name="pkg-devel", rpm_sourcerpm="pkg-1.0-1.src.rpm"
|
||||||
"foo-1.0-1.x86_64": NamedMock(name="foo"),
|
),
|
||||||
|
"pkg-devel-1.0-1.i686": NamedMock(
|
||||||
|
name="pkg-devel", rpm_sourcerpm="pkg-1.0-1.src.rpm"
|
||||||
|
),
|
||||||
|
"foo-1.0-1.x86_64": NamedMock(
|
||||||
|
name="foo", rpm_sourcerpm="foo-1.0-1.src.rpm"
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.phase.packages = self.phase.package_maps["x86_64"]
|
self.phase.packages = self.phase.package_maps["x86_64"]
|
||||||
@ -716,6 +738,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
release="1",
|
release="1",
|
||||||
arch="x86_64",
|
arch="x86_64",
|
||||||
provides=[("/usr/lib/libfoo.1.so.1", None, None)],
|
provides=[("/usr/lib/libfoo.1.so.1", None, None)],
|
||||||
|
rpm_sourcerpm="foo-1.0-1.src.rpm",
|
||||||
),
|
),
|
||||||
"def": NamedMock(
|
"def": NamedMock(
|
||||||
name="foo",
|
name="foo",
|
||||||
@ -724,6 +747,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
release="1",
|
release="1",
|
||||||
arch="i686",
|
arch="i686",
|
||||||
provides=[("/usr/lib/libfoo.1.so.1", None, None)],
|
provides=[("/usr/lib/libfoo.1.so.1", None, None)],
|
||||||
|
rpm_sourcerpm="foo-1.0-1.src.rpm",
|
||||||
),
|
),
|
||||||
"ghi": NamedMock(
|
"ghi": NamedMock(
|
||||||
name="pkg-devel",
|
name="pkg-devel",
|
||||||
@ -732,6 +756,7 @@ class TestRunSolver(HelperMixin, helpers.PungiTestCase):
|
|||||||
release="1",
|
release="1",
|
||||||
arch="x86_64",
|
arch="x86_64",
|
||||||
provides=[],
|
provides=[],
|
||||||
|
rpm_sourcerpm="pkg-devel-1.0-1.src.rpm",
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
cr.Metadata.return_value.keys.return_value = packages.keys()
|
cr.Metadata.return_value.keys.return_value = packages.keys()
|
||||||
@ -932,20 +957,11 @@ class TestExpandPackages(helpers.PungiTestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
|
@mock.patch("pungi.phases.gather.methods.method_hybrid.get_repo_packages")
|
||||||
def test_skip_lookaside_source(self, cr):
|
def test_skip_lookaside_source(self, get_repo_packages):
|
||||||
nevra_to_pkg = self._mk_packages(src=True)
|
nevra_to_pkg = self._mk_packages(src=True)
|
||||||
lookasides = [mock.Mock()]
|
lookasides = [mock.Mock()]
|
||||||
repo = {
|
get_repo_packages.return_value = ["pkg.src.rpm"]
|
||||||
"abc": NamedMock(
|
|
||||||
name="pkg",
|
|
||||||
arch="src",
|
|
||||||
location_base="file:///tmp/",
|
|
||||||
location_href="pkg.src.rpm",
|
|
||||||
),
|
|
||||||
}
|
|
||||||
cr.Metadata.return_value.keys.return_value = repo.keys()
|
|
||||||
cr.Metadata.return_value.get.side_effect = lambda key: repo[key]
|
|
||||||
|
|
||||||
res = hybrid.expand_packages(
|
res = hybrid.expand_packages(
|
||||||
nevra_to_pkg, lookasides, [("pkg-3:1-2", "x86_64", [])], []
|
nevra_to_pkg, lookasides, [("pkg-3:1-2", "x86_64", [])], []
|
||||||
@ -960,20 +976,11 @@ class TestExpandPackages(helpers.PungiTestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.methods.method_hybrid.cr")
|
@mock.patch("pungi.phases.gather.methods.method_hybrid.get_repo_packages")
|
||||||
def test_skip_lookaside_packages(self, cr):
|
def test_skip_lookaside_packages(self, get_repo_packages):
|
||||||
nevra_to_pkg = self._mk_packages(debug_arch="x86_64")
|
nevra_to_pkg = self._mk_packages(debug_arch="x86_64")
|
||||||
lookasides = [mock.Mock()]
|
lookasides = [mock.Mock()]
|
||||||
repo = {
|
get_repo_packages.return_value = ["pkg.rpm"]
|
||||||
"abc": NamedMock(
|
|
||||||
name="pkg",
|
|
||||||
arch="x86_64",
|
|
||||||
location_base="file:///tmp/",
|
|
||||||
location_href="pkg.rpm",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
cr.Metadata.return_value.keys.return_value = repo.keys()
|
|
||||||
cr.Metadata.return_value.get.side_effect = lambda key: repo[key]
|
|
||||||
|
|
||||||
res = hybrid.expand_packages(
|
res = hybrid.expand_packages(
|
||||||
nevra_to_pkg, lookasides, [("pkg-3:1-2", "x86_64", [])], []
|
nevra_to_pkg, lookasides, [("pkg-3:1-2", "x86_64", [])], []
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import mock
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import mock
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -12,8 +14,8 @@ except ImportError:
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
from pungi.phases import gather
|
from pungi.phases import gather
|
||||||
from pungi.phases.pkgset.common import MaterializedPackageSet
|
|
||||||
from pungi.phases.gather import _mk_pkg_map
|
from pungi.phases.gather import _mk_pkg_map
|
||||||
|
from pungi.phases.pkgset.common import MaterializedPackageSet
|
||||||
from tests import helpers
|
from tests import helpers
|
||||||
from tests.helpers import MockPackageSet, MockPkg
|
from tests.helpers import MockPackageSet, MockPkg
|
||||||
|
|
||||||
@ -1080,21 +1082,24 @@ class TestGatherPackages(helpers.PungiTestCase):
|
|||||||
|
|
||||||
|
|
||||||
class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
||||||
|
def _save_config_dump(self, compose):
|
||||||
|
config_dump_full = compose.paths.log.log_file("global", "config-dump")
|
||||||
|
with open(config_dump_full, "w") as f:
|
||||||
|
json.dump(compose.conf, f, sort_keys=True, indent=4)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
def test_reuse_no_old_gather_result(self, load_old_gather_result):
|
def test_reuse_no_old_gather_result(self, load_old_gather_result):
|
||||||
load_old_gather_result.return_value = None
|
load_old_gather_result.return_value = None
|
||||||
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
|
self._save_config_dump(compose)
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], []
|
compose, "x86_64", compose.variants["Server"], [], "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_no_old_compose_config(self, load_old_gather_result):
|
||||||
def test_reuse_no_old_compose_config(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
load_old_gather_result.return_value = {
|
load_old_gather_result.return_value = {
|
||||||
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
|
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
|
||||||
"srpm": [],
|
"srpm": [],
|
||||||
@ -1102,18 +1107,15 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = None
|
self._save_config_dump(compose)
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], []
|
compose, "x86_64", compose.variants["Server"], [], "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_compose_config_different(self, load_old_gather_result):
|
||||||
def test_reuse_compose_config_different(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
load_old_gather_result.return_value = {
|
load_old_gather_result.return_value = {
|
||||||
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
|
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
|
||||||
"srpm": [],
|
"srpm": [],
|
||||||
@ -1121,20 +1123,18 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
|
self._save_config_dump(compose)
|
||||||
compose_conf_copy = dict(compose.conf)
|
compose_conf_copy = dict(compose.conf)
|
||||||
compose_conf_copy["gather_method"] = "nodeps"
|
compose_conf_copy["gather_method"] = "nodeps"
|
||||||
load_old_compose_config.return_value = compose_conf_copy
|
compose.load_old_compose_config.return_value = compose_conf_copy
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], []
|
compose, "x86_64", compose.variants["Server"], [], "nodeps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_compose_config_different_whitelist(self, load_old_gather_result):
|
||||||
def test_reuse_compose_config_different_whitelist(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
for whitelist_opt in ["product_id", "pkgset_koji_builds"]:
|
for whitelist_opt in ["product_id", "pkgset_koji_builds"]:
|
||||||
load_old_gather_result.return_value = {
|
load_old_gather_result.return_value = {
|
||||||
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
|
"rpm": [{"path": "/build/bash-1.0.0-1.x86_64.rpm"}],
|
||||||
@ -1143,12 +1143,13 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
|
self._save_config_dump(compose)
|
||||||
compose_conf_copy = dict(compose.conf)
|
compose_conf_copy = dict(compose.conf)
|
||||||
compose_conf_copy[whitelist_opt] = "different"
|
compose_conf_copy[whitelist_opt] = "different"
|
||||||
load_old_compose_config.return_value = compose_conf_copy
|
compose.load_old_compose_config.return_value = compose_conf_copy
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], []
|
compose, "x86_64", compose.variants["Server"], [], "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, {"rpm": [], "srpm": [], "debuginfo": []})
|
self.assertEqual(result, {"rpm": [], "srpm": [], "debuginfo": []})
|
||||||
|
|
||||||
@ -1173,16 +1174,16 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
return package_sets
|
return package_sets
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse(self, load_old_gather_result):
|
||||||
def test_reuse(self, load_old_compose_config, load_old_gather_result):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
result,
|
result,
|
||||||
@ -1194,19 +1195,17 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_update_gather_lookaside_repos(self, load_old_gather_result):
|
||||||
def test_reuse_update_gather_lookaside_repos(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = copy.deepcopy(compose.conf)
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = copy.deepcopy(compose.conf)
|
||||||
|
|
||||||
gather._update_config(compose, "Server", "x86_64", compose.topdir)
|
gather._update_config(compose, "Server", "x86_64", compose.topdir)
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
result,
|
result,
|
||||||
@ -1218,49 +1217,46 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
|
||||||
def test_reuse_update_gather_lookaside_repos_different_initial_repos(
|
def test_reuse_update_gather_lookaside_repos_different_initial_repos(
|
||||||
self, load_old_compose_config, load_old_gather_result
|
self, load_old_gather_result
|
||||||
):
|
):
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
|
self._save_config_dump(compose)
|
||||||
lookasides = compose.conf["gather_lookaside_repos"]
|
lookasides = compose.conf["gather_lookaside_repos"]
|
||||||
lookasides.append(("^Server$", {"x86_64": "http://localhost/real.repo"}))
|
lookasides.append(("^Server$", {"x86_64": "http://localhost/real.repo"}))
|
||||||
load_old_compose_config.return_value = copy.deepcopy(compose.conf)
|
compose.load_old_compose_config.return_value = copy.deepcopy(compose.conf)
|
||||||
|
|
||||||
gather._update_config(compose, "Server", "x86_64", compose.topdir)
|
gather._update_config(compose, "Server", "x86_64", compose.topdir)
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
|
||||||
def test_reuse_update_gather_lookaside_repos_different_initial_repos_list(
|
def test_reuse_update_gather_lookaside_repos_different_initial_repos_list(
|
||||||
self, load_old_compose_config, load_old_gather_result
|
self, load_old_gather_result
|
||||||
):
|
):
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
|
self._save_config_dump(compose)
|
||||||
lookasides = compose.conf["gather_lookaside_repos"]
|
lookasides = compose.conf["gather_lookaside_repos"]
|
||||||
repos = ["http://localhost/real1.repo", "http://localhost/real2.repo"]
|
repos = ["http://localhost/real1.repo", "http://localhost/real2.repo"]
|
||||||
lookasides.append(("^Server$", {"x86_64": repos}))
|
lookasides.append(("^Server$", {"x86_64": repos}))
|
||||||
load_old_compose_config.return_value = copy.deepcopy(compose.conf)
|
compose.load_old_compose_config.return_value = copy.deepcopy(compose.conf)
|
||||||
|
|
||||||
gather._update_config(compose, "Server", "x86_64", compose.topdir)
|
gather._update_config(compose, "Server", "x86_64", compose.topdir)
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_no_old_file_cache(self, load_old_gather_result):
|
||||||
def test_reuse_no_old_file_cache(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
@ -1268,18 +1264,16 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
"/build/foo-1-1.x86_64.rpm": MockPkg("foo-1-1.x86_64.rpm", sourcerpm="foo")
|
"/build/foo-1-1.x86_64.rpm": MockPkg("foo-1-1.x86_64.rpm", sourcerpm="foo")
|
||||||
}
|
}
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_two_rpms_from_same_source(self, load_old_gather_result):
|
||||||
def test_reuse_two_rpms_from_same_source(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
@ -1290,18 +1284,16 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
pkg_set.old_file_cache["/build/bash-1-2.x86_64.rpm"] = bash_pkg
|
pkg_set.old_file_cache["/build/bash-1-2.x86_64.rpm"] = bash_pkg
|
||||||
pkg_set.file_cache["/build/bash-1-2.x86_64.rpm"] = bash_pkg
|
pkg_set.file_cache["/build/bash-1-2.x86_64.rpm"] = bash_pkg
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_rpm_added_removed(self, load_old_gather_result):
|
||||||
def test_reuse_rpm_added_removed(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=[]
|
load_old_gather_result, requires=[], provides=[]
|
||||||
)
|
)
|
||||||
@ -1315,59 +1307,54 @@ class TestReuseOldGatherPackages(helpers.PungiTestCase):
|
|||||||
pkg_set.old_file_cache["/build/file-1-1.x86_64.rpm"] = file_pkg
|
pkg_set.old_file_cache["/build/file-1-1.x86_64.rpm"] = file_pkg
|
||||||
pkg_set.file_cache["/build/foo-1-1.x86_64.rpm"] = foo_pkg
|
pkg_set.file_cache["/build/foo-1-1.x86_64.rpm"] = foo_pkg
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_different_packages(self, load_old_gather_result):
|
||||||
def test_reuse_different_packages(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=["foo"]
|
load_old_gather_result, requires=[], provides=["foo"]
|
||||||
)
|
)
|
||||||
package_sets[0]["global"].old_file_cache = None
|
package_sets[0]["global"].old_file_cache = None
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_requires_changed(self, load_old_gather_result):
|
||||||
def test_reuse_requires_changed(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=["foo"], provides=[]
|
load_old_gather_result, requires=["foo"], provides=[]
|
||||||
)
|
)
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
@mock.patch("pungi.phases.gather.load_old_gather_result")
|
||||||
@mock.patch("pungi.phases.gather.load_old_compose_config")
|
def test_reuse_provides_changed(self, load_old_gather_result):
|
||||||
def test_reuse_provides_changed(
|
|
||||||
self, load_old_compose_config, load_old_gather_result
|
|
||||||
):
|
|
||||||
package_sets = self._prepare_package_sets(
|
package_sets = self._prepare_package_sets(
|
||||||
load_old_gather_result, requires=[], provides=["foo"]
|
load_old_gather_result, requires=[], provides=["foo"]
|
||||||
)
|
)
|
||||||
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
compose = helpers.DummyCompose(self.topdir, {"gather_allow_reuse": True})
|
||||||
load_old_compose_config.return_value = compose.conf
|
self._save_config_dump(compose)
|
||||||
|
compose.load_old_compose_config.return_value = compose.conf
|
||||||
|
|
||||||
result = gather.reuse_old_gather_packages(
|
result = gather.reuse_old_gather_packages(
|
||||||
compose, "x86_64", compose.variants["Server"], package_sets
|
compose, "x86_64", compose.variants["Server"], package_sets, "deps"
|
||||||
)
|
)
|
||||||
self.assertEqual(result, None)
|
self.assertEqual(result, None)
|
||||||
|
|
||||||
@ -1561,6 +1548,24 @@ class TestGatherPhase(helpers.PungiTestCase):
|
|||||||
phase = gather.GatherPhase(compose, pkgset_phase)
|
phase = gather.GatherPhase(compose, pkgset_phase)
|
||||||
phase.validate()
|
phase.validate()
|
||||||
|
|
||||||
|
def test_validates_variants_requiring_is_not_subset_of_required(self):
|
||||||
|
pkgset_phase = mock.Mock()
|
||||||
|
compose = helpers.DummyCompose(
|
||||||
|
self.topdir, {"variant_as_lookaside": [("Everything", "Client")]}
|
||||||
|
)
|
||||||
|
phase = gather.GatherPhase(compose, pkgset_phase)
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
phase.validate()
|
||||||
|
self.assertIn("architectures of variant 'Client'", str(ctx.exception))
|
||||||
|
|
||||||
|
def test_validates_variants_requiring_is_subset_of_required(self):
|
||||||
|
pkgset_phase = mock.Mock()
|
||||||
|
compose = helpers.DummyCompose(
|
||||||
|
self.topdir, {"variant_as_lookaside": [("Client", "Everything")]}
|
||||||
|
)
|
||||||
|
phase = gather.GatherPhase(compose, pkgset_phase)
|
||||||
|
phase.validate()
|
||||||
|
|
||||||
|
|
||||||
class TestGetPackagesToGather(helpers.PungiTestCase):
|
class TestGetPackagesToGather(helpers.PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user