Compare commits
No commits in common. "master" and "pungi-4.3.7-3.alma" have entirely different histories.
master
...
pungi-4.3.
41
1715.patch
41
1715.patch
@ -1,41 +0,0 @@
|
|||||||
From 432b0bce0401c4bbcd1a958a89305c475a794f26 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Adam Williamson <awilliam@redhat.com>
|
|
||||||
Date: Jan 19 2024 07:25:09 +0000
|
|
||||||
Subject: checks: don't require "repo" in the "ostree" schema
|
|
||||||
|
|
||||||
|
|
||||||
Per @siosm in https://pagure.io/pungi-fedora/pull-request/1227
|
|
||||||
this option "is deprecated and not needed anymore", so Pungi
|
|
||||||
should not be requiring it.
|
|
||||||
|
|
||||||
Merges: https://pagure.io/pungi/pull-request/1714
|
|
||||||
Signed-off-by: Adam Williamson <awilliam@redhat.com>
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
diff --git a/pungi/checks.py b/pungi/checks.py
|
|
||||||
index a340f93..db8b297 100644
|
|
||||||
--- a/pungi/checks.py
|
|
||||||
+++ b/pungi/checks.py
|
|
||||||
@@ -1066,7 +1066,6 @@ def make_schema():
|
|
||||||
"required": [
|
|
||||||
"treefile",
|
|
||||||
"config_url",
|
|
||||||
- "repo",
|
|
||||||
"ostree_repo",
|
|
||||||
],
|
|
||||||
"additionalProperties": False,
|
|
||||||
diff --git a/pungi/phases/ostree.py b/pungi/phases/ostree.py
|
|
||||||
index 90578ae..2649cdb 100644
|
|
||||||
--- a/pungi/phases/ostree.py
|
|
||||||
+++ b/pungi/phases/ostree.py
|
|
||||||
@@ -85,7 +85,7 @@ class OSTreeThread(WorkerThread):
|
|
||||||
comps_repo = compose.paths.work.comps_repo(
|
|
||||||
"$basearch", variant=variant, create_dir=False
|
|
||||||
)
|
|
||||||
- repos = shortcuts.force_list(config["repo"]) + self.repos
|
|
||||||
+ repos = shortcuts.force_list(config.get("repo", [])) + self.repos
|
|
||||||
if compose.has_comps:
|
|
||||||
repos.append(translate_path(compose, comps_repo))
|
|
||||||
repos = get_repo_dicts(repos, logger=self.pool)
|
|
||||||
|
|
@ -2,7 +2,6 @@ include AUTHORS
|
|||||||
include COPYING
|
include COPYING
|
||||||
include GPL
|
include GPL
|
||||||
include pungi.spec
|
include pungi.spec
|
||||||
include setup.cfg
|
|
||||||
include tox.ini
|
include tox.ini
|
||||||
include share/*
|
include share/*
|
||||||
include share/multilib/*
|
include share/multilib/*
|
||||||
|
1
TODO
1
TODO
@ -47,6 +47,7 @@ Split Pungi into smaller well-defined tools
|
|||||||
|
|
||||||
* create install images
|
* create install images
|
||||||
* lorax
|
* lorax
|
||||||
|
* buildinstall
|
||||||
|
|
||||||
* create isos
|
* create isos
|
||||||
* isos
|
* isos
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
# Clean up pungi cache
|
|
||||||
d /var/cache/pungi/createrepo_c/ - - - 30d
|
|
130
doc/_static/phases.svg
vendored
130
doc/_static/phases.svg
vendored
@ -1,22 +1,22 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
<svg
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
width="610.46454"
|
width="610.46454"
|
||||||
height="327.16599"
|
height="301.1662"
|
||||||
viewBox="0 0 610.46457 327.16599"
|
viewBox="0 0 610.46457 301.1662"
|
||||||
id="svg2"
|
id="svg2"
|
||||||
version="1.1"
|
version="1.1"
|
||||||
inkscape:version="1.3.2 (091e20e, 2023-11-25)"
|
inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
|
||||||
sodipodi:docname="phases.svg"
|
sodipodi:docname="phases.svg"
|
||||||
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
||||||
inkscape:export-xdpi="90"
|
inkscape:export-xdpi="90"
|
||||||
inkscape:export-ydpi="90"
|
inkscape:export-ydpi="90">
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
|
||||||
<sodipodi:namedview
|
<sodipodi:namedview
|
||||||
id="base"
|
id="base"
|
||||||
pagecolor="#ffffff"
|
pagecolor="#ffffff"
|
||||||
@ -25,15 +25,15 @@
|
|||||||
inkscape:pageopacity="1"
|
inkscape:pageopacity="1"
|
||||||
inkscape:pageshadow="2"
|
inkscape:pageshadow="2"
|
||||||
inkscape:zoom="1.5"
|
inkscape:zoom="1.5"
|
||||||
inkscape:cx="268"
|
inkscape:cx="9.4746397"
|
||||||
inkscape:cy="260.66667"
|
inkscape:cy="58.833855"
|
||||||
inkscape:document-units="px"
|
inkscape:document-units="px"
|
||||||
inkscape:current-layer="layer1"
|
inkscape:current-layer="layer1"
|
||||||
showgrid="false"
|
showgrid="false"
|
||||||
inkscape:window-width="1920"
|
inkscape:window-width="2560"
|
||||||
inkscape:window-height="1027"
|
inkscape:window-height="1376"
|
||||||
inkscape:window-x="0"
|
inkscape:window-x="0"
|
||||||
inkscape:window-y="25"
|
inkscape:window-y="0"
|
||||||
inkscape:window-maximized="1"
|
inkscape:window-maximized="1"
|
||||||
units="px"
|
units="px"
|
||||||
inkscape:document-rotation="0"
|
inkscape:document-rotation="0"
|
||||||
@ -43,10 +43,7 @@
|
|||||||
fit-margin-left="7.4"
|
fit-margin-left="7.4"
|
||||||
fit-margin-right="7.4"
|
fit-margin-right="7.4"
|
||||||
fit-margin-bottom="7.4"
|
fit-margin-bottom="7.4"
|
||||||
lock-margins="true"
|
lock-margins="true" />
|
||||||
inkscape:showpageshadow="2"
|
|
||||||
inkscape:pagecheckerboard="0"
|
|
||||||
inkscape:deskcolor="#d1d1d1" />
|
|
||||||
<defs
|
<defs
|
||||||
id="defs4">
|
id="defs4">
|
||||||
<marker
|
<marker
|
||||||
@ -73,6 +70,7 @@
|
|||||||
<dc:format>image/svg+xml</dc:format>
|
<dc:format>image/svg+xml</dc:format>
|
||||||
<dc:type
|
<dc:type
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||||
|
<dc:title />
|
||||||
</cc:Work>
|
</cc:Work>
|
||||||
</rdf:RDF>
|
</rdf:RDF>
|
||||||
</metadata>
|
</metadata>
|
||||||
@ -105,7 +103,7 @@
|
|||||||
style="font-size:13.1479px;line-height:1.25">Pkgset</tspan></text>
|
style="font-size:13.1479px;line-height:1.25">Pkgset</tspan></text>
|
||||||
</g>
|
</g>
|
||||||
<g
|
<g
|
||||||
transform="translate(56.378954,-80.817124)"
|
transform="translate(58.253953,-80.817124)"
|
||||||
id="g3398">
|
id="g3398">
|
||||||
<rect
|
<rect
|
||||||
y="553.98242"
|
y="553.98242"
|
||||||
@ -303,16 +301,13 @@
|
|||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
<g
|
|
||||||
id="g2"
|
|
||||||
transform="translate(-1.4062678e-8,9.3749966)">
|
|
||||||
<rect
|
<rect
|
||||||
transform="matrix(0,1,1,0,0,0)"
|
transform="matrix(0,1,1,0,0,0)"
|
||||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1.85901px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1.85901px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
id="rect3338-1"
|
id="rect3338-1"
|
||||||
width="103.12497"
|
width="90.874992"
|
||||||
height="115.80065"
|
height="115.80065"
|
||||||
x="863.29883"
|
x="872.67383"
|
||||||
y="486.55563" />
|
y="486.55563" />
|
||||||
<text
|
<text
|
||||||
id="text3384-0"
|
id="text3384-0"
|
||||||
@ -325,7 +320,6 @@
|
|||||||
sodipodi:role="line"
|
sodipodi:role="line"
|
||||||
x="489.56451"
|
x="489.56451"
|
||||||
y="921.73846">ImageChecksum</tspan></text>
|
y="921.73846">ImageChecksum</tspan></text>
|
||||||
</g>
|
|
||||||
<g
|
<g
|
||||||
transform="translate(-42.209584,-80.817124)"
|
transform="translate(-42.209584,-80.817124)"
|
||||||
id="g3458">
|
id="g3458">
|
||||||
@ -423,16 +417,16 @@
|
|||||||
id="rect290"
|
id="rect290"
|
||||||
width="26.295755"
|
width="26.295755"
|
||||||
height="224.35098"
|
height="224.35098"
|
||||||
x="1091.7223"
|
x="1063.5973"
|
||||||
y="378.43698"
|
y="378.43698"
|
||||||
transform="matrix(0,1,1,0,0,0)" />
|
transform="matrix(0,1,1,0,0,0)" />
|
||||||
<text
|
<text
|
||||||
xml:space="preserve"
|
xml:space="preserve"
|
||||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
x="380.74133"
|
x="380.74133"
|
||||||
y="1106.6223"
|
y="1080.3723"
|
||||||
id="text294"><tspan
|
id="text294"><tspan
|
||||||
y="1106.6223"
|
y="1080.3723"
|
||||||
x="380.74133"
|
x="380.74133"
|
||||||
sodipodi:role="line"
|
sodipodi:role="line"
|
||||||
id="tspan301"
|
id="tspan301"
|
||||||
@ -460,9 +454,32 @@
|
|||||||
y="1069.0087"
|
y="1069.0087"
|
||||||
id="tspan3812">ExtraIsos</tspan></text>
|
id="tspan3812">ExtraIsos</tspan></text>
|
||||||
</g>
|
</g>
|
||||||
|
<g
|
||||||
|
id="g1031"
|
||||||
|
transform="translate(-40.740337,29.23522)">
|
||||||
|
<rect
|
||||||
|
transform="matrix(0,1,1,0,0,0)"
|
||||||
|
style="fill:#5ed4ec;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
id="rect206"
|
||||||
|
width="26.295755"
|
||||||
|
height="102.36562"
|
||||||
|
x="1066.8611"
|
||||||
|
y="418.66275" />
|
||||||
|
<text
|
||||||
|
id="text210"
|
||||||
|
y="1084.9105"
|
||||||
|
x="421.51923"
|
||||||
|
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
xml:space="preserve"><tspan
|
||||||
|
y="1084.9105"
|
||||||
|
x="421.51923"
|
||||||
|
id="tspan208"
|
||||||
|
sodipodi:role="line"
|
||||||
|
style="font-size:13.1479px;line-height:1.25">Repoclosure</tspan></text>
|
||||||
|
</g>
|
||||||
<rect
|
<rect
|
||||||
y="377.92242"
|
y="377.92242"
|
||||||
x="1122.3463"
|
x="1096.0963"
|
||||||
height="224.24059"
|
height="224.24059"
|
||||||
width="26.295755"
|
width="26.295755"
|
||||||
id="rect87"
|
id="rect87"
|
||||||
@ -472,18 +489,17 @@
|
|||||||
xml:space="preserve"
|
xml:space="preserve"
|
||||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
x="380.7789"
|
x="380.7789"
|
||||||
y="1140.3958"
|
y="1114.1458"
|
||||||
id="text91"><tspan
|
id="text91"><tspan
|
||||||
style="font-size:13.1479px;line-height:1.25"
|
style="font-size:13.1479px;line-height:1.25"
|
||||||
sodipodi:role="line"
|
sodipodi:role="line"
|
||||||
id="tspan89"
|
id="tspan89"
|
||||||
x="380.7789"
|
x="380.7789"
|
||||||
y="1140.3958">Repoclosure</tspan></text>
|
y="1114.1458">Repoclosure</tspan></text>
|
||||||
<g
|
<g
|
||||||
id="g206"
|
id="g206">
|
||||||
transform="translate(0,-1.8749994)">
|
|
||||||
<rect
|
<rect
|
||||||
style="fill:#fcd9a4;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
id="rect290-6"
|
id="rect290-6"
|
||||||
width="26.295755"
|
width="26.295755"
|
||||||
height="101.91849"
|
height="101.91849"
|
||||||
@ -500,25 +516,19 @@
|
|||||||
x="380.23166"
|
x="380.23166"
|
||||||
sodipodi:role="line"
|
sodipodi:role="line"
|
||||||
id="tspan301-5"
|
id="tspan301-5"
|
||||||
style="font-size:12px;line-height:0">KiwiBuild</tspan></text>
|
style="font-size:12px;line-height:0">OSBuild</tspan></text>
|
||||||
</g>
|
</g>
|
||||||
<g
|
|
||||||
id="g3">
|
|
||||||
<g
|
|
||||||
id="g1">
|
|
||||||
<g
|
|
||||||
id="g4">
|
|
||||||
<rect
|
<rect
|
||||||
transform="matrix(0,1,1,0,0,0)"
|
transform="matrix(0,1,1,0,0,0)"
|
||||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.83502px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.83502px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
id="rect3338-1-3"
|
id="rect3338-1-3"
|
||||||
width="103.12497"
|
width="88.544876"
|
||||||
height="115.80065"
|
height="115.80065"
|
||||||
x="983.44263"
|
x="970.31763"
|
||||||
y="486.55563" />
|
y="486.55563" />
|
||||||
<text
|
<text
|
||||||
id="text3384-0-6"
|
id="text3384-0-6"
|
||||||
y="1038.8422"
|
y="1018.2172"
|
||||||
x="489.56451"
|
x="489.56451"
|
||||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
xml:space="preserve"><tspan
|
xml:space="preserve"><tspan
|
||||||
@ -526,32 +536,6 @@
|
|||||||
id="tspan3391-7"
|
id="tspan3391-7"
|
||||||
sodipodi:role="line"
|
sodipodi:role="line"
|
||||||
x="489.56451"
|
x="489.56451"
|
||||||
y="1038.8422">ImageContainer</tspan></text>
|
y="1018.2172">ImageContainer</tspan></text>
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g
|
|
||||||
id="g206-1"
|
|
||||||
transform="translate(-0.04628921,28.701853)">
|
|
||||||
<rect
|
|
||||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
|
||||||
id="rect290-6-7"
|
|
||||||
width="26.295755"
|
|
||||||
height="101.91849"
|
|
||||||
x="1032.3469"
|
|
||||||
y="377.92731"
|
|
||||||
transform="matrix(0,1,1,0,0,0)" />
|
|
||||||
<text
|
|
||||||
xml:space="preserve"
|
|
||||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
|
||||||
x="380.23166"
|
|
||||||
y="1049.1219"
|
|
||||||
id="text294-7-5"><tspan
|
|
||||||
y="1049.1219"
|
|
||||||
x="380.23166"
|
|
||||||
sodipodi:role="line"
|
|
||||||
id="tspan301-5-5"
|
|
||||||
style="font-size:12px;line-height:0">OSBuild</tspan></text>
|
|
||||||
</g>
|
|
||||||
</g>
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 22 KiB |
142
doc/conf.py
142
doc/conf.py
@ -18,12 +18,12 @@ import os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
# sys.path.insert(0, os.path.abspath('.'))
|
#sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
# needs_sphinx = '1.0'
|
#needs_sphinx = '1.0'
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
@ -31,201 +31,207 @@ import os
|
|||||||
extensions = []
|
extensions = []
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ['_templates']
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = ".rst"
|
source_suffix = '.rst'
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
# source_encoding = 'utf-8-sig'
|
#source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = "index"
|
master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "Pungi"
|
project = u'Pungi'
|
||||||
copyright = "2016, Red Hat, Inc."
|
copyright = u'2016, Red Hat, Inc.'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = "4.7"
|
version = '4.3'
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = "4.7.0"
|
release = '4.3.7'
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
# language = None
|
#language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
# today = ''
|
#today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
# today_fmt = '%B %d, %Y'
|
#today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ["_build"]
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
# default_role = None
|
#default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
# add_function_parentheses = True
|
#add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
# add_module_names = True
|
#add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
# show_authors = False
|
#show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = "sphinx"
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
#modindex_common_prefix = []
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
# keep_warnings = False
|
#keep_warnings = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = "default"
|
html_theme = 'default'
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
# html_theme_options = {}
|
#html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
# html_theme_path = []
|
#html_theme_path = []
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
# html_title = None
|
#html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
# html_short_title = None
|
#html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
# html_logo = None
|
#html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
# html_favicon = None
|
#html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ["_static"]
|
html_static_path = ['_static']
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
# html_extra_path = []
|
#html_extra_path = []
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
# html_last_updated_fmt = '%b %d, %Y'
|
#html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
# html_use_smartypants = True
|
#html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
# html_sidebars = {}
|
#html_sidebars = {}
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
# html_additional_pages = {}
|
#html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# html_domain_indices = True
|
#html_domain_indices = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
# html_use_index = True
|
#html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
# html_split_index = False
|
#html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
# html_show_sourcelink = True
|
#html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
# html_show_sphinx = True
|
#html_show_sphinx = True
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
# html_show_copyright = True
|
#html_show_copyright = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
# html_use_opensearch = ''
|
#html_use_opensearch = ''
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
# html_file_suffix = None
|
#html_file_suffix = None
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = "Pungidoc"
|
htmlhelp_basename = 'Pungidoc'
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
|
||||||
#'pointsize': '10pt',
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# Additional stuff for the LaTeX preamble.
|
#'pointsize': '10pt',
|
||||||
#'preamble': '',
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
("index", "Pungi.tex", "Pungi Documentation", "Daniel Mach", "manual"),
|
('index', 'Pungi.tex', u'Pungi Documentation',
|
||||||
|
u'Daniel Mach', 'manual'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
# latex_logo = None
|
#latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
# latex_use_parts = False
|
#latex_use_parts = False
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
# latex_show_pagerefs = False
|
#latex_show_pagerefs = False
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
# latex_show_urls = False
|
#latex_show_urls = False
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
# latex_appendices = []
|
#latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# latex_domain_indices = True
|
#latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
man_pages = [
|
||||||
|
('index', 'pungi', u'Pungi Documentation',
|
||||||
|
[u'Daniel Mach'], 1)
|
||||||
|
]
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
# man_show_urls = False
|
#man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
@ -234,25 +240,19 @@ man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(
|
('index', 'Pungi', u'Pungi Documentation',
|
||||||
"index",
|
u'Daniel Mach', 'Pungi', 'One line description of project.',
|
||||||
"Pungi",
|
'Miscellaneous'),
|
||||||
"Pungi Documentation",
|
|
||||||
"Daniel Mach",
|
|
||||||
"Pungi",
|
|
||||||
"One line description of project.",
|
|
||||||
"Miscellaneous",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
# texinfo_appendices = []
|
#texinfo_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# texinfo_domain_indices = True
|
#texinfo_domain_indices = True
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
# texinfo_show_urls = 'footnote'
|
#texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
# texinfo_no_detailmenu = False
|
#texinfo_no_detailmenu = False
|
||||||
|
@ -194,17 +194,6 @@ Options
|
|||||||
Tracking Service Kerberos authentication. If not defined, the default
|
Tracking Service Kerberos authentication. If not defined, the default
|
||||||
Kerberos principal is used.
|
Kerberos principal is used.
|
||||||
|
|
||||||
**cts_oidc_token_url**
|
|
||||||
(*str*) -- URL to the OIDC token endpoint.
|
|
||||||
For example ``https://oidc.example.com/openid-connect/token``.
|
|
||||||
This option can be overridden by the environment variable ``CTS_OIDC_TOKEN_URL``.
|
|
||||||
|
|
||||||
**cts_oidc_client_id*
|
|
||||||
(*str*) -- OIDC client ID.
|
|
||||||
This option can be overridden by the environment variable ``CTS_OIDC_CLIENT_ID``.
|
|
||||||
Note that environment variable ``CTS_OIDC_CLIENT_SECRET`` must be configured with
|
|
||||||
corresponding client secret to authenticate to CTS via OIDC.
|
|
||||||
|
|
||||||
**compose_type**
|
**compose_type**
|
||||||
(*str*) -- Allows to set default compose type. Type set via a command-line
|
(*str*) -- Allows to set default compose type. Type set via a command-line
|
||||||
option overwrites this.
|
option overwrites this.
|
||||||
@ -292,8 +281,8 @@ There a couple common format specifiers available for both the options:
|
|||||||
format string. The pattern should not overlap, otherwise it is undefined
|
format string. The pattern should not overlap, otherwise it is undefined
|
||||||
which one will be used.
|
which one will be used.
|
||||||
|
|
||||||
This format will be used for some phases generating images. Currently that
|
This format will be used for all phases generating images. Currently that
|
||||||
means ``createiso``, ``buildinstall`` and ``ostree_installer``.
|
means ``createiso``, ``live_images`` and ``buildinstall``.
|
||||||
|
|
||||||
Available extra keys are:
|
Available extra keys are:
|
||||||
* ``disc_num``
|
* ``disc_num``
|
||||||
@ -323,6 +312,7 @@ There a couple common format specifiers available for both the options:
|
|||||||
|
|
||||||
Available keys are:
|
Available keys are:
|
||||||
* ``boot`` -- for ``boot.iso`` images created in *buildinstall* phase
|
* ``boot`` -- for ``boot.iso`` images created in *buildinstall* phase
|
||||||
|
* ``live`` -- for images created by *live_images* phase
|
||||||
* ``dvd`` -- for images created by *createiso* phase
|
* ``dvd`` -- for images created by *createiso* phase
|
||||||
* ``ostree`` -- for ostree installer images
|
* ``ostree`` -- for ostree installer images
|
||||||
|
|
||||||
@ -350,10 +340,48 @@ Example
|
|||||||
|
|
||||||
disc_types = {
|
disc_types = {
|
||||||
'boot': 'netinst',
|
'boot': 'netinst',
|
||||||
|
'live': 'Live',
|
||||||
'dvd': 'DVD',
|
'dvd': 'DVD',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Signing
|
||||||
|
=======
|
||||||
|
|
||||||
|
If you want to sign deliverables generated during pungi run like RPM wrapped
|
||||||
|
images. You must provide few configuration options:
|
||||||
|
|
||||||
|
**signing_command** [optional]
|
||||||
|
(*str*) -- Command that will be run with a koji build as a single
|
||||||
|
argument. This command must not require any user interaction.
|
||||||
|
If you need to pass a password for a signing key to the command,
|
||||||
|
do this via command line option of the command and use string
|
||||||
|
formatting syntax ``%(signing_key_password)s``.
|
||||||
|
(See **signing_key_password_file**).
|
||||||
|
|
||||||
|
**signing_key_id** [optional]
|
||||||
|
(*str*) -- ID of the key that will be used for the signing.
|
||||||
|
This ID will be used when crafting koji paths to signed files
|
||||||
|
(``kojipkgs.fedoraproject.org/packages/NAME/VER/REL/data/signed/KEYID/..``).
|
||||||
|
|
||||||
|
**signing_key_password_file** [optional]
|
||||||
|
(*str*) -- Path to a file with password that will be formatted
|
||||||
|
into **signing_command** string via ``%(signing_key_password)s``
|
||||||
|
string format syntax (if used).
|
||||||
|
Because pungi config is usually stored in git and is part of compose
|
||||||
|
logs we don't want password to be included directly in the config.
|
||||||
|
Note: If ``-`` string is used instead of a filename, then you will be asked
|
||||||
|
for the password interactivelly right after pungi starts.
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
::
|
||||||
|
|
||||||
|
signing_command = '~/git/releng/scripts/sigulsign_unsigned.py -vv --password=%(signing_key_password)s fedora-24'
|
||||||
|
signing_key_id = '81b46521'
|
||||||
|
signing_key_password_file = '~/password_for_fedora-24_key'
|
||||||
|
|
||||||
|
|
||||||
.. _git-urls:
|
.. _git-urls:
|
||||||
|
|
||||||
Git URLs
|
Git URLs
|
||||||
@ -553,16 +581,6 @@ Options
|
|||||||
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
||||||
``ExclusiveArch`` and always consider only binary architectures.
|
``ExclusiveArch`` and always consider only binary architectures.
|
||||||
|
|
||||||
**pkgset_inherit_exclusive_arch_to_noarch** = True
|
|
||||||
(*bool*) -- When set to ``True``, the value of ``ExclusiveArch`` or
|
|
||||||
``ExcludeArch`` will be copied from source rpm to all its noarch packages.
|
|
||||||
That will than limit which architectures the noarch packages can be
|
|
||||||
included in.
|
|
||||||
|
|
||||||
By setting this option to ``False`` this step is skipped, and noarch
|
|
||||||
packages will by default land in all architectures. They can still be
|
|
||||||
excluded by listing them in a relevant section of ``filter_packages``.
|
|
||||||
|
|
||||||
**pkgset_allow_reuse** = True
|
**pkgset_allow_reuse** = True
|
||||||
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
||||||
from the old composes specified by ``--old-composes``. When enabled, this
|
from the old composes specified by ``--old-composes``. When enabled, this
|
||||||
@ -603,7 +621,7 @@ Options
|
|||||||
-------
|
-------
|
||||||
|
|
||||||
**buildinstall_method**
|
**buildinstall_method**
|
||||||
(*str*) -- "lorax" (f16+, rhel7+)
|
(*str*) -- "lorax" (f16+, rhel7+) or "buildinstall" (older releases)
|
||||||
**lorax_options**
|
**lorax_options**
|
||||||
(*list*) -- special options passed on to *lorax*.
|
(*list*) -- special options passed on to *lorax*.
|
||||||
|
|
||||||
@ -902,10 +920,6 @@ Options
|
|||||||
comps file can not be found in the package set. When disabled (the
|
comps file can not be found in the package set. When disabled (the
|
||||||
default), such cases are still reported as warnings in the log.
|
default), such cases are still reported as warnings in the log.
|
||||||
|
|
||||||
With ``dnf`` gather backend, this option will abort the compose on any
|
|
||||||
missing package no matter if it's listed in comps, ``additional_packages``
|
|
||||||
or prepopulate file.
|
|
||||||
|
|
||||||
**gather_source_mapping**
|
**gather_source_mapping**
|
||||||
(*str*) -- JSON mapping with initial packages for the compose. The value
|
(*str*) -- JSON mapping with initial packages for the compose. The value
|
||||||
should be a path to JSON file with following mapping: ``{variant: {arch:
|
should be a path to JSON file with following mapping: ``{variant: {arch:
|
||||||
@ -1329,8 +1343,8 @@ All non-``RC`` milestones from label get appended to the version. For release
|
|||||||
either label is used or date, type and respin.
|
either label is used or date, type and respin.
|
||||||
|
|
||||||
|
|
||||||
Common options for Live Media and Image Build
|
Common options for Live Images, Live Media and Image Build
|
||||||
=============================================
|
==========================================================
|
||||||
|
|
||||||
All images can have ``ksurl``, ``version``, ``release`` and ``target``
|
All images can have ``ksurl``, ``version``, ``release`` and ``target``
|
||||||
specified. Since this can create a lot of duplication, there are global options
|
specified. Since this can create a lot of duplication, there are global options
|
||||||
@ -1346,12 +1360,14 @@ The kickstart URL is configured by these options.
|
|||||||
* ``global_ksurl`` -- global fallback setting
|
* ``global_ksurl`` -- global fallback setting
|
||||||
* ``live_media_ksurl``
|
* ``live_media_ksurl``
|
||||||
* ``image_build_ksurl``
|
* ``image_build_ksurl``
|
||||||
|
* ``live_images_ksurl``
|
||||||
|
|
||||||
Target is specified by these settings.
|
Target is specified by these settings.
|
||||||
|
|
||||||
* ``global_target`` -- global fallback setting
|
* ``global_target`` -- global fallback setting
|
||||||
* ``live_media_target``
|
* ``live_media_target``
|
||||||
* ``image_build_target``
|
* ``image_build_target``
|
||||||
|
* ``live_images_target``
|
||||||
* ``osbuild_target``
|
* ``osbuild_target``
|
||||||
|
|
||||||
Version is specified by these options. If no version is set, a default value
|
Version is specified by these options. If no version is set, a default value
|
||||||
@ -1360,6 +1376,7 @@ will be provided according to :ref:`automatic versioning <auto-version>`.
|
|||||||
* ``global_version`` -- global fallback setting
|
* ``global_version`` -- global fallback setting
|
||||||
* ``live_media_version``
|
* ``live_media_version``
|
||||||
* ``image_build_version``
|
* ``image_build_version``
|
||||||
|
* ``live_images_version``
|
||||||
* ``osbuild_version``
|
* ``osbuild_version``
|
||||||
|
|
||||||
Release is specified by these options. If set to a magic value to
|
Release is specified by these options. If set to a magic value to
|
||||||
@ -1369,14 +1386,44 @@ to :ref:`automatic versioning <auto-version>`.
|
|||||||
* ``global_release`` -- global fallback setting
|
* ``global_release`` -- global fallback setting
|
||||||
* ``live_media_release``
|
* ``live_media_release``
|
||||||
* ``image_build_release``
|
* ``image_build_release``
|
||||||
|
* ``live_images_release``
|
||||||
* ``osbuild_release``
|
* ``osbuild_release``
|
||||||
|
|
||||||
Each configuration block can also optionally specify a ``failable`` key. It
|
Each configuration block can also optionally specify a ``failable`` key. For
|
||||||
|
live images it should have a boolean value. For live media and image build it
|
||||||
should be a list of strings containing architectures that are optional. If any
|
should be a list of strings containing architectures that are optional. If any
|
||||||
deliverable fails on an optional architecture, it will not abort the whole
|
deliverable fails on an optional architecture, it will not abort the whole
|
||||||
compose. If the list contains only ``"*"``, all arches will be substituted.
|
compose. If the list contains only ``"*"``, all arches will be substituted.
|
||||||
|
|
||||||
|
|
||||||
|
Live Images Settings
|
||||||
|
====================
|
||||||
|
|
||||||
|
**live_images**
|
||||||
|
(*list*) -- Configuration for the particular image. The elements of the
|
||||||
|
list should be tuples ``(variant_uid_regex, {arch|*: config})``. The config
|
||||||
|
should be a dict with these keys:
|
||||||
|
|
||||||
|
* ``kickstart`` (*str*)
|
||||||
|
* ``ksurl`` (*str*) [optional] -- where to get the kickstart from
|
||||||
|
* ``name`` (*str*)
|
||||||
|
* ``version`` (*str*)
|
||||||
|
* ``target`` (*str*)
|
||||||
|
* ``repo`` (*str|[str]*) -- repos specified by URL or variant UID
|
||||||
|
* ``specfile`` (*str*) -- for images wrapped in RPM
|
||||||
|
* ``scratch`` (*bool*) -- only RPM-wrapped images can use scratch builds,
|
||||||
|
but by default this is turned off
|
||||||
|
* ``type`` (*str*) -- what kind of task to start in Koji. Defaults to
|
||||||
|
``live`` meaning ``koji spin-livecd`` will be used. Alternative option
|
||||||
|
is ``appliance`` corresponding to ``koji spin-appliance``.
|
||||||
|
* ``sign`` (*bool*) -- only RPM-wrapped images can be signed
|
||||||
|
|
||||||
|
**live_images_no_rename**
|
||||||
|
(*bool*) -- When set to ``True``, filenames generated by Koji will be used.
|
||||||
|
When ``False``, filenames will be generated based on ``image_name_format``
|
||||||
|
configuration option.
|
||||||
|
|
||||||
|
|
||||||
Live Media Settings
|
Live Media Settings
|
||||||
===================
|
===================
|
||||||
|
|
||||||
@ -1532,61 +1579,6 @@ Example
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
KiwiBuild Settings
|
|
||||||
==================
|
|
||||||
|
|
||||||
**kiwibuild**
|
|
||||||
(*dict*) -- configuration for building images using kiwi by a Koji plugin.
|
|
||||||
Pungi will trigger a Koji task delegating to kiwi, which will build the image,
|
|
||||||
import it to Koji via content generators.
|
|
||||||
|
|
||||||
Format: ``{variant_uid_regex: [{...}]}``.
|
|
||||||
|
|
||||||
Required keys in the configuration dict:
|
|
||||||
|
|
||||||
* ``kiwi_profile`` -- (*str*) select profile from description file.
|
|
||||||
|
|
||||||
Description scm, description path and target have to be provided too, but
|
|
||||||
instead of specifying them for each image separately, you can use the
|
|
||||||
``kiwibuild_*`` options or ``global_target``.
|
|
||||||
|
|
||||||
Optional keys:
|
|
||||||
|
|
||||||
* ``description_scm`` -- (*str*) scm URL of description kiwi description.
|
|
||||||
* ``description_path`` -- (*str*) path to kiwi description inside the scm
|
|
||||||
repo.
|
|
||||||
* ``repos`` -- additional repos used to install RPMs in the image. The
|
|
||||||
compose repository for the enclosing variant is added automatically.
|
|
||||||
Either variant name or a URL is supported.
|
|
||||||
* ``target`` -- (*str*) which build target to use for the task. If not
|
|
||||||
provided, then either ``kiwibuild_target`` or ``global_target`` is
|
|
||||||
needed.
|
|
||||||
* ``release`` -- (*str*) release of the output image.
|
|
||||||
* ``arches`` -- (*[str]*) List of architectures to build for. If not
|
|
||||||
provided, all variant architectures will be built.
|
|
||||||
* ``failable`` -- (*[str]*) List of architectures for which this
|
|
||||||
deliverable is not release blocking.
|
|
||||||
* ``type`` -- (*str*) override default type from the bundle with this value.
|
|
||||||
* ``type_attr`` -- (*[str]*) override default attributes for the build type
|
|
||||||
from description.
|
|
||||||
* ``bundle_name_format`` -- (*str*) override default bundle format name.
|
|
||||||
|
|
||||||
**kiwibuild_description_scm**
|
|
||||||
(*str*) -- URL for scm containing the description files
|
|
||||||
|
|
||||||
**kiwibuild_description_path**
|
|
||||||
(*str*) -- path to a description file within the description scm
|
|
||||||
|
|
||||||
**kiwibuild_type**
|
|
||||||
(*str*) -- override default type from the bundle with this value.
|
|
||||||
|
|
||||||
**kiwibuild_type_attr**
|
|
||||||
(*[str]*) -- override default attributes for the build type from description.
|
|
||||||
|
|
||||||
**kiwibuild_bundle_name_format**
|
|
||||||
(*str*) -- override default bundle format name.
|
|
||||||
|
|
||||||
|
|
||||||
OSBuild Composer for building images
|
OSBuild Composer for building images
|
||||||
====================================
|
====================================
|
||||||
|
|
||||||
@ -1635,17 +1627,11 @@ OSBuild Composer for building images
|
|||||||
* ``arches`` -- list of architectures for which to build the image. By
|
* ``arches`` -- list of architectures for which to build the image. By
|
||||||
default, the variant arches are used. This option can only restrict it,
|
default, the variant arches are used. This option can only restrict it,
|
||||||
not add a new one.
|
not add a new one.
|
||||||
* ``manifest_type`` -- the image type that is put into the manifest by
|
|
||||||
pungi. If not supplied then it is autodetected from the Koji output.
|
|
||||||
* ``ostree_url`` -- URL of the repository that's used to fetch the parent
|
* ``ostree_url`` -- URL of the repository that's used to fetch the parent
|
||||||
commit from.
|
commit from.
|
||||||
* ``ostree_ref`` -- name of the ostree branch
|
* ``ostree_ref`` -- name of the ostree branch
|
||||||
* ``ostree_parent`` -- commit hash or a a branch-like reference to the
|
* ``ostree_parent`` -- commit hash or a a branch-like reference to the
|
||||||
parent commit.
|
parent commit.
|
||||||
* ``customizations`` -- a dictionary with customizations to use for the
|
|
||||||
image build. For the list of supported customizations, see the **hosted**
|
|
||||||
variants in the `Image Builder documentation
|
|
||||||
<https://osbuild.org/docs/user-guide/blueprint-reference#installation-device>`.
|
|
||||||
* ``upload_options`` -- a dictionary with upload options specific to the
|
* ``upload_options`` -- a dictionary with upload options specific to the
|
||||||
target cloud environment. If provided, the image will be uploaded to the
|
target cloud environment. If provided, the image will be uploaded to the
|
||||||
cloud environment, in addition to the Koji server. One can't combine
|
cloud environment, in addition to the Koji server. One can't combine
|
||||||
@ -1753,16 +1739,16 @@ another directory. Any new packages in the compose will be added to the
|
|||||||
repository with a new commit.
|
repository with a new commit.
|
||||||
|
|
||||||
**ostree**
|
**ostree**
|
||||||
(*dict*) -- a mapping of configuration for each variant. The format should
|
(*dict*) -- a mapping of configuration for each. The format should be
|
||||||
be ``{variant_uid_regex: config_dict}``. It is possible to use a list of
|
``{variant_uid_regex: config_dict}``. It is possible to use a list of
|
||||||
configuration dicts as well.
|
configuration dicts as well.
|
||||||
|
|
||||||
The configuration dict for each variant arch pair must have these keys:
|
The configuration dict for each variant arch pair must have these keys:
|
||||||
|
|
||||||
* ``treefile`` -- (*str*) Filename of configuration for ``rpm-ostree``.
|
* ``treefile`` -- (*str*) Filename of configuration for ``rpm-ostree``.
|
||||||
* ``config_url`` -- (*str*) URL for Git repository with the ``treefile``.
|
* ``config_url`` -- (*str*) URL for Git repository with the ``treefile``.
|
||||||
* ``repo`` -- (*str|dict|[str|dict]*) repos specified by URL or a dict of
|
* ``repo`` -- (*str|dict|[str|dict]*) repos specified by URL or variant UID
|
||||||
repo options, ``baseurl`` is required in the dict.
|
or a dict of repo options, ``baseurl`` is required in the dict.
|
||||||
* ``ostree_repo`` -- (*str*) Where to put the ostree repository
|
* ``ostree_repo`` -- (*str*) Where to put the ostree repository
|
||||||
|
|
||||||
These keys are optional:
|
These keys are optional:
|
||||||
@ -1793,8 +1779,6 @@ repository with a new commit.
|
|||||||
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
||||||
reference will not be created.
|
reference will not be created.
|
||||||
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
||||||
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
|
||||||
installed in the runroot environment in Koji.
|
|
||||||
|
|
||||||
Example config
|
Example config
|
||||||
--------------
|
--------------
|
||||||
@ -1804,11 +1788,13 @@ Example config
|
|||||||
"^Atomic$": {
|
"^Atomic$": {
|
||||||
"treefile": "fedora-atomic-docker-host.json",
|
"treefile": "fedora-atomic-docker-host.json",
|
||||||
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
|
"config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",
|
||||||
"keep_original_sources": True,
|
|
||||||
"repo": [
|
"repo": [
|
||||||
|
"Server",
|
||||||
"http://example.com/repo/x86_64/os",
|
"http://example.com/repo/x86_64/os",
|
||||||
|
{"baseurl": "Everything"},
|
||||||
{"baseurl": "http://example.com/linux/repo", "exclude": "systemd-container"},
|
{"baseurl": "http://example.com/linux/repo", "exclude": "systemd-container"},
|
||||||
],
|
],
|
||||||
|
"keep_original_sources": True,
|
||||||
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
|
"ostree_repo": "/mnt/koji/compose/atomic/Rawhide/",
|
||||||
"update_summary": True,
|
"update_summary": True,
|
||||||
# Automatically generate a reasonable version
|
# Automatically generate a reasonable version
|
||||||
@ -1824,79 +1810,6 @@ Example config
|
|||||||
has the pungi_ostree plugin installed.
|
has the pungi_ostree plugin installed.
|
||||||
|
|
||||||
|
|
||||||
OSTree Native Container Settings
|
|
||||||
================================
|
|
||||||
|
|
||||||
The ``ostree_container`` phase of *Pungi* can create an ostree native container
|
|
||||||
image as an OCI archive. This is done by running ``rpm-ostree compose image``
|
|
||||||
in a Koji runroot environment.
|
|
||||||
|
|
||||||
While rpm-ostree can use information from previously built images to improve
|
|
||||||
the split in container layers, we can not use that functionnality until
|
|
||||||
https://github.com/containers/skopeo/pull/2114 is resolved. Each invocation
|
|
||||||
will thus create a new OCI archive image *from scratch*.
|
|
||||||
|
|
||||||
**ostree_container**
|
|
||||||
(*dict*) -- a mapping of configuration for each variant. The format should
|
|
||||||
be ``{variant_uid_regex: config_dict}``. It is possible to use a list of
|
|
||||||
configuration dicts as well.
|
|
||||||
|
|
||||||
The configuration dict for each variant arch pair must have these keys:
|
|
||||||
|
|
||||||
* ``treefile`` -- (*str*) Filename of configuration for ``rpm-ostree``.
|
|
||||||
* ``config_url`` -- (*str*) URL for Git repository with the ``treefile``.
|
|
||||||
|
|
||||||
These keys are optional:
|
|
||||||
|
|
||||||
* ``repo`` -- (*str|dict|[str|dict]*) repos specified by URL or a dict of
|
|
||||||
repo options, ``baseurl`` is required in the dict.
|
|
||||||
* ``keep_original_sources`` -- (*bool*) Keep the existing source repos in
|
|
||||||
the tree config file. If not enabled, all the original source repos will
|
|
||||||
be removed from the tree config file.
|
|
||||||
* ``config_branch`` -- (*str*) Git branch of the repo to use. Defaults to
|
|
||||||
``main``.
|
|
||||||
* ``arches`` -- (*[str]*) List of architectures for which to generate
|
|
||||||
ostree native container images. There will be one task per architecture.
|
|
||||||
By default all architectures in the variant are used.
|
|
||||||
* ``failable`` -- (*[str]*) List of architectures for which this
|
|
||||||
deliverable is not release blocking.
|
|
||||||
* ``version`` -- (*str*) Version string to be added to the OCI archive name.
|
|
||||||
If this option is set to ``!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN``,
|
|
||||||
a value will be generated automatically as ``$VERSION.$RELEASE``.
|
|
||||||
If this option is set to ``!VERSION_FROM_VERSION_DATE_RESPIN``,
|
|
||||||
a value will be generated automatically as ``$VERSION.$DATE.$RESPIN``.
|
|
||||||
:ref:`See how those values are created <auto-version>`.
|
|
||||||
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
|
||||||
reference will not be created.
|
|
||||||
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
|
||||||
installed in the runroot environment in Koji.
|
|
||||||
|
|
||||||
Example config
|
|
||||||
--------------
|
|
||||||
::
|
|
||||||
|
|
||||||
ostree_container = {
|
|
||||||
"^Sagano$": {
|
|
||||||
"treefile": "fedora-tier-0-38.yaml",
|
|
||||||
"config_url": "https://gitlab.com/CentOS/cloud/sagano.git",
|
|
||||||
"config_branch": "main",
|
|
||||||
"repo": [
|
|
||||||
"http://example.com/repo/x86_64/os",
|
|
||||||
{"baseurl": "http://example.com/linux/repo", "exclude": "systemd-container"},
|
|
||||||
],
|
|
||||||
# Automatically generate a reasonable version
|
|
||||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
|
||||||
# Only run this for x86_64 even if Sagano has more arches
|
|
||||||
"arches": ["x86_64"],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
**ostree_container_use_koji_plugin** = False
|
|
||||||
(*bool*) -- When set to ``True``, the Koji pungi_ostree task will be
|
|
||||||
used to execute rpm-ostree instead of runroot. Use only if the Koji instance
|
|
||||||
has the pungi_ostree plugin installed.
|
|
||||||
|
|
||||||
|
|
||||||
Ostree Installer Settings
|
Ostree Installer Settings
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
@ -2247,9 +2160,9 @@ Miscellaneous Settings
|
|||||||
format string accepting ``%(variant_name)s`` and ``%(arch)s`` placeholders.
|
format string accepting ``%(variant_name)s`` and ``%(arch)s`` placeholders.
|
||||||
|
|
||||||
**symlink_isos_to**
|
**symlink_isos_to**
|
||||||
(*str*) -- If set, the ISO files from ``buildinstall`` and ``createiso``
|
(*str*) -- If set, the ISO files from ``buildinstall``, ``createiso`` and
|
||||||
phases will be put into this destination, and a symlink pointing to this
|
``live_images`` phases will be put into this destination, and a symlink
|
||||||
location will be created in actual compose directory.
|
pointing to this location will be created in actual compose directory.
|
||||||
|
|
||||||
**dogpile_cache_backend**
|
**dogpile_cache_backend**
|
||||||
(*str*) -- If set, Pungi will use the configured Dogpile cache backend to
|
(*str*) -- If set, Pungi will use the configured Dogpile cache backend to
|
||||||
|
@ -294,6 +294,30 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
|||||||
})
|
})
|
||||||
]
|
]
|
||||||
|
|
||||||
|
live_target = 'f32'
|
||||||
|
live_images_no_rename = True
|
||||||
|
live_images = [
|
||||||
|
('^Workstation$', {
|
||||||
|
'armhfp': {
|
||||||
|
'kickstart': 'fedora-arm-workstation.ks',
|
||||||
|
'name': 'Fedora-Workstation-armhfp',
|
||||||
|
# Again workstation takes packages from Everything.
|
||||||
|
'repo': 'Everything',
|
||||||
|
'type': 'appliance',
|
||||||
|
'failable': True,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
('^Server$', {
|
||||||
|
# But Server has its own repo.
|
||||||
|
'armhfp': {
|
||||||
|
'kickstart': 'fedora-arm-server.ks',
|
||||||
|
'name': 'Fedora-Server-armhfp',
|
||||||
|
'type': 'appliance',
|
||||||
|
'failable': True,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
|
||||||
ostree = {
|
ostree = {
|
||||||
"^Silverblue$": {
|
"^Silverblue$": {
|
||||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||||
@ -319,20 +343,6 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ostree_container = {
|
|
||||||
"^Sagano$": {
|
|
||||||
"treefile": "fedora-tier-0-38.yaml",
|
|
||||||
"config_url": "https://gitlab.com/CentOS/cloud/sagano.git",
|
|
||||||
"config_branch": "main",
|
|
||||||
# Consume packages from Everything
|
|
||||||
"repo": "Everything",
|
|
||||||
# Automatically generate a reasonable version
|
|
||||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
|
||||||
# Only run this for x86_64 even if Sagano has more arches
|
|
||||||
"arches": ["x86_64"],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ostree_installer = [
|
ostree_installer = [
|
||||||
("^Silverblue$", {
|
("^Silverblue$", {
|
||||||
"x86_64": {
|
"x86_64": {
|
||||||
|
@ -19,7 +19,7 @@ Contents:
|
|||||||
scm_support
|
scm_support
|
||||||
messaging
|
messaging
|
||||||
gathering
|
gathering
|
||||||
koji
|
|
||||||
comps
|
comps
|
||||||
contributing
|
contributing
|
||||||
testing
|
testing
|
||||||
|
multi_compose
|
||||||
|
105
doc/koji.rst
105
doc/koji.rst
@ -1,105 +0,0 @@
|
|||||||
======================
|
|
||||||
Getting data from koji
|
|
||||||
======================
|
|
||||||
|
|
||||||
When Pungi is configured to get packages from a Koji tag, it somehow needs to
|
|
||||||
access the actual RPM files.
|
|
||||||
|
|
||||||
Historically, this required the storage used by Koji to be directly available
|
|
||||||
on the host where Pungi was running. This was usually achieved by using NFS for
|
|
||||||
the Koji volume, and mounting it on the compose host.
|
|
||||||
|
|
||||||
The compose could be created directly on the same volume. In such case the
|
|
||||||
packages would be hardlinked, significantly reducing space consumption.
|
|
||||||
|
|
||||||
The compose could also be created on a different storage, in which case the
|
|
||||||
packages would either need to be copied over or symlinked. Using symlinks
|
|
||||||
requires that anything that accesses the compose (e.g. a download server) would
|
|
||||||
also need to mount the Koji volume in the same location.
|
|
||||||
|
|
||||||
There is also a risk with symlinks that the package in Koji can change (due to
|
|
||||||
being resigned for example), which would invalidate composes linking to it.
|
|
||||||
|
|
||||||
|
|
||||||
Using Koji without direct mount
|
|
||||||
===============================
|
|
||||||
|
|
||||||
It is possible now to run a compose from a Koji tag without direct access to
|
|
||||||
Koji storage.
|
|
||||||
|
|
||||||
Pungi can download the packages over HTTP protocol, store them in a local
|
|
||||||
cache, and consume them from there.
|
|
||||||
|
|
||||||
The local cache has similar structure to what is on the Koji volume.
|
|
||||||
|
|
||||||
When Pungi needs some package, it has a path on Koji volume. It will replace
|
|
||||||
the ``topdir`` with the cache location. If such file exists, it will be used.
|
|
||||||
If it doesn't exist, it will be downloaded from Koji (by replacing the
|
|
||||||
``topdir`` with ``topurl``).
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Koji path /mnt/koji/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
|
||||||
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
|
||||||
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
|
||||||
|
|
||||||
The packages can be hardlinked from this cache directory.
|
|
||||||
|
|
||||||
|
|
||||||
Cleanup
|
|
||||||
-------
|
|
||||||
|
|
||||||
While the approach above allows each RPM to be downloaded only once, it will
|
|
||||||
eventually result in the Koji volume being mirrored locally. Most of the
|
|
||||||
packages will however no longer be needed.
|
|
||||||
|
|
||||||
There is a script ``pungi-cache-cleanup`` that can help with that. It can find
|
|
||||||
and remove files from the cache that are no longer needed.
|
|
||||||
|
|
||||||
A file is no longer needed if it has a single link (meaning it is only in the
|
|
||||||
cache, not in any compose), and it has mtime older than a given threshold.
|
|
||||||
|
|
||||||
It doesn't make sense to delete files that are hardlinked in an existing
|
|
||||||
compose as it would not save any space anyway.
|
|
||||||
|
|
||||||
The mtime check is meant to preserve files that are downloaded but not actually
|
|
||||||
used in a compose, like a subpackage that is not included in any variant. Every
|
|
||||||
time its existence in the local cache is checked, the mtime is updated.
|
|
||||||
|
|
||||||
|
|
||||||
Race conditions?
|
|
||||||
----------------
|
|
||||||
|
|
||||||
It should be safe to have multiple compose hosts share the same storage volume
|
|
||||||
for generated composes and local cache.
|
|
||||||
|
|
||||||
If a cache file is accessed and it exists, there's no risk of race condition.
|
|
||||||
|
|
||||||
If two composes need the same file at the same time and it is not present yet,
|
|
||||||
one of them will take a lock on it and start downloading. The other will wait
|
|
||||||
until the download is finished.
|
|
||||||
|
|
||||||
The lock is only valid for a set amount of time (5 minutes) to avoid issues
|
|
||||||
where the downloading process is killed in a way that blocks it from releasing
|
|
||||||
the lock.
|
|
||||||
|
|
||||||
If the file is large and network slow, the limit may not be enough finish
|
|
||||||
downloading. In that case the second process will steal the lock while the
|
|
||||||
first process is still downloading. This will result in the same file being
|
|
||||||
downloaded twice.
|
|
||||||
|
|
||||||
When the first process finishes the download, it will put the file into the
|
|
||||||
local cache location. When the second process finishes, it will atomically
|
|
||||||
replace it, but since it's the same file it will be the same file.
|
|
||||||
|
|
||||||
If the first compose already managed to hardlink the file before it gets
|
|
||||||
replaced, there will be two copies of the file present locally.
|
|
||||||
|
|
||||||
|
|
||||||
Integrity checking
|
|
||||||
------------------
|
|
||||||
|
|
||||||
There is minimal integrity checking. RPM packages belonging to real builds will
|
|
||||||
be check to match the checksum provided by Koji hub.
|
|
||||||
|
|
||||||
There is no checking for scratch builds or any images.
|
|
107
doc/multi_compose.rst
Normal file
107
doc/multi_compose.rst
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
.. _multi_compose:
|
||||||
|
|
||||||
|
Managing compose from multiple parts
|
||||||
|
====================================
|
||||||
|
|
||||||
|
There may be cases where it makes sense to split a big compose into separate
|
||||||
|
parts, but create a compose output that links all output into one familiar
|
||||||
|
structure.
|
||||||
|
|
||||||
|
The `pungi-orchestrate` tools allows that.
|
||||||
|
|
||||||
|
It works with an INI-style configuration file. The ``[general]`` section
|
||||||
|
contains information about identity of the main compose. Other sections define
|
||||||
|
individual parts.
|
||||||
|
|
||||||
|
The parts are scheduled to run in parallel, with the minimal amount of
|
||||||
|
serialization. The final compose directory will contain hard-links to the
|
||||||
|
files.
|
||||||
|
|
||||||
|
|
||||||
|
General settings
|
||||||
|
----------------
|
||||||
|
|
||||||
|
**target**
|
||||||
|
Path to directory where the final compose should be created.
|
||||||
|
**compose_type**
|
||||||
|
Type of compose to make.
|
||||||
|
**release_name**
|
||||||
|
Name of the product for the final compose.
|
||||||
|
**release_short**
|
||||||
|
Short name of the product for the final compose.
|
||||||
|
**release_version**
|
||||||
|
Version of the product for the final compose.
|
||||||
|
**release_type**
|
||||||
|
Type of the product for the final compose.
|
||||||
|
**extra_args**
|
||||||
|
Additional arguments that will be passed to the child Pungi processes.
|
||||||
|
**koji_profile**
|
||||||
|
If specified, a current event will be retrieved from the Koji instance and
|
||||||
|
used for all parts.
|
||||||
|
|
||||||
|
**kerberos**
|
||||||
|
If set to yes, a kerberos ticket will be automatically created at the start.
|
||||||
|
Set keytab and principal as well.
|
||||||
|
**kerberos_keytab**
|
||||||
|
Path to keytab file used to create the kerberos ticket.
|
||||||
|
**kerberos_principal**
|
||||||
|
Kerberos principal for the ticket
|
||||||
|
|
||||||
|
**pre_compose_script**
|
||||||
|
Commands to execute before first part is started. Can contain multiple
|
||||||
|
commands on separate lines.
|
||||||
|
**post_compose_script**
|
||||||
|
Commands to execute after the last part finishes and final status is
|
||||||
|
updated. Can contain multiple commands on separate lines. ::
|
||||||
|
|
||||||
|
post_compose_script =
|
||||||
|
compose-latest-symlink $COMPOSE_PATH
|
||||||
|
custom-post-compose-script.sh
|
||||||
|
|
||||||
|
Multiple environment variables are defined for the scripts:
|
||||||
|
|
||||||
|
* ``COMPOSE_PATH``
|
||||||
|
* ``COMPOSE_ID``
|
||||||
|
* ``COMPOSE_DATE``
|
||||||
|
* ``COMPOSE_TYPE``
|
||||||
|
* ``COMPOSE_RESPIN``
|
||||||
|
* ``COMPOSE_LABEL``
|
||||||
|
* ``RELEASE_ID``
|
||||||
|
* ``RELEASE_NAME``
|
||||||
|
* ``RELEASE_SHORT``
|
||||||
|
* ``RELEASE_VERSION``
|
||||||
|
* ``RELEASE_TYPE``
|
||||||
|
* ``RELEASE_IS_LAYERED`` – ``YES`` for layered products, empty otherwise
|
||||||
|
* ``BASE_PRODUCT_NAME`` – only set for layered products
|
||||||
|
* ``BASE_PRODUCT_SHORT`` – only set for layered products
|
||||||
|
* ``BASE_PRODUCT_VERSION`` – only set for layered products
|
||||||
|
* ``BASE_PRODUCT_TYPE`` – only set for layered products
|
||||||
|
|
||||||
|
**notification_script**
|
||||||
|
Executable name (or path to a script) that will be used to send a message
|
||||||
|
once the compose is finished. In order for a valid URL to be included in the
|
||||||
|
message, at least one part must configure path translation that would apply
|
||||||
|
to location of main compose.
|
||||||
|
|
||||||
|
Only two messages will be sent, one for start and one for finish (either
|
||||||
|
successful or not).
|
||||||
|
|
||||||
|
|
||||||
|
Partial compose settings
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Each part should have a separate section in the config file.
|
||||||
|
|
||||||
|
It can specify these options:
|
||||||
|
|
||||||
|
**config**
|
||||||
|
Path to configuration file that describes this part. If relative, it is
|
||||||
|
resolved relative to the file with parts configuration.
|
||||||
|
**just_phase**, **skip_phase**
|
||||||
|
Customize which phases should run for this part.
|
||||||
|
**depends_on**
|
||||||
|
A comma separated list of other parts that must be finished before this part
|
||||||
|
starts.
|
||||||
|
**failable**
|
||||||
|
A boolean toggle to mark a part as failable. A failure in such part will
|
||||||
|
mark the final compose as incomplete, but still successful.
|
@ -30,14 +30,17 @@ packages to architectures.
|
|||||||
Buildinstall
|
Buildinstall
|
||||||
------------
|
------------
|
||||||
|
|
||||||
Spawns a bunch of threads, each of which runs the ``lorax`` command. The
|
Spawns a bunch of threads, each of which runs either ``lorax`` or
|
||||||
|
``buildinstall`` command (the latter coming from ``anaconda`` package). The
|
||||||
commands create ``boot.iso`` and other boot configuration files. The image is
|
commands create ``boot.iso`` and other boot configuration files. The image is
|
||||||
finally linked into the ``compose/`` directory as netinstall media.
|
finally linked into the ``compose/`` directory as netinstall media.
|
||||||
|
|
||||||
The created images are also needed for creating live media or other images in
|
The created images are also needed for creating live media or other images in
|
||||||
later phases.
|
later phases.
|
||||||
|
|
||||||
With ``lorax`` this phase runs one task per variant.arch combination.
|
With ``lorax`` this phase runs one task per variant.arch combination. For
|
||||||
|
``buildinstall`` command there is only one task per architecture and
|
||||||
|
``product.img`` should be used to customize the results.
|
||||||
|
|
||||||
Gather
|
Gather
|
||||||
------
|
------
|
||||||
@ -112,12 +115,6 @@ ImageBuild
|
|||||||
This phase wraps up ``koji image-build``. It also updates the metadata
|
This phase wraps up ``koji image-build``. It also updates the metadata
|
||||||
ultimately responsible for ``images.json`` manifest.
|
ultimately responsible for ``images.json`` manifest.
|
||||||
|
|
||||||
KiwiBuild
|
|
||||||
---------
|
|
||||||
|
|
||||||
Similarly to image build, this phases creates a koji `kiwiBuild` task. In the
|
|
||||||
background it uses Kiwi to create images.
|
|
||||||
|
|
||||||
OSBuild
|
OSBuild
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
@ -41,14 +41,6 @@ which can contain following keys.
|
|||||||
* ``command`` -- defines a shell command to run after Git clone to generate the
|
* ``command`` -- defines a shell command to run after Git clone to generate the
|
||||||
needed file (for example to run ``make``). Only supported in Git backend.
|
needed file (for example to run ``make``). Only supported in Git backend.
|
||||||
|
|
||||||
* ``options`` -- a dictionary of additional configuration options. These are
|
|
||||||
specific to different backends.
|
|
||||||
|
|
||||||
Currently supported values for Git:
|
|
||||||
|
|
||||||
* ``credential_helper`` -- path to a credential helper used to supply
|
|
||||||
username/password for remotes that require authentication.
|
|
||||||
|
|
||||||
|
|
||||||
Koji examples
|
Koji examples
|
||||||
-------------
|
-------------
|
||||||
|
265
pungi.spec
265
pungi.spec
@ -1,8 +1,8 @@
|
|||||||
%{?python_enable_dependency_generator}
|
%{?python_enable_dependency_generator}
|
||||||
|
|
||||||
Name: pungi
|
Name: pungi
|
||||||
Version: 4.7.0
|
Version: 4.3.7
|
||||||
Release: 6%{?dist}.alma.1
|
Release: 3%{?dist}.alma
|
||||||
Summary: Distribution compose tool
|
Summary: Distribution compose tool
|
||||||
|
|
||||||
License: GPL-2.0-only
|
License: GPL-2.0-only
|
||||||
@ -11,14 +11,15 @@ Source0: %{name}-%{version}.tar.bz2
|
|||||||
|
|
||||||
BuildRequires: make
|
BuildRequires: make
|
||||||
BuildRequires: python3-pytest
|
BuildRequires: python3-pytest
|
||||||
# replaced by unittest.mock
|
BuildRequires: python3-pyfakefs
|
||||||
# BuildRequires: python3-mock
|
BuildRequires: python3-ddt
|
||||||
BuildRequires: python3-devel
|
BuildRequires: python3-devel
|
||||||
BuildRequires: python3-setuptools
|
BuildRequires: python3-setuptools
|
||||||
BuildRequires: python3-productmd >= 1.33
|
BuildRequires: python3-productmd >= 1.33
|
||||||
BuildRequires: python3-kobo-rpmlib >= 0.18.0
|
BuildRequires: python3-kobo-rpmlib >= 0.18.0
|
||||||
BuildRequires: createrepo_c >= 0.20.1
|
BuildRequires: createrepo_c >= 0.20.1
|
||||||
BuildRequires: python3-lxml
|
BuildRequires: python3-lxml
|
||||||
|
BuildRequires: python3-ddt
|
||||||
BuildRequires: python3-kickstart
|
BuildRequires: python3-kickstart
|
||||||
BuildRequires: python3-rpm
|
BuildRequires: python3-rpm
|
||||||
BuildRequires: python3-dnf
|
BuildRequires: python3-dnf
|
||||||
@ -36,30 +37,28 @@ BuildRequires: python3-gobject
|
|||||||
BuildRequires: python3-createrepo_c >= 0.20.1
|
BuildRequires: python3-createrepo_c >= 0.20.1
|
||||||
BuildRequires: python3-dogpile-cache
|
BuildRequires: python3-dogpile-cache
|
||||||
BuildRequires: python3-parameterized
|
BuildRequires: python3-parameterized
|
||||||
BuildRequires: python3-flufl-lock
|
|
||||||
BuildRequires: python3-ddt
|
|
||||||
BuildRequires: python3-distro
|
|
||||||
BuildRequires: python3-gobject-base
|
BuildRequires: python3-gobject-base
|
||||||
BuildRequires: python3-pgpy
|
BuildRequires: python3-distro
|
||||||
BuildRequires: python3-pyfakefs
|
|
||||||
%if %{rhel} == 8
|
%if %{rhel} == 8
|
||||||
BuildRequires: python3-dataclasses
|
BuildRequires: python3-dataclasses
|
||||||
%endif
|
%endif
|
||||||
|
BuildRequires: python3-pgpy
|
||||||
|
|
||||||
#deps for doc building
|
#deps for doc building
|
||||||
BuildRequires: python3-sphinx
|
BuildRequires: python3-sphinx
|
||||||
|
|
||||||
Requires: python3-kobo-rpmlib >= 0.18.0
|
Requires: python3-kobo-rpmlib >= 0.18.0
|
||||||
|
Requires: python3-productmd >= 1.33
|
||||||
Requires: python3-kickstart
|
Requires: python3-kickstart
|
||||||
|
Requires: python3-requests
|
||||||
|
%if %{rhel} == 8
|
||||||
|
Requires: python3-dataclasses
|
||||||
|
%endif
|
||||||
Requires: createrepo_c >= 0.20.1
|
Requires: createrepo_c >= 0.20.1
|
||||||
Requires: koji >= 1.10.1-13
|
Requires: koji >= 1.10.1-13
|
||||||
Requires: python3-koji-cli-plugins
|
Requires: python3-koji-cli-plugins
|
||||||
Requires: isomd5sum
|
Requires: isomd5sum
|
||||||
%if %{rhel} == 8 || %{rhel} == 9
|
|
||||||
Requires: genisoimage
|
Requires: genisoimage
|
||||||
%else
|
|
||||||
Recommends: genisoimage
|
|
||||||
%endif
|
|
||||||
Requires: git
|
Requires: git
|
||||||
Requires: python3-dnf
|
Requires: python3-dnf
|
||||||
Requires: python3-multilib
|
Requires: python3-multilib
|
||||||
@ -69,22 +68,11 @@ Requires: python3-libmodulemd >= 2.8.0
|
|||||||
Requires: python3-gobject
|
Requires: python3-gobject
|
||||||
Requires: python3-createrepo_c >= 0.20.1
|
Requires: python3-createrepo_c >= 0.20.1
|
||||||
Requires: python3-PyYAML
|
Requires: python3-PyYAML
|
||||||
Requires: python3-productmd >= 1.38
|
Requires: python3-productmd >= 1.28R
|
||||||
Requires: python3-flufl-lock
|
|
||||||
%if %{rhel} == 10
|
|
||||||
Requires: xorriso
|
|
||||||
%else
|
|
||||||
Recommends: xorriso
|
|
||||||
%endif
|
|
||||||
Requires: python3-productmd >= 1.33
|
|
||||||
Requires: lorax
|
|
||||||
Requires: python3-distro
|
|
||||||
Requires: python3-gobject-base
|
Requires: python3-gobject-base
|
||||||
|
Requires: lorax
|
||||||
Requires: python3-pgpy
|
Requires: python3-pgpy
|
||||||
Requires: python3-requests
|
Requires: python3-distro
|
||||||
%if %{rhel} == 8
|
|
||||||
Requires: python3-dataclasses
|
|
||||||
%endif
|
|
||||||
|
|
||||||
# This package is not available on i686, hence we cannot require it
|
# This package is not available on i686, hence we cannot require it
|
||||||
# See https://bugzilla.redhat.com/show_bug.cgi?id=1743421
|
# See https://bugzilla.redhat.com/show_bug.cgi?id=1743421
|
||||||
@ -100,7 +88,6 @@ A tool to create anaconda based installation trees/isos of a set of rpms.
|
|||||||
%package utils
|
%package utils
|
||||||
Summary: Utilities for working with finished composes
|
Summary: Utilities for working with finished composes
|
||||||
Requires: pungi = %{version}-%{release}
|
Requires: pungi = %{version}-%{release}
|
||||||
Requires: python3-fedora-messaging
|
|
||||||
|
|
||||||
%description utils
|
%description utils
|
||||||
These utilities work with finished composes produced by Pungi. They can be used
|
These utilities work with finished composes produced by Pungi. They can be used
|
||||||
@ -109,8 +96,8 @@ notification to Fedora Message Bus.
|
|||||||
|
|
||||||
%package -n python3-%{name}
|
%package -n python3-%{name}
|
||||||
Summary: Python 3 libraries for pungi
|
Summary: Python 3 libraries for pungi
|
||||||
Requires: fus
|
|
||||||
Requires: python3-attrs
|
Requires: python3-attrs
|
||||||
|
Requires: fus
|
||||||
|
|
||||||
%description -n python3-%{name}
|
%description -n python3-%{name}
|
||||||
Python library with code for Pungi. This is not a public library and there are
|
Python library with code for Pungi. This is not a public library and there are
|
||||||
@ -153,9 +140,7 @@ rm %{buildroot}%{_bindir}/pungi
|
|||||||
%{_bindir}/%{name}-make-ostree
|
%{_bindir}/%{name}-make-ostree
|
||||||
%{_mandir}/man1/pungi.1.gz
|
%{_mandir}/man1/pungi.1.gz
|
||||||
%{_datadir}/pungi
|
%{_datadir}/pungi
|
||||||
%{_localstatedir}/cache/pungi
|
/var/cache/pungi
|
||||||
%dir %attr(1777, root, root) %{_localstatedir}/cache/pungi/createrepo_c
|
|
||||||
%{_tmpfilesdir}/pungi-clean-cache.conf
|
|
||||||
|
|
||||||
%files -n python3-%{name}
|
%files -n python3-%{name}
|
||||||
%{python3_sitelib}/%{name}
|
%{python3_sitelib}/%{name}
|
||||||
@ -166,226 +151,14 @@ rm %{buildroot}%{_bindir}/pungi
|
|||||||
%{_bindir}/%{name}-create-unified-isos
|
%{_bindir}/%{name}-create-unified-isos
|
||||||
%{_bindir}/%{name}-config-dump
|
%{_bindir}/%{name}-config-dump
|
||||||
%{_bindir}/%{name}-config-validate
|
%{_bindir}/%{name}-config-validate
|
||||||
%{_bindir}/%{name}-fedmsg-notification
|
|
||||||
%{_bindir}/%{name}-notification-report-progress
|
%{_bindir}/%{name}-notification-report-progress
|
||||||
|
%{_bindir}/%{name}-orchestrate
|
||||||
%{_bindir}/%{name}-patch-iso
|
%{_bindir}/%{name}-patch-iso
|
||||||
%{_bindir}/%{name}-compare-depsolving
|
%{_bindir}/%{name}-compare-depsolving
|
||||||
%{_bindir}/%{name}-wait-for-signed-ostree-handler
|
%{_bindir}/%{name}-wait-for-signed-ostree-handler
|
||||||
%{_bindir}/%{name}-cache-cleanup
|
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
* Fri Sep 27 2024 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.7.0-6
|
|
||||||
- Add x86_64_v2 to a list of exclusive arches if there is any arch with base `x86_64`
|
|
||||||
|
|
||||||
* Mon Sep 16 2024 Eduard Abdullin <eabdullin@almalinux.org> - 4.7.0-5
|
|
||||||
- Add x86_64_v2 to arch list if x86_64 in list
|
|
||||||
|
|
||||||
* Fri Sep 06 2024 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.7.0-4
|
|
||||||
- Truncate a volume ID to 32 bytes
|
|
||||||
- Add new architecture `x86_64_v2`
|
|
||||||
|
|
||||||
* Thu Sep 05 2024 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.7.0-2
|
|
||||||
- Use xorriso as recommended package and genisoimage as required for RHEL8/9 and vice versa for RHEL10
|
|
||||||
|
|
||||||
* Thu Aug 22 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.7.0-1
|
|
||||||
- kiwibuild: Add support for type, type attr and bundle format (lsedlar)
|
|
||||||
- createiso: Block reuse if unsigned packages are allowed (lsedlar)
|
|
||||||
- Allow live_images phase to still be skipped (lsedlar)
|
|
||||||
- createiso: Recompute .treeinfo checksums for images (lsedlar)
|
|
||||||
- Drop support for signing rpm-wrapped artifacts (lsedlar)
|
|
||||||
- Remove live_images.py (LiveImagesPhase) (awilliam)
|
|
||||||
- Clean up requirements (lsedlar)
|
|
||||||
- Update pungi.spec for py3 (hlin)
|
|
||||||
|
|
||||||
* Fri Jul 19 2024 Fedora Release Engineering <releng@fedoraproject.org> - 4.6.3-2
|
|
||||||
- Rebuilt for https://fedoraproject.org/wiki/Fedora_41_Mass_Rebuild
|
|
||||||
|
|
||||||
* Fri Jul 12 2024 Haibo Lin <hlin@redhat.com> - 4.6.3-1
|
|
||||||
- Fix formatting of long line (lsedlar)
|
|
||||||
- unified-isos: Resolve symlinks (lsedlar)
|
|
||||||
- gather: Skip lookaside packages from local lookaside repo (lsedlar)
|
|
||||||
- pkgset: Avoid adding modules to unavailable arches (hlin)
|
|
||||||
- iso: Extract volume id with xorriso if available (lsedlar)
|
|
||||||
- De-duplicate log messages for ostree and ostree_container phases (awilliam)
|
|
||||||
- Handle tracebacks as str or bytes (lsedlar)
|
|
||||||
- ostree/container: add missing --version arg (awilliam)
|
|
||||||
- Block pkgset reuse on module defaults change (lsedlar)
|
|
||||||
- Include task ID in DONE message for OSBS phase (awilliam)
|
|
||||||
- Various phases: consistent format of failure message (awilliam)
|
|
||||||
- Update tests to exercise kiwi specific metadata (lsedlar)
|
|
||||||
- Kiwi: translate virtualbox and azure productmd formats (awilliam)
|
|
||||||
- kiwibuild: Add tests for the basic functionality (lsedlar)
|
|
||||||
- kiwibuild: Remove repos as dicts (lsedlar)
|
|
||||||
- Fix additional image metadata (lsedlar)
|
|
||||||
- Drop kiwibuild_version option (lsedlar)
|
|
||||||
- Update docs with kiwibuild options (lsedlar)
|
|
||||||
- kiwibuild: allow setting description scm and path at phase level (awilliam)
|
|
||||||
- Use latest Fedora for python 3 test environment (lsedlar)
|
|
||||||
- Install unittest2 only on python 2 (lsedlar)
|
|
||||||
- Fix 'failable' handling for kiwibuild phase (awilliam)
|
|
||||||
- image_build: Accept Kiwi extension for Azure VHD images (jeremycline)
|
|
||||||
- image_build: accept Kiwi vagrant image name format (awilliam)
|
|
||||||
|
|
||||||
* Sun Jun 09 2024 Python Maint <python-maint@redhat.com> - 4.6.2-7
|
|
||||||
- Rebuilt for Python 3.13
|
|
||||||
|
|
||||||
* Fri May 31 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.2-6
|
|
||||||
- Rebuild to bump release over f40-infra build
|
|
||||||
|
|
||||||
* Fri May 31 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.2-2
|
|
||||||
- Add dependency on xorriso, fixes rhbz#2278677
|
|
||||||
|
|
||||||
* Tue Apr 30 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.2-1
|
|
||||||
- Phases/osbuild: support passing 'customizations' for image builds (thozza)
|
|
||||||
- dnf: Load filelists for actual solver too (lsedlar)
|
|
||||||
- kiwibuild: Tell Koji which arches are allowed to fail (lsedlar)
|
|
||||||
- kiwibuild: Update documentation with more details (lsedlar)
|
|
||||||
- kiwibuild: Add kiwibuild global options (lsedlar)
|
|
||||||
- kiwibuild: Process images same as image-build (lsedlar)
|
|
||||||
- kiwibuild: Add subvariant configuration (lsedlar)
|
|
||||||
- kiwibuild: Work around missing arch in build data (lsedlar)
|
|
||||||
- Support KiwiBuild (hlin)
|
|
||||||
- ostree/container: Set version in treefile 'automatic-version-prefix' (tim)
|
|
||||||
- dnf: Explicitly load filelists (lsedlar)
|
|
||||||
- Fix buildinstall reuse with pungi_buildinstall plugin (lsedlar)
|
|
||||||
- Fix filters for DNF query (lsedlar)
|
|
||||||
- gather-dnf: Support dotarch in filter_packages (lsedlar)
|
|
||||||
- gather: Support dotarch notation for debuginfo packages (lsedlar)
|
|
||||||
- Correctly set input and fultree_exclude flags for debuginfo (lsedlar)
|
|
||||||
|
|
||||||
* Fri Feb 09 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.1-1
|
|
||||||
- Make python3-mock dependency optional (lsedlar)
|
|
||||||
- Make latest black happy (lsedlar)
|
|
||||||
- Update tox configuration (lsedlar)
|
|
||||||
- Fix scm tests to not use user configuration (lsedlar)
|
|
||||||
- Add workaround for old requests in kojiwrapper (lsedlar)
|
|
||||||
- Use pungi_buildinstall without NFS (lsedlar)
|
|
||||||
- checks: don't require "repo" in the "ostree" schema (awilliam)
|
|
||||||
- ostree_container: Use unique temporary directory (lsedlar)
|
|
||||||
|
|
||||||
* Fri Jan 26 2024 Maxwell G <maxwell@gtmx.me> - 4.6.0-5
|
|
||||||
- Remove python3-mock dependency
|
|
||||||
|
|
||||||
* Fri Jan 26 2024 Fedora Release Engineering <releng@fedoraproject.org> - 4.6.0-4
|
|
||||||
- Rebuilt for https://fedoraproject.org/wiki/Fedora_40_Mass_Rebuild
|
|
||||||
|
|
||||||
* Sun Jan 21 2024 Fedora Release Engineering <releng@fedoraproject.org> - 4.6.0-3
|
|
||||||
- Rebuilt for https://fedoraproject.org/wiki/Fedora_40_Mass_Rebuild
|
|
||||||
|
|
||||||
* Fri Jan 19 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.0-3
|
|
||||||
- Stop requiring repo option in ostree phase
|
|
||||||
|
|
||||||
* Thu Jan 18 2024 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.0-2
|
|
||||||
- ostree_container: Use unique temporary directory
|
|
||||||
|
|
||||||
* Wed Dec 13 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.6.0-1
|
|
||||||
- Add ostree container to image metadata (lsedlar)
|
|
||||||
- Updates for ostree-container phase (lsedlar)
|
|
||||||
- Add ostree native container support (tim)
|
|
||||||
- Improve autodetection of productmd image type for osbuild images (awilliam)
|
|
||||||
- pkgset: ignore events for modular content tags (lsedlar)
|
|
||||||
- pkgset: Ignore duplicated module builds (lsedlar)
|
|
||||||
- Drop buildinstall method (abisoi)
|
|
||||||
- Add step to send UMB message (lzhuang)
|
|
||||||
- Fix minor Ruff/flake8 warnings (tim)
|
|
||||||
- osbuild: manifest type in config (cmdr)
|
|
||||||
|
|
||||||
* Mon Nov 21 2023 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.5.0-3
|
|
||||||
- Method `get_remote_file_content` is object's method now
|
|
||||||
|
|
||||||
* Wed Nov 15 2023 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.5.0-2
|
|
||||||
- Return empty list if a repo doesn't contain any module
|
|
||||||
|
|
||||||
* Mon Sep 25 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.5.0-7
|
|
||||||
- Backport patch for explicit setting of osbuild image type in metadata
|
|
||||||
|
|
||||||
* Thu Aug 31 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.5.0-1
|
|
||||||
- kojiwrapper: Stop being smart about local access (lsedlar)
|
|
||||||
- Fix unittest errors (ounsal)
|
|
||||||
- Add integrity checking for builds (lsedlar)
|
|
||||||
- Add script for cleaning up the cache (lsedlar)
|
|
||||||
- Add ability to download images (lsedlar)
|
|
||||||
- Add support for not having koji volume mounted locally (lsedlar)
|
|
||||||
- Remove repository cloning multiple times (abisoi)
|
|
||||||
- Support require_all_comps_packages on DNF backend (lsedlar)
|
|
||||||
- Fix new warnings from flake8 (lsedlar)
|
|
||||||
|
|
||||||
* Tue Jul 25 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-8
|
|
||||||
- Option `excluded-packages` for script `pungi-gather-rpms`
|
|
||||||
|
|
||||||
* Tue Jul 25 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.1-1
|
|
||||||
- ostree: Add configuration for custom runroot packages (lsedlar)
|
|
||||||
- pkgset: Emit better error for missing modulemd file (lsedlar)
|
|
||||||
- Add support for git-credential-helper (lsedlar)
|
|
||||||
- Support OIDC Client Credentials authentication to CTS (hlin)
|
|
||||||
|
|
||||||
* Fri Jul 21 2023 Fedora Release Engineering <releng@fedoraproject.org> - 4.4.0-4
|
|
||||||
- Rebuilt for https://fedoraproject.org/wiki/Fedora_39_Mass_Rebuild
|
|
||||||
|
|
||||||
* Wed Jul 19 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-3
|
|
||||||
- Backport ostree runroot package additions
|
|
||||||
|
|
||||||
* Wed Jul 19 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-2
|
|
||||||
- Backport ostree runroot package additions
|
|
||||||
|
|
||||||
* Mon Jun 19 2023 Python Maint <python-maint@redhat.com> - 4.4.0-2
|
|
||||||
- Rebuilt for Python 3.12
|
|
||||||
|
|
||||||
* Wed Jun 07 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-1
|
|
||||||
- gather-dnf: Run latest() later (lsedlar)
|
|
||||||
- iso: Support joliet long names (lsedlar)
|
|
||||||
- Drop pungi-orchestrator code (lsedlar)
|
|
||||||
- isos: Ensure proper file ownership and permissions (lsedlar)
|
|
||||||
- gather: Always get latest packages (lsedlar)
|
|
||||||
- Add back compatibility with jsonschema <3.0.0 (lsedlar)
|
|
||||||
- Remove useless debug message (lsedlar)
|
|
||||||
- Remove fedmsg from requirements (lsedlar)
|
|
||||||
- gather: Support dotarch in DNF backend (lsedlar)
|
|
||||||
- Fix compatibility with createrepo_c 0.21.1 (lsedlar)
|
|
||||||
- comps: Apply arch filtering to environment/optionlist (lsedlar)
|
|
||||||
- Add config file for cleaning up cache files (hlin)
|
|
||||||
|
|
||||||
* Wed May 17 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.8-3
|
|
||||||
- Rebuild without fedmsg dependency
|
|
||||||
|
|
||||||
* Wed May 03 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.8-1
|
|
||||||
- Set priority for Fedora messages
|
|
||||||
|
|
||||||
* Thu Apr 13 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-7
|
|
||||||
- gather-module can find modules through symlinks
|
|
||||||
|
|
||||||
* Thu Apr 13 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-6
|
|
||||||
- CLI option `--label` can be passed through a Pungi config file
|
|
||||||
|
|
||||||
* Fri Mar 31 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-4
|
|
||||||
- ALBS-1030: Generate Devel section in packages.json
|
|
||||||
- Also the tool can combine (remove and add) packages in a variant from different sources according to an url's type of source
|
|
||||||
- Some upstream changes to KojiMock part
|
|
||||||
- Skip verifying an RPM signature if sigkeys are empty
|
|
||||||
- ALBS-987: Generate i686 and dev repositories with pungi on building new distr. version automatically
|
|
||||||
- [Generator of packages.json] Replace using CLI by config.yaml
|
|
||||||
- [Gather RPMs] os.path is replaced by Pat
|
|
||||||
|
|
||||||
* Thu Mar 30 2023 Haibo Lin <hlin@redhat.com> - 4.3.8-1
|
|
||||||
- createiso: Update possibly changed file on DVD (lsedlar)
|
|
||||||
- pkgset: Stop reuse if configuration changed (lsedlar)
|
|
||||||
- Allow disabling inheriting ExcludeArch to noarch packages (lsedlar)
|
|
||||||
- pkgset: Support extra builds with no tags (lsedlar)
|
|
||||||
- buildinstall: Avoid pointlessly tweaking the boot images (lsedlar)
|
|
||||||
- Prevent to reuse if unsigned packages are allowed (hlin)
|
|
||||||
- Pass parent id/respin id to CTS (lsedlar)
|
|
||||||
- Exclude existing files in boot.iso (hlin)
|
|
||||||
- image-build/osbuild: Pull ISOs into the compose (lsedlar)
|
|
||||||
- Retry 401 error from CTS (lsedlar)
|
|
||||||
- gather: Better detection of debuginfo in lookaside (lsedlar)
|
|
||||||
- Log versions of all installed packages (hlin)
|
|
||||||
- Use authentication for all CTS calls (lsedlar)
|
|
||||||
- Fix black complaints (lsedlar)
|
|
||||||
- Add vhd.gz extension to compressed VHD images (lsedlar)
|
|
||||||
- Add vhd-compressed image type (lsedlar)
|
|
||||||
- Update to work with latest mock (lsedlar)
|
|
||||||
- Default bztar format for sdist command (onosek)
|
|
||||||
|
|
||||||
* Fri Mar 17 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-3
|
* Fri Mar 17 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-3
|
||||||
- ALBS-987: Generate i686 repositories with pungi on building new distr. version automatically
|
- ALBS-987: Generate i686 repositories with pungi on building new distr. version automatically
|
||||||
- KojiMock extracts all modules which are suitable for the variant's arches
|
- KojiMock extracts all modules which are suitable for the variant's arches
|
||||||
|
@ -93,11 +93,6 @@ def split_name_arch(name_arch):
|
|||||||
|
|
||||||
def is_excluded(package, arches, logger=None):
|
def is_excluded(package, arches, logger=None):
|
||||||
"""Check if package is excluded from given architectures."""
|
"""Check if package is excluded from given architectures."""
|
||||||
if any(
|
|
||||||
getBaseArch(exc_arch) == 'x86_64' for exc_arch in package.exclusivearch
|
|
||||||
) and 'x86_64_v2' not in package.exclusivearch:
|
|
||||||
package.exclusivearch.append('x86_64_v2')
|
|
||||||
|
|
||||||
if package.excludearch and set(package.excludearch) & set(arches):
|
if package.excludearch and set(package.excludearch) & set(arches):
|
||||||
if logger:
|
if logger:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
@ -34,8 +34,6 @@ arches = {
|
|||||||
"x86_64": "athlon",
|
"x86_64": "athlon",
|
||||||
"amd64": "x86_64",
|
"amd64": "x86_64",
|
||||||
"ia32e": "x86_64",
|
"ia32e": "x86_64",
|
||||||
# x86-64-v2
|
|
||||||
"x86_64_v2": "noarch",
|
|
||||||
# ppc64le
|
# ppc64le
|
||||||
"ppc64le": "noarch",
|
"ppc64le": "noarch",
|
||||||
# ppc
|
# ppc
|
||||||
|
192
pungi/checks.py
192
pungi/checks.py
@ -39,6 +39,7 @@ from __future__ import print_function
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os.path
|
import os.path
|
||||||
import platform
|
import platform
|
||||||
|
import distro
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
@ -227,18 +228,8 @@ def validate(config, offline=False, schema=None):
|
|||||||
DefaultValidator = _extend_with_default_and_alias(
|
DefaultValidator = _extend_with_default_and_alias(
|
||||||
jsonschema.Draft4Validator, offline=offline
|
jsonschema.Draft4Validator, offline=offline
|
||||||
)
|
)
|
||||||
|
|
||||||
if hasattr(jsonschema.Draft4Validator, "TYPE_CHECKER"):
|
|
||||||
# jsonschema >= 3.0 has new interface for checking types
|
|
||||||
validator = DefaultValidator(schema)
|
|
||||||
else:
|
|
||||||
validator = DefaultValidator(
|
validator = DefaultValidator(
|
||||||
schema,
|
schema,
|
||||||
{
|
|
||||||
"array": (tuple, list),
|
|
||||||
"regex": six.string_types,
|
|
||||||
"url": six.string_types,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
errors = []
|
errors = []
|
||||||
warnings = []
|
warnings = []
|
||||||
@ -387,7 +378,6 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
instance[property]["branch"] = resolver(
|
instance[property]["branch"] = resolver(
|
||||||
instance[property]["repo"],
|
instance[property]["repo"],
|
||||||
instance[property].get("branch") or "HEAD",
|
instance[property].get("branch") or "HEAD",
|
||||||
instance[property].get("options"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for error in _hook_errors(properties, instance, schema):
|
for error in _hook_errors(properties, instance, schema):
|
||||||
@ -455,20 +445,16 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
context=all_errors,
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
if hasattr(validator_class, "TYPE_CHECKER"):
|
|
||||||
# jsonschema >= 3
|
|
||||||
def is_array(checker, instance):
|
def is_array(checker, instance):
|
||||||
return isinstance(instance, (tuple, list))
|
return isinstance(instance, (tuple, list))
|
||||||
|
|
||||||
def is_string_type(checker, instance):
|
def is_string_type(checker, instance):
|
||||||
return isinstance(instance, six.string_types)
|
return isinstance(instance, six.string_types)
|
||||||
|
|
||||||
kwargs["type_checker"] = validator_class.TYPE_CHECKER.redefine_many(
|
# RHEL9 has newer version of package jsonschema
|
||||||
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
# which has another way of working with validators
|
||||||
)
|
if float(distro.linux_distribution()[1]) < 9:
|
||||||
|
validator = jsonschema.validators.extend(
|
||||||
return jsonschema.validators.extend(
|
|
||||||
validator_class,
|
validator_class,
|
||||||
{
|
{
|
||||||
"properties": properties_validator,
|
"properties": properties_validator,
|
||||||
@ -478,8 +464,30 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
"additionalProperties": _validate_additional_properties,
|
"additionalProperties": _validate_additional_properties,
|
||||||
"anyOf": _validate_any_of,
|
"anyOf": _validate_any_of,
|
||||||
},
|
},
|
||||||
**kwargs
|
|
||||||
)
|
)
|
||||||
|
validator.DEFAULT_TYPES.update({
|
||||||
|
"array": (list, tuple),
|
||||||
|
"regex": six.string_types,
|
||||||
|
"url": six.string_types,
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
type_checker = validator_class.TYPE_CHECKER.redefine_many(
|
||||||
|
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||||
|
)
|
||||||
|
|
||||||
|
validator = jsonschema.validators.extend(
|
||||||
|
validator_class,
|
||||||
|
{
|
||||||
|
"properties": properties_validator,
|
||||||
|
"deprecated": error_on_deprecated,
|
||||||
|
"type": validate_regex_type,
|
||||||
|
"required": _validate_required,
|
||||||
|
"additionalProperties": _validate_additional_properties,
|
||||||
|
"anyOf": _validate_any_of,
|
||||||
|
},
|
||||||
|
type_checker=type_checker,
|
||||||
|
)
|
||||||
|
return validator
|
||||||
|
|
||||||
|
|
||||||
class ConfigDeprecation(jsonschema.exceptions.ValidationError):
|
class ConfigDeprecation(jsonschema.exceptions.ValidationError):
|
||||||
@ -521,13 +529,6 @@ def make_schema():
|
|||||||
"file": {"type": "string"},
|
"file": {"type": "string"},
|
||||||
"dir": {"type": "string"},
|
"dir": {"type": "string"},
|
||||||
"command": {"type": "string"},
|
"command": {"type": "string"},
|
||||||
"options": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"credential_helper": {"type": "string"},
|
|
||||||
},
|
|
||||||
"additionalProperties": False,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
@ -553,6 +554,27 @@ def make_schema():
|
|||||||
"list_of_strings": {"type": "array", "items": {"type": "string"}},
|
"list_of_strings": {"type": "array", "items": {"type": "string"}},
|
||||||
"strings": _one_or_list({"type": "string"}),
|
"strings": _one_or_list({"type": "string"}),
|
||||||
"optional_string": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
"optional_string": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||||
|
"live_image_config": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"kickstart": {"type": "string"},
|
||||||
|
"ksurl": {"type": "url"},
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"subvariant": {"type": "string"},
|
||||||
|
"target": {"type": "string"},
|
||||||
|
"version": {"type": "string"},
|
||||||
|
"repo": {"$ref": "#/definitions/repos"},
|
||||||
|
"specfile": {"type": "string"},
|
||||||
|
"scratch": {"type": "boolean"},
|
||||||
|
"type": {"type": "string"},
|
||||||
|
"sign": {"type": "boolean"},
|
||||||
|
"failable": {"type": "boolean"},
|
||||||
|
"release": {"$ref": "#/definitions/optional_string"},
|
||||||
|
},
|
||||||
|
"required": ["kickstart"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
"osbs_config": {
|
"osbs_config": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -588,7 +610,6 @@ def make_schema():
|
|||||||
"release_discinfo_description": {"type": "string"},
|
"release_discinfo_description": {"type": "string"},
|
||||||
"treeinfo_version": {"type": "string"},
|
"treeinfo_version": {"type": "string"},
|
||||||
"compose_type": {"type": "string", "enum": COMPOSE_TYPES},
|
"compose_type": {"type": "string", "enum": COMPOSE_TYPES},
|
||||||
"label": {"type": "string"},
|
|
||||||
"base_product_name": {"type": "string"},
|
"base_product_name": {"type": "string"},
|
||||||
"base_product_short": {"type": "string"},
|
"base_product_short": {"type": "string"},
|
||||||
"base_product_version": {"type": "string"},
|
"base_product_version": {"type": "string"},
|
||||||
@ -797,7 +818,7 @@ def make_schema():
|
|||||||
"buildinstall_allow_reuse": {"type": "boolean", "default": False},
|
"buildinstall_allow_reuse": {"type": "boolean", "default": False},
|
||||||
"buildinstall_method": {
|
"buildinstall_method": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["lorax"],
|
"enum": ["lorax", "buildinstall"],
|
||||||
},
|
},
|
||||||
# In phase `buildinstall` we should add to compose only the
|
# In phase `buildinstall` we should add to compose only the
|
||||||
# images that will be used only as netinstall
|
# images that will be used only as netinstall
|
||||||
@ -824,11 +845,8 @@ def make_schema():
|
|||||||
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
||||||
"cts_url": {"type": "string"},
|
"cts_url": {"type": "string"},
|
||||||
"cts_keytab": {"type": "string"},
|
"cts_keytab": {"type": "string"},
|
||||||
"cts_oidc_token_url": {"type": "url"},
|
|
||||||
"cts_oidc_client_id": {"type": "string"},
|
|
||||||
"koji_profile": {"type": "string"},
|
"koji_profile": {"type": "string"},
|
||||||
"koji_event": {"type": "number"},
|
"koji_event": {"type": "number"},
|
||||||
"koji_cache": {"type": "string"},
|
|
||||||
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
||||||
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
||||||
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
||||||
@ -846,10 +864,6 @@ def make_schema():
|
|||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"default": True,
|
"default": True,
|
||||||
},
|
},
|
||||||
"pkgset_inherit_exclusive_arch_to_noarch": {
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
},
|
|
||||||
"pkgset_scratch_modules": {
|
"pkgset_scratch_modules": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"patternProperties": {
|
"patternProperties": {
|
||||||
@ -862,10 +876,7 @@ def make_schema():
|
|||||||
"paths_module": {"type": "string"},
|
"paths_module": {"type": "string"},
|
||||||
"skip_phases": {
|
"skip_phases": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {"type": "string", "enum": PHASES_NAMES + ["productimg"]},
|
||||||
"type": "string",
|
|
||||||
"enum": PHASES_NAMES + ["productimg", "live_images"],
|
|
||||||
},
|
|
||||||
"default": [],
|
"default": [],
|
||||||
},
|
},
|
||||||
"image_name_format": {
|
"image_name_format": {
|
||||||
@ -899,6 +910,11 @@ def make_schema():
|
|||||||
},
|
},
|
||||||
"restricted_volid": {"type": "boolean", "default": False},
|
"restricted_volid": {"type": "boolean", "default": False},
|
||||||
"volume_id_substitutions": {"type": "object", "default": {}},
|
"volume_id_substitutions": {"type": "object", "default": {}},
|
||||||
|
"live_images_no_rename": {"type": "boolean", "default": False},
|
||||||
|
"live_images_ksurl": {"type": "url"},
|
||||||
|
"live_images_target": {"type": "string"},
|
||||||
|
"live_images_release": {"$ref": "#/definitions/optional_string"},
|
||||||
|
"live_images_version": {"type": "string"},
|
||||||
"image_build_ksurl": {"type": "url"},
|
"image_build_ksurl": {"type": "url"},
|
||||||
"image_build_target": {"type": "string"},
|
"image_build_target": {"type": "string"},
|
||||||
"image_build_release": {"$ref": "#/definitions/optional_string"},
|
"image_build_release": {"$ref": "#/definitions/optional_string"},
|
||||||
@ -931,6 +947,8 @@ def make_schema():
|
|||||||
"product_id": {"$ref": "#/definitions/str_or_scm_dict"},
|
"product_id": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
"product_id_allow_missing": {"type": "boolean", "default": False},
|
"product_id_allow_missing": {"type": "boolean", "default": False},
|
||||||
"product_id_allow_name_prefix": {"type": "boolean", "default": True},
|
"product_id_allow_name_prefix": {"type": "boolean", "default": True},
|
||||||
|
# Deprecated in favour of regular local/phase/global setting.
|
||||||
|
"live_target": {"type": "string"},
|
||||||
"tree_arches": {"$ref": "#/definitions/list_of_strings", "default": []},
|
"tree_arches": {"$ref": "#/definitions/list_of_strings", "default": []},
|
||||||
"tree_variants": {"$ref": "#/definitions/list_of_strings", "default": []},
|
"tree_variants": {"$ref": "#/definitions/list_of_strings", "default": []},
|
||||||
"translate_paths": {"$ref": "#/definitions/string_pairs", "default": []},
|
"translate_paths": {"$ref": "#/definitions/string_pairs", "default": []},
|
||||||
@ -1048,13 +1066,11 @@ def make_schema():
|
|||||||
"config_branch": {"type": "string"},
|
"config_branch": {"type": "string"},
|
||||||
"tag_ref": {"type": "boolean"},
|
"tag_ref": {"type": "boolean"},
|
||||||
"ostree_ref": {"type": "string"},
|
"ostree_ref": {"type": "string"},
|
||||||
"runroot_packages": {
|
|
||||||
"$ref": "#/definitions/list_of_strings",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"treefile",
|
"treefile",
|
||||||
"config_url",
|
"config_url",
|
||||||
|
"repo",
|
||||||
"ostree_repo",
|
"ostree_repo",
|
||||||
],
|
],
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
@ -1092,39 +1108,6 @@ def make_schema():
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"ostree_container": {
|
|
||||||
"type": "object",
|
|
||||||
"patternProperties": {
|
|
||||||
# Warning: this pattern is a variant uid regex, but the
|
|
||||||
# format does not let us validate it as there is no regular
|
|
||||||
# expression to describe all regular expressions.
|
|
||||||
".+": _one_or_list(
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"treefile": {"type": "string"},
|
|
||||||
"config_url": {"type": "string"},
|
|
||||||
"repo": {"$ref": "#/definitions/repos"},
|
|
||||||
"keep_original_sources": {"type": "boolean"},
|
|
||||||
"config_branch": {"type": "string"},
|
|
||||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"version": {"type": "string"},
|
|
||||||
"tag_ref": {"type": "boolean"},
|
|
||||||
"runroot_packages": {
|
|
||||||
"$ref": "#/definitions/list_of_strings",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"treefile",
|
|
||||||
"config_url",
|
|
||||||
],
|
|
||||||
"additionalProperties": False,
|
|
||||||
}
|
|
||||||
),
|
|
||||||
},
|
|
||||||
"additionalProperties": False,
|
|
||||||
},
|
|
||||||
"ostree_installer": _variant_arch_mapping(
|
"ostree_installer": _variant_arch_mapping(
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1149,9 +1132,11 @@ def make_schema():
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
"ostree_use_koji_plugin": {"type": "boolean", "default": False},
|
"ostree_use_koji_plugin": {"type": "boolean", "default": False},
|
||||||
"ostree_container_use_koji_plugin": {"type": "boolean", "default": False},
|
|
||||||
"ostree_installer_use_koji_plugin": {"type": "boolean", "default": False},
|
"ostree_installer_use_koji_plugin": {"type": "boolean", "default": False},
|
||||||
"ostree_installer_overwrite": {"type": "boolean", "default": False},
|
"ostree_installer_overwrite": {"type": "boolean", "default": False},
|
||||||
|
"live_images": _variant_arch_mapping(
|
||||||
|
_one_or_list({"$ref": "#/definitions/live_image_config"})
|
||||||
|
),
|
||||||
"image_build_allow_reuse": {"type": "boolean", "default": False},
|
"image_build_allow_reuse": {"type": "boolean", "default": False},
|
||||||
"image_build": {
|
"image_build": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1202,50 +1187,6 @@ def make_schema():
|
|||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
"kiwibuild": {
|
|
||||||
"type": "object",
|
|
||||||
"patternProperties": {
|
|
||||||
# Warning: this pattern is a variant uid regex, but the
|
|
||||||
# format does not let us validate it as there is no regular
|
|
||||||
# expression to describe all regular expressions.
|
|
||||||
".+": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"target": {"type": "string"},
|
|
||||||
"description_scm": {"type": "url"},
|
|
||||||
"description_path": {"type": "string"},
|
|
||||||
"kiwi_profile": {"type": "string"},
|
|
||||||
"release": {"type": "string"},
|
|
||||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"repos": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"subvariant": {"type": "string"},
|
|
||||||
"type": {"type": "string"},
|
|
||||||
"type_attr": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"bundle_name_format": {"type": "string"},
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
# description_scm and description_path
|
|
||||||
# are really required, but as they can
|
|
||||||
# be set at the phase level we cannot
|
|
||||||
# enforce that here
|
|
||||||
"kiwi_profile",
|
|
||||||
],
|
|
||||||
"additionalProperties": False,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": False,
|
|
||||||
},
|
|
||||||
"kiwibuild_description_scm": {"type": "url"},
|
|
||||||
"kiwibuild_description_path": {"type": "string"},
|
|
||||||
"kiwibuild_target": {"type": "string"},
|
|
||||||
"kiwibuild_release": {"$ref": "#/definitions/optional_string"},
|
|
||||||
"kiwibuild_type": {"type": "string"},
|
|
||||||
"kiwibuild_type_attr": {"$ref": "#/definitions/list_of_strings"},
|
|
||||||
"kiwibuild_bundle_name_format": {"type": "string"},
|
|
||||||
"osbuild_target": {"type": "string"},
|
"osbuild_target": {"type": "string"},
|
||||||
"osbuild_release": {"$ref": "#/definitions/optional_string"},
|
"osbuild_release": {"$ref": "#/definitions/optional_string"},
|
||||||
"osbuild_version": {"type": "string"},
|
"osbuild_version": {"type": "string"},
|
||||||
@ -1306,11 +1247,6 @@ def make_schema():
|
|||||||
"ostree_url": {"type": "string"},
|
"ostree_url": {"type": "string"},
|
||||||
"ostree_ref": {"type": "string"},
|
"ostree_ref": {"type": "string"},
|
||||||
"ostree_parent": {"type": "string"},
|
"ostree_parent": {"type": "string"},
|
||||||
"manifest_type": {"type": "string"},
|
|
||||||
"customizations": {
|
|
||||||
"type": "object",
|
|
||||||
"additionalProperties": True,
|
|
||||||
},
|
|
||||||
"upload_options": {
|
"upload_options": {
|
||||||
# this should be really 'oneOf', but the minimal
|
# this should be really 'oneOf', but the minimal
|
||||||
# required properties in AWSEC2 and GCP options
|
# required properties in AWSEC2 and GCP options
|
||||||
@ -1427,6 +1363,9 @@ def make_schema():
|
|||||||
{"$ref": "#/definitions/strings"}
|
{"$ref": "#/definitions/strings"}
|
||||||
),
|
),
|
||||||
"lorax_use_koji_plugin": {"type": "boolean", "default": False},
|
"lorax_use_koji_plugin": {"type": "boolean", "default": False},
|
||||||
|
"signing_key_id": {"type": "string"},
|
||||||
|
"signing_key_password_file": {"type": "string"},
|
||||||
|
"signing_command": {"type": "string"},
|
||||||
"productimg": {
|
"productimg": {
|
||||||
"deprecated": "remove it. Productimg phase has been removed"
|
"deprecated": "remove it. Productimg phase has been removed"
|
||||||
},
|
},
|
||||||
@ -1561,6 +1500,7 @@ def get_num_cpus():
|
|||||||
CONFIG_DEPS = {
|
CONFIG_DEPS = {
|
||||||
"buildinstall_method": {
|
"buildinstall_method": {
|
||||||
"conflicts": (
|
"conflicts": (
|
||||||
|
(lambda val: val == "buildinstall", ["lorax_options"]),
|
||||||
(lambda val: not val, ["lorax_options", "buildinstall_kickstart"]),
|
(lambda val: not val, ["lorax_options", "buildinstall_kickstart"]),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
157
pungi/compose.py
157
pungi/compose.py
@ -17,7 +17,6 @@
|
|||||||
__all__ = ("Compose",)
|
__all__ = ("Compose",)
|
||||||
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -39,7 +38,6 @@ from dogpile.cache import make_region
|
|||||||
from pungi.graph import SimpleAcyclicOrientedGraph
|
from pungi.graph import SimpleAcyclicOrientedGraph
|
||||||
from pungi.wrappers.variants import VariantsXmlParser
|
from pungi.wrappers.variants import VariantsXmlParser
|
||||||
from pungi.paths import Paths
|
from pungi.paths import Paths
|
||||||
from pungi.wrappers.kojiwrapper import KojiDownloadProxy
|
|
||||||
from pungi.wrappers.scm import get_file_from_scm
|
from pungi.wrappers.scm import get_file_from_scm
|
||||||
from pungi.util import (
|
from pungi.util import (
|
||||||
makedirs,
|
makedirs,
|
||||||
@ -59,101 +57,20 @@ except ImportError:
|
|||||||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||||
|
|
||||||
|
|
||||||
def is_status_fatal(status_code):
|
|
||||||
"""Check if status code returned from CTS reports an error that is unlikely
|
|
||||||
to be fixed by retrying. Generally client errors (4XX) are fatal, with the
|
|
||||||
exception of 401 Unauthorized which could be caused by transient network
|
|
||||||
issue between compose host and KDC.
|
|
||||||
"""
|
|
||||||
if status_code == 401:
|
|
||||||
return False
|
|
||||||
return status_code >= 400 and status_code < 500
|
|
||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RequestException)
|
@retry(wait_on=RequestException)
|
||||||
def retry_request(method, url, data=None, json_data=None, auth=None):
|
def retry_request(method, url, data=None, auth=None):
|
||||||
"""
|
|
||||||
:param str method: Reqest method.
|
|
||||||
:param str url: Target URL.
|
|
||||||
:param dict data: form-urlencoded data to send in the body of the request.
|
|
||||||
:param dict json_data: json data to send in the body of the request.
|
|
||||||
"""
|
|
||||||
request_method = getattr(requests, method)
|
request_method = getattr(requests, method)
|
||||||
rv = request_method(url, data=data, json=json_data, auth=auth)
|
rv = request_method(url, json=data, auth=auth)
|
||||||
if is_status_fatal(rv.status_code):
|
if rv.status_code >= 400 and rv.status_code < 500:
|
||||||
try:
|
try:
|
||||||
error = rv.json()
|
error = rv.json()["message"]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
error = rv.text
|
error = rv.text
|
||||||
raise RuntimeError("%s responded with %d: %s" % (url, rv.status_code, error))
|
raise RuntimeError("CTS responded with %d: %s" % (rv.status_code, error))
|
||||||
rv.raise_for_status()
|
rv.raise_for_status()
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
class BearerAuth(requests.auth.AuthBase):
|
|
||||||
def __init__(self, token):
|
|
||||||
self.token = token
|
|
||||||
|
|
||||||
def __call__(self, r):
|
|
||||||
r.headers["authorization"] = "Bearer " + self.token
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def cts_auth(pungi_conf):
|
|
||||||
"""
|
|
||||||
:param dict pungi_conf: dict obj of pungi.json config.
|
|
||||||
"""
|
|
||||||
auth = None
|
|
||||||
token = None
|
|
||||||
cts_keytab = pungi_conf.get("cts_keytab")
|
|
||||||
cts_oidc_token_url = os.environ.get("CTS_OIDC_TOKEN_URL", "") or pungi_conf.get(
|
|
||||||
"cts_oidc_token_url"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if cts_keytab:
|
|
||||||
# requests-kerberos cannot accept custom keytab, we need to use
|
|
||||||
# environment variable for this. But we need to change environment
|
|
||||||
# only temporarily just for this single requests.post.
|
|
||||||
# So at first backup the current environment and revert to it
|
|
||||||
# after the requests call.
|
|
||||||
from requests_kerberos import HTTPKerberosAuth
|
|
||||||
|
|
||||||
auth = HTTPKerberosAuth()
|
|
||||||
environ_copy = dict(os.environ)
|
|
||||||
if "$HOSTNAME" in cts_keytab:
|
|
||||||
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
|
||||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
|
||||||
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
|
||||||
elif cts_oidc_token_url:
|
|
||||||
cts_oidc_client_id = os.environ.get(
|
|
||||||
"CTS_OIDC_CLIENT_ID", ""
|
|
||||||
) or pungi_conf.get("cts_oidc_client_id", "")
|
|
||||||
token = retry_request(
|
|
||||||
"post",
|
|
||||||
cts_oidc_token_url,
|
|
||||||
data={
|
|
||||||
"grant_type": "client_credentials",
|
|
||||||
"client_id": cts_oidc_client_id,
|
|
||||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
|
||||||
},
|
|
||||||
).json()["access_token"]
|
|
||||||
auth = BearerAuth(token)
|
|
||||||
del token
|
|
||||||
|
|
||||||
yield auth
|
|
||||||
except Exception as e:
|
|
||||||
# Avoid leaking client secret in trackback
|
|
||||||
e.show_locals = False
|
|
||||||
raise e
|
|
||||||
finally:
|
|
||||||
if cts_keytab:
|
|
||||||
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
|
||||||
os.environ.clear()
|
|
||||||
os.environ.update(environ_copy)
|
|
||||||
|
|
||||||
|
|
||||||
def get_compose_info(
|
def get_compose_info(
|
||||||
conf,
|
conf,
|
||||||
compose_type="production",
|
compose_type="production",
|
||||||
@ -183,19 +100,38 @@ def get_compose_info(
|
|||||||
ci.compose.type = compose_type
|
ci.compose.type = compose_type
|
||||||
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
||||||
ci.compose.respin = compose_respin or 0
|
ci.compose.respin = compose_respin or 0
|
||||||
ci.compose.id = ci.create_compose_id()
|
|
||||||
|
|
||||||
cts_url = conf.get("cts_url")
|
cts_url = conf.get("cts_url", None)
|
||||||
if cts_url:
|
if cts_url:
|
||||||
|
# Requests-kerberos cannot accept custom keytab, we need to use
|
||||||
|
# environment variable for this. But we need to change environment
|
||||||
|
# only temporarily just for this single requests.post.
|
||||||
|
# So at first backup the current environment and revert to it
|
||||||
|
# after the requests.post call.
|
||||||
|
cts_keytab = conf.get("cts_keytab", None)
|
||||||
|
authentication = get_authentication(conf)
|
||||||
|
if cts_keytab:
|
||||||
|
environ_copy = dict(os.environ)
|
||||||
|
if "$HOSTNAME" in cts_keytab:
|
||||||
|
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||||
|
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||||
|
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||||
|
|
||||||
|
try:
|
||||||
# Create compose in CTS and get the reserved compose ID.
|
# Create compose in CTS and get the reserved compose ID.
|
||||||
|
ci.compose.id = ci.create_compose_id()
|
||||||
url = os.path.join(cts_url, "api/1/composes/")
|
url = os.path.join(cts_url, "api/1/composes/")
|
||||||
data = {
|
data = {
|
||||||
"compose_info": json.loads(ci.dumps()),
|
"compose_info": json.loads(ci.dumps()),
|
||||||
"parent_compose_ids": parent_compose_ids,
|
"parent_compose_ids": parent_compose_ids,
|
||||||
"respin_of": respin_of,
|
"respin_of": respin_of,
|
||||||
}
|
}
|
||||||
with cts_auth(conf) as authentication:
|
rv = retry_request("post", url, data=data, auth=authentication)
|
||||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
finally:
|
||||||
|
if cts_keytab:
|
||||||
|
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||||
|
os.environ.clear()
|
||||||
|
os.environ.update(environ_copy)
|
||||||
|
|
||||||
# Update local ComposeInfo with received ComposeInfo.
|
# Update local ComposeInfo with received ComposeInfo.
|
||||||
cts_ci = ComposeInfo()
|
cts_ci = ComposeInfo()
|
||||||
@ -203,9 +139,22 @@ def get_compose_info(
|
|||||||
ci.compose.respin = cts_ci.compose.respin
|
ci.compose.respin = cts_ci.compose.respin
|
||||||
ci.compose.id = cts_ci.compose.id
|
ci.compose.id = cts_ci.compose.id
|
||||||
|
|
||||||
|
else:
|
||||||
|
ci.compose.id = ci.create_compose_id()
|
||||||
|
|
||||||
return ci
|
return ci
|
||||||
|
|
||||||
|
|
||||||
|
def get_authentication(conf):
|
||||||
|
authentication = None
|
||||||
|
cts_keytab = conf.get("cts_keytab", None)
|
||||||
|
if cts_keytab:
|
||||||
|
from requests_kerberos import HTTPKerberosAuth
|
||||||
|
|
||||||
|
authentication = HTTPKerberosAuth()
|
||||||
|
return authentication
|
||||||
|
|
||||||
|
|
||||||
def write_compose_info(compose_dir, ci):
|
def write_compose_info(compose_dir, ci):
|
||||||
"""
|
"""
|
||||||
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
||||||
@ -219,6 +168,7 @@ def write_compose_info(compose_dir, ci):
|
|||||||
|
|
||||||
|
|
||||||
def update_compose_url(compose_id, compose_dir, conf):
|
def update_compose_url(compose_id, compose_dir, conf):
|
||||||
|
authentication = get_authentication(conf)
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
if cts_url:
|
if cts_url:
|
||||||
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
||||||
@ -231,8 +181,7 @@ def update_compose_url(compose_id, compose_dir, conf):
|
|||||||
"action": "set_url",
|
"action": "set_url",
|
||||||
"compose_url": compose_url,
|
"compose_url": compose_url,
|
||||||
}
|
}
|
||||||
with cts_auth(conf) as authentication:
|
return retry_request("patch", url, data=data, auth=authentication)
|
||||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
|
||||||
|
|
||||||
|
|
||||||
def get_compose_dir(
|
def get_compose_dir(
|
||||||
@ -243,19 +192,11 @@ def get_compose_dir(
|
|||||||
compose_respin=None,
|
compose_respin=None,
|
||||||
compose_label=None,
|
compose_label=None,
|
||||||
already_exists_callbacks=None,
|
already_exists_callbacks=None,
|
||||||
parent_compose_ids=None,
|
|
||||||
respin_of=None,
|
|
||||||
):
|
):
|
||||||
already_exists_callbacks = already_exists_callbacks or []
|
already_exists_callbacks = already_exists_callbacks or []
|
||||||
|
|
||||||
ci = get_compose_info(
|
ci = get_compose_info(
|
||||||
conf,
|
conf, compose_type, compose_date, compose_respin, compose_label
|
||||||
compose_type,
|
|
||||||
compose_date,
|
|
||||||
compose_respin,
|
|
||||||
compose_label,
|
|
||||||
parent_compose_ids,
|
|
||||||
respin_of,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
@ -410,8 +351,6 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
else:
|
else:
|
||||||
self.cache_region = make_region().configure("dogpile.cache.null")
|
self.cache_region = make_region().configure("dogpile.cache.null")
|
||||||
|
|
||||||
self.koji_downloader = KojiDownloadProxy.from_config(self.conf, self._logger)
|
|
||||||
|
|
||||||
get_compose_info = staticmethod(get_compose_info)
|
get_compose_info = staticmethod(get_compose_info)
|
||||||
write_compose_info = staticmethod(write_compose_info)
|
write_compose_info = staticmethod(write_compose_info)
|
||||||
get_compose_dir = staticmethod(get_compose_dir)
|
get_compose_dir = staticmethod(get_compose_dir)
|
||||||
@ -707,7 +646,7 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
separators=(",", ": "),
|
separators=(",", ": "),
|
||||||
)
|
)
|
||||||
|
|
||||||
def traceback(self, detail=None, show_locals=True):
|
def traceback(self, detail=None):
|
||||||
"""Store an extended traceback. This method should only be called when
|
"""Store an extended traceback. This method should only be called when
|
||||||
handling an exception.
|
handling an exception.
|
||||||
|
|
||||||
@ -718,10 +657,8 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
basename += "-" + detail
|
basename += "-" + detail
|
||||||
tb_path = self.paths.log.log_file("global", basename)
|
tb_path = self.paths.log.log_file("global", basename)
|
||||||
self.log_error("Extended traceback in: %s", tb_path)
|
self.log_error("Extended traceback in: %s", tb_path)
|
||||||
tback = kobo.tback.Traceback(show_locals=show_locals).get_traceback()
|
with open(tb_path, "wb") as f:
|
||||||
# Kobo 0.36.0 returns traceback as str, older versions return bytes
|
f.write(kobo.tback.Traceback().get_traceback())
|
||||||
with open(tb_path, "wb" if isinstance(tback, bytes) else "w") as f:
|
|
||||||
f.write(tback)
|
|
||||||
|
|
||||||
def load_old_compose_config(self):
|
def load_old_compose_config(self):
|
||||||
"""
|
"""
|
||||||
|
@ -5,14 +5,11 @@ from __future__ import print_function
|
|||||||
import os
|
import os
|
||||||
import six
|
import six
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from kobo.shortcuts import run
|
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
|
|
||||||
from .wrappers import iso
|
from .wrappers import iso
|
||||||
from .wrappers.jigdo import JigdoWrapper
|
from .wrappers.jigdo import JigdoWrapper
|
||||||
|
|
||||||
from .phases.buildinstall import BOOT_CONFIGS, BOOT_IMAGES
|
|
||||||
|
|
||||||
|
|
||||||
CreateIsoOpts = namedtuple(
|
CreateIsoOpts = namedtuple(
|
||||||
"CreateIsoOpts",
|
"CreateIsoOpts",
|
||||||
@ -67,6 +64,10 @@ def make_image(f, opts):
|
|||||||
os.path.join("$TEMPLATE", "config_files/ppc"),
|
os.path.join("$TEMPLATE", "config_files/ppc"),
|
||||||
hfs_compat=opts.hfs_compat,
|
hfs_compat=opts.hfs_compat,
|
||||||
)
|
)
|
||||||
|
elif opts.buildinstall_method == "buildinstall":
|
||||||
|
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
||||||
|
opts.arch, "/usr/lib/anaconda-runtime/boot"
|
||||||
|
)
|
||||||
|
|
||||||
# ppc(64) doesn't seem to support utf-8
|
# ppc(64) doesn't seem to support utf-8
|
||||||
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
||||||
@ -117,65 +118,25 @@ def make_jigdo(f, opts):
|
|||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
|
|
||||||
|
|
||||||
def _get_perms(fs_path):
|
|
||||||
"""Compute proper permissions for a file.
|
|
||||||
|
|
||||||
This mimicks what -rational-rock option of genisoimage does. All read bits
|
|
||||||
are set, so that files and directories are globally readable. If any
|
|
||||||
execute bit is set for a file, set them all. No writes are allowed and
|
|
||||||
special bits are erased too.
|
|
||||||
"""
|
|
||||||
statinfo = os.stat(fs_path)
|
|
||||||
perms = 0o444
|
|
||||||
if statinfo.st_mode & 0o111:
|
|
||||||
perms |= 0o111
|
|
||||||
return perms
|
|
||||||
|
|
||||||
|
|
||||||
def write_xorriso_commands(opts):
|
def write_xorriso_commands(opts):
|
||||||
# Create manifest for the boot.iso listing all contents
|
|
||||||
boot_iso_manifest = "%s.manifest" % os.path.join(
|
|
||||||
opts.script_dir, os.path.basename(opts.boot_iso)
|
|
||||||
)
|
|
||||||
run(
|
|
||||||
iso.get_manifest_cmd(
|
|
||||||
opts.boot_iso, opts.use_xorrisofs, output_file=boot_iso_manifest
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# Find which files may have been updated by pungi. This only includes a few
|
|
||||||
# files from tweaking buildinstall and .discinfo metadata. There's no good
|
|
||||||
# way to detect whether the boot config files actually changed, so we may
|
|
||||||
# be updating files in the ISO with the same data.
|
|
||||||
UPDATEABLE_FILES = set(BOOT_IMAGES + BOOT_CONFIGS + [".discinfo"])
|
|
||||||
updated_files = set()
|
|
||||||
excluded_files = set()
|
|
||||||
with open(boot_iso_manifest) as f:
|
|
||||||
for line in f:
|
|
||||||
path = line.lstrip("/").rstrip("\n")
|
|
||||||
if path in UPDATEABLE_FILES:
|
|
||||||
updated_files.add(path)
|
|
||||||
else:
|
|
||||||
excluded_files.add(path)
|
|
||||||
|
|
||||||
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||||
with open(script, "w") as f:
|
with open(script, "w") as f:
|
||||||
for cmd in iso.xorriso_commands(
|
emit(f, "-indev %s" % opts.boot_iso)
|
||||||
opts.arch, opts.boot_iso, os.path.join(opts.output_dir, opts.iso_name)
|
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
||||||
):
|
emit(f, "-boot_image any replay")
|
||||||
emit(f, " ".join(cmd))
|
|
||||||
emit(f, "-volid %s" % opts.volid)
|
emit(f, "-volid %s" % opts.volid)
|
||||||
|
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
||||||
|
emit(f, "-joliet on")
|
||||||
|
|
||||||
with open(opts.graft_points) as gp:
|
with open(opts.graft_points) as gp:
|
||||||
for line in gp:
|
for line in gp:
|
||||||
iso_path, fs_path = line.strip().split("=", 1)
|
iso_path, fs_path = line.strip().split("=", 1)
|
||||||
if iso_path in excluded_files:
|
emit(f, "-map %s %s" % (fs_path, iso_path))
|
||||||
continue
|
|
||||||
cmd = "-update" if iso_path in updated_files else "-map"
|
if opts.arch == "ppc64le":
|
||||||
emit(f, "%s %s %s" % (cmd, fs_path, iso_path))
|
# This is needed for the image to be bootable.
|
||||||
emit(f, "-chmod 0%o %s" % (_get_perms(fs_path), iso_path))
|
emit(f, "-as mkisofs -U --")
|
||||||
|
|
||||||
emit(f, "-chown_r 0 /")
|
|
||||||
emit(f, "-chgrp_r 0 /")
|
|
||||||
emit(f, "-end")
|
emit(f, "-end")
|
||||||
return script
|
return script
|
||||||
|
|
||||||
|
@ -1118,6 +1118,7 @@ class Pungi(PungiBase):
|
|||||||
self.logger.info("Finished gathering package objects.")
|
self.logger.info("Finished gathering package objects.")
|
||||||
|
|
||||||
def gather(self):
|
def gather(self):
|
||||||
|
|
||||||
# get package objects according to the input list
|
# get package objects according to the input list
|
||||||
self.getPackageObjects()
|
self.getPackageObjects()
|
||||||
if self.is_sources:
|
if self.is_sources:
|
||||||
|
@ -15,21 +15,17 @@
|
|||||||
|
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from functools import cmp_to_key
|
from itertools import count
|
||||||
from itertools import count, groupby
|
|
||||||
import errno
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from kobo.rpmlib import parse_nvra
|
from kobo.rpmlib import parse_nvra
|
||||||
import rpm
|
|
||||||
|
|
||||||
import pungi.common
|
import pungi.common
|
||||||
import pungi.dnf_wrapper
|
import pungi.dnf_wrapper
|
||||||
import pungi.multilib_dnf
|
import pungi.multilib_dnf
|
||||||
import pungi.util
|
import pungi.util
|
||||||
from pungi import arch_utils
|
|
||||||
from pungi.linker import Linker
|
from pungi.linker import Linker
|
||||||
from pungi.profiler import Profiler
|
from pungi.profiler import Profiler
|
||||||
from pungi.util import DEBUG_PATTERNS
|
from pungi.util import DEBUG_PATTERNS
|
||||||
@ -40,20 +36,6 @@ def get_source_name(pkg):
|
|||||||
return pkg.sourcerpm.rsplit("-", 2)[0]
|
return pkg.sourcerpm.rsplit("-", 2)[0]
|
||||||
|
|
||||||
|
|
||||||
def filter_dotarch(queue, pattern, **kwargs):
|
|
||||||
"""Filter queue for packages matching the pattern. If pattern matches the
|
|
||||||
dotarch format of <name>.<arch>, it is processed as such. Otherwise it is
|
|
||||||
treated as just a name.
|
|
||||||
"""
|
|
||||||
kwargs["name__glob"] = pattern
|
|
||||||
if "." in pattern:
|
|
||||||
name, arch = pattern.split(".", 1)
|
|
||||||
if arch in arch_utils.arches or arch == "noarch":
|
|
||||||
kwargs["name__glob"] = name
|
|
||||||
kwargs["arch"] = arch
|
|
||||||
return queue.filter(**kwargs).apply()
|
|
||||||
|
|
||||||
|
|
||||||
class GatherOptions(pungi.common.OptionsBase):
|
class GatherOptions(pungi.common.OptionsBase):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super(GatherOptions, self).__init__()
|
super(GatherOptions, self).__init__()
|
||||||
@ -263,37 +245,13 @@ class Gather(GatherBase):
|
|||||||
# from lookaside. This can be achieved by removing any package that is
|
# from lookaside. This can be achieved by removing any package that is
|
||||||
# also in lookaside from the list.
|
# also in lookaside from the list.
|
||||||
lookaside_pkgs = set()
|
lookaside_pkgs = set()
|
||||||
|
|
||||||
if self.opts.lookaside_repos:
|
|
||||||
# We will call `latest()` to get the highest version packages only.
|
|
||||||
# However, that is per name and architecture. If a package switches
|
|
||||||
# from arched to noarch or the other way, it is possible that the
|
|
||||||
# package_list contains different versions in main repos and in
|
|
||||||
# lookaside repos.
|
|
||||||
# We need to manually filter the latest version.
|
|
||||||
def vercmp(x, y):
|
|
||||||
return rpm.labelCompare(x[1], y[1])
|
|
||||||
|
|
||||||
# Annotate the packages with their version.
|
|
||||||
versioned_packages = [
|
|
||||||
(pkg, (str(pkg.epoch) or "0", pkg.version, pkg.release))
|
|
||||||
for pkg in package_list
|
|
||||||
]
|
|
||||||
# Sort the packages newest first.
|
|
||||||
sorted_packages = sorted(
|
|
||||||
versioned_packages, key=cmp_to_key(vercmp), reverse=True
|
|
||||||
)
|
|
||||||
# Group packages by version, take the first group and discard the
|
|
||||||
# version info from the tuple.
|
|
||||||
package_list = list(
|
|
||||||
x[0] for x in next(groupby(sorted_packages, key=lambda x: x[1]))[1]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Now we can decide what is used from lookaside.
|
|
||||||
for pkg in package_list:
|
for pkg in package_list:
|
||||||
if pkg.repoid in self.opts.lookaside_repos:
|
if pkg.repoid in self.opts.lookaside_repos:
|
||||||
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
||||||
|
|
||||||
|
if self.opts.greedy_method == "all":
|
||||||
|
return list(package_list)
|
||||||
|
|
||||||
all_pkgs = []
|
all_pkgs = []
|
||||||
for pkg in package_list:
|
for pkg in package_list:
|
||||||
# Remove packages that are also in lookaside
|
# Remove packages that are also in lookaside
|
||||||
@ -305,22 +263,17 @@ class Gather(GatherBase):
|
|||||||
|
|
||||||
if not debuginfo:
|
if not debuginfo:
|
||||||
native_pkgs = set(
|
native_pkgs = set(
|
||||||
self.q_native_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
self.q_native_binary_packages.filter(pkg=all_pkgs).apply()
|
||||||
)
|
)
|
||||||
multilib_pkgs = set(
|
multilib_pkgs = set(
|
||||||
self.q_multilib_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply()
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
native_pkgs = set(
|
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
|
||||||
self.q_native_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
|
||||||
)
|
|
||||||
multilib_pkgs = set(
|
multilib_pkgs = set(
|
||||||
self.q_multilib_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply()
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.opts.greedy_method == "all":
|
|
||||||
return list(native_pkgs | multilib_pkgs)
|
|
||||||
|
|
||||||
result = set()
|
result = set()
|
||||||
|
|
||||||
# try seen native packages first
|
# try seen native packages first
|
||||||
@ -439,7 +392,9 @@ class Gather(GatherBase):
|
|||||||
"""Given an name of a queue (stored as attribute in `self`), exclude
|
"""Given an name of a queue (stored as attribute in `self`), exclude
|
||||||
all given packages and keep only the latest per package name and arch.
|
all given packages and keep only the latest per package name and arch.
|
||||||
"""
|
"""
|
||||||
setattr(self, queue, getattr(self, queue).filter(pkg__neq=exclude).apply())
|
setattr(
|
||||||
|
self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply()
|
||||||
|
)
|
||||||
|
|
||||||
@Profiler("Gather._apply_excludes()")
|
@Profiler("Gather._apply_excludes()")
|
||||||
def _apply_excludes(self, excludes):
|
def _apply_excludes(self, excludes):
|
||||||
@ -465,16 +420,12 @@ class Gather(GatherBase):
|
|||||||
name__glob=pattern[:-4], reponame__neq=self.opts.lookaside_repos
|
name__glob=pattern[:-4], reponame__neq=self.opts.lookaside_repos
|
||||||
)
|
)
|
||||||
elif pungi.util.pkg_is_debug(pattern):
|
elif pungi.util.pkg_is_debug(pattern):
|
||||||
pkgs = filter_dotarch(
|
pkgs = self.q_debug_packages.filter(
|
||||||
self.q_debug_packages,
|
name__glob=pattern, reponame__neq=self.opts.lookaside_repos
|
||||||
pattern,
|
|
||||||
reponame__neq=self.opts.lookaside_repos,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
pkgs = filter_dotarch(
|
pkgs = self.q_binary_packages.filter(
|
||||||
self.q_binary_packages,
|
name__glob=pattern, reponame__neq=self.opts.lookaside_repos
|
||||||
pattern,
|
|
||||||
reponame__neq=self.opts.lookaside_repos,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
exclude.update(pkgs)
|
exclude.update(pkgs)
|
||||||
@ -540,19 +491,21 @@ class Gather(GatherBase):
|
|||||||
name__glob=pattern[:-2]
|
name__glob=pattern[:-2]
|
||||||
).apply()
|
).apply()
|
||||||
else:
|
else:
|
||||||
pkgs = filter_dotarch(self.q_debug_packages, pattern)
|
pkgs = self.q_debug_packages.filter(
|
||||||
|
name__glob=pattern
|
||||||
|
).apply()
|
||||||
else:
|
else:
|
||||||
if pattern.endswith(".+"):
|
if pattern.endswith(".+"):
|
||||||
pkgs = self.q_multilib_binary_packages.filter(
|
pkgs = self.q_multilib_binary_packages.filter(
|
||||||
name__glob=pattern[:-2]
|
name__glob=pattern[:-2]
|
||||||
).apply()
|
).apply()
|
||||||
else:
|
else:
|
||||||
pkgs = filter_dotarch(self.q_binary_packages, pattern)
|
pkgs = self.q_binary_packages.filter(
|
||||||
|
name__glob=pattern
|
||||||
|
).apply()
|
||||||
|
|
||||||
if not pkgs:
|
if not pkgs:
|
||||||
self.logger.error(
|
self.logger.error("No package matches pattern %s" % pattern)
|
||||||
"Could not find a match for %s in any configured repo", pattern
|
|
||||||
)
|
|
||||||
|
|
||||||
# The pattern could have been a glob. In that case we want to
|
# The pattern could have been a glob. In that case we want to
|
||||||
# group the packages by name and get best match in those
|
# group the packages by name and get best match in those
|
||||||
@ -663,6 +616,7 @@ class Gather(GatherBase):
|
|||||||
return added
|
return added
|
||||||
|
|
||||||
for pkg in self.result_debug_packages.copy():
|
for pkg in self.result_debug_packages.copy():
|
||||||
|
|
||||||
if pkg not in self.finished_add_debug_package_deps:
|
if pkg not in self.finished_add_debug_package_deps:
|
||||||
deps = self._get_package_deps(pkg, debuginfo=True)
|
deps = self._get_package_deps(pkg, debuginfo=True)
|
||||||
for i, req in deps:
|
for i, req in deps:
|
||||||
@ -830,6 +784,7 @@ class Gather(GatherBase):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
debug_pkgs = []
|
debug_pkgs = []
|
||||||
|
pkg_in_lookaside = pkg.repoid in self.opts.lookaside_repos
|
||||||
for i in candidates:
|
for i in candidates:
|
||||||
if pkg.arch != i.arch:
|
if pkg.arch != i.arch:
|
||||||
continue
|
continue
|
||||||
@ -837,14 +792,8 @@ class Gather(GatherBase):
|
|||||||
# If it's not debugsource package or does not match name of
|
# If it's not debugsource package or does not match name of
|
||||||
# the package, we don't want it in.
|
# the package, we don't want it in.
|
||||||
continue
|
continue
|
||||||
if self.is_from_lookaside(i):
|
if i.repoid in self.opts.lookaside_repos or pkg_in_lookaside:
|
||||||
self._set_flag(i, PkgFlag.lookaside)
|
self._set_flag(i, PkgFlag.lookaside)
|
||||||
srpm_name = i.sourcerpm.rsplit("-", 2)[0]
|
|
||||||
if srpm_name in self.opts.fulltree_excludes:
|
|
||||||
self._set_flag(i, PkgFlag.fulltree_exclude)
|
|
||||||
if PkgFlag.input in self.result_package_flags.get(srpm_name, set()):
|
|
||||||
# If src rpm is marked as input, mark debuginfo as input too
|
|
||||||
self._set_flag(i, PkgFlag.input)
|
|
||||||
if i not in self.result_debug_packages:
|
if i not in self.result_debug_packages:
|
||||||
added.add(i)
|
added.add(i)
|
||||||
debug_pkgs.append(i)
|
debug_pkgs.append(i)
|
||||||
@ -1081,11 +1030,8 @@ class Gather(GatherBase):
|
|||||||
# Link downloaded package in (or link package from file repo)
|
# Link downloaded package in (or link package from file repo)
|
||||||
try:
|
try:
|
||||||
linker.link(pkg.localPkg(), target)
|
linker.link(pkg.localPkg(), target)
|
||||||
except Exception as ex:
|
except Exception:
|
||||||
if ex.errno == errno.EEXIST:
|
self.logger.error("Unable to link %s from the yum cache." % pkg.name)
|
||||||
self.logger.warning("Downloaded package exists in %s", target)
|
|
||||||
else:
|
|
||||||
self.logger.error("Unable to link %s from the yum cache.", pkg.name)
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def log_count(self, msg, method, *args):
|
def log_count(self, msg, method, *args):
|
||||||
|
@ -306,6 +306,11 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
if variant.type in ("addon",) or variant.is_empty:
|
if variant.type in ("addon",) or variant.is_empty:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
compose.log_debug(
|
||||||
|
"on arch '%s' looking at variant '%s' of type '%s'"
|
||||||
|
% (arch, variant, variant.type)
|
||||||
|
)
|
||||||
|
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
else:
|
else:
|
||||||
|
@ -19,7 +19,6 @@ import logging
|
|||||||
|
|
||||||
from .tree import Tree
|
from .tree import Tree
|
||||||
from .installer import Installer
|
from .installer import Installer
|
||||||
from .container import Container
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args=None):
|
||||||
@ -72,43 +71,6 @@ def main(args=None):
|
|||||||
help="use unified core mode in rpm-ostree",
|
help="use unified core mode in rpm-ostree",
|
||||||
)
|
)
|
||||||
|
|
||||||
container = subparser.add_parser(
|
|
||||||
"container", help="Compose OSTree native container"
|
|
||||||
)
|
|
||||||
container.set_defaults(_class=Container, func="run")
|
|
||||||
container.add_argument(
|
|
||||||
"--name",
|
|
||||||
required=True,
|
|
||||||
help="the name of the the OCI archive (required)",
|
|
||||||
)
|
|
||||||
container.add_argument(
|
|
||||||
"--path",
|
|
||||||
required=True,
|
|
||||||
help="where to output the OCI archive (required)",
|
|
||||||
)
|
|
||||||
container.add_argument(
|
|
||||||
"--treefile",
|
|
||||||
metavar="FILE",
|
|
||||||
required=True,
|
|
||||||
help="treefile for rpm-ostree (required)",
|
|
||||||
)
|
|
||||||
container.add_argument(
|
|
||||||
"--log-dir",
|
|
||||||
metavar="DIR",
|
|
||||||
required=True,
|
|
||||||
help="where to log output (required).",
|
|
||||||
)
|
|
||||||
container.add_argument(
|
|
||||||
"--extra-config", metavar="FILE", help="JSON file contains extra configurations"
|
|
||||||
)
|
|
||||||
container.add_argument(
|
|
||||||
"-v",
|
|
||||||
"--version",
|
|
||||||
metavar="VERSION",
|
|
||||||
required=True,
|
|
||||||
help="version identifier (required)",
|
|
||||||
)
|
|
||||||
|
|
||||||
installerp = subparser.add_parser(
|
installerp = subparser.add_parser(
|
||||||
"installer", help="Create an OSTree installer image"
|
"installer", help="Create an OSTree installer image"
|
||||||
)
|
)
|
||||||
|
@ -1,86 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; version 2 of the License.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Library General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import six
|
|
||||||
from six.moves import shlex_quote
|
|
||||||
|
|
||||||
|
|
||||||
from .base import OSTree
|
|
||||||
from .utils import tweak_treeconf
|
|
||||||
|
|
||||||
|
|
||||||
def emit(cmd):
|
|
||||||
"""Print line of shell code into the stream."""
|
|
||||||
if isinstance(cmd, six.string_types):
|
|
||||||
print(cmd)
|
|
||||||
else:
|
|
||||||
print(" ".join([shlex_quote(x) for x in cmd]))
|
|
||||||
|
|
||||||
|
|
||||||
class Container(OSTree):
|
|
||||||
def _make_container(self):
|
|
||||||
"""Compose OSTree Container Native image"""
|
|
||||||
stamp_file = os.path.join(self.logdir, "%s.stamp" % self.name)
|
|
||||||
cmd = [
|
|
||||||
"rpm-ostree",
|
|
||||||
"compose",
|
|
||||||
"image",
|
|
||||||
# Always initialize for now
|
|
||||||
"--initialize",
|
|
||||||
# Touch the file if a new commit was created. This can help us tell
|
|
||||||
# if the commitid file is missing because no commit was created or
|
|
||||||
# because something went wrong.
|
|
||||||
"--touch-if-changed=%s" % stamp_file,
|
|
||||||
self.treefile,
|
|
||||||
]
|
|
||||||
fullpath = os.path.join(self.path, "%s.ociarchive" % self.name)
|
|
||||||
cmd.append(fullpath)
|
|
||||||
|
|
||||||
# Set the umask to be more permissive so directories get group write
|
|
||||||
# permissions. See https://pagure.io/releng/issue/8811#comment-629051
|
|
||||||
emit("umask 0002")
|
|
||||||
emit(cmd)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self.name = self.args.name
|
|
||||||
self.path = self.args.path
|
|
||||||
self.treefile = self.args.treefile
|
|
||||||
self.logdir = self.args.log_dir
|
|
||||||
self.extra_config = self.args.extra_config
|
|
||||||
|
|
||||||
if self.extra_config:
|
|
||||||
self.extra_config = json.load(open(self.extra_config, "r"))
|
|
||||||
repos = self.extra_config.get("repo", [])
|
|
||||||
keep_original_sources = self.extra_config.get(
|
|
||||||
"keep_original_sources", False
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# missing extra_config mustn't affect tweak_treeconf call
|
|
||||||
repos = []
|
|
||||||
keep_original_sources = True
|
|
||||||
|
|
||||||
update_dict = {"automatic-version-prefix": self.args.version}
|
|
||||||
|
|
||||||
self.treefile = tweak_treeconf(
|
|
||||||
self.treefile,
|
|
||||||
source_repos=repos,
|
|
||||||
keep_original_sources=keep_original_sources,
|
|
||||||
update_dict=update_dict,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._make_container()
|
|
@ -25,9 +25,9 @@ from .buildinstall import BuildinstallPhase # noqa
|
|||||||
from .extra_files import ExtraFilesPhase # noqa
|
from .extra_files import ExtraFilesPhase # noqa
|
||||||
from .createiso import CreateisoPhase # noqa
|
from .createiso import CreateisoPhase # noqa
|
||||||
from .extra_isos import ExtraIsosPhase # noqa
|
from .extra_isos import ExtraIsosPhase # noqa
|
||||||
|
from .live_images import LiveImagesPhase # noqa
|
||||||
from .image_build import ImageBuildPhase # noqa
|
from .image_build import ImageBuildPhase # noqa
|
||||||
from .image_container import ImageContainerPhase # noqa
|
from .image_container import ImageContainerPhase # noqa
|
||||||
from .kiwibuild import KiwiBuildPhase # noqa
|
|
||||||
from .osbuild import OSBuildPhase # noqa
|
from .osbuild import OSBuildPhase # noqa
|
||||||
from .repoclosure import RepoclosurePhase # noqa
|
from .repoclosure import RepoclosurePhase # noqa
|
||||||
from .test import TestPhase # noqa
|
from .test import TestPhase # noqa
|
||||||
@ -35,7 +35,6 @@ from .image_checksum import ImageChecksumPhase # noqa
|
|||||||
from .livemedia_phase import LiveMediaPhase # noqa
|
from .livemedia_phase import LiveMediaPhase # noqa
|
||||||
from .ostree import OSTreePhase # noqa
|
from .ostree import OSTreePhase # noqa
|
||||||
from .ostree_installer import OstreeInstallerPhase # noqa
|
from .ostree_installer import OstreeInstallerPhase # noqa
|
||||||
from .ostree_container import OSTreeContainerPhase # noqa
|
|
||||||
from .osbs import OSBSPhase # noqa
|
from .osbs import OSBSPhase # noqa
|
||||||
from .phases_metadata import gather_phases_metadata # noqa
|
from .phases_metadata import gather_phases_metadata # noqa
|
||||||
|
|
||||||
|
@ -31,14 +31,14 @@ from six.moves import shlex_quote
|
|||||||
from pungi.arch import get_valid_arches
|
from pungi.arch import get_valid_arches
|
||||||
from pungi.util import get_volid, get_arch_variant_data
|
from pungi.util import get_volid, get_arch_variant_data
|
||||||
from pungi.util import get_file_size, get_mtime, failable, makedirs
|
from pungi.util import get_file_size, get_mtime, failable, makedirs
|
||||||
from pungi.util import copy_all, translate_path
|
from pungi.util import copy_all, translate_path, move_all
|
||||||
from pungi.wrappers.lorax import LoraxWrapper
|
from pungi.wrappers.lorax import LoraxWrapper
|
||||||
from pungi.wrappers import iso
|
from pungi.wrappers import iso
|
||||||
from pungi.wrappers.scm import get_file
|
from pungi.wrappers.scm import get_file
|
||||||
from pungi.wrappers.scm import get_file_from_scm
|
from pungi.wrappers.scm import get_file_from_scm
|
||||||
from pungi.wrappers import kojiwrapper
|
from pungi.wrappers import kojiwrapper
|
||||||
from pungi.phases.base import PhaseBase
|
from pungi.phases.base import PhaseBase
|
||||||
from pungi.runroot import Runroot, download_and_extract_archive
|
from pungi.runroot import Runroot
|
||||||
|
|
||||||
|
|
||||||
class BuildinstallPhase(PhaseBase):
|
class BuildinstallPhase(PhaseBase):
|
||||||
@ -144,7 +144,7 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
)
|
)
|
||||||
if self.compose.has_comps:
|
if self.compose.has_comps:
|
||||||
comps_repo = self.compose.paths.work.comps_repo(arch, variant)
|
comps_repo = self.compose.paths.work.comps_repo(arch, variant)
|
||||||
if final_output_dir != output_dir or self.lorax_use_koji_plugin:
|
if final_output_dir != output_dir:
|
||||||
comps_repo = translate_path(self.compose, comps_repo)
|
comps_repo = translate_path(self.compose, comps_repo)
|
||||||
repos.append(comps_repo)
|
repos.append(comps_repo)
|
||||||
|
|
||||||
@ -169,6 +169,7 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
"rootfs-size": rootfs_size,
|
"rootfs-size": rootfs_size,
|
||||||
"dracut-args": dracut_args,
|
"dracut-args": dracut_args,
|
||||||
"skip_branding": skip_branding,
|
"skip_branding": skip_branding,
|
||||||
|
"outputdir": output_dir,
|
||||||
"squashfs_only": squashfs_only,
|
"squashfs_only": squashfs_only,
|
||||||
"configuration_file": configuration_file,
|
"configuration_file": configuration_file,
|
||||||
}
|
}
|
||||||
@ -218,6 +219,10 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
return repos
|
return repos
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
lorax = LoraxWrapper()
|
||||||
|
product = self.compose.conf["release_name"]
|
||||||
|
version = self.compose.conf["release_version"]
|
||||||
|
release = self.compose.conf["release_version"]
|
||||||
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
|
|
||||||
# Prepare kickstart file for final images.
|
# Prepare kickstart file for final images.
|
||||||
@ -234,7 +239,7 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
)
|
)
|
||||||
makedirs(final_output_dir)
|
makedirs(final_output_dir)
|
||||||
repo_baseurls = self.get_repos(arch)
|
repo_baseurls = self.get_repos(arch)
|
||||||
if final_output_dir != output_dir or self.lorax_use_koji_plugin:
|
if final_output_dir != output_dir:
|
||||||
repo_baseurls = [translate_path(self.compose, r) for r in repo_baseurls]
|
repo_baseurls = [translate_path(self.compose, r) for r in repo_baseurls]
|
||||||
|
|
||||||
if self.buildinstall_method == "lorax":
|
if self.buildinstall_method == "lorax":
|
||||||
@ -270,12 +275,29 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
elif self.buildinstall_method == "buildinstall":
|
||||||
|
volid = get_volid(self.compose, arch, disc_type=disc_type)
|
||||||
|
commands.append(
|
||||||
|
(
|
||||||
|
None,
|
||||||
|
lorax.get_buildinstall_cmd(
|
||||||
|
product,
|
||||||
|
version,
|
||||||
|
release,
|
||||||
|
repo_baseurls,
|
||||||
|
output_dir,
|
||||||
|
is_final=self.compose.supported,
|
||||||
|
buildarch=arch,
|
||||||
|
volid=volid,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
||||||
)
|
)
|
||||||
|
|
||||||
for variant, cmd in commands:
|
for (variant, cmd) in commands:
|
||||||
self.pool.add(BuildinstallThread(self.pool))
|
self.pool.add(BuildinstallThread(self.pool))
|
||||||
self.pool.queue_put(
|
self.pool.queue_put(
|
||||||
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
||||||
@ -342,17 +364,9 @@ BOOT_CONFIGS = [
|
|||||||
"EFI/BOOT/BOOTX64.conf",
|
"EFI/BOOT/BOOTX64.conf",
|
||||||
"EFI/BOOT/grub.cfg",
|
"EFI/BOOT/grub.cfg",
|
||||||
]
|
]
|
||||||
BOOT_IMAGES = [
|
|
||||||
"images/efiboot.img",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||||
"""
|
|
||||||
Put escaped volume ID and possibly kickstart file into the boot
|
|
||||||
configuration files.
|
|
||||||
:returns: list of paths to modified config files
|
|
||||||
"""
|
|
||||||
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
||||||
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
||||||
found_configs = []
|
found_configs = []
|
||||||
@ -360,6 +374,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
config_path = os.path.join(path, config)
|
config_path = os.path.join(path, config)
|
||||||
if not os.path.exists(config_path):
|
if not os.path.exists(config_path):
|
||||||
continue
|
continue
|
||||||
|
found_configs.append(config)
|
||||||
|
|
||||||
with open(config_path, "r") as f:
|
with open(config_path, "r") as f:
|
||||||
data = original_data = f.read()
|
data = original_data = f.read()
|
||||||
@ -379,12 +394,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
with open(config_path, "w") as f:
|
with open(config_path, "w") as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
if data != original_data:
|
if logger and data != original_data:
|
||||||
found_configs.append(config)
|
|
||||||
if logger:
|
|
||||||
# Generally lorax should create file with correct volume id
|
|
||||||
# already. If we don't have a kickstart, this function should
|
|
||||||
# be a no-op.
|
|
||||||
logger.info("Boot config %s changed" % config_path)
|
logger.info("Boot config %s changed" % config_path)
|
||||||
|
|
||||||
return found_configs
|
return found_configs
|
||||||
@ -424,8 +434,9 @@ def tweak_buildinstall(
|
|||||||
if kickstart_file and found_configs:
|
if kickstart_file and found_configs:
|
||||||
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
||||||
|
|
||||||
images = [os.path.join(tmp_dir, img) for img in BOOT_IMAGES]
|
images = [
|
||||||
if found_configs:
|
os.path.join(tmp_dir, "images", "efiboot.img"),
|
||||||
|
]
|
||||||
for image in images:
|
for image in images:
|
||||||
if not os.path.isfile(image):
|
if not os.path.isfile(image):
|
||||||
continue
|
continue
|
||||||
@ -435,9 +446,7 @@ def tweak_buildinstall(
|
|||||||
logger=compose._logger,
|
logger=compose._logger,
|
||||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||||
) as mount_tmp_dir:
|
) as mount_tmp_dir:
|
||||||
for config in found_configs:
|
for config in BOOT_CONFIGS:
|
||||||
# Put each modified config file into the image (overwriting the
|
|
||||||
# original).
|
|
||||||
config_path = os.path.join(tmp_dir, config)
|
config_path = os.path.join(tmp_dir, config)
|
||||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||||
|
|
||||||
@ -521,10 +530,7 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
|||||||
setattr(img, "can_fail", can_fail)
|
setattr(img, "can_fail", can_fail)
|
||||||
setattr(img, "deliverable", "buildinstall")
|
setattr(img, "deliverable", "buildinstall")
|
||||||
try:
|
try:
|
||||||
img.volume_id = iso.get_volume_id(
|
img.volume_id = iso.get_volume_id(new_boot_iso_path)
|
||||||
new_boot_iso_path,
|
|
||||||
compose.conf.get("createiso_use_xorrisofs"),
|
|
||||||
)
|
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
pass
|
pass
|
||||||
# In this phase we should add to compose only the images that
|
# In this phase we should add to compose only the images that
|
||||||
@ -719,8 +725,8 @@ class BuildinstallThread(WorkerThread):
|
|||||||
# input on RPM level.
|
# input on RPM level.
|
||||||
cmd_copy = copy(cmd)
|
cmd_copy = copy(cmd)
|
||||||
for key in ["outputdir", "sources"]:
|
for key in ["outputdir", "sources"]:
|
||||||
cmd_copy.pop(key, None)
|
del cmd_copy[key]
|
||||||
old_metadata["cmd"].pop(key, None)
|
del old_metadata["cmd"][key]
|
||||||
|
|
||||||
# Do not reuse if command line arguments are not the same.
|
# Do not reuse if command line arguments are not the same.
|
||||||
if old_metadata["cmd"] != cmd_copy:
|
if old_metadata["cmd"] != cmd_copy:
|
||||||
@ -815,6 +821,8 @@ class BuildinstallThread(WorkerThread):
|
|||||||
if buildinstall_method == "lorax":
|
if buildinstall_method == "lorax":
|
||||||
packages += ["lorax"]
|
packages += ["lorax"]
|
||||||
chown_paths.append(_get_log_dir(compose, variant, arch))
|
chown_paths.append(_get_log_dir(compose, variant, arch))
|
||||||
|
elif buildinstall_method == "buildinstall":
|
||||||
|
packages += ["anaconda"]
|
||||||
packages += get_arch_variant_data(
|
packages += get_arch_variant_data(
|
||||||
compose.conf, "buildinstall_packages", arch, variant
|
compose.conf, "buildinstall_packages", arch, variant
|
||||||
)
|
)
|
||||||
@ -835,13 +843,13 @@ class BuildinstallThread(WorkerThread):
|
|||||||
|
|
||||||
# Start the runroot task.
|
# Start the runroot task.
|
||||||
runroot = Runroot(compose, phase="buildinstall")
|
runroot = Runroot(compose, phase="buildinstall")
|
||||||
task_id = None
|
|
||||||
if buildinstall_method == "lorax" and lorax_use_koji_plugin:
|
if buildinstall_method == "lorax" and lorax_use_koji_plugin:
|
||||||
task_id = runroot.run_pungi_buildinstall(
|
runroot.run_pungi_buildinstall(
|
||||||
cmd,
|
cmd,
|
||||||
log_file=log_file,
|
log_file=log_file,
|
||||||
arch=arch,
|
arch=arch,
|
||||||
packages=packages,
|
packages=packages,
|
||||||
|
mounts=[compose.topdir],
|
||||||
weight=compose.conf["runroot_weights"].get("buildinstall"),
|
weight=compose.conf["runroot_weights"].get("buildinstall"),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -874,17 +882,19 @@ class BuildinstallThread(WorkerThread):
|
|||||||
log_dir = os.path.join(output_dir, "logs")
|
log_dir = os.path.join(output_dir, "logs")
|
||||||
copy_all(log_dir, final_log_dir)
|
copy_all(log_dir, final_log_dir)
|
||||||
elif lorax_use_koji_plugin:
|
elif lorax_use_koji_plugin:
|
||||||
# If Koji pungi-buildinstall is used, then the buildinstall results
|
# If Koji pungi-buildinstall is used, then the buildinstall results are
|
||||||
# are attached as outputs to the Koji task. Download and unpack
|
# not stored directly in `output_dir` dir, but in "results" and "logs"
|
||||||
# them to the correct location.
|
# subdirectories. We need to move them to final_output_dir.
|
||||||
download_and_extract_archive(
|
results_dir = os.path.join(output_dir, "results")
|
||||||
compose, task_id, "results.tar.gz", final_output_dir
|
move_all(results_dir, final_output_dir, rm_src_dir=True)
|
||||||
)
|
|
||||||
|
|
||||||
# Download the logs into proper location too.
|
# Get the log_dir into which we should copy the resulting log files.
|
||||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||||
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
||||||
download_and_extract_archive(compose, task_id, "logs.tar.gz", final_log_dir)
|
if not os.path.exists(final_log_dir):
|
||||||
|
makedirs(final_log_dir)
|
||||||
|
log_dir = os.path.join(output_dir, "logs")
|
||||||
|
move_all(log_dir, final_log_dir, rm_src_dir=True)
|
||||||
|
|
||||||
rpms = runroot.get_buildroot_rpms()
|
rpms = runroot.get_buildroot_rpms()
|
||||||
self._write_buildinstall_metadata(
|
self._write_buildinstall_metadata(
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
import itertools
|
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
@ -24,7 +23,7 @@ import json
|
|||||||
import productmd.treeinfo
|
import productmd.treeinfo
|
||||||
from productmd.images import Image
|
from productmd.images import Image
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
from kobo.shortcuts import run, relative_path, compute_file_checksums
|
from kobo.shortcuts import run, relative_path
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
|
|
||||||
from pungi.wrappers import iso
|
from pungi.wrappers import iso
|
||||||
@ -155,13 +154,6 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
disc_num=cmd["disc_num"],
|
disc_num=cmd["disc_num"],
|
||||||
disc_count=cmd["disc_count"],
|
disc_count=cmd["disc_count"],
|
||||||
)
|
)
|
||||||
if self.compose.notifier:
|
|
||||||
self.compose.notifier.send(
|
|
||||||
"createiso-imagedone",
|
|
||||||
file=cmd["iso_path"],
|
|
||||||
arch=arch,
|
|
||||||
variant=str(variant),
|
|
||||||
)
|
|
||||||
|
|
||||||
def try_reuse(self, cmd, variant, arch, opts):
|
def try_reuse(self, cmd, variant, arch, opts):
|
||||||
"""Try to reuse image from previous compose.
|
"""Try to reuse image from previous compose.
|
||||||
@ -189,14 +181,6 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
if not old_config:
|
if not old_config:
|
||||||
self.logger.info("%s - no config for old compose", log_msg)
|
self.logger.info("%s - no config for old compose", log_msg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Disable reuse if unsigned packages are allowed. The older compose
|
|
||||||
# could have unsigned packages, and those may have been signed since
|
|
||||||
# then. We want to regenerate the ISO to have signatures.
|
|
||||||
if None in self.compose.conf["sigkeys"]:
|
|
||||||
self.logger.info("%s - unsigned packages are allowed", log_msg)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Convert current configuration to JSON and back to encode it similarly
|
# Convert current configuration to JSON and back to encode it similarly
|
||||||
# to the old one
|
# to the old one
|
||||||
config = json.loads(json.dumps(self.compose.conf))
|
config = json.loads(json.dumps(self.compose.conf))
|
||||||
@ -385,7 +369,7 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
if self.compose.notifier:
|
if self.compose.notifier:
|
||||||
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
||||||
|
|
||||||
for cmd, variant, arch in commands:
|
for (cmd, variant, arch) in commands:
|
||||||
self.pool.add(CreateIsoThread(self.pool))
|
self.pool.add(CreateIsoThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||||
|
|
||||||
@ -466,14 +450,7 @@ class CreateIsoThread(WorkerThread):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
run_createiso_command(
|
run_createiso_command(
|
||||||
num,
|
num, compose, bootable, arch, cmd["cmd"], mounts, log_file
|
||||||
compose,
|
|
||||||
bootable,
|
|
||||||
arch,
|
|
||||||
cmd["cmd"],
|
|
||||||
mounts,
|
|
||||||
log_file,
|
|
||||||
cmd["iso_path"],
|
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.fail(compose, cmd, variant, arch)
|
self.fail(compose, cmd, variant, arch)
|
||||||
@ -540,10 +517,7 @@ def add_iso_to_metadata(
|
|||||||
setattr(img, "can_fail", compose.can_fail(variant, arch, "iso"))
|
setattr(img, "can_fail", compose.can_fail(variant, arch, "iso"))
|
||||||
setattr(img, "deliverable", "iso")
|
setattr(img, "deliverable", "iso")
|
||||||
try:
|
try:
|
||||||
img.volume_id = iso.get_volume_id(
|
img.volume_id = iso.get_volume_id(iso_path)
|
||||||
iso_path,
|
|
||||||
compose.conf.get("createiso_use_xorrisofs"),
|
|
||||||
)
|
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
pass
|
pass
|
||||||
if arch == "src":
|
if arch == "src":
|
||||||
@ -554,9 +528,7 @@ def add_iso_to_metadata(
|
|||||||
return img
|
return img
|
||||||
|
|
||||||
|
|
||||||
def run_createiso_command(
|
def run_createiso_command(num, compose, bootable, arch, cmd, mounts, log_file):
|
||||||
num, compose, bootable, arch, cmd, mounts, log_file, iso_path
|
|
||||||
):
|
|
||||||
packages = [
|
packages = [
|
||||||
"coreutils",
|
"coreutils",
|
||||||
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
|
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
|
||||||
@ -567,6 +539,7 @@ def run_createiso_command(
|
|||||||
if bootable:
|
if bootable:
|
||||||
extra_packages = {
|
extra_packages = {
|
||||||
"lorax": ["lorax", "which"],
|
"lorax": ["lorax", "which"],
|
||||||
|
"buildinstall": ["anaconda"],
|
||||||
}
|
}
|
||||||
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
||||||
|
|
||||||
@ -598,76 +571,6 @@ def run_createiso_command(
|
|||||||
weight=compose.conf["runroot_weights"].get("createiso"),
|
weight=compose.conf["runroot_weights"].get("createiso"),
|
||||||
)
|
)
|
||||||
|
|
||||||
if bootable and compose.conf.get("createiso_use_xorrisofs"):
|
|
||||||
fix_treeinfo_checksums(compose, iso_path, arch)
|
|
||||||
|
|
||||||
|
|
||||||
def fix_treeinfo_checksums(compose, iso_path, arch):
|
|
||||||
"""It is possible for the ISO to contain a .treefile with incorrect
|
|
||||||
checksums. By modifying the ISO (adding files) some of the images may
|
|
||||||
change.
|
|
||||||
|
|
||||||
This function fixes that after the fact by looking for incorrect checksums,
|
|
||||||
recalculating them and updating the .treeinfo file. Since the size of the
|
|
||||||
file doesn't change, this seems to not change any images.
|
|
||||||
"""
|
|
||||||
modified = False
|
|
||||||
with iso.mount(iso_path, compose._logger) as mountpoint:
|
|
||||||
ti = productmd.TreeInfo()
|
|
||||||
ti.load(os.path.join(mountpoint, ".treeinfo"))
|
|
||||||
for image, (type_, expected) in ti.checksums.checksums.items():
|
|
||||||
checksums = compute_file_checksums(os.path.join(mountpoint, image), [type_])
|
|
||||||
actual = checksums[type_]
|
|
||||||
if actual == expected:
|
|
||||||
# Everything fine here, skip to next image.
|
|
||||||
continue
|
|
||||||
|
|
||||||
compose.log_debug("%s: %s: checksum mismatch", iso_path, image)
|
|
||||||
# Update treeinfo with correct checksum
|
|
||||||
ti.checksums.checksums[image] = (type_, actual)
|
|
||||||
modified = True
|
|
||||||
|
|
||||||
if not modified:
|
|
||||||
compose.log_debug("%s: All checksums match, nothing to do.", iso_path)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
tmpdir = compose.mkdtemp(arch, prefix="fix-checksum-")
|
|
||||||
# Write modified .treeinfo
|
|
||||||
ti_path = os.path.join(tmpdir, ".treeinfo")
|
|
||||||
compose.log_debug("Storing modified .treeinfo in %s", ti_path)
|
|
||||||
ti.dump(ti_path)
|
|
||||||
# Write a modified DVD into a temporary path, that is atomically moved
|
|
||||||
# over the original file.
|
|
||||||
fixed_path = os.path.join(tmpdir, "fixed-checksum-dvd.iso")
|
|
||||||
cmd = ["xorriso"]
|
|
||||||
cmd.extend(
|
|
||||||
itertools.chain.from_iterable(
|
|
||||||
iso.xorriso_commands(arch, iso_path, fixed_path)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
cmd.extend(["-map", ti_path, ".treeinfo"])
|
|
||||||
run(
|
|
||||||
cmd,
|
|
||||||
logfile=compose.paths.log.log_file(
|
|
||||||
arch, "checksum-fix_generate_%s" % os.path.basename(iso_path)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
# The modified ISO no longer has implanted MD5, so that needs to be
|
|
||||||
# fixed again.
|
|
||||||
compose.log_debug("Implanting new MD5 to %s", fixed_path)
|
|
||||||
run(
|
|
||||||
iso.get_implantisomd5_cmd(fixed_path, compose.supported),
|
|
||||||
logfile=compose.paths.log.log_file(
|
|
||||||
arch, "checksum-fix_implantisomd5_%s" % os.path.basename(iso_path)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
# All done, move the updated image to the final location.
|
|
||||||
compose.log_debug("Updating %s", iso_path)
|
|
||||||
os.rename(fixed_path, iso_path)
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(tmpdir)
|
|
||||||
|
|
||||||
|
|
||||||
def split_iso(compose, arch, variant, no_split=False, logger=None):
|
def split_iso(compose, arch, variant, no_split=False, logger=None):
|
||||||
"""
|
"""
|
||||||
|
@ -76,7 +76,7 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
|||||||
for arch in sorted(arches):
|
for arch in sorted(arches):
|
||||||
commands.append((config, variant, arch))
|
commands.append((config, variant, arch))
|
||||||
|
|
||||||
for config, variant, arch in commands:
|
for (config, variant, arch) in commands:
|
||||||
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
||||||
self.pool.queue_put((self.compose, config, variant, arch))
|
self.pool.queue_put((self.compose, config, variant, arch))
|
||||||
|
|
||||||
@ -166,7 +166,6 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
log_file=compose.paths.log.log_file(
|
log_file=compose.paths.log.log_file(
|
||||||
arch, "extraiso-%s" % os.path.basename(iso_path)
|
arch, "extraiso-%s" % os.path.basename(iso_path)
|
||||||
),
|
),
|
||||||
iso_path=iso_path,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
img = add_iso_to_metadata(
|
img = add_iso_to_metadata(
|
||||||
@ -205,14 +204,6 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
if not old_config:
|
if not old_config:
|
||||||
self.pool.log_info("%s - no config for old compose", log_msg)
|
self.pool.log_info("%s - no config for old compose", log_msg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Disable reuse if unsigned packages are allowed. The older compose
|
|
||||||
# could have unsigned packages, and those may have been signed since
|
|
||||||
# then. We want to regenerate the ISO to have signatures.
|
|
||||||
if None in compose.conf["sigkeys"]:
|
|
||||||
self.pool.log_info("%s - unsigned packages are allowed", log_msg)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Convert current configuration to JSON and back to encode it similarly
|
# Convert current configuration to JSON and back to encode it similarly
|
||||||
# to the old one
|
# to the old one
|
||||||
config = json.loads(json.dumps(compose.conf))
|
config = json.loads(json.dumps(compose.conf))
|
||||||
|
@ -91,7 +91,7 @@ class GatherPhase(PhaseBase):
|
|||||||
|
|
||||||
# check whether variants from configuration value
|
# check whether variants from configuration value
|
||||||
# 'variant_as_lookaside' are correct
|
# 'variant_as_lookaside' are correct
|
||||||
for requiring, required in variant_as_lookaside:
|
for (requiring, required) in variant_as_lookaside:
|
||||||
if requiring in all_variants and required not in all_variants:
|
if requiring in all_variants and required not in all_variants:
|
||||||
errors.append(
|
errors.append(
|
||||||
"variant_as_lookaside: variant %r doesn't exist but is "
|
"variant_as_lookaside: variant %r doesn't exist but is "
|
||||||
@ -100,7 +100,7 @@ class GatherPhase(PhaseBase):
|
|||||||
|
|
||||||
# check whether variants from configuration value
|
# check whether variants from configuration value
|
||||||
# 'variant_as_lookaside' have same architectures
|
# 'variant_as_lookaside' have same architectures
|
||||||
for requiring, required in variant_as_lookaside:
|
for (requiring, required) in variant_as_lookaside:
|
||||||
if (
|
if (
|
||||||
requiring in all_variants
|
requiring in all_variants
|
||||||
and required in all_variants
|
and required in all_variants
|
||||||
@ -236,7 +236,7 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets, methods):
|
|||||||
if not hasattr(compose, "_gather_reused_variant_arch"):
|
if not hasattr(compose, "_gather_reused_variant_arch"):
|
||||||
setattr(compose, "_gather_reused_variant_arch", [])
|
setattr(compose, "_gather_reused_variant_arch", [])
|
||||||
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
||||||
for requiring, required in variant_as_lookaside:
|
for (requiring, required) in variant_as_lookaside:
|
||||||
if (
|
if (
|
||||||
requiring == variant.uid
|
requiring == variant.uid
|
||||||
and (required, arch) not in compose._gather_reused_variant_arch
|
and (required, arch) not in compose._gather_reused_variant_arch
|
||||||
@ -469,7 +469,9 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
|||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
for source_name in ("module", "comps", "json"):
|
for source_name in ("module", "comps", "json"):
|
||||||
|
|
||||||
packages, groups, filter_packages = get_variant_packages(
|
packages, groups, filter_packages = get_variant_packages(
|
||||||
compose, arch, variant, source_name, package_sets
|
compose, arch, variant, source_name, package_sets
|
||||||
)
|
)
|
||||||
@ -574,6 +576,7 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
|
|||||||
move_to_parent_pkgs = _mk_pkg_map()
|
move_to_parent_pkgs = _mk_pkg_map()
|
||||||
removed_pkgs = _mk_pkg_map()
|
removed_pkgs = _mk_pkg_map()
|
||||||
for pkg_type, pkgs in pkg_map.items():
|
for pkg_type, pkgs in pkg_map.items():
|
||||||
|
|
||||||
new_pkgs = []
|
new_pkgs = []
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
pkg_path = pkg["path"]
|
pkg_path = pkg["path"]
|
||||||
@ -645,10 +648,9 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
compose.paths.work.topdir(arch="global"), "download"
|
compose.paths.work.topdir(arch="global"), "download"
|
||||||
)
|
)
|
||||||
+ "/",
|
+ "/",
|
||||||
"koji": lambda: compose.conf.get(
|
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
||||||
"koji_cache",
|
compose
|
||||||
pungi.wrappers.kojiwrapper.KojiWrapper(compose).koji_module.config.topdir,
|
).koji_module.config.topdir.rstrip("/")
|
||||||
).rstrip("/")
|
|
||||||
+ "/",
|
+ "/",
|
||||||
"kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
|
"kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
|
||||||
compose,
|
compose,
|
||||||
@ -666,11 +668,6 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
# we need a union of all SRPMs.
|
# we need a union of all SRPMs.
|
||||||
if pkg_type == "srpm" or pkg_arch == arch:
|
if pkg_type == "srpm" or pkg_arch == arch:
|
||||||
for pkg in packages:
|
for pkg in packages:
|
||||||
if "lookaside" in pkg.get("flags", []):
|
|
||||||
# We want to ignore lookaside packages, those will
|
|
||||||
# be visible to the depending variants from the
|
|
||||||
# lookaside repo directly.
|
|
||||||
continue
|
|
||||||
pkg = pkg["path"]
|
pkg = pkg["path"]
|
||||||
if path_prefix and pkg.startswith(path_prefix):
|
if path_prefix and pkg.startswith(path_prefix):
|
||||||
pkg = pkg[len(path_prefix) :]
|
pkg = pkg[len(path_prefix) :]
|
||||||
|
@ -47,15 +47,9 @@ class FakePackage(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def files(self):
|
def files(self):
|
||||||
paths = []
|
return [
|
||||||
# createrepo_c.Package.files is a tuple, but its length differs across
|
os.path.join(dirname, basename) for (_, dirname, basename) in self.pkg.files
|
||||||
# versions. The constants define index at which the related value is
|
]
|
||||||
# located.
|
|
||||||
for entry in self.pkg.files:
|
|
||||||
paths.append(
|
|
||||||
os.path.join(entry[cr.FILE_ENTRY_PATH], entry[cr.FILE_ENTRY_NAME])
|
|
||||||
)
|
|
||||||
return paths
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def provides(self):
|
def provides(self):
|
||||||
|
@ -25,7 +25,6 @@ from productmd.rpms import Rpms
|
|||||||
# results will be pulled into the compose.
|
# results will be pulled into the compose.
|
||||||
EXTENSIONS = {
|
EXTENSIONS = {
|
||||||
"docker": ["tar.gz", "tar.xz"],
|
"docker": ["tar.gz", "tar.xz"],
|
||||||
"iso": ["iso"],
|
|
||||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||||
"qcow": ["qcow"],
|
"qcow": ["qcow"],
|
||||||
"qcow2": ["qcow2"],
|
"qcow2": ["qcow2"],
|
||||||
@ -40,7 +39,6 @@ EXTENSIONS = {
|
|||||||
"vdi": ["vdi"],
|
"vdi": ["vdi"],
|
||||||
"vmdk": ["vmdk"],
|
"vmdk": ["vmdk"],
|
||||||
"vpc": ["vhd"],
|
"vpc": ["vhd"],
|
||||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
|
||||||
"vsphere-ova": ["vsphere.ova"],
|
"vsphere-ova": ["vsphere.ova"],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -346,9 +344,7 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = compose.koji_downloader.get_file(
|
src_file = os.path.realpath(image_info["path"])
|
||||||
os.path.realpath(image_info["path"])
|
|
||||||
)
|
|
||||||
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
|
@ -76,7 +76,7 @@ class ImageContainerThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
if koji.watch_task(task_id, log_file) != 0:
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"ImageContainer task failed: %s. See %s for details"
|
"ImageContainer: task %s failed: see %s for details"
|
||||||
% (task_id, log_file)
|
% (task_id, log_file)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,229 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
|
||||||
from kobo import shortcuts
|
|
||||||
from productmd.images import Image
|
|
||||||
|
|
||||||
from . import base
|
|
||||||
from .. import util
|
|
||||||
from ..linker import Linker
|
|
||||||
from ..wrappers import kojiwrapper
|
|
||||||
from .image_build import EXTENSIONS
|
|
||||||
|
|
||||||
KIWIEXTENSIONS = [
|
|
||||||
("vhd-compressed", ["vhdfixed.xz"], "vhd.xz"),
|
|
||||||
("vagrant-libvirt", ["vagrant.libvirt.box"], "vagrant-libvirt.box"),
|
|
||||||
("vagrant-virtualbox", ["vagrant.virtualbox.box"], "vagrant-virtualbox.box"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class KiwiBuildPhase(
|
|
||||||
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
|
||||||
):
|
|
||||||
name = "kiwibuild"
|
|
||||||
|
|
||||||
def __init__(self, compose):
|
|
||||||
super(KiwiBuildPhase, self).__init__(compose)
|
|
||||||
self.pool = ThreadPool(logger=self.logger)
|
|
||||||
|
|
||||||
def _get_arches(self, image_conf, arches):
|
|
||||||
"""Get an intersection of arches in the config dict and the given ones."""
|
|
||||||
if "arches" in image_conf:
|
|
||||||
arches = set(image_conf["arches"]) & arches
|
|
||||||
return sorted(arches)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_repo_urls(compose, repos, arch="$basearch"):
|
|
||||||
"""
|
|
||||||
Get list of repos with resolved repo URLs. Preserve repos defined
|
|
||||||
as dicts.
|
|
||||||
"""
|
|
||||||
resolved_repos = []
|
|
||||||
|
|
||||||
for repo in repos:
|
|
||||||
repo = util.get_repo_url(compose, repo, arch=arch)
|
|
||||||
if repo is None:
|
|
||||||
raise RuntimeError("Failed to resolve repo URL for %s" % repo)
|
|
||||||
resolved_repos.append(repo)
|
|
||||||
|
|
||||||
return resolved_repos
|
|
||||||
|
|
||||||
def _get_repo(self, image_conf, variant):
|
|
||||||
"""
|
|
||||||
Get a list of repos. First included are those explicitly listed in
|
|
||||||
config, followed by by repo for current variant if it's not included in
|
|
||||||
the list already.
|
|
||||||
"""
|
|
||||||
repos = shortcuts.force_list(image_conf.get("repos", []))
|
|
||||||
|
|
||||||
if not variant.is_empty and variant.uid not in repos:
|
|
||||||
repos.append(variant.uid)
|
|
||||||
|
|
||||||
return KiwiBuildPhase._get_repo_urls(self.compose, repos, arch="$arch")
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
for variant in self.compose.get_variants():
|
|
||||||
arches = set([x for x in variant.arches if x != "src"])
|
|
||||||
|
|
||||||
for image_conf in self.get_config_block(variant):
|
|
||||||
build_arches = self._get_arches(image_conf, arches)
|
|
||||||
if not build_arches:
|
|
||||||
self.log_debug("skip: no arches")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# these properties can be set per-image *or* as e.g.
|
|
||||||
# kiwibuild_description_scm or global_release in the config
|
|
||||||
generics = {
|
|
||||||
"release": self.get_release(image_conf),
|
|
||||||
"target": self.get_config(image_conf, "target"),
|
|
||||||
"descscm": self.get_config(image_conf, "description_scm"),
|
|
||||||
"descpath": self.get_config(image_conf, "description_path"),
|
|
||||||
"type": self.get_config(image_conf, "type"),
|
|
||||||
"type_attr": self.get_config(image_conf, "type_attr"),
|
|
||||||
"bundle_name_format": self.get_config(
|
|
||||||
image_conf, "bundle_name_format"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
repo = self._get_repo(image_conf, variant)
|
|
||||||
|
|
||||||
failable_arches = image_conf.pop("failable", [])
|
|
||||||
if failable_arches == ["*"]:
|
|
||||||
failable_arches = image_conf["arches"]
|
|
||||||
|
|
||||||
self.pool.add(RunKiwiBuildThread(self.pool))
|
|
||||||
self.pool.queue_put(
|
|
||||||
(
|
|
||||||
self.compose,
|
|
||||||
variant,
|
|
||||||
image_conf,
|
|
||||||
build_arches,
|
|
||||||
generics,
|
|
||||||
repo,
|
|
||||||
failable_arches,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.pool.start()
|
|
||||||
|
|
||||||
|
|
||||||
class RunKiwiBuildThread(WorkerThread):
|
|
||||||
def process(self, item, num):
|
|
||||||
(compose, variant, config, arches, generics, repo, failable_arches) = item
|
|
||||||
self.failable_arches = failable_arches
|
|
||||||
# the Koji task as a whole can only fail if *all* arches are failable
|
|
||||||
can_task_fail = set(failable_arches).issuperset(set(arches))
|
|
||||||
self.num = num
|
|
||||||
with util.failable(
|
|
||||||
compose,
|
|
||||||
can_task_fail,
|
|
||||||
variant,
|
|
||||||
"*",
|
|
||||||
"kiwibuild",
|
|
||||||
logger=self.pool._logger,
|
|
||||||
):
|
|
||||||
self.worker(compose, variant, config, arches, generics, repo)
|
|
||||||
|
|
||||||
def worker(self, compose, variant, config, arches, generics, repo):
|
|
||||||
msg = "kiwibuild task for variant %s" % variant.uid
|
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
|
||||||
koji = kojiwrapper.KojiWrapper(compose)
|
|
||||||
koji.login()
|
|
||||||
|
|
||||||
task_id = koji.koji_proxy.kiwiBuild(
|
|
||||||
generics["target"],
|
|
||||||
arches,
|
|
||||||
generics["descscm"],
|
|
||||||
generics["descpath"],
|
|
||||||
profile=config["kiwi_profile"],
|
|
||||||
release=generics["release"],
|
|
||||||
repos=repo,
|
|
||||||
type=generics["type"],
|
|
||||||
type_attr=generics["type_attr"],
|
|
||||||
result_bundle_name_format=generics["bundle_name_format"],
|
|
||||||
# this ensures the task won't fail if only failable arches fail
|
|
||||||
optional_arches=self.failable_arches,
|
|
||||||
)
|
|
||||||
|
|
||||||
koji.save_task_id(task_id)
|
|
||||||
|
|
||||||
# Wait for it to finish and capture the output into log file.
|
|
||||||
log_dir = os.path.join(compose.paths.log.topdir(), "kiwibuild")
|
|
||||||
util.makedirs(log_dir)
|
|
||||||
log_file = os.path.join(
|
|
||||||
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
|
||||||
)
|
|
||||||
if koji.watch_task(task_id, log_file) != 0:
|
|
||||||
raise RuntimeError(
|
|
||||||
"kiwiBuild task failed: %s. See %s for details" % (task_id, log_file)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Refresh koji session which may have timed out while the task was
|
|
||||||
# running. Watching is done via a subprocess, so the session is
|
|
||||||
# inactive.
|
|
||||||
koji = kojiwrapper.KojiWrapper(compose)
|
|
||||||
|
|
||||||
linker = Linker(logger=self.pool._logger)
|
|
||||||
|
|
||||||
# Process all images in the build. There should be one for each
|
|
||||||
# architecture, but we don't verify that.
|
|
||||||
paths = koji.get_image_paths(task_id)
|
|
||||||
|
|
||||||
for arch, paths in paths.items():
|
|
||||||
for path in paths:
|
|
||||||
type_, format_ = _find_type_and_format(path)
|
|
||||||
if not format_:
|
|
||||||
# Path doesn't match any known type.
|
|
||||||
continue
|
|
||||||
|
|
||||||
# image_dir is absolute path to which the image should be copied.
|
|
||||||
# We also need the same path as relative to compose directory for
|
|
||||||
# including in the metadata.
|
|
||||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
|
||||||
rel_image_dir = compose.paths.compose.image_dir(
|
|
||||||
variant, relative=True
|
|
||||||
) % {"arch": arch}
|
|
||||||
util.makedirs(image_dir)
|
|
||||||
|
|
||||||
filename = os.path.basename(path)
|
|
||||||
|
|
||||||
image_dest = os.path.join(image_dir, filename)
|
|
||||||
|
|
||||||
src_file = compose.koji_downloader.get_file(path)
|
|
||||||
|
|
||||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
|
||||||
|
|
||||||
# Update image manifest
|
|
||||||
img = Image(compose.im)
|
|
||||||
|
|
||||||
# Get the manifest type from the config if supplied, otherwise we
|
|
||||||
# determine the manifest type based on the koji output
|
|
||||||
img.type = type_
|
|
||||||
img.format = format_
|
|
||||||
img.path = os.path.join(rel_image_dir, filename)
|
|
||||||
img.mtime = util.get_mtime(image_dest)
|
|
||||||
img.size = util.get_file_size(image_dest)
|
|
||||||
img.arch = arch
|
|
||||||
img.disc_number = 1 # We don't expect multiple disks
|
|
||||||
img.disc_count = 1
|
|
||||||
img.bootable = False
|
|
||||||
img.subvariant = config.get("subvariant", variant.uid)
|
|
||||||
setattr(img, "can_fail", arch in self.failable_arches)
|
|
||||||
setattr(img, "deliverable", "kiwibuild")
|
|
||||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, task_id))
|
|
||||||
|
|
||||||
|
|
||||||
def _find_type_and_format(path):
|
|
||||||
for type_, suffixes in EXTENSIONS.items():
|
|
||||||
for suffix in suffixes:
|
|
||||||
if path.endswith(suffix):
|
|
||||||
return type_, suffix
|
|
||||||
# these are our kiwi-exclusive mappings for images whose extensions
|
|
||||||
# aren't quite the same as imagefactory
|
|
||||||
for type_, suffixes, format_ in KIWIEXTENSIONS:
|
|
||||||
if any(path.endswith(suffix) for suffix in suffixes):
|
|
||||||
return type_, format_
|
|
||||||
return None, None
|
|
406
pungi/phases/live_images.py
Normal file
406
pungi/phases/live_images.py
Normal file
@ -0,0 +1,406 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 2 of the License.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Library General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from kobo.threads import ThreadPool, WorkerThread
|
||||||
|
from kobo.shortcuts import run, save_to_file, force_list
|
||||||
|
from productmd.images import Image
|
||||||
|
from six.moves import shlex_quote
|
||||||
|
|
||||||
|
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||||
|
from pungi.wrappers import iso
|
||||||
|
from pungi.phases import base
|
||||||
|
from pungi.util import makedirs, get_mtime, get_file_size, failable
|
||||||
|
from pungi.util import get_repo_urls
|
||||||
|
|
||||||
|
|
||||||
|
# HACK: define cmp in python3
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
|
||||||
|
def cmp(a, b):
|
||||||
|
return (a > b) - (a < b)
|
||||||
|
|
||||||
|
|
||||||
|
class LiveImagesPhase(
|
||||||
|
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||||
|
):
|
||||||
|
name = "live_images"
|
||||||
|
|
||||||
|
def __init__(self, compose):
|
||||||
|
super(LiveImagesPhase, self).__init__(compose)
|
||||||
|
self.pool = ThreadPool(logger=self.logger)
|
||||||
|
|
||||||
|
def _get_repos(self, arch, variant, data):
|
||||||
|
repos = []
|
||||||
|
if not variant.is_empty:
|
||||||
|
repos.append(variant.uid)
|
||||||
|
repos.extend(force_list(data.get("repo", [])))
|
||||||
|
return get_repo_urls(self.compose, repos, arch=arch)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
||||||
|
commands = []
|
||||||
|
|
||||||
|
for variant in self.compose.all_variants.values():
|
||||||
|
for arch in variant.arches + ["src"]:
|
||||||
|
for data in self.get_config_block(variant, arch):
|
||||||
|
subvariant = data.get("subvariant", variant.uid)
|
||||||
|
type = data.get("type", "live")
|
||||||
|
|
||||||
|
if type == "live":
|
||||||
|
dest_dir = self.compose.paths.compose.iso_dir(
|
||||||
|
arch, variant, symlink_to=symlink_isos_to
|
||||||
|
)
|
||||||
|
elif type == "appliance":
|
||||||
|
dest_dir = self.compose.paths.compose.image_dir(
|
||||||
|
variant, symlink_to=symlink_isos_to
|
||||||
|
)
|
||||||
|
dest_dir = dest_dir % {"arch": arch}
|
||||||
|
makedirs(dest_dir)
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Unknown live image type %s" % type)
|
||||||
|
if not dest_dir:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cmd = {
|
||||||
|
"name": data.get("name"),
|
||||||
|
"version": self.get_version(data),
|
||||||
|
"release": self.get_release(data),
|
||||||
|
"dest_dir": dest_dir,
|
||||||
|
"build_arch": arch,
|
||||||
|
"ks_file": data["kickstart"],
|
||||||
|
"ksurl": self.get_ksurl(data),
|
||||||
|
# Used for images wrapped in RPM
|
||||||
|
"specfile": data.get("specfile", None),
|
||||||
|
# Scratch (only taken in consideration if specfile
|
||||||
|
# specified) For images wrapped in rpm is scratch
|
||||||
|
# disabled by default For other images is scratch
|
||||||
|
# always on
|
||||||
|
"scratch": data.get("scratch", False),
|
||||||
|
"sign": False,
|
||||||
|
"type": type,
|
||||||
|
"label": "", # currently not used
|
||||||
|
"subvariant": subvariant,
|
||||||
|
"failable_arches": data.get("failable", []),
|
||||||
|
# First see if live_target is specified, then fall back
|
||||||
|
# to regular setup of local, phase and global setting.
|
||||||
|
"target": self.compose.conf.get("live_target")
|
||||||
|
or self.get_config(data, "target"),
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd["repos"] = self._get_repos(arch, variant, data)
|
||||||
|
|
||||||
|
# Signing of the rpm wrapped image
|
||||||
|
if not cmd["scratch"] and data.get("sign"):
|
||||||
|
cmd["sign"] = True
|
||||||
|
|
||||||
|
cmd["filename"] = self._get_file_name(
|
||||||
|
arch, variant, cmd["name"], cmd["version"]
|
||||||
|
)
|
||||||
|
|
||||||
|
commands.append((cmd, variant, arch))
|
||||||
|
|
||||||
|
for (cmd, variant, arch) in commands:
|
||||||
|
self.pool.add(CreateLiveImageThread(self.pool))
|
||||||
|
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||||
|
|
||||||
|
self.pool.start()
|
||||||
|
|
||||||
|
def _get_file_name(self, arch, variant, name=None, version=None):
|
||||||
|
if self.compose.conf["live_images_no_rename"]:
|
||||||
|
return None
|
||||||
|
|
||||||
|
disc_type = self.compose.conf["disc_types"].get("live", "live")
|
||||||
|
|
||||||
|
format = (
|
||||||
|
"%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
||||||
|
)
|
||||||
|
# Custom name (prefix)
|
||||||
|
if name:
|
||||||
|
custom_iso_name = name
|
||||||
|
if version:
|
||||||
|
custom_iso_name += "-%s" % version
|
||||||
|
format = (
|
||||||
|
custom_iso_name
|
||||||
|
+ "-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: hardcoded disc_num
|
||||||
|
return self.compose.get_image_name(
|
||||||
|
arch, variant, disc_type=disc_type, disc_num=None, format=format
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CreateLiveImageThread(WorkerThread):
|
||||||
|
EXTS = (".iso", ".raw.xz")
|
||||||
|
|
||||||
|
def process(self, item, num):
|
||||||
|
compose, cmd, variant, arch = item
|
||||||
|
self.failable_arches = cmd.get("failable_arches", [])
|
||||||
|
self.can_fail = bool(self.failable_arches)
|
||||||
|
with failable(
|
||||||
|
compose,
|
||||||
|
self.can_fail,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
"live",
|
||||||
|
cmd.get("subvariant"),
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
|
self.worker(compose, cmd, variant, arch, num)
|
||||||
|
|
||||||
|
def worker(self, compose, cmd, variant, arch, num):
|
||||||
|
self.basename = "%(name)s-%(version)s-%(release)s" % cmd
|
||||||
|
log_file = compose.paths.log.log_file(arch, "liveimage-%s" % self.basename)
|
||||||
|
|
||||||
|
subvariant = cmd.pop("subvariant")
|
||||||
|
|
||||||
|
imgname = "%s-%s-%s-%s" % (
|
||||||
|
compose.ci_base.release.short,
|
||||||
|
subvariant,
|
||||||
|
"Live" if cmd["type"] == "live" else "Disk",
|
||||||
|
arch,
|
||||||
|
)
|
||||||
|
|
||||||
|
msg = "Creating ISO (arch: %s, variant: %s): %s" % (
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
self.basename,
|
||||||
|
)
|
||||||
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
|
koji_wrapper = KojiWrapper(compose)
|
||||||
|
_, version = compose.compose_id.rsplit("-", 1)
|
||||||
|
name = cmd["name"] or imgname
|
||||||
|
version = cmd["version"] or version
|
||||||
|
archive = False
|
||||||
|
if cmd["specfile"] and not cmd["scratch"]:
|
||||||
|
# Non scratch build are allowed only for rpm wrapped images
|
||||||
|
archive = True
|
||||||
|
koji_cmd = koji_wrapper.get_create_image_cmd(
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
cmd["target"],
|
||||||
|
cmd["build_arch"],
|
||||||
|
cmd["ks_file"],
|
||||||
|
cmd["repos"],
|
||||||
|
image_type=cmd["type"],
|
||||||
|
wait=True,
|
||||||
|
archive=archive,
|
||||||
|
specfile=cmd["specfile"],
|
||||||
|
release=cmd["release"],
|
||||||
|
ksurl=cmd["ksurl"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# avoid race conditions?
|
||||||
|
# Kerberos authentication failed:
|
||||||
|
# Permission denied in replay cache code (-1765328215)
|
||||||
|
time.sleep(num * 3)
|
||||||
|
|
||||||
|
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
||||||
|
if output["retcode"] != 0:
|
||||||
|
raise RuntimeError(
|
||||||
|
"LiveImage task failed: %s. See %s for more details."
|
||||||
|
% (output["task_id"], log_file)
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy finished image to isos/
|
||||||
|
image_path = [
|
||||||
|
path
|
||||||
|
for path in koji_wrapper.get_image_path(output["task_id"])
|
||||||
|
if self._is_image(path)
|
||||||
|
]
|
||||||
|
if len(image_path) != 1:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Got %d images from task %d, expected 1."
|
||||||
|
% (len(image_path), output["task_id"])
|
||||||
|
)
|
||||||
|
image_path = image_path[0]
|
||||||
|
filename = cmd.get("filename") or os.path.basename(image_path)
|
||||||
|
destination = os.path.join(cmd["dest_dir"], filename)
|
||||||
|
shutil.copy2(image_path, destination)
|
||||||
|
|
||||||
|
# copy finished rpm to isos/ (if rpm wrapped ISO was built)
|
||||||
|
if cmd["specfile"]:
|
||||||
|
rpm_paths = koji_wrapper.get_wrapped_rpm_path(output["task_id"])
|
||||||
|
|
||||||
|
if cmd["sign"]:
|
||||||
|
# Sign the rpm wrapped images and get their paths
|
||||||
|
self.pool.log_info(
|
||||||
|
"Signing rpm wrapped images in task_id: %s (expected key ID: %s)"
|
||||||
|
% (output["task_id"], compose.conf.get("signing_key_id"))
|
||||||
|
)
|
||||||
|
signed_rpm_paths = self._sign_image(
|
||||||
|
koji_wrapper, compose, cmd, output["task_id"]
|
||||||
|
)
|
||||||
|
if signed_rpm_paths:
|
||||||
|
rpm_paths = signed_rpm_paths
|
||||||
|
|
||||||
|
for rpm_path in rpm_paths:
|
||||||
|
shutil.copy2(rpm_path, cmd["dest_dir"])
|
||||||
|
|
||||||
|
if cmd["type"] == "live":
|
||||||
|
# ISO manifest only makes sense for live images
|
||||||
|
self._write_manifest(destination)
|
||||||
|
|
||||||
|
self._add_to_images(
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
subvariant,
|
||||||
|
arch,
|
||||||
|
cmd["type"],
|
||||||
|
self._get_format(image_path),
|
||||||
|
destination,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||||
|
|
||||||
|
def _add_to_images(self, compose, variant, subvariant, arch, type, format, path):
|
||||||
|
"""Adds the image to images.json"""
|
||||||
|
img = Image(compose.im)
|
||||||
|
img.type = "raw-xz" if type == "appliance" else type
|
||||||
|
img.format = format
|
||||||
|
img.path = os.path.relpath(path, compose.paths.compose.topdir())
|
||||||
|
img.mtime = get_mtime(path)
|
||||||
|
img.size = get_file_size(path)
|
||||||
|
img.arch = arch
|
||||||
|
img.disc_number = 1 # We don't expect multiple disks
|
||||||
|
img.disc_count = 1
|
||||||
|
img.bootable = True
|
||||||
|
img.subvariant = subvariant
|
||||||
|
setattr(img, "can_fail", self.can_fail)
|
||||||
|
setattr(img, "deliverable", "live")
|
||||||
|
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||||
|
|
||||||
|
def _is_image(self, path):
|
||||||
|
for ext in self.EXTS:
|
||||||
|
if path.endswith(ext):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _get_format(self, path):
|
||||||
|
"""Get format based on extension."""
|
||||||
|
for ext in self.EXTS:
|
||||||
|
if path.endswith(ext):
|
||||||
|
return ext[1:]
|
||||||
|
raise RuntimeError("Getting format for unknown image %s" % path)
|
||||||
|
|
||||||
|
def _write_manifest(self, iso_path):
|
||||||
|
"""Generate manifest for ISO at given path.
|
||||||
|
|
||||||
|
:param iso_path: (str) absolute path to the ISO
|
||||||
|
"""
|
||||||
|
dir, filename = os.path.split(iso_path)
|
||||||
|
run("cd %s && %s" % (shlex_quote(dir), iso.get_manifest_cmd(filename)))
|
||||||
|
|
||||||
|
def _sign_image(self, koji_wrapper, compose, cmd, koji_task_id):
|
||||||
|
signing_key_id = compose.conf.get("signing_key_id")
|
||||||
|
signing_command = compose.conf.get("signing_command")
|
||||||
|
|
||||||
|
if not signing_key_id:
|
||||||
|
self.pool.log_warning(
|
||||||
|
"Signing is enabled but signing_key_id is not specified"
|
||||||
|
)
|
||||||
|
self.pool.log_warning("Signing skipped")
|
||||||
|
return None
|
||||||
|
if not signing_command:
|
||||||
|
self.pool.log_warning(
|
||||||
|
"Signing is enabled but signing_command is not specified"
|
||||||
|
)
|
||||||
|
self.pool.log_warning("Signing skipped")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Prepare signing log file
|
||||||
|
signing_log_file = compose.paths.log.log_file(
|
||||||
|
cmd["build_arch"], "live_images-signing-%s" % self.basename
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sign the rpm wrapped images
|
||||||
|
try:
|
||||||
|
sign_builds_in_task(
|
||||||
|
koji_wrapper,
|
||||||
|
koji_task_id,
|
||||||
|
signing_command,
|
||||||
|
log_file=signing_log_file,
|
||||||
|
signing_key_password=compose.conf.get("signing_key_password"),
|
||||||
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
self.pool.log_error(
|
||||||
|
"Error while signing rpm wrapped images. See log: %s" % signing_log_file
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Get pats to the signed rpms
|
||||||
|
signing_key_id = signing_key_id.lower() # Koji uses lowercase in paths
|
||||||
|
rpm_paths = koji_wrapper.get_signed_wrapped_rpms_paths(
|
||||||
|
koji_task_id, signing_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wait until files are available
|
||||||
|
if wait_paths(rpm_paths, 60 * 15):
|
||||||
|
# Files are ready
|
||||||
|
return rpm_paths
|
||||||
|
|
||||||
|
# Signed RPMs are not available
|
||||||
|
self.pool.log_warning("Signed files are not available: %s" % rpm_paths)
|
||||||
|
self.pool.log_warning("Unsigned files will be used")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def wait_paths(paths, timeout=60):
|
||||||
|
started = time.time()
|
||||||
|
remaining = paths[:]
|
||||||
|
while True:
|
||||||
|
for path in remaining[:]:
|
||||||
|
if os.path.exists(path):
|
||||||
|
remaining.remove(path)
|
||||||
|
if not remaining:
|
||||||
|
break
|
||||||
|
time.sleep(1)
|
||||||
|
if timeout >= 0 and (time.time() - started) > timeout:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def sign_builds_in_task(
|
||||||
|
koji_wrapper, task_id, signing_command, log_file=None, signing_key_password=None
|
||||||
|
):
|
||||||
|
# Get list of nvrs that should be signed
|
||||||
|
nvrs = koji_wrapper.get_build_nvrs(task_id)
|
||||||
|
if not nvrs:
|
||||||
|
# No builds are available (scratch build, etc.?)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Append builds to sign_cmd
|
||||||
|
for nvr in nvrs:
|
||||||
|
signing_command += " '%s'" % nvr
|
||||||
|
|
||||||
|
# Log signing command before password is filled in it
|
||||||
|
if log_file:
|
||||||
|
save_to_file(log_file, signing_command, append=True)
|
||||||
|
|
||||||
|
# Fill password into the signing command
|
||||||
|
if signing_key_password:
|
||||||
|
signing_command = signing_command % {
|
||||||
|
"signing_key_password": signing_key_password
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sign the builds
|
||||||
|
run(signing_command, can_fail=False, show_cmd=False, logfile=log_file)
|
@ -182,9 +182,7 @@ class LiveMediaThread(WorkerThread):
|
|||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = compose.koji_downloader.get_file(
|
src_file = os.path.realpath(image_info["path"])
|
||||||
os.path.realpath(image_info["path"])
|
|
||||||
)
|
|
||||||
linker.link(src_file, image_dest, link_type=link_type)
|
linker.link(src_file, image_dest, link_type=link_type)
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
|
@ -134,7 +134,7 @@ class OSBSThread(WorkerThread):
|
|||||||
# though there is not much there).
|
# though there is not much there).
|
||||||
if koji.watch_task(task_id, log_file) != 0:
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"OSBS task failed: %s. See %s for details" % (task_id, log_file)
|
"OSBS: task %s failed: see %s for details" % (task_id, log_file)
|
||||||
)
|
)
|
||||||
|
|
||||||
scratch = config.get("scratch", False)
|
scratch = config.get("scratch", False)
|
||||||
@ -154,7 +154,7 @@ class OSBSThread(WorkerThread):
|
|||||||
reuse_file,
|
reuse_file,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, task_id))
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
def _get_image_conf(self, compose, config):
|
def _get_image_conf(self, compose, config):
|
||||||
"""Get image-build.conf from git repo.
|
"""Get image-build.conf from git repo.
|
||||||
|
@ -159,10 +159,6 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
if upload_options:
|
if upload_options:
|
||||||
opts["upload_options"] = upload_options
|
opts["upload_options"] = upload_options
|
||||||
|
|
||||||
customizations = config.get("customizations")
|
|
||||||
if customizations:
|
|
||||||
opts["customizations"] = customizations
|
|
||||||
|
|
||||||
if release:
|
if release:
|
||||||
opts["release"] = release
|
opts["release"] = release
|
||||||
task_id = koji.koji_proxy.osbuildImage(
|
task_id = koji.koji_proxy.osbuildImage(
|
||||||
@ -185,7 +181,7 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
)
|
)
|
||||||
if koji.watch_task(task_id, log_file) != 0:
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"OSBuild task failed: %s. See %s for details" % (task_id, log_file)
|
"OSBuild: task %s failed: see %s for details" % (task_id, log_file)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Refresh koji session which may have timed out while the task was
|
# Refresh koji session which may have timed out while the task was
|
||||||
@ -216,27 +212,16 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
# image_dir is absolute path to which the image should be copied.
|
# image_dir is absolute path to which the image should be copied.
|
||||||
# We also need the same path as relative to compose directory for
|
# We also need the same path as relative to compose directory for
|
||||||
# including in the metadata.
|
# including in the metadata.
|
||||||
if archive["type_name"] == "iso":
|
|
||||||
# If the produced image is actually an ISO, it should go to
|
|
||||||
# iso/ subdirectory.
|
|
||||||
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
|
||||||
rel_image_dir = compose.paths.compose.iso_dir(
|
|
||||||
arch, variant, relative=True
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||||
rel_image_dir = compose.paths.compose.image_dir(
|
rel_image_dir = compose.paths.compose.image_dir(variant, relative=True) % {
|
||||||
variant, relative=True
|
"arch": arch
|
||||||
) % {"arch": arch}
|
}
|
||||||
util.makedirs(image_dir)
|
util.makedirs(image_dir)
|
||||||
|
|
||||||
image_dest = os.path.join(image_dir, archive["filename"])
|
image_dest = os.path.join(image_dir, archive["filename"])
|
||||||
|
|
||||||
src_file = compose.koji_downloader.get_file(
|
src_file = os.path.join(
|
||||||
os.path.join(
|
koji.koji_module.pathinfo.imagebuild(build_info), archive["filename"]
|
||||||
koji.koji_module.pathinfo.imagebuild(build_info),
|
|
||||||
archive["filename"],
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||||
@ -253,24 +238,7 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
|
|
||||||
# Get the manifest type from the config if supplied, otherwise we
|
|
||||||
# determine the manifest type based on the koji output
|
|
||||||
img.type = config.get("manifest_type")
|
|
||||||
if not img.type:
|
|
||||||
if archive["type_name"] != "iso":
|
|
||||||
img.type = archive["type_name"]
|
img.type = archive["type_name"]
|
||||||
else:
|
|
||||||
fn = archive["filename"].lower()
|
|
||||||
if "ostree" in fn:
|
|
||||||
img.type = "dvd-ostree-osbuild"
|
|
||||||
elif "live" in fn:
|
|
||||||
img.type = "live-osbuild"
|
|
||||||
elif "netinst" in fn or "boot" in fn:
|
|
||||||
img.type = "boot"
|
|
||||||
else:
|
|
||||||
img.type = "dvd"
|
|
||||||
|
|
||||||
img.format = suffix
|
img.format = suffix
|
||||||
img.path = os.path.join(rel_image_dir, archive["filename"])
|
img.path = os.path.join(rel_image_dir, archive["filename"])
|
||||||
img.mtime = util.get_mtime(image_dest)
|
img.mtime = util.get_mtime(image_dest)
|
||||||
|
@ -85,7 +85,7 @@ class OSTreeThread(WorkerThread):
|
|||||||
comps_repo = compose.paths.work.comps_repo(
|
comps_repo = compose.paths.work.comps_repo(
|
||||||
"$basearch", variant=variant, create_dir=False
|
"$basearch", variant=variant, create_dir=False
|
||||||
)
|
)
|
||||||
repos = shortcuts.force_list(config.get("repo", [])) + self.repos
|
repos = shortcuts.force_list(config["repo"]) + self.repos
|
||||||
if compose.has_comps:
|
if compose.has_comps:
|
||||||
repos.append(translate_path(compose, comps_repo))
|
repos.append(translate_path(compose, comps_repo))
|
||||||
repos = get_repo_dicts(repos, logger=self.pool)
|
repos = get_repo_dicts(repos, logger=self.pool)
|
||||||
@ -168,9 +168,7 @@ class OSTreeThread(WorkerThread):
|
|||||||
("unified-core", config.get("unified_core", False)),
|
("unified-core", config.get("unified_core", False)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
default_packages = ["pungi", "ostree", "rpm-ostree"]
|
packages = ["pungi", "ostree", "rpm-ostree"]
|
||||||
additional_packages = config.get("runroot_packages", [])
|
|
||||||
packages = default_packages + additional_packages
|
|
||||||
log_file = os.path.join(self.logdir, "runroot.log")
|
log_file = os.path.join(self.logdir, "runroot.log")
|
||||||
mounts = [compose.topdir, config["ostree_repo"]]
|
mounts = [compose.topdir, config["ostree_repo"]]
|
||||||
runroot = Runroot(compose, phase="ostree")
|
runroot = Runroot(compose, phase="ostree")
|
||||||
|
@ -1,190 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import copy
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from kobo import shortcuts
|
|
||||||
from kobo.threads import ThreadPool, WorkerThread
|
|
||||||
|
|
||||||
from productmd.images import Image
|
|
||||||
|
|
||||||
from pungi.runroot import Runroot
|
|
||||||
from .base import ConfigGuardedPhase
|
|
||||||
from .. import util
|
|
||||||
from ..util import get_repo_dicts, translate_path
|
|
||||||
from ..wrappers import scm
|
|
||||||
|
|
||||||
|
|
||||||
class OSTreeContainerPhase(ConfigGuardedPhase):
|
|
||||||
name = "ostree_container"
|
|
||||||
|
|
||||||
def __init__(self, compose, pkgset_phase=None):
|
|
||||||
super(OSTreeContainerPhase, self).__init__(compose)
|
|
||||||
self.pool = ThreadPool(logger=self.compose._logger)
|
|
||||||
self.pkgset_phase = pkgset_phase
|
|
||||||
|
|
||||||
def get_repos(self):
|
|
||||||
return [
|
|
||||||
translate_path(
|
|
||||||
self.compose,
|
|
||||||
self.compose.paths.work.pkgset_repo(
|
|
||||||
pkgset.name, "$basearch", create_dir=False
|
|
||||||
),
|
|
||||||
)
|
|
||||||
for pkgset in self.pkgset_phase.package_sets
|
|
||||||
]
|
|
||||||
|
|
||||||
def _enqueue(self, variant, arch, conf):
|
|
||||||
self.pool.add(OSTreeContainerThread(self.pool, self.get_repos()))
|
|
||||||
self.pool.queue_put((self.compose, variant, arch, conf))
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
if isinstance(self.compose.conf.get(self.name), dict):
|
|
||||||
for variant in self.compose.get_variants():
|
|
||||||
for conf in self.get_config_block(variant):
|
|
||||||
for arch in conf.get("arches", []) or variant.arches:
|
|
||||||
self._enqueue(variant, arch, conf)
|
|
||||||
else:
|
|
||||||
# Legacy code path to support original configuration.
|
|
||||||
for variant in self.compose.get_variants():
|
|
||||||
for arch in variant.arches:
|
|
||||||
for conf in self.get_config_block(variant, arch):
|
|
||||||
self._enqueue(variant, arch, conf)
|
|
||||||
|
|
||||||
self.pool.start()
|
|
||||||
|
|
||||||
|
|
||||||
class OSTreeContainerThread(WorkerThread):
|
|
||||||
def __init__(self, pool, repos):
|
|
||||||
super(OSTreeContainerThread, self).__init__(pool)
|
|
||||||
self.repos = repos
|
|
||||||
|
|
||||||
def process(self, item, num):
|
|
||||||
compose, variant, arch, config = item
|
|
||||||
self.num = num
|
|
||||||
failable_arches = config.get("failable", [])
|
|
||||||
self.can_fail = util.can_arch_fail(failable_arches, arch)
|
|
||||||
with util.failable(compose, self.can_fail, variant, arch, "ostree-container"):
|
|
||||||
self.worker(compose, variant, arch, config)
|
|
||||||
|
|
||||||
def worker(self, compose, variant, arch, config):
|
|
||||||
msg = "OSTree container phase for variant %s, arch %s" % (variant.uid, arch)
|
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
|
||||||
workdir = compose.paths.work.topdir("ostree-container-%d" % self.num)
|
|
||||||
self.logdir = compose.paths.log.topdir(
|
|
||||||
"%s/%s/ostree-container-%d" % (arch, variant.uid, self.num)
|
|
||||||
)
|
|
||||||
repodir = os.path.join(workdir, "config_repo")
|
|
||||||
self._clone_repo(
|
|
||||||
compose,
|
|
||||||
repodir,
|
|
||||||
config["config_url"],
|
|
||||||
config.get("config_branch", "main"),
|
|
||||||
)
|
|
||||||
|
|
||||||
repos = shortcuts.force_list(config.get("repo", [])) + self.repos
|
|
||||||
repos = get_repo_dicts(repos, logger=self.pool)
|
|
||||||
|
|
||||||
# copy the original config and update before save to a json file
|
|
||||||
new_config = copy.copy(config)
|
|
||||||
|
|
||||||
# repos in configuration can have repo url set to variant UID,
|
|
||||||
# update it to have the actual url that we just translated.
|
|
||||||
new_config.update({"repo": repos})
|
|
||||||
|
|
||||||
# remove unnecessary (for 'pungi-make-ostree container' script ) elements
|
|
||||||
# from config, it doesn't hurt to have them, however remove them can
|
|
||||||
# reduce confusion
|
|
||||||
for k in [
|
|
||||||
"treefile",
|
|
||||||
"config_url",
|
|
||||||
"config_branch",
|
|
||||||
"failable",
|
|
||||||
"version",
|
|
||||||
]:
|
|
||||||
new_config.pop(k, None)
|
|
||||||
|
|
||||||
# write a json file to save the configuration, so 'pungi-make-ostree tree'
|
|
||||||
# can take use of it
|
|
||||||
extra_config_file = os.path.join(workdir, "extra_config.json")
|
|
||||||
with open(extra_config_file, "w") as f:
|
|
||||||
json.dump(new_config, f, indent=4)
|
|
||||||
|
|
||||||
self._run_ostree_container_cmd(
|
|
||||||
compose, variant, arch, config, repodir, extra_config_file=extra_config_file
|
|
||||||
)
|
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s" % (msg))
|
|
||||||
|
|
||||||
def _run_ostree_container_cmd(
|
|
||||||
self, compose, variant, arch, config, config_repo, extra_config_file=None
|
|
||||||
):
|
|
||||||
target_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
|
||||||
util.makedirs(target_dir)
|
|
||||||
version = util.version_generator(compose, config.get("version"))
|
|
||||||
archive_name = "%s-%s-%s" % (
|
|
||||||
compose.conf["release_short"],
|
|
||||||
variant.uid,
|
|
||||||
version,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Run the pungi-make-ostree command locally to create a script to
|
|
||||||
# execute in runroot environment.
|
|
||||||
cmd = [
|
|
||||||
"pungi-make-ostree",
|
|
||||||
"container",
|
|
||||||
"--log-dir=%s" % self.logdir,
|
|
||||||
"--name=%s" % archive_name,
|
|
||||||
"--path=%s" % target_dir,
|
|
||||||
"--treefile=%s" % os.path.join(config_repo, config["treefile"]),
|
|
||||||
"--extra-config=%s" % extra_config_file,
|
|
||||||
"--version=%s" % version,
|
|
||||||
]
|
|
||||||
|
|
||||||
_, runroot_script = shortcuts.run(cmd, universal_newlines=True)
|
|
||||||
|
|
||||||
default_packages = ["ostree", "rpm-ostree", "selinux-policy-targeted"]
|
|
||||||
additional_packages = config.get("runroot_packages", [])
|
|
||||||
packages = default_packages + additional_packages
|
|
||||||
log_file = os.path.join(self.logdir, "runroot.log")
|
|
||||||
# TODO: Use to get previous build
|
|
||||||
mounts = [compose.topdir]
|
|
||||||
|
|
||||||
runroot = Runroot(compose, phase="ostree_container")
|
|
||||||
runroot.run(
|
|
||||||
" && ".join(runroot_script.splitlines()),
|
|
||||||
log_file=log_file,
|
|
||||||
arch=arch,
|
|
||||||
packages=packages,
|
|
||||||
mounts=mounts,
|
|
||||||
new_chroot=True,
|
|
||||||
weight=compose.conf["runroot_weights"].get("ostree"),
|
|
||||||
)
|
|
||||||
|
|
||||||
fullpath = os.path.join(target_dir, "%s.ociarchive" % archive_name)
|
|
||||||
|
|
||||||
# Update image manifest
|
|
||||||
img = Image(compose.im)
|
|
||||||
|
|
||||||
# Get the manifest type from the config if supplied, otherwise we
|
|
||||||
# determine the manifest type based on the koji output
|
|
||||||
img.type = "ociarchive"
|
|
||||||
img.format = "ociarchive"
|
|
||||||
img.path = os.path.relpath(fullpath, compose.paths.compose.topdir())
|
|
||||||
img.mtime = util.get_mtime(fullpath)
|
|
||||||
img.size = util.get_file_size(fullpath)
|
|
||||||
img.arch = arch
|
|
||||||
img.disc_number = 1
|
|
||||||
img.disc_count = 1
|
|
||||||
img.bootable = False
|
|
||||||
img.subvariant = config.get("subvariant", variant.uid)
|
|
||||||
setattr(img, "can_fail", self.can_fail)
|
|
||||||
setattr(img, "deliverable", "ostree-container")
|
|
||||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
|
||||||
|
|
||||||
def _clone_repo(self, compose, repodir, url, branch):
|
|
||||||
scm.get_dir_from_scm(
|
|
||||||
{"scm": "git", "repo": url, "branch": branch, "dir": "."},
|
|
||||||
repodir,
|
|
||||||
compose=compose,
|
|
||||||
)
|
|
@ -38,17 +38,12 @@ from pungi.phases.createrepo import add_modular_metadata
|
|||||||
|
|
||||||
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
||||||
result = {}
|
result = {}
|
||||||
|
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
compose.log_info("Populating package set for arch: %s", arch)
|
compose.log_info("Populating package set for arch: %s", arch)
|
||||||
is_multilib = is_arch_multilib(compose.conf, arch)
|
is_multilib = is_arch_multilib(compose.conf, arch)
|
||||||
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
||||||
pkgset = global_pkgset.subset(
|
pkgset = global_pkgset.subset(arch, arches, exclusive_noarch=exclusive_noarch)
|
||||||
arch,
|
|
||||||
arches,
|
|
||||||
exclusive_noarch=compose.conf["pkgset_exclusive_arch_considers_noarch"],
|
|
||||||
inherit_to_noarch=compose.conf["pkgset_inherit_exclusive_arch_to_noarch"],
|
|
||||||
)
|
|
||||||
pkgset.save_file_list(
|
pkgset.save_file_list(
|
||||||
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
||||||
remove_path_prefix=path_prefix,
|
remove_path_prefix=path_prefix,
|
||||||
|
@ -26,12 +26,10 @@ import time
|
|||||||
import pgpy
|
import pgpy
|
||||||
import rpm
|
import rpm
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
import kobo.pkgset
|
import kobo.pkgset
|
||||||
import kobo.rpmlib
|
import kobo.rpmlib
|
||||||
from kobo.shortcuts import compute_file_checksums
|
|
||||||
|
|
||||||
from kobo.threads import WorkerThread, ThreadPool
|
from kobo.threads import WorkerThread, ThreadPool
|
||||||
|
|
||||||
@ -154,15 +152,9 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def nvr_formatter(package_info):
|
def nvr_formatter(package_info):
|
||||||
epoch_suffix = ''
|
# joins NVR parts of the package with '-' character.
|
||||||
if package_info['epoch'] is not None:
|
return "-".join(
|
||||||
epoch_suffix = ':' + package_info['epoch']
|
(package_info["name"], package_info["version"], package_info["release"])
|
||||||
return (
|
|
||||||
f"{package_info['name']}"
|
|
||||||
f"{epoch_suffix}-"
|
|
||||||
f"{package_info['version']}-"
|
|
||||||
f"{package_info['release']}."
|
|
||||||
f"{package_info['arch']}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_error(sigkeys, infos):
|
def get_error(sigkeys, infos):
|
||||||
@ -213,31 +205,16 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
return self.rpms_by_arch
|
return self.rpms_by_arch
|
||||||
|
|
||||||
def subset(
|
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
|
||||||
self, primary_arch, arch_list, exclusive_noarch=True, inherit_to_noarch=True
|
|
||||||
):
|
|
||||||
"""Create a subset of this package set that only includes
|
"""Create a subset of this package set that only includes
|
||||||
packages compatible with"""
|
packages compatible with"""
|
||||||
pkgset = PackageSetBase(
|
pkgset = PackageSetBase(
|
||||||
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
||||||
)
|
)
|
||||||
pkgset.merge(
|
pkgset.merge(self, primary_arch, arch_list, exclusive_noarch=exclusive_noarch)
|
||||||
self,
|
|
||||||
primary_arch,
|
|
||||||
arch_list,
|
|
||||||
exclusive_noarch=exclusive_noarch,
|
|
||||||
inherit_to_noarch=inherit_to_noarch,
|
|
||||||
)
|
|
||||||
return pkgset
|
return pkgset
|
||||||
|
|
||||||
def merge(
|
def merge(self, other, primary_arch, arch_list, exclusive_noarch=True):
|
||||||
self,
|
|
||||||
other,
|
|
||||||
primary_arch,
|
|
||||||
arch_list,
|
|
||||||
exclusive_noarch=True,
|
|
||||||
inherit_to_noarch=True,
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Merge ``other`` package set into this instance.
|
Merge ``other`` package set into this instance.
|
||||||
"""
|
"""
|
||||||
@ -276,7 +253,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
if i.file_path in self.file_cache:
|
if i.file_path in self.file_cache:
|
||||||
# TODO: test if it really works
|
# TODO: test if it really works
|
||||||
continue
|
continue
|
||||||
if inherit_to_noarch and exclusivearch_list and arch == "noarch":
|
if exclusivearch_list and arch == "noarch":
|
||||||
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -343,11 +320,6 @@ class FilelistPackageSet(PackageSetBase):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# This is a marker to indicate package set with only extra builds/tasks and no
|
|
||||||
# tasks.
|
|
||||||
MISSING_KOJI_TAG = object()
|
|
||||||
|
|
||||||
|
|
||||||
class KojiPackageSet(PackageSetBase):
|
class KojiPackageSet(PackageSetBase):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -364,7 +336,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
extra_tasks=None,
|
extra_tasks=None,
|
||||||
signed_packages_retries=0,
|
signed_packages_retries=0,
|
||||||
signed_packages_wait=30,
|
signed_packages_wait=30,
|
||||||
downloader=None,
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Creates new KojiPackageSet.
|
Creates new KojiPackageSet.
|
||||||
@ -402,7 +373,7 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
:param int signed_packages_wait: How long to wait between search attemts.
|
:param int signed_packages_wait: How long to wait between search attemts.
|
||||||
"""
|
"""
|
||||||
super(KojiPackageSet, self).__init__(
|
super(KojiPackageSet, self).__init__(
|
||||||
name if name != MISSING_KOJI_TAG else "no-tag",
|
name,
|
||||||
sigkey_ordering=sigkey_ordering,
|
sigkey_ordering=sigkey_ordering,
|
||||||
arches=arches,
|
arches=arches,
|
||||||
logger=logger,
|
logger=logger,
|
||||||
@ -419,8 +390,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
self.signed_packages_retries = signed_packages_retries
|
self.signed_packages_retries = signed_packages_retries
|
||||||
self.signed_packages_wait = signed_packages_wait
|
self.signed_packages_wait = signed_packages_wait
|
||||||
|
|
||||||
self.downloader = downloader
|
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
result = self.__dict__.copy()
|
result = self.__dict__.copy()
|
||||||
del result["koji_wrapper"]
|
del result["koji_wrapper"]
|
||||||
@ -542,28 +511,11 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
# Check if this RPM is coming from scratch task. In this case, we already
|
# Check if this RPM is coming from scratch task. In this case, we already
|
||||||
# know the path.
|
# know the path.
|
||||||
if "path_from_task" in rpm_info:
|
if "path_from_task" in rpm_info:
|
||||||
return self.downloader.get_file(rpm_info["path_from_task"])
|
return rpm_info["path_from_task"]
|
||||||
|
|
||||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||||
paths = []
|
paths = []
|
||||||
|
|
||||||
if "getRPMChecksums" in self.koji_proxy.system.listMethods():
|
|
||||||
|
|
||||||
def checksum_validator(keyname, pkg_path):
|
|
||||||
checksums = self.koji_proxy.getRPMChecksums(
|
|
||||||
rpm_info["id"], checksum_types=("sha256",)
|
|
||||||
)
|
|
||||||
if "sha256" in checksums.get(keyname, {}):
|
|
||||||
computed = compute_file_checksums(pkg_path, ("sha256",))
|
|
||||||
if computed["sha256"] != checksums[keyname]["sha256"]:
|
|
||||||
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def checksum_validator(keyname, pkg_path):
|
|
||||||
# Koji doesn't support checksums yet
|
|
||||||
pass
|
|
||||||
|
|
||||||
attempts_left = self.signed_packages_retries + 1
|
attempts_left = self.signed_packages_retries + 1
|
||||||
while attempts_left > 0:
|
while attempts_left > 0:
|
||||||
for sigkey in self.sigkey_ordering:
|
for sigkey in self.sigkey_ordering:
|
||||||
@ -576,11 +528,8 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
)
|
)
|
||||||
if rpm_path not in paths:
|
if rpm_path not in paths:
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
path = self.downloader.get_file(
|
if os.path.isfile(rpm_path):
|
||||||
rpm_path, partial(checksum_validator, sigkey)
|
return rpm_path
|
||||||
)
|
|
||||||
if path:
|
|
||||||
return path
|
|
||||||
|
|
||||||
# No signed copy was found, wait a little and try again.
|
# No signed copy was found, wait a little and try again.
|
||||||
attempts_left -= 1
|
attempts_left -= 1
|
||||||
@ -593,18 +542,16 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
path = self.downloader.get_file(rpm_path, partial(checksum_validator, ""))
|
if os.path.isfile(rpm_path):
|
||||||
if path:
|
return rpm_path
|
||||||
return path
|
|
||||||
|
|
||||||
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
path = self.downloader.get_file(rpm_path)
|
if os.path.isfile(rpm_path):
|
||||||
if path:
|
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
return path
|
return rpm_path
|
||||||
|
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
self.log_error(
|
self.log_error(
|
||||||
@ -625,7 +572,7 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
result_srpms = []
|
result_srpms = []
|
||||||
include_packages = set(include_packages or [])
|
include_packages = set(include_packages or [])
|
||||||
|
|
||||||
if isinstance(event, dict):
|
if type(event) is dict:
|
||||||
event = event["id"]
|
event = event["id"]
|
||||||
|
|
||||||
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (
|
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (
|
||||||
@ -634,8 +581,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
inherit,
|
inherit,
|
||||||
)
|
)
|
||||||
self.log_info("[BEGIN] %s" % msg)
|
self.log_info("[BEGIN] %s" % msg)
|
||||||
rpms, builds = [], []
|
|
||||||
if tag != MISSING_KOJI_TAG:
|
|
||||||
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
||||||
extra_rpms, extra_builds = self.get_extra_rpms()
|
extra_rpms, extra_builds = self.get_extra_rpms()
|
||||||
rpms += extra_rpms
|
rpms += extra_rpms
|
||||||
@ -741,15 +686,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
:param include_packages: an iterable of tuples (package name, arch) that should
|
:param include_packages: an iterable of tuples (package name, arch) that should
|
||||||
be included.
|
be included.
|
||||||
"""
|
"""
|
||||||
if len(self.sigkey_ordering) > 1 and (
|
|
||||||
None in self.sigkey_ordering or "" in self.sigkey_ordering
|
|
||||||
):
|
|
||||||
self.log_warning(
|
|
||||||
"Stop writing reuse file as unsigned packages are allowed "
|
|
||||||
"in the compose."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
||||||
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
||||||
try:
|
try:
|
||||||
@ -766,13 +702,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
"srpms_by_name": self.srpms_by_name,
|
"srpms_by_name": self.srpms_by_name,
|
||||||
"extra_builds": self.extra_builds,
|
"extra_builds": self.extra_builds,
|
||||||
"include_packages": include_packages,
|
"include_packages": include_packages,
|
||||||
"inherit_to_noarch": compose.conf[
|
|
||||||
"pkgset_inherit_exclusive_arch_to_noarch"
|
|
||||||
],
|
|
||||||
"exclusive_noarch": compose.conf[
|
|
||||||
"pkgset_exclusive_arch_considers_noarch"
|
|
||||||
],
|
|
||||||
"module_defaults_dir": compose.conf.get("module_defaults_dir"),
|
|
||||||
},
|
},
|
||||||
f,
|
f,
|
||||||
protocol=pickle.HIGHEST_PROTOCOL,
|
protocol=pickle.HIGHEST_PROTOCOL,
|
||||||
@ -867,9 +796,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
self.log_debug("Failed to load reuse file: %s" % str(e))
|
self.log_debug("Failed to load reuse file: %s" % str(e))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
inherit_to_noarch = compose.conf["pkgset_inherit_exclusive_arch_to_noarch"]
|
|
||||||
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
|
||||||
module_defaults_dir = compose.conf.get("module_defaults_dir")
|
|
||||||
if (
|
if (
|
||||||
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
||||||
and reuse_data["packages"] == self.packages
|
and reuse_data["packages"] == self.packages
|
||||||
@ -877,11 +803,6 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
and reuse_data["extra_builds"] == self.extra_builds
|
and reuse_data["extra_builds"] == self.extra_builds
|
||||||
and reuse_data["sigkeys"] == self.sigkey_ordering
|
and reuse_data["sigkeys"] == self.sigkey_ordering
|
||||||
and reuse_data["include_packages"] == include_packages
|
and reuse_data["include_packages"] == include_packages
|
||||||
# If the value is not present in reuse data, the compose was
|
|
||||||
# generated with older version of Pungi. Best to not reuse.
|
|
||||||
and reuse_data.get("inherit_to_noarch") == inherit_to_noarch
|
|
||||||
and reuse_data.get("exclusive_noarch") == exclusive_noarch
|
|
||||||
and reuse_data.get("module_defaults_dir") == module_defaults_dir
|
|
||||||
):
|
):
|
||||||
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
||||||
copy_all(old_repo_dir, repo_dir)
|
copy_all(old_repo_dir, repo_dir)
|
||||||
@ -897,6 +818,69 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
|
|
||||||
|
|
||||||
class KojiMockPackageSet(KojiPackageSet):
|
class KojiMockPackageSet(KojiPackageSet):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name,
|
||||||
|
koji_wrapper,
|
||||||
|
sigkey_ordering,
|
||||||
|
arches=None,
|
||||||
|
logger=None,
|
||||||
|
packages=None,
|
||||||
|
allow_invalid_sigkeys=False,
|
||||||
|
populate_only_packages=False,
|
||||||
|
cache_region=None,
|
||||||
|
extra_builds=None,
|
||||||
|
extra_tasks=None,
|
||||||
|
signed_packages_retries=0,
|
||||||
|
signed_packages_wait=30,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Creates new KojiPackageSet.
|
||||||
|
|
||||||
|
:param list sigkey_ordering: Ordered list of sigkey strings. When
|
||||||
|
getting package from Koji, KojiPackageSet tries to get the package
|
||||||
|
signed by sigkey from this list. If None or "" appears in this
|
||||||
|
list, unsigned package is used.
|
||||||
|
:param list arches: List of arches to get the packages for.
|
||||||
|
:param logging.Logger logger: Logger instance to use for logging.
|
||||||
|
:param list packages: List of package names to be used when
|
||||||
|
`allow_invalid_sigkeys` or `populate_only_packages` is set.
|
||||||
|
:param bool allow_invalid_sigkeys: When True, packages *not* listed in
|
||||||
|
the `packages` list are added to KojiPackageSet even if they have
|
||||||
|
invalid sigkey. This is useful in case Koji tag contains some
|
||||||
|
unsigned packages, but we know they won't appear in a compose.
|
||||||
|
When False, all packages in Koji tag must have valid sigkey as
|
||||||
|
defined in `sigkey_ordering`.
|
||||||
|
:param bool populate_only_packages. When True, only packages in
|
||||||
|
`packages` list are added to KojiPackageSet. This can save time
|
||||||
|
when generating compose from predefined list of packages from big
|
||||||
|
Koji tag.
|
||||||
|
When False, all packages from Koji tag are added to KojiPackageSet.
|
||||||
|
:param dogpile.cache.CacheRegion cache_region: If set, the CacheRegion
|
||||||
|
will be used to cache the list of RPMs per Koji tag, so next calls
|
||||||
|
of the KojiPackageSet.populate(...) method won't try fetching it
|
||||||
|
again.
|
||||||
|
:param list extra_builds: Extra builds NVRs to get from Koji and include
|
||||||
|
in the package set.
|
||||||
|
:param list extra_tasks: Extra RPMs defined as Koji task IDs to get from Koji
|
||||||
|
and include in the package set. Useful when building testing compose
|
||||||
|
with RPM scratch builds.
|
||||||
|
"""
|
||||||
|
super(KojiMockPackageSet, self).__init__(
|
||||||
|
name,
|
||||||
|
koji_wrapper=koji_wrapper,
|
||||||
|
sigkey_ordering=sigkey_ordering,
|
||||||
|
arches=arches,
|
||||||
|
logger=logger,
|
||||||
|
packages=packages,
|
||||||
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
|
populate_only_packages=populate_only_packages,
|
||||||
|
cache_region=cache_region,
|
||||||
|
extra_builds=extra_builds,
|
||||||
|
extra_tasks=extra_tasks,
|
||||||
|
signed_packages_retries=signed_packages_retries,
|
||||||
|
signed_packages_wait=signed_packages_wait,
|
||||||
|
)
|
||||||
|
|
||||||
def _is_rpm_signed(self, rpm_path) -> bool:
|
def _is_rpm_signed(self, rpm_path) -> bool:
|
||||||
ts = rpm.TransactionSet()
|
ts = rpm.TransactionSet()
|
||||||
@ -905,8 +889,6 @@ class KojiMockPackageSet(KojiPackageSet):
|
|||||||
sigkey.lower() for sigkey in self.sigkey_ordering
|
sigkey.lower() for sigkey in self.sigkey_ordering
|
||||||
if sigkey is not None
|
if sigkey is not None
|
||||||
]
|
]
|
||||||
if not sigkeys:
|
|
||||||
return True
|
|
||||||
with open(rpm_path, 'rb') as fd:
|
with open(rpm_path, 'rb') as fd:
|
||||||
header = ts.hdrFromFdno(fd)
|
header = ts.hdrFromFdno(fd)
|
||||||
signature = header[rpm.RPMTAG_SIGGPG] or header[rpm.RPMTAG_SIGPGP]
|
signature = header[rpm.RPMTAG_SIGGPG] or header[rpm.RPMTAG_SIGPGP]
|
||||||
|
@ -193,13 +193,17 @@ class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||||||
def __call__(self):
|
def __call__(self):
|
||||||
compose = self.compose
|
compose = self.compose
|
||||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
||||||
package_sets = get_pkgset_from_koji(self.compose, self.koji_wrapper)
|
# path prefix must contain trailing '/'
|
||||||
return (package_sets, self.compose.koji_downloader.path_prefix)
|
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
||||||
|
package_sets = get_pkgset_from_koji(
|
||||||
|
self.compose, self.koji_wrapper, path_prefix
|
||||||
|
)
|
||||||
|
return (package_sets, path_prefix)
|
||||||
|
|
||||||
|
|
||||||
def get_pkgset_from_koji(compose, koji_wrapper):
|
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
||||||
event_info = get_koji_event_info(compose, koji_wrapper)
|
event_info = get_koji_event_info(compose, koji_wrapper)
|
||||||
return populate_global_pkgset(compose, koji_wrapper, event_info)
|
return populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
||||||
|
|
||||||
|
|
||||||
def _add_module_to_variant(
|
def _add_module_to_variant(
|
||||||
@ -222,23 +226,20 @@ def _add_module_to_variant(
|
|||||||
"""
|
"""
|
||||||
mmds = {}
|
mmds = {}
|
||||||
archives = koji_wrapper.koji_proxy.listArchives(build["id"])
|
archives = koji_wrapper.koji_proxy.listArchives(build["id"])
|
||||||
available_arches = set()
|
|
||||||
for archive in archives:
|
for archive in archives:
|
||||||
if archive["btype"] != "module":
|
if archive["btype"] != "module":
|
||||||
# Skip non module archives
|
# Skip non module archives
|
||||||
continue
|
continue
|
||||||
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
||||||
filename = archive["filename"]
|
filename = archive["filename"]
|
||||||
file_path = compose.koji_downloader.get_file(os.path.join(typedir, filename))
|
file_path = os.path.join(typedir, filename)
|
||||||
try:
|
try:
|
||||||
# If there are two dots, the arch is in the middle. MBS uploads
|
# If there are two dots, the arch is in the middle. MBS uploads
|
||||||
# files with actual architecture in the filename, but Pungi deals
|
# files with actual architecture in the filename, but Pungi deals
|
||||||
# in basearch. This assumes that each arch in the build maps to a
|
# in basearch. This assumes that each arch in the build maps to a
|
||||||
# unique basearch.
|
# unique basearch.
|
||||||
_, arch, _ = filename.split(".")
|
_, arch, _ = filename.split(".")
|
||||||
basearch = getBaseArch(arch)
|
filename = "modulemd.%s.txt" % getBaseArch(arch)
|
||||||
filename = "modulemd.%s.txt" % basearch
|
|
||||||
available_arches.add(basearch)
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
mmds[filename] = file_path
|
mmds[filename] = file_path
|
||||||
@ -263,26 +264,15 @@ def _add_module_to_variant(
|
|||||||
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
|
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if arch not in available_arches:
|
|
||||||
compose.log_debug(
|
|
||||||
"Module %s is not available for arch %s.%s", nsvc, variant, arch
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
filename = "modulemd.%s.txt" % arch
|
filename = "modulemd.%s.txt" % arch
|
||||||
if filename not in mmds:
|
if filename not in mmds:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Module %s does not have metadata for arch %s and is not filtered "
|
"Module %s does not have metadata for arch %s and is not filtered "
|
||||||
"out via filter_modules option." % (nsvc, arch)
|
"out via filter_modules option." % (nsvc, arch)
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
mod_stream = read_single_module_stream_from_file(
|
mod_stream = read_single_module_stream_from_file(
|
||||||
mmds[filename], compose, arch, build
|
mmds[filename], compose, arch, build
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
|
||||||
# libmodulemd raises various GLib exceptions with not very helpful
|
|
||||||
# messages. Let's replace it with something more useful.
|
|
||||||
raise RuntimeError("Failed to read %s: %s", mmds[filename], str(exc))
|
|
||||||
if mod_stream:
|
if mod_stream:
|
||||||
added = True
|
added = True
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
||||||
@ -405,13 +395,7 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
|
|||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji(
|
def _get_modules_from_koji(
|
||||||
compose,
|
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd, exclude_module_ns
|
||||||
koji_wrapper,
|
|
||||||
event,
|
|
||||||
variant,
|
|
||||||
variant_tags,
|
|
||||||
tag_to_mmd,
|
|
||||||
exclude_module_ns,
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from koji `session`, adds them to
|
Loads modules for given `variant` from koji `session`, adds them to
|
||||||
@ -496,16 +480,7 @@ def filter_inherited(koji_proxy, event, module_builds, top_tag):
|
|||||||
# And keep only builds from that topmost tag
|
# And keep only builds from that topmost tag
|
||||||
result.extend(build for build in builds if build["tag_name"] == tag)
|
result.extend(build for build in builds if build["tag_name"] == tag)
|
||||||
|
|
||||||
# If the same module was inherited multiple times, it will be in result
|
return result
|
||||||
# multiple times. We need to deduplicate.
|
|
||||||
deduplicated_result = []
|
|
||||||
included_nvrs = set()
|
|
||||||
for build in result:
|
|
||||||
if build["nvr"] not in included_nvrs:
|
|
||||||
deduplicated_result.append(build)
|
|
||||||
included_nvrs.add(build["nvr"])
|
|
||||||
|
|
||||||
return deduplicated_result
|
|
||||||
|
|
||||||
|
|
||||||
def filter_by_whitelist(compose, module_builds, input_modules, expected_modules):
|
def filter_by_whitelist(compose, module_builds, input_modules, expected_modules):
|
||||||
@ -695,7 +670,7 @@ def _get_modules_from_koji_tags(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def populate_global_pkgset(compose, koji_wrapper, event):
|
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||||
all_arches = get_all_arches(compose)
|
all_arches = get_all_arches(compose)
|
||||||
|
|
||||||
# List of compose tags from which we create this compose
|
# List of compose tags from which we create this compose
|
||||||
@ -789,12 +764,7 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||||||
|
|
||||||
if extra_modules:
|
if extra_modules:
|
||||||
_add_extra_modules_to_variant(
|
_add_extra_modules_to_variant(
|
||||||
compose,
|
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
||||||
koji_wrapper,
|
|
||||||
variant,
|
|
||||||
extra_modules,
|
|
||||||
variant_tags,
|
|
||||||
tag_to_mmd,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
variant_scratch_modules = get_variant_data(
|
variant_scratch_modules = get_variant_data(
|
||||||
@ -821,23 +791,17 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||||||
|
|
||||||
pkgsets = []
|
pkgsets = []
|
||||||
|
|
||||||
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
|
||||||
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
|
||||||
|
|
||||||
if not pkgset_koji_tags and (extra_builds or extra_tasks):
|
|
||||||
# We have extra packages to pull in, but no tag to merge them with.
|
|
||||||
compose_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
|
||||||
pkgset_koji_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
|
||||||
|
|
||||||
# Get package set for each compose tag and merge it to global package
|
# Get package set for each compose tag and merge it to global package
|
||||||
# list. Also prepare per-variant pkgset, because we do not have list
|
# list. Also prepare per-variant pkgset, because we do not have list
|
||||||
# of binary RPMs in module definition - there is just list of SRPMs.
|
# of binary RPMs in module definition - there is just list of SRPMs.
|
||||||
for compose_tag in compose_tags:
|
for compose_tag in compose_tags:
|
||||||
compose.log_info("Loading package set for tag %s", compose_tag)
|
compose.log_info("Loading package set for tag %s", compose_tag)
|
||||||
kwargs = {}
|
|
||||||
if compose_tag in pkgset_koji_tags:
|
if compose_tag in pkgset_koji_tags:
|
||||||
kwargs["extra_builds"] = extra_builds
|
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
||||||
kwargs["extra_tasks"] = extra_tasks
|
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
||||||
|
else:
|
||||||
|
extra_builds = []
|
||||||
|
extra_tasks = []
|
||||||
|
|
||||||
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
||||||
compose_tag,
|
compose_tag,
|
||||||
@ -849,10 +813,10 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||||||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
populate_only_packages=populate_only_packages_to_gather,
|
populate_only_packages=populate_only_packages_to_gather,
|
||||||
cache_region=compose.cache_region,
|
cache_region=compose.cache_region,
|
||||||
|
extra_builds=extra_builds,
|
||||||
|
extra_tasks=extra_tasks,
|
||||||
signed_packages_retries=compose.conf["signed_packages_retries"],
|
signed_packages_retries=compose.conf["signed_packages_retries"],
|
||||||
signed_packages_wait=compose.conf["signed_packages_wait"],
|
signed_packages_wait=compose.conf["signed_packages_wait"],
|
||||||
downloader=compose.koji_downloader,
|
|
||||||
**kwargs
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if we have cache for this tag from previous compose. If so, use
|
# Check if we have cache for this tag from previous compose. If so, use
|
||||||
@ -910,18 +874,13 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||||||
if pkgset.reuse is None:
|
if pkgset.reuse is None:
|
||||||
pkgset.populate(
|
pkgset.populate(
|
||||||
compose_tag,
|
compose_tag,
|
||||||
# We care about packages as they existed on the specified
|
event,
|
||||||
# event. However, modular content tags are not expected to
|
|
||||||
# change, so the event doesn't matter there. If an exact NSVC
|
|
||||||
# of a module is specified, the code above would happily find
|
|
||||||
# its content tag, but fail here if the content tag doesn't
|
|
||||||
# exist at the given event.
|
|
||||||
event=event if is_traditional else None,
|
|
||||||
inherit=should_inherit,
|
inherit=should_inherit,
|
||||||
include_packages=modular_packages,
|
include_packages=modular_packages,
|
||||||
)
|
)
|
||||||
for variant in compose.all_variants.values():
|
for variant in compose.all_variants.values():
|
||||||
if compose_tag in variant_tags[variant]:
|
if compose_tag in variant_tags[variant]:
|
||||||
|
|
||||||
# If it's a modular tag, store the package set for the module.
|
# If it's a modular tag, store the package set for the module.
|
||||||
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
||||||
if compose_tag == koji_tag:
|
if compose_tag == koji_tag:
|
||||||
@ -944,7 +903,7 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||||||
MaterializedPackageSet.create,
|
MaterializedPackageSet.create,
|
||||||
compose,
|
compose,
|
||||||
pkgset,
|
pkgset,
|
||||||
compose.koji_downloader.path_prefix,
|
path_prefix,
|
||||||
mmd=tag_to_mmd.get(pkgset.name),
|
mmd=tag_to_mmd.get(pkgset.name),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -35,12 +35,10 @@ import pungi.wrappers.kojiwrapper
|
|||||||
from pungi.wrappers.comps import CompsWrapper
|
from pungi.wrappers.comps import CompsWrapper
|
||||||
from pungi.wrappers.mbs import MBSWrapper
|
from pungi.wrappers.mbs import MBSWrapper
|
||||||
import pungi.phases.pkgset.pkgsets
|
import pungi.phases.pkgset.pkgsets
|
||||||
|
|
||||||
from pungi.util import (
|
from pungi.util import (
|
||||||
retry,
|
retry,
|
||||||
get_arch_variant_data,
|
get_arch_variant_data,
|
||||||
get_variant_data,
|
get_variant_data,
|
||||||
|
|
||||||
read_single_module_stream_from_string,
|
read_single_module_stream_from_string,
|
||||||
read_single_module_stream_from_file,
|
read_single_module_stream_from_file,
|
||||||
)
|
)
|
||||||
@ -103,12 +101,12 @@ def variant_dict_from_str(compose, module_str):
|
|||||||
release_regex = re.compile(r"^(\d){14}$")
|
release_regex = re.compile(r"^(\d){14}$")
|
||||||
|
|
||||||
section_start = module_str.rfind("-")
|
section_start = module_str.rfind("-")
|
||||||
module_str_first_part = module_str[section_start + 1:]
|
module_str_first_part = module_str[section_start + 1 :]
|
||||||
if release_regex.match(module_str_first_part):
|
if release_regex.match(module_str_first_part):
|
||||||
module_info["version"] = module_str_first_part
|
module_info["version"] = module_str_first_part
|
||||||
module_str = module_str[:section_start]
|
module_str = module_str[:section_start]
|
||||||
section_start = module_str.rfind("-")
|
section_start = module_str.rfind("-")
|
||||||
module_info["stream"] = module_str[section_start + 1:]
|
module_info["stream"] = module_str[section_start + 1 :]
|
||||||
else:
|
else:
|
||||||
module_info["stream"] = module_str_first_part
|
module_info["stream"] = module_str_first_part
|
||||||
module_info["name"] = module_str[:section_start]
|
module_info["name"] = module_str[:section_start]
|
||||||
@ -162,16 +160,14 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
|
|||||||
# Store module versioning information into the dict, but make sure
|
# Store module versioning information into the dict, but make sure
|
||||||
# not to overwrite any existing keys.
|
# not to overwrite any existing keys.
|
||||||
md["module_stream"] = md["extra"]["typeinfo"]["module"]["stream"]
|
md["module_stream"] = md["extra"]["typeinfo"]["module"]["stream"]
|
||||||
md["module_version"] = int(
|
md["module_version"] = int(md["extra"]["typeinfo"]["module"]["version"])
|
||||||
md["extra"]["typeinfo"]["module"]["version"])
|
|
||||||
md["module_context"] = md["extra"]["typeinfo"]["module"]["context"]
|
md["module_context"] = md["extra"]["typeinfo"]["module"]["context"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if md["state"] == pungi.wrappers.kojiwrapper.KOJI_BUILD_DELETED:
|
if md["state"] == pungi.wrappers.kojiwrapper.KOJI_BUILD_DELETED:
|
||||||
compose.log_debug(
|
compose.log_debug(
|
||||||
"Module build %s has been deleted, ignoring it." % build[
|
"Module build %s has been deleted, ignoring it." % build["name"]
|
||||||
"name"]
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -193,8 +189,7 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
|
|||||||
)
|
)
|
||||||
latest_version = sorted_modules[0]["module_version"]
|
latest_version = sorted_modules[0]["module_version"]
|
||||||
modules = [
|
modules = [
|
||||||
module for module in modules
|
module for module in modules if latest_version == module["module_version"]
|
||||||
if latest_version == module["module_version"]
|
|
||||||
]
|
]
|
||||||
|
|
||||||
return modules
|
return modules
|
||||||
@ -210,8 +205,7 @@ class PkgsetSourceKojiMock(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||||||
get_all_arches(compose),
|
get_all_arches(compose),
|
||||||
)
|
)
|
||||||
# path prefix must contain trailing '/'
|
# path prefix must contain trailing '/'
|
||||||
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip(
|
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
||||||
"/") + "/"
|
|
||||||
package_sets = get_pkgset_from_koji(
|
package_sets = get_pkgset_from_koji(
|
||||||
self.compose, self.koji_wrapper, path_prefix
|
self.compose, self.koji_wrapper, path_prefix
|
||||||
)
|
)
|
||||||
@ -220,8 +214,7 @@ class PkgsetSourceKojiMock(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||||||
|
|
||||||
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
||||||
event_info = get_koji_event_info(compose, koji_wrapper)
|
event_info = get_koji_event_info(compose, koji_wrapper)
|
||||||
return populate_global_pkgset(compose, koji_wrapper, path_prefix,
|
return populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
||||||
event_info)
|
|
||||||
|
|
||||||
|
|
||||||
def _add_module_to_variant(
|
def _add_module_to_variant(
|
||||||
@ -248,16 +241,13 @@ def _add_module_to_variant(
|
|||||||
if archive["btype"] != "module":
|
if archive["btype"] != "module":
|
||||||
# Skip non module archives
|
# Skip non module archives
|
||||||
continue
|
continue
|
||||||
|
|
||||||
filename = archive["filename"]
|
filename = archive["filename"]
|
||||||
file_path = os.path.join(
|
file_path = os.path.join(
|
||||||
koji_wrapper.koji_module.pathinfo.topdir,
|
koji_wrapper.koji_module.pathinfo.topdir,
|
||||||
'modules',
|
'modules',
|
||||||
build['arch'],
|
build['arch'],
|
||||||
build['extra']['typeinfo']['module']['content_koji_tag']
|
build['extra']['typeinfo']['module']['content_koji_tag']
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
mmds[filename] = file_path
|
mmds[filename] = file_path
|
||||||
|
|
||||||
if len(mmds) <= 1:
|
if len(mmds) <= 1:
|
||||||
@ -276,22 +266,17 @@ def _add_module_to_variant(
|
|||||||
added = False
|
added = False
|
||||||
|
|
||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
if _is_filtered_out(compose, variant, arch, info["name"],
|
if _is_filtered_out(compose, variant, arch, info["name"], info["stream"]):
|
||||||
info["stream"]):
|
compose.log_debug("Module %s is filtered from %s.%s", nsvc, variant, arch)
|
||||||
compose.log_debug("Module %s is filtered from %s.%s", nsvc,
|
|
||||||
variant, arch)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
filename = "modulemd.%s.txt" % arch
|
filename = "modulemd.%s.txt" % arch
|
||||||
try:
|
try:
|
||||||
mod_stream = read_single_module_stream_from_file(
|
mod_stream = read_single_module_stream_from_file(
|
||||||
mmds[filename], compose, arch, build
|
mmds[filename], compose, arch, build
|
||||||
|
|
||||||
)
|
)
|
||||||
if mod_stream:
|
if mod_stream:
|
||||||
added = True
|
added = True
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
||||||
|
|
||||||
added = True
|
added = True
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# There is no modulemd for this arch. This could mean an arch was
|
# There is no modulemd for this arch. This could mean an arch was
|
||||||
@ -313,8 +298,7 @@ def _add_extra_modules_to_variant(
|
|||||||
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
||||||
):
|
):
|
||||||
for nsvc in extra_modules:
|
for nsvc in extra_modules:
|
||||||
msg = "Adding extra module build '%s' to variant '%s'" % (
|
msg = "Adding extra module build '%s' to variant '%s'" % (nsvc, variant)
|
||||||
nsvc, variant)
|
|
||||||
compose.log_info(msg)
|
compose.log_info(msg)
|
||||||
|
|
||||||
nsvc_info = nsvc.split(":")
|
nsvc_info = nsvc.split(":")
|
||||||
@ -360,8 +344,7 @@ def _add_scratch_modules_to_variant(
|
|||||||
compose, variant, scratch_modules, variant_tags, tag_to_mmd
|
compose, variant, scratch_modules, variant_tags, tag_to_mmd
|
||||||
):
|
):
|
||||||
if compose.compose_type != "test" and scratch_modules:
|
if compose.compose_type != "test" and scratch_modules:
|
||||||
compose.log_warning(
|
compose.log_warning("Only test composes could include scratch module builds")
|
||||||
"Only test composes could include scratch module builds")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
mbs = MBSWrapper(compose.conf["mbs_api_url"])
|
mbs = MBSWrapper(compose.conf["mbs_api_url"])
|
||||||
@ -372,8 +355,7 @@ def _add_scratch_modules_to_variant(
|
|||||||
try:
|
try:
|
||||||
final_modulemd = mbs.final_modulemd(module_build["id"])
|
final_modulemd = mbs.final_modulemd(module_build["id"])
|
||||||
except Exception:
|
except Exception:
|
||||||
compose.log_error(
|
compose.log_error("Unable to get modulemd for build %s" % module_build)
|
||||||
"Unable to get modulemd for build %s" % module_build)
|
|
||||||
raise
|
raise
|
||||||
tag = module_build["koji_tag"]
|
tag = module_build["koji_tag"]
|
||||||
variant_tags[variant].append(tag)
|
variant_tags[variant].append(tag)
|
||||||
@ -381,7 +363,8 @@ def _add_scratch_modules_to_variant(
|
|||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
try:
|
try:
|
||||||
mmd = read_single_module_stream_from_string(
|
mmd = read_single_module_stream_from_string(
|
||||||
final_modulemd[arch])
|
final_modulemd[arch]
|
||||||
|
)
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mmd
|
||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
@ -407,24 +390,21 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
|
|||||||
if not compose:
|
if not compose:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for filter in get_arch_variant_data(compose.conf, "filter_modules", arch,
|
for filter in get_arch_variant_data(compose.conf, "filter_modules", arch, variant):
|
||||||
variant):
|
|
||||||
if ":" not in filter:
|
if ":" not in filter:
|
||||||
name_filter = filter
|
name_filter = filter
|
||||||
stream_filter = "*"
|
stream_filter = "*"
|
||||||
else:
|
else:
|
||||||
name_filter, stream_filter = filter.split(":", 1)
|
name_filter, stream_filter = filter.split(":", 1)
|
||||||
|
|
||||||
if fnmatch(module_name, name_filter) and fnmatch(module_stream,
|
if fnmatch(module_name, name_filter) and fnmatch(module_stream, stream_filter):
|
||||||
stream_filter):
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji(
|
def _get_modules_from_koji(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd,
|
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
||||||
exclude_module_ns
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from koji `session`, adds them to
|
Loads modules for given `variant` from koji `session`, adds them to
|
||||||
@ -435,21 +415,15 @@ def _get_modules_from_koji(
|
|||||||
:param Variant variant: Variant with modules to find.
|
:param Variant variant: Variant with modules to find.
|
||||||
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
||||||
and value is list of Koji tags to get the RPMs from.
|
and value is list of Koji tags to get the RPMs from.
|
||||||
:param list exclude_module_ns: Module name:stream which will be excluded.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Find out all modules in every variant and add their Koji tags
|
# Find out all modules in every variant and add their Koji tags
|
||||||
# to variant and variant_tags list.
|
# to variant and variant_tags list.
|
||||||
for module in variant.get_modules():
|
for module in variant.get_modules():
|
||||||
koji_modules = get_koji_modules(compose, koji_wrapper, event,
|
koji_modules = get_koji_modules(compose, koji_wrapper, event, module["name"])
|
||||||
module["name"])
|
|
||||||
for koji_module in koji_modules:
|
for koji_module in koji_modules:
|
||||||
nsvc = _add_module_to_variant(
|
nsvc = _add_module_to_variant(
|
||||||
koji_wrapper,
|
koji_wrapper, variant, koji_module, compose=compose
|
||||||
variant,
|
|
||||||
koji_module,
|
|
||||||
compose=compose,
|
|
||||||
exclude_module_ns=exclude_module_ns,
|
|
||||||
)
|
)
|
||||||
if not nsvc:
|
if not nsvc:
|
||||||
continue
|
continue
|
||||||
@ -488,8 +462,7 @@ def filter_inherited(koji_proxy, event, module_builds, top_tag):
|
|||||||
does not understand streams, so we have to reimplement it here.
|
does not understand streams, so we have to reimplement it here.
|
||||||
"""
|
"""
|
||||||
inheritance = [
|
inheritance = [
|
||||||
tag["name"] for tag in
|
tag["name"] for tag in koji_proxy.getFullInheritance(top_tag, event=event["id"])
|
||||||
koji_proxy.getFullInheritance(top_tag, event=event["id"])
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def keyfunc(mb):
|
def keyfunc(mb):
|
||||||
@ -514,8 +487,7 @@ def filter_inherited(koji_proxy, event, module_builds, top_tag):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def filter_by_whitelist(compose, module_builds, input_modules,
|
def filter_by_whitelist(compose, module_builds, input_modules, expected_modules):
|
||||||
expected_modules):
|
|
||||||
"""
|
"""
|
||||||
Exclude modules from the list that do not match any pattern specified in
|
Exclude modules from the list that do not match any pattern specified in
|
||||||
input_modules. Order may not be preserved. The last argument is a set of
|
input_modules. Order may not be preserved. The last argument is a set of
|
||||||
@ -539,7 +511,6 @@ def filter_by_whitelist(compose, module_builds, input_modules,
|
|||||||
info.get("context"),
|
info.get("context"),
|
||||||
)
|
)
|
||||||
nvr_patterns.add((pattern, spec["name"]))
|
nvr_patterns.add((pattern, spec["name"]))
|
||||||
|
|
||||||
modules_to_keep = []
|
modules_to_keep = []
|
||||||
|
|
||||||
for mb in sorted(module_builds, key=lambda i: i['name']):
|
for mb in sorted(module_builds, key=lambda i: i['name']):
|
||||||
@ -604,13 +575,7 @@ def _filter_expected_modules(
|
|||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji_tags(
|
def _get_modules_from_koji_tags(
|
||||||
compose,
|
compose, koji_wrapper, event_id, variant, variant_tags, tag_to_mmd
|
||||||
koji_wrapper,
|
|
||||||
event_id,
|
|
||||||
variant,
|
|
||||||
variant_tags,
|
|
||||||
tag_to_mmd,
|
|
||||||
exclude_module_ns,
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from Koji, adds them to
|
Loads modules for given `variant` from Koji, adds them to
|
||||||
@ -622,12 +587,10 @@ def _get_modules_from_koji_tags(
|
|||||||
:param Variant variant: Variant with modules to find.
|
:param Variant variant: Variant with modules to find.
|
||||||
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
:param dict variant_tags: Dict populated by this method. Key is `variant`
|
||||||
and value is list of Koji tags to get the RPMs from.
|
and value is list of Koji tags to get the RPMs from.
|
||||||
:param list exclude_module_ns: Module name:stream which will be excluded.
|
|
||||||
"""
|
"""
|
||||||
# Compose tags from configuration
|
# Compose tags from configuration
|
||||||
compose_tags = [
|
compose_tags = [
|
||||||
{"name": tag} for tag in
|
{"name": tag} for tag in force_list(compose.conf["pkgset_koji_module_tag"])
|
||||||
force_list(compose.conf["pkgset_koji_module_tag"])
|
|
||||||
]
|
]
|
||||||
# Get set of configured module names for this variant. If nothing is
|
# Get set of configured module names for this variant. If nothing is
|
||||||
# configured, the set is empty.
|
# configured, the set is empty.
|
||||||
@ -654,8 +617,7 @@ def _get_modules_from_koji_tags(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Filter out builds inherited from non-top tag
|
# Filter out builds inherited from non-top tag
|
||||||
module_builds = filter_inherited(koji_proxy, event_id, module_builds,
|
module_builds = filter_inherited(koji_proxy, event_id, module_builds, tag)
|
||||||
tag)
|
|
||||||
|
|
||||||
# Apply whitelist of modules if specified.
|
# Apply whitelist of modules if specified.
|
||||||
variant_modules = variant.get_modules()
|
variant_modules = variant.get_modules()
|
||||||
@ -663,7 +625,6 @@ def _get_modules_from_koji_tags(
|
|||||||
module_builds = filter_by_whitelist(
|
module_builds = filter_by_whitelist(
|
||||||
compose, module_builds, variant_modules, expected_modules
|
compose, module_builds, variant_modules, expected_modules
|
||||||
)
|
)
|
||||||
|
|
||||||
# Find the latest builds of all modules. This does following:
|
# Find the latest builds of all modules. This does following:
|
||||||
# - Sorts the module_builds descending by Koji NVR (which maps to NSV
|
# - Sorts the module_builds descending by Koji NVR (which maps to NSV
|
||||||
# for modules). Split release into modular version and context, and
|
# for modules). Split release into modular version and context, and
|
||||||
@ -701,18 +662,6 @@ def _get_modules_from_koji_tags(
|
|||||||
for build in latest_builds:
|
for build in latest_builds:
|
||||||
# Get the Build from Koji to get modulemd and module_tag.
|
# Get the Build from Koji to get modulemd and module_tag.
|
||||||
build = koji_proxy.getBuild(build["build_id"])
|
build = koji_proxy.getBuild(build["build_id"])
|
||||||
|
|
||||||
nsvc = _add_module_to_variant(
|
|
||||||
koji_wrapper,
|
|
||||||
variant,
|
|
||||||
build,
|
|
||||||
True,
|
|
||||||
compose=compose,
|
|
||||||
exclude_module_ns=exclude_module_ns,
|
|
||||||
)
|
|
||||||
if not nsvc:
|
|
||||||
continue
|
|
||||||
|
|
||||||
module_tag = (
|
module_tag = (
|
||||||
build.get("extra", {})
|
build.get("extra", {})
|
||||||
.get("typeinfo", {})
|
.get("typeinfo", {})
|
||||||
@ -722,6 +671,12 @@ def _get_modules_from_koji_tags(
|
|||||||
|
|
||||||
variant_tags[variant].append(module_tag)
|
variant_tags[variant].append(module_tag)
|
||||||
|
|
||||||
|
nsvc = _add_module_to_variant(
|
||||||
|
koji_wrapper, variant, build, True, compose=compose
|
||||||
|
)
|
||||||
|
if not nsvc:
|
||||||
|
continue
|
||||||
|
|
||||||
tag_to_mmd.setdefault(module_tag, {})
|
tag_to_mmd.setdefault(module_tag, {})
|
||||||
for arch in variant.arch_mmds:
|
for arch in variant.arch_mmds:
|
||||||
try:
|
try:
|
||||||
@ -753,9 +708,8 @@ def _get_modules_from_koji_tags(
|
|||||||
# There are some module names that were listed in configuration and not
|
# There are some module names that were listed in configuration and not
|
||||||
# found in any tag...
|
# found in any tag...
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Configuration specified patterns ({', '.join(expected_modules)})"
|
"Configuration specified patterns (%s) that don't match "
|
||||||
" that don't match any modules in "
|
"any modules in the configured tags." % ", ".join(expected_modules)
|
||||||
f"the configured tags for variant '{variant.name}'"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -813,48 +767,26 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
"modules."
|
"modules."
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_modules = get_variant_data(
|
|
||||||
compose.conf, "pkgset_koji_module_builds", variant
|
|
||||||
)
|
|
||||||
|
|
||||||
# When adding extra modules, other modules of the same name:stream available
|
|
||||||
# in brew tag should be excluded.
|
|
||||||
exclude_module_ns = []
|
|
||||||
if extra_modules:
|
|
||||||
exclude_module_ns = [
|
|
||||||
":".join(nsvc.split(":")[:2]) for nsvc in extra_modules
|
|
||||||
]
|
|
||||||
|
|
||||||
if modular_koji_tags or (
|
if modular_koji_tags or (
|
||||||
compose.conf["pkgset_koji_module_tag"] and variant.modules
|
compose.conf["pkgset_koji_module_tag"] and variant.modules
|
||||||
):
|
):
|
||||||
# List modules tagged in particular tags.
|
# List modules tagged in particular tags.
|
||||||
_get_modules_from_koji_tags(
|
_get_modules_from_koji_tags(
|
||||||
compose,
|
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
||||||
koji_wrapper,
|
|
||||||
event,
|
|
||||||
variant,
|
|
||||||
variant_tags,
|
|
||||||
tag_to_mmd,
|
|
||||||
exclude_module_ns,
|
|
||||||
)
|
)
|
||||||
elif variant.modules:
|
elif variant.modules:
|
||||||
# Search each module in Koji separately. Tagging does not come into
|
# Search each module in Koji separately. Tagging does not come into
|
||||||
# play here.
|
# play here.
|
||||||
_get_modules_from_koji(
|
_get_modules_from_koji(
|
||||||
compose,
|
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
||||||
koji_wrapper,
|
|
||||||
event,
|
|
||||||
variant,
|
|
||||||
variant_tags,
|
|
||||||
tag_to_mmd,
|
|
||||||
exclude_module_ns,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
extra_modules = get_variant_data(
|
||||||
|
compose.conf, "pkgset_koji_module_builds", variant
|
||||||
|
)
|
||||||
if extra_modules:
|
if extra_modules:
|
||||||
_add_extra_modules_to_variant(
|
_add_extra_modules_to_variant(
|
||||||
compose, koji_wrapper, variant, extra_modules, variant_tags,
|
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
||||||
tag_to_mmd
|
|
||||||
)
|
)
|
||||||
|
|
||||||
variant_scratch_modules = get_variant_data(
|
variant_scratch_modules = get_variant_data(
|
||||||
@ -862,8 +794,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
)
|
)
|
||||||
if variant_scratch_modules:
|
if variant_scratch_modules:
|
||||||
_add_scratch_modules_to_variant(
|
_add_scratch_modules_to_variant(
|
||||||
compose, variant, variant_scratch_modules, variant_tags,
|
compose, variant, variant_scratch_modules, variant_tags, tag_to_mmd
|
||||||
tag_to_mmd
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ensure that every tag added to `variant_tags` is added also to
|
# Ensure that every tag added to `variant_tags` is added also to
|
||||||
@ -888,10 +819,8 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
for compose_tag in compose_tags:
|
for compose_tag in compose_tags:
|
||||||
compose.log_info("Loading package set for tag %s", compose_tag)
|
compose.log_info("Loading package set for tag %s", compose_tag)
|
||||||
if compose_tag in pkgset_koji_tags:
|
if compose_tag in pkgset_koji_tags:
|
||||||
extra_builds = force_list(
|
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
||||||
compose.conf.get("pkgset_koji_builds", []))
|
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
||||||
extra_tasks = force_list(
|
|
||||||
compose.conf.get("pkgset_koji_scratch_tasks", []))
|
|
||||||
else:
|
else:
|
||||||
extra_builds = []
|
extra_builds = []
|
||||||
extra_tasks = []
|
extra_tasks = []
|
||||||
@ -997,8 +926,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
|
|
||||||
|
|
||||||
def get_koji_event_info(compose, koji_wrapper):
|
def get_koji_event_info(compose, koji_wrapper):
|
||||||
event_file = os.path.join(compose.paths.work.topdir(arch="global"),
|
event_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-event")
|
||||||
"koji-event")
|
|
||||||
|
|
||||||
compose.log_info("Getting koji event")
|
compose.log_info("Getting koji event")
|
||||||
result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file)
|
result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file)
|
||||||
|
@ -13,19 +13,13 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
|
||||||
import tarfile
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import six
|
import six
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
import kobo.log
|
import kobo.log
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
|
|
||||||
from pungi import util
|
|
||||||
from pungi.wrappers import kojiwrapper
|
from pungi.wrappers import kojiwrapper
|
||||||
|
|
||||||
|
|
||||||
@ -236,9 +230,9 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
fmt_dict["runroot_key"] = runroot_key
|
fmt_dict["runroot_key"] = runroot_key
|
||||||
self._ssh_run(hostname, user, run_template, fmt_dict, log_file=log_file)
|
self._ssh_run(hostname, user, run_template, fmt_dict, log_file=log_file)
|
||||||
|
|
||||||
fmt_dict["command"] = (
|
fmt_dict[
|
||||||
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'"
|
"command"
|
||||||
)
|
] = "rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'"
|
||||||
buildroot_rpms = self._ssh_run(
|
buildroot_rpms = self._ssh_run(
|
||||||
hostname,
|
hostname,
|
||||||
user,
|
user,
|
||||||
@ -320,8 +314,7 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
arch,
|
arch,
|
||||||
args,
|
args,
|
||||||
channel=runroot_channel,
|
channel=runroot_channel,
|
||||||
# We want to change owner only if shared NFS directory is used.
|
chown_uid=os.getuid(),
|
||||||
chown_uid=os.getuid() if kwargs.get("mounts") else None,
|
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -332,7 +325,6 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
% (output["task_id"], log_file)
|
% (output["task_id"], log_file)
|
||||||
)
|
)
|
||||||
self._result = output
|
self._result = output
|
||||||
return output["task_id"]
|
|
||||||
|
|
||||||
def run_pungi_ostree(self, args, log_file=None, arch=None, **kwargs):
|
def run_pungi_ostree(self, args, log_file=None, arch=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
@ -389,72 +381,3 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
return self._result
|
return self._result
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown runroot_method %r." % self.runroot_method)
|
raise ValueError("Unknown runroot_method %r." % self.runroot_method)
|
||||||
|
|
||||||
|
|
||||||
@util.retry(wait_on=requests.exceptions.RequestException)
|
|
||||||
def _download_file(url, dest):
|
|
||||||
# contextlib.closing is only needed in requests<2.18
|
|
||||||
with contextlib.closing(requests.get(url, stream=True, timeout=5)) as r:
|
|
||||||
if r.status_code == 404:
|
|
||||||
raise RuntimeError("Archive %s not found" % url)
|
|
||||||
r.raise_for_status()
|
|
||||||
with open(dest, "wb") as f:
|
|
||||||
shutil.copyfileobj(r.raw, f)
|
|
||||||
|
|
||||||
|
|
||||||
def _download_archive(task_id, fname, archive_url, dest_dir):
|
|
||||||
"""Download file from URL to a destination, with retries."""
|
|
||||||
temp_file = os.path.join(dest_dir, fname)
|
|
||||||
_download_file(archive_url, temp_file)
|
|
||||||
return temp_file
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_archive(task_id, fname, archive_file, dest_path):
|
|
||||||
"""Extract the archive into given destination.
|
|
||||||
|
|
||||||
All items of the archive must match the name of the archive, i.e. all
|
|
||||||
paths in foo.tar.gz must start with foo/.
|
|
||||||
"""
|
|
||||||
basename = os.path.basename(fname).split(".")[0]
|
|
||||||
strip_prefix = basename + "/"
|
|
||||||
with tarfile.open(archive_file, "r") as archive:
|
|
||||||
for member in archive.getmembers():
|
|
||||||
# Check if each item is either the root directory or is within it.
|
|
||||||
if member.name != basename and not member.name.startswith(strip_prefix):
|
|
||||||
raise RuntimeError(
|
|
||||||
"Archive %s from task %s contains file without expected prefix: %s"
|
|
||||||
% (fname, task_id, member)
|
|
||||||
)
|
|
||||||
dest = os.path.join(dest_path, member.name[len(strip_prefix) :])
|
|
||||||
if member.isdir():
|
|
||||||
# Create directories where needed...
|
|
||||||
util.makedirs(dest)
|
|
||||||
elif member.isfile():
|
|
||||||
# ... and extract files into them.
|
|
||||||
with open(dest, "wb") as dest_obj:
|
|
||||||
shutil.copyfileobj(archive.extractfile(member), dest_obj)
|
|
||||||
elif member.islnk():
|
|
||||||
# We have a hardlink. Let's also link it.
|
|
||||||
linked_file = os.path.join(
|
|
||||||
dest_path, member.linkname[len(strip_prefix) :]
|
|
||||||
)
|
|
||||||
os.link(linked_file, dest)
|
|
||||||
else:
|
|
||||||
# Any other file type is an error.
|
|
||||||
raise RuntimeError(
|
|
||||||
"Unexpected file type in %s from task %s: %s"
|
|
||||||
% (fname, task_id, member)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_archive(compose, task_id, fname, destination):
|
|
||||||
"""Download a tar archive from task outputs and extract it to the destination."""
|
|
||||||
koji = kojiwrapper.KojiWrapper(compose).koji_module
|
|
||||||
# Koji API provides downloadTaskOutput method, but it's not usable as it
|
|
||||||
# will attempt to load the entire file into memory.
|
|
||||||
# So instead let's generate a patch and attempt to convert it to a URL.
|
|
||||||
server_path = os.path.join(koji.pathinfo.task(task_id), fname)
|
|
||||||
archive_url = server_path.replace(koji.config.topdir, koji.config.topurl)
|
|
||||||
with util.temp_dir(prefix="buildinstall-download") as tmp_dir:
|
|
||||||
local_path = _download_archive(task_id, fname, archive_url, tmp_dir)
|
|
||||||
_extract_archive(task_id, fname, local_path, destination)
|
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
|
|
||||||
from pungi.util import format_size
|
|
||||||
|
|
||||||
|
|
||||||
LOCK_RE = re.compile(r".*\.lock(\|[A-Za-z0-9]+)*$")
|
|
||||||
|
|
||||||
|
|
||||||
def should_be_cleaned_up(path, st, threshold):
|
|
||||||
if st.st_nlink == 1 and st.st_mtime < threshold:
|
|
||||||
# No other instances, older than limit
|
|
||||||
return True
|
|
||||||
|
|
||||||
if LOCK_RE.match(path) and st.st_mtime < threshold:
|
|
||||||
# Suspiciously old lock
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("CACHE_DIR")
|
|
||||||
parser.add_argument("-n", "--dry-run", action="store_true")
|
|
||||||
parser.add_argument("--verbose", action="store_true")
|
|
||||||
parser.add_argument(
|
|
||||||
"--max-age",
|
|
||||||
help="how old files should be considered for deletion",
|
|
||||||
default=7,
|
|
||||||
type=int,
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
topdir = os.path.abspath(args.CACHE_DIR)
|
|
||||||
max_age = args.max_age * 24 * 3600
|
|
||||||
|
|
||||||
cleaned_up = 0
|
|
||||||
|
|
||||||
threshold = time.time() - max_age
|
|
||||||
for dirpath, dirnames, filenames in os.walk(topdir):
|
|
||||||
for f in filenames:
|
|
||||||
filepath = os.path.join(dirpath, f)
|
|
||||||
st = os.stat(filepath)
|
|
||||||
if should_be_cleaned_up(filepath, st, threshold):
|
|
||||||
if args.verbose:
|
|
||||||
print("RM %s" % filepath)
|
|
||||||
cleaned_up += st.st_size
|
|
||||||
if not args.dry_run:
|
|
||||||
os.remove(filepath)
|
|
||||||
if not dirnames and not filenames:
|
|
||||||
if args.verbose:
|
|
||||||
print("RMDIR %s" % dirpath)
|
|
||||||
if not args.dry_run:
|
|
||||||
os.rmdir(dirpath)
|
|
||||||
|
|
||||||
if args.dry_run:
|
|
||||||
print("Would reclaim %s bytes." % format_size(cleaned_up))
|
|
||||||
else:
|
|
||||||
print("Reclaimed %s bytes." % format_size(cleaned_up))
|
|
@ -171,11 +171,32 @@ def main():
|
|||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--offline", action="store_true", help="Do not resolve git references."
|
"--offline", action="store_true", help="Do not resolve git references."
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--multi",
|
||||||
|
metavar="DIR",
|
||||||
|
help=(
|
||||||
|
"Treat source as config for pungi-orchestrate and store dump into "
|
||||||
|
"given directory."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
defines = config_utils.extract_defines(args.define)
|
defines = config_utils.extract_defines(args.define)
|
||||||
|
|
||||||
|
if args.multi:
|
||||||
|
if len(args.sources) > 1:
|
||||||
|
parser.error("Only one multi config can be specified.")
|
||||||
|
|
||||||
|
return dump_multi_config(
|
||||||
|
args.sources[0],
|
||||||
|
dest=args.multi,
|
||||||
|
defines=defines,
|
||||||
|
just_dump=args.just_dump,
|
||||||
|
event=args.freeze_event,
|
||||||
|
offline=args.offline,
|
||||||
|
)
|
||||||
|
|
||||||
return process_file(
|
return process_file(
|
||||||
args.sources,
|
args.sources,
|
||||||
defines=defines,
|
defines=defines,
|
||||||
|
@ -128,6 +128,7 @@ def run(config, topdir, has_old, offline, defined_variables, schema_overrides):
|
|||||||
pungi.phases.OSTreePhase(compose),
|
pungi.phases.OSTreePhase(compose),
|
||||||
pungi.phases.CreateisoPhase(compose, buildinstall_phase),
|
pungi.phases.CreateisoPhase(compose, buildinstall_phase),
|
||||||
pungi.phases.ExtraIsosPhase(compose, buildinstall_phase),
|
pungi.phases.ExtraIsosPhase(compose, buildinstall_phase),
|
||||||
|
pungi.phases.LiveImagesPhase(compose),
|
||||||
pungi.phases.LiveMediaPhase(compose),
|
pungi.phases.LiveMediaPhase(compose),
|
||||||
pungi.phases.ImageBuildPhase(compose),
|
pungi.phases.ImageBuildPhase(compose),
|
||||||
pungi.phases.ImageChecksumPhase(compose),
|
pungi.phases.ImageChecksumPhase(compose),
|
||||||
|
@ -5,43 +5,35 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
from typing import (
|
from typing import AnyStr, List, Dict, Optional
|
||||||
AnyStr,
|
|
||||||
List,
|
|
||||||
Dict,
|
|
||||||
Optional,
|
|
||||||
)
|
|
||||||
|
|
||||||
import createrepo_c as cr
|
import createrepo_c as cr
|
||||||
import requests
|
import requests
|
||||||
import yaml
|
import yaml
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
|
||||||
from .create_packages_json import (
|
from .create_packages_json import PackagesGenerator, RepoInfo
|
||||||
PackagesGenerator,
|
|
||||||
RepoInfo,
|
|
||||||
VariantInfo,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ExtraVariantInfo(VariantInfo):
|
class ExtraRepoInfo(RepoInfo):
|
||||||
|
|
||||||
modules: List[AnyStr] = field(default_factory=list)
|
modules: List[AnyStr] = field(default_factory=list)
|
||||||
packages: List[AnyStr] = field(default_factory=list)
|
packages: List[AnyStr] = field(default_factory=list)
|
||||||
|
is_remote: bool = True
|
||||||
|
|
||||||
|
|
||||||
class CreateExtraRepo(PackagesGenerator):
|
class CreateExtraRepo(PackagesGenerator):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
variants: List[ExtraVariantInfo],
|
repos: List[ExtraRepoInfo],
|
||||||
bs_auth_token: AnyStr,
|
bs_auth_token: AnyStr,
|
||||||
local_repository_path: AnyStr,
|
local_repository_path: AnyStr,
|
||||||
clear_target_repo: bool = True,
|
clear_target_repo: bool = True,
|
||||||
):
|
):
|
||||||
self.variants = [] # type: List[ExtraVariantInfo]
|
self.repos = [] # type: List[ExtraRepoInfo]
|
||||||
super().__init__(variants, [], [])
|
super().__init__(repos, [], [])
|
||||||
self.auth_headers = {
|
self.auth_headers = {
|
||||||
'Authorization': f'Bearer {bs_auth_token}',
|
'Authorization': f'Bearer {bs_auth_token}',
|
||||||
}
|
}
|
||||||
@ -100,7 +92,7 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
arch: AnyStr,
|
arch: AnyStr,
|
||||||
packages: Optional[List[AnyStr]] = None,
|
packages: Optional[List[AnyStr]] = None,
|
||||||
modules: Optional[List[AnyStr]] = None,
|
modules: Optional[List[AnyStr]] = None,
|
||||||
) -> List[ExtraVariantInfo]:
|
) -> List[ExtraRepoInfo]:
|
||||||
"""
|
"""
|
||||||
Get info about a BS repo and save it to
|
Get info about a BS repo and save it to
|
||||||
an object of class ExtraRepoInfo
|
an object of class ExtraRepoInfo
|
||||||
@ -118,7 +110,7 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
api_uri = 'api/v1'
|
api_uri = 'api/v1'
|
||||||
bs_repo_suffix = 'build_repos'
|
bs_repo_suffix = 'build_repos'
|
||||||
|
|
||||||
variants_info = []
|
repos_info = []
|
||||||
|
|
||||||
# get the full info about a BS repo
|
# get the full info about a BS repo
|
||||||
repo_request = requests.get(
|
repo_request = requests.get(
|
||||||
@ -140,13 +132,7 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
# skip repo with unsuitable architecture
|
# skip repo with unsuitable architecture
|
||||||
if architecture != arch:
|
if architecture != arch:
|
||||||
continue
|
continue
|
||||||
variant_info = ExtraVariantInfo(
|
repo_info = ExtraRepoInfo(
|
||||||
name=f'{build_id}-{platform_name}-{architecture}',
|
|
||||||
arch=architecture,
|
|
||||||
packages=packages,
|
|
||||||
modules=modules,
|
|
||||||
repos=[
|
|
||||||
RepoInfo(
|
|
||||||
path=os.path.join(
|
path=os.path.join(
|
||||||
bs_url,
|
bs_url,
|
||||||
bs_repo_suffix,
|
bs_repo_suffix,
|
||||||
@ -154,12 +140,14 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
platform_name,
|
platform_name,
|
||||||
),
|
),
|
||||||
folder=architecture,
|
folder=architecture,
|
||||||
|
name=f'{build_id}-{platform_name}-{architecture}',
|
||||||
|
arch=architecture,
|
||||||
is_remote=True,
|
is_remote=True,
|
||||||
|
packages=packages,
|
||||||
|
modules=modules,
|
||||||
)
|
)
|
||||||
]
|
repos_info.append(repo_info)
|
||||||
)
|
return repos_info
|
||||||
variants_info.append(variant_info)
|
|
||||||
return variants_info
|
|
||||||
|
|
||||||
def _create_local_extra_repo(self):
|
def _create_local_extra_repo(self):
|
||||||
"""
|
"""
|
||||||
@ -196,7 +184,7 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
def _download_rpm_to_local_repo(
|
def _download_rpm_to_local_repo(
|
||||||
self,
|
self,
|
||||||
package_location: AnyStr,
|
package_location: AnyStr,
|
||||||
repo_info: RepoInfo,
|
repo_info: ExtraRepoInfo,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Download a rpm package from a remote repo and save it to a local repo
|
Download a rpm package from a remote repo and save it to a local repo
|
||||||
@ -224,22 +212,21 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
def _download_packages(
|
def _download_packages(
|
||||||
self,
|
self,
|
||||||
packages: Dict[AnyStr, cr.Package],
|
packages: Dict[AnyStr, cr.Package],
|
||||||
variant_info: ExtraVariantInfo
|
repo_info: ExtraRepoInfo
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Download all defined packages from a remote repo
|
Download all defined packages from a remote repo
|
||||||
:param packages: information about all packages (including
|
:param packages: information about all packages (including
|
||||||
modularity) in a remote repo
|
modularity) in a remote repo
|
||||||
:param variant_info: information about a remote variant
|
:param repo_info: information about a remote repo
|
||||||
"""
|
"""
|
||||||
for package in packages.values():
|
for package in packages.values():
|
||||||
package_name = package.name
|
package_name = package.name
|
||||||
# Skip a current package from a remote repo if we defined
|
# Skip a current package from a remote repo if we defined
|
||||||
# the list packages and a current package doesn't belong to it
|
# the list packages and a current package doesn't belong to it
|
||||||
if variant_info.packages and \
|
if repo_info.packages and \
|
||||||
package_name not in variant_info.packages:
|
package_name not in repo_info.packages:
|
||||||
continue
|
continue
|
||||||
for repo_info in variant_info.repos:
|
|
||||||
self._download_rpm_to_local_repo(
|
self._download_rpm_to_local_repo(
|
||||||
package_location=package.location_href,
|
package_location=package.location_href,
|
||||||
repo_info=repo_info,
|
repo_info=repo_info,
|
||||||
@ -248,14 +235,14 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
def _download_modules(
|
def _download_modules(
|
||||||
self,
|
self,
|
||||||
modules_data: List[Dict],
|
modules_data: List[Dict],
|
||||||
variant_info: ExtraVariantInfo,
|
repo_info: ExtraRepoInfo,
|
||||||
packages: Dict[AnyStr, cr.Package]
|
packages: Dict[AnyStr, cr.Package]
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Download all defined modularity packages and their data from
|
Download all defined modularity packages and their data from
|
||||||
a remote repo
|
a remote repo
|
||||||
:param modules_data: information about all modules in a remote repo
|
:param modules_data: information about all modules in a remote repo
|
||||||
:param variant_info: information about a remote variant
|
:param repo_info: information about a remote repo
|
||||||
:param packages: information about all packages (including
|
:param packages: information about all packages (including
|
||||||
modularity) in a remote repo
|
modularity) in a remote repo
|
||||||
"""
|
"""
|
||||||
@ -263,8 +250,8 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
module_data = module['data']
|
module_data = module['data']
|
||||||
# Skip a current module from a remote repo if we defined
|
# Skip a current module from a remote repo if we defined
|
||||||
# the list modules and a current module doesn't belong to it
|
# the list modules and a current module doesn't belong to it
|
||||||
if variant_info.modules and \
|
if repo_info.modules and \
|
||||||
module_data['name'] not in variant_info.modules:
|
module_data['name'] not in repo_info.modules:
|
||||||
continue
|
continue
|
||||||
# we should add info about a module if the local repodata
|
# we should add info about a module if the local repodata
|
||||||
# doesn't have it
|
# doesn't have it
|
||||||
@ -279,12 +266,11 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
# Empty repo_info.packages means that we will download
|
# Empty repo_info.packages means that we will download
|
||||||
# all packages from repo including
|
# all packages from repo including
|
||||||
# the modularity packages
|
# the modularity packages
|
||||||
if not variant_info.packages:
|
if not repo_info.packages:
|
||||||
break
|
break
|
||||||
# skip a rpm if it doesn't belong to a processed repo
|
# skip a rpm if it doesn't belong to a processed repo
|
||||||
if rpm not in packages:
|
if rpm not in packages:
|
||||||
continue
|
continue
|
||||||
for repo_info in variant_info.repos:
|
|
||||||
self._download_rpm_to_local_repo(
|
self._download_rpm_to_local_repo(
|
||||||
package_location=packages[rpm].location_href,
|
package_location=packages[rpm].location_href,
|
||||||
repo_info=repo_info,
|
repo_info=repo_info,
|
||||||
@ -298,12 +284,23 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
3. Call `createrepo_c` which creates a local repo
|
3. Call `createrepo_c` which creates a local repo
|
||||||
with the right repodata
|
with the right repodata
|
||||||
"""
|
"""
|
||||||
for variant_info in self.variants:
|
for repo_info in self.repos:
|
||||||
for repo_info in variant_info.repos:
|
packages = {} # type: Dict[AnyStr, cr.Package]
|
||||||
repomd_records = self._get_repomd_records(
|
repomd_records = self._get_repomd_records(
|
||||||
repo_info=repo_info,
|
repo_info=repo_info,
|
||||||
)
|
)
|
||||||
packages_iterator = self.get_packages_iterator(repo_info)
|
repomd_records_dict = {} # type: Dict[str, str]
|
||||||
|
self._download_repomd_records(
|
||||||
|
repo_info=repo_info,
|
||||||
|
repomd_records=repomd_records,
|
||||||
|
repomd_records_dict=repomd_records_dict,
|
||||||
|
)
|
||||||
|
packages_iterator = cr.PackageIterator(
|
||||||
|
primary_path=repomd_records_dict['primary'],
|
||||||
|
filelists_path=repomd_records_dict['filelists'],
|
||||||
|
other_path=repomd_records_dict['other'],
|
||||||
|
warningcb=self._warning_callback,
|
||||||
|
)
|
||||||
# parse the repodata (including modules.yaml.gz)
|
# parse the repodata (including modules.yaml.gz)
|
||||||
modules_data = self._parse_module_repomd_record(
|
modules_data = self._parse_module_repomd_record(
|
||||||
repo_info=repo_info,
|
repo_info=repo_info,
|
||||||
@ -319,12 +316,12 @@ class CreateExtraRepo(PackagesGenerator):
|
|||||||
}
|
}
|
||||||
self._download_modules(
|
self._download_modules(
|
||||||
modules_data=modules_data,
|
modules_data=modules_data,
|
||||||
variant_info=variant_info,
|
repo_info=repo_info,
|
||||||
packages=packages,
|
packages=packages,
|
||||||
)
|
)
|
||||||
self._download_packages(
|
self._download_packages(
|
||||||
packages=packages,
|
packages=packages,
|
||||||
variant_info=variant_info,
|
repo_info=repo_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._dump_local_modules_yaml()
|
self._dump_local_modules_yaml()
|
||||||
@ -336,6 +333,7 @@ def create_parser():
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--bs-auth-token',
|
'--bs-auth-token',
|
||||||
help='Auth token for Build System',
|
help='Auth token for Build System',
|
||||||
|
required=True,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--local-repo-path',
|
'--local-repo-path',
|
||||||
@ -404,16 +402,11 @@ def cli_main():
|
|||||||
packages = packages.split()
|
packages = packages.split()
|
||||||
if repo.startswith('http://'):
|
if repo.startswith('http://'):
|
||||||
repos_info.append(
|
repos_info.append(
|
||||||
ExtraVariantInfo(
|
ExtraRepoInfo(
|
||||||
name=repo_folder,
|
|
||||||
arch=repo_arch,
|
|
||||||
repos=[
|
|
||||||
RepoInfo(
|
|
||||||
path=repo,
|
path=repo,
|
||||||
folder=repo_folder,
|
folder=repo_folder,
|
||||||
is_remote=True,
|
name=repo_folder,
|
||||||
)
|
arch=repo_arch,
|
||||||
],
|
|
||||||
modules=modules,
|
modules=modules,
|
||||||
packages=packages,
|
packages=packages,
|
||||||
)
|
)
|
||||||
@ -429,7 +422,7 @@ def cli_main():
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
cer = CreateExtraRepo(
|
cer = CreateExtraRepo(
|
||||||
variants=repos_info,
|
repos=repos_info,
|
||||||
bs_auth_token=args.bs_auth_token,
|
bs_auth_token=args.bs_auth_token,
|
||||||
local_repository_path=args.local_repo_path,
|
local_repository_path=args.local_repo_path,
|
||||||
clear_target_repo=args.clear_local_repo,
|
clear_target_repo=args.clear_local_repo,
|
||||||
|
@ -9,41 +9,22 @@ https://github.com/rpm-software-management/createrepo_c/blob/master/examples/pyt
|
|||||||
import argparse
|
import argparse
|
||||||
import gzip
|
import gzip
|
||||||
import json
|
import json
|
||||||
import logging
|
|
||||||
import lzma
|
import lzma
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import tempfile
|
import tempfile
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from itertools import tee
|
from typing import AnyStr, Dict, List, Optional, Any, Iterator
|
||||||
from pathlib import Path
|
|
||||||
from typing import (
|
|
||||||
AnyStr,
|
|
||||||
Dict,
|
|
||||||
List,
|
|
||||||
Any,
|
|
||||||
Iterator,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
from urllib.parse import urljoin
|
import createrepo_c as cr
|
||||||
|
import dnf.subject
|
||||||
|
import hawkey
|
||||||
import requests
|
import requests
|
||||||
import rpm
|
import rpm
|
||||||
import yaml
|
import yaml
|
||||||
from createrepo_c import (
|
from createrepo_c import Package, PackageIterator
|
||||||
Package,
|
from dataclasses import dataclass
|
||||||
PackageIterator,
|
|
||||||
Repomd,
|
|
||||||
RepomdRecord,
|
|
||||||
)
|
|
||||||
from dataclasses import dataclass, field
|
|
||||||
from kobo.rpmlib import parse_nvra
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
|
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
|
||||||
@ -70,31 +51,21 @@ class RepoInfo:
|
|||||||
# 'appstream', 'baseos', etc.
|
# 'appstream', 'baseos', etc.
|
||||||
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
||||||
# using remote repo
|
# using remote repo
|
||||||
path: str
|
path: AnyStr
|
||||||
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
||||||
folder: str
|
folder: AnyStr
|
||||||
|
# name of repo. E.g. 'BaseOS', 'AppStream', etc
|
||||||
|
name: AnyStr
|
||||||
|
# architecture of repo. E.g. 'x86_64', 'i686', etc
|
||||||
|
arch: AnyStr
|
||||||
# Is a repo remote or local
|
# Is a repo remote or local
|
||||||
is_remote: bool
|
is_remote: bool
|
||||||
# Is a reference repository (usually it's a RHEL repo)
|
# Is a reference repository (usually it's a RHEL repo)
|
||||||
# Layout of packages from such repository will be taken as example
|
# Layout of packages from such repository will be taken as example
|
||||||
# Only layout of specific package (which doesn't exist
|
# Only layout of specific package (which don't exist
|
||||||
# in a reference repository) will be taken as example
|
# in a reference repository) will be taken as example
|
||||||
is_reference: bool = False
|
is_reference: bool = False
|
||||||
# The packages from 'present' repo will be added to a variant.
|
strict_arch: bool = False
|
||||||
# The packages from 'absent' repo will be removed from a variant.
|
|
||||||
repo_type: str = 'present'
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class VariantInfo:
|
|
||||||
# name of variant. E.g. 'BaseOS', 'AppStream', etc
|
|
||||||
name: AnyStr
|
|
||||||
# architecture of variant. E.g. 'x86_64', 'i686', etc
|
|
||||||
arch: AnyStr
|
|
||||||
# The packages which will be not added to a variant
|
|
||||||
excluded_packages: List[str] = field(default_factory=list)
|
|
||||||
# Repos of a variant
|
|
||||||
repos: List[RepoInfo] = field(default_factory=list)
|
|
||||||
|
|
||||||
|
|
||||||
class PackagesGenerator:
|
class PackagesGenerator:
|
||||||
@ -110,36 +81,22 @@ class PackagesGenerator:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
variants: List[VariantInfo],
|
repos: List[RepoInfo],
|
||||||
excluded_packages: List[AnyStr],
|
excluded_packages: List[AnyStr],
|
||||||
included_packages: List[AnyStr],
|
included_packages: List[AnyStr],
|
||||||
):
|
):
|
||||||
self.variants = variants
|
self.repos = repos
|
||||||
self.pkgs = dict()
|
|
||||||
self.excluded_packages = excluded_packages
|
self.excluded_packages = excluded_packages
|
||||||
self.included_packages = included_packages
|
self.included_packages = included_packages
|
||||||
self.tmp_files = [] # type: list[Path]
|
self.tmp_files = []
|
||||||
for arch, arch_list in self.addon_repos.items():
|
for arch, arch_list in self.addon_repos.items():
|
||||||
self.repo_arches[arch].extend(arch_list)
|
self.repo_arches[arch].extend(arch_list)
|
||||||
self.repo_arches[arch].append(arch)
|
self.repo_arches[arch].append(arch)
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
for tmp_file in self.tmp_files:
|
for tmp_file in self.tmp_files:
|
||||||
if tmp_file.exists():
|
if os.path.exists(tmp_file):
|
||||||
tmp_file.unlink()
|
os.remove(tmp_file)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_full_repo_path(repo_info: RepoInfo):
|
|
||||||
result = os.path.join(
|
|
||||||
repo_info.path,
|
|
||||||
repo_info.folder
|
|
||||||
)
|
|
||||||
if repo_info.is_remote:
|
|
||||||
result = urljoin(
|
|
||||||
repo_info.path + '/',
|
|
||||||
repo_info.folder,
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _warning_callback(warning_type, message):
|
def _warning_callback(warning_type, message):
|
||||||
@ -149,7 +106,8 @@ class PackagesGenerator:
|
|||||||
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_remote_file_content(self, file_url: AnyStr) -> AnyStr:
|
@staticmethod
|
||||||
|
def get_remote_file_content(file_url: AnyStr) -> AnyStr:
|
||||||
"""
|
"""
|
||||||
Get content from a remote file and write it to a temp file
|
Get content from a remote file and write it to a temp file
|
||||||
:param file_url: url of a remote file
|
:param file_url: url of a remote file
|
||||||
@ -162,16 +120,15 @@ class PackagesGenerator:
|
|||||||
file_request.raise_for_status()
|
file_request.raise_for_status()
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||||
file_stream.write(file_request.content)
|
file_stream.write(file_request.content)
|
||||||
self.tmp_files.append(Path(file_stream.name))
|
|
||||||
return file_stream.name
|
return file_stream.name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_repomd(repomd_file_path: AnyStr) -> Repomd:
|
def _parse_repomd(repomd_file_path: AnyStr) -> cr.Repomd:
|
||||||
"""
|
"""
|
||||||
Parse file repomd.xml and create object Repomd
|
Parse file repomd.xml and create object Repomd
|
||||||
:param repomd_file_path: path to local repomd.xml
|
:param repomd_file_path: path to local repomd.xml
|
||||||
"""
|
"""
|
||||||
return Repomd(repomd_file_path)
|
return cr.Repomd(repomd_file_path)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _parse_modules_file(
|
def _parse_modules_file(
|
||||||
@ -182,7 +139,7 @@ class PackagesGenerator:
|
|||||||
"""
|
"""
|
||||||
Parse modules.yaml.gz and returns parsed data
|
Parse modules.yaml.gz and returns parsed data
|
||||||
:param modules_file_path: path to local modules.yaml.gz
|
:param modules_file_path: path to local modules.yaml.gz
|
||||||
:return: List of dict for each module in a repo
|
:return: List of dict for each modules in a repo
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with open(modules_file_path, 'rb') as modules_file:
|
with open(modules_file_path, 'rb') as modules_file:
|
||||||
@ -199,7 +156,7 @@ class PackagesGenerator:
|
|||||||
def _get_repomd_records(
|
def _get_repomd_records(
|
||||||
self,
|
self,
|
||||||
repo_info: RepoInfo,
|
repo_info: RepoInfo,
|
||||||
) -> List[RepomdRecord]:
|
) -> List[cr.RepomdRecord]:
|
||||||
"""
|
"""
|
||||||
Get, parse file repomd.xml and extract from it repomd records
|
Get, parse file repomd.xml and extract from it repomd records
|
||||||
:param repo_info: structure which contains info about a current repo
|
:param repo_info: structure which contains info about a current repo
|
||||||
@ -212,15 +169,9 @@ class PackagesGenerator:
|
|||||||
'repomd.xml',
|
'repomd.xml',
|
||||||
)
|
)
|
||||||
if repo_info.is_remote:
|
if repo_info.is_remote:
|
||||||
repomd_file_path = urljoin(
|
|
||||||
urljoin(
|
|
||||||
repo_info.path + '/',
|
|
||||||
repo_info.folder
|
|
||||||
) + '/',
|
|
||||||
'repodata/repomd.xml'
|
|
||||||
)
|
|
||||||
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
||||||
|
else:
|
||||||
|
repomd_file_path = repomd_file_path
|
||||||
repomd_object = self._parse_repomd(repomd_file_path)
|
repomd_object = self._parse_repomd(repomd_file_path)
|
||||||
if repo_info.is_remote:
|
if repo_info.is_remote:
|
||||||
os.remove(repomd_file_path)
|
os.remove(repomd_file_path)
|
||||||
@ -229,7 +180,7 @@ class PackagesGenerator:
|
|||||||
def _download_repomd_records(
|
def _download_repomd_records(
|
||||||
self,
|
self,
|
||||||
repo_info: RepoInfo,
|
repo_info: RepoInfo,
|
||||||
repomd_records: List[RepomdRecord],
|
repomd_records: List[cr.RepomdRecord],
|
||||||
repomd_records_dict: Dict[str, str],
|
repomd_records_dict: Dict[str, str],
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@ -253,17 +204,19 @@ class PackagesGenerator:
|
|||||||
if repo_info.is_remote:
|
if repo_info.is_remote:
|
||||||
repomd_record_file_path = self.get_remote_file_content(
|
repomd_record_file_path = self.get_remote_file_content(
|
||||||
repomd_record_file_path)
|
repomd_record_file_path)
|
||||||
|
self.tmp_files.append(repomd_record_file_path)
|
||||||
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
||||||
|
|
||||||
def _parse_module_repomd_record(
|
def _parse_module_repomd_record(
|
||||||
self,
|
self,
|
||||||
repo_info: RepoInfo,
|
repo_info: RepoInfo,
|
||||||
repomd_records: List[RepomdRecord],
|
repomd_records: List[cr.RepomdRecord],
|
||||||
) -> List[Dict]:
|
) -> List[Dict]:
|
||||||
"""
|
"""
|
||||||
Download repomd records
|
Download repomd records
|
||||||
:param repo_info: structure which contains info about a current repo
|
:param repo_info: structure which contains info about a current repo
|
||||||
:param repomd_records: list with repomd records
|
:param repomd_records: list with repomd records
|
||||||
|
:param repomd_records_dict: dict with paths to repodata files
|
||||||
"""
|
"""
|
||||||
for repomd_record in repomd_records:
|
for repomd_record in repomd_records:
|
||||||
if repomd_record.type != 'modules':
|
if repomd_record.type != 'modules':
|
||||||
@ -276,10 +229,10 @@ class PackagesGenerator:
|
|||||||
if repo_info.is_remote:
|
if repo_info.is_remote:
|
||||||
repomd_record_file_path = self.get_remote_file_content(
|
repomd_record_file_path = self.get_remote_file_content(
|
||||||
repomd_record_file_path)
|
repomd_record_file_path)
|
||||||
|
self.tmp_files.append(repomd_record_file_path)
|
||||||
return list(self._parse_modules_file(
|
return list(self._parse_modules_file(
|
||||||
repomd_record_file_path,
|
repomd_record_file_path,
|
||||||
))
|
))
|
||||||
return []
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
||||||
@ -295,13 +248,21 @@ class PackagesGenerator:
|
|||||||
)
|
)
|
||||||
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
||||||
|
|
||||||
def get_packages_iterator(
|
def generate_packages_json(
|
||||||
self,
|
self
|
||||||
repo_info: RepoInfo,
|
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
||||||
) -> Union[PackageIterator, Iterator]:
|
"""
|
||||||
full_repo_path = self._get_full_repo_path(repo_info)
|
Generate packages.json
|
||||||
pkgs_iterator = self.pkgs.get(full_repo_path)
|
"""
|
||||||
if pkgs_iterator is None:
|
packages_json = defaultdict(
|
||||||
|
lambda: defaultdict(
|
||||||
|
lambda: defaultdict(
|
||||||
|
list,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
all_packages = defaultdict(lambda: {'variants': list()})
|
||||||
|
for repo_info in self.repos:
|
||||||
repomd_records = self._get_repomd_records(
|
repomd_records = self._get_repomd_records(
|
||||||
repo_info=repo_info,
|
repo_info=repo_info,
|
||||||
)
|
)
|
||||||
@ -311,146 +272,157 @@ class PackagesGenerator:
|
|||||||
repomd_records=repomd_records,
|
repomd_records=repomd_records,
|
||||||
repomd_records_dict=repomd_records_dict,
|
repomd_records_dict=repomd_records_dict,
|
||||||
)
|
)
|
||||||
pkgs_iterator = PackageIterator(
|
packages_iterator = PackageIterator(
|
||||||
primary_path=repomd_records_dict['primary'],
|
primary_path=repomd_records_dict['primary'],
|
||||||
filelists_path=repomd_records_dict['filelists'],
|
filelists_path=repomd_records_dict['filelists'],
|
||||||
other_path=repomd_records_dict['other'],
|
other_path=repomd_records_dict['other'],
|
||||||
warningcb=self._warning_callback,
|
warningcb=self._warning_callback,
|
||||||
)
|
)
|
||||||
pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator)
|
for package in packages_iterator:
|
||||||
return pkgs_iterator
|
if package.arch not in self.repo_arches[repo_info.arch]:
|
||||||
|
package_arch = repo_info.arch
|
||||||
def get_package_arch(
|
else:
|
||||||
self,
|
package_arch = package.arch
|
||||||
package: Package,
|
package_key = f'{package.name}.{package_arch}'
|
||||||
variant_arch: str,
|
if 'module' in package.release and not any(
|
||||||
) -> str:
|
re.search(included_package, package.name)
|
||||||
result = variant_arch
|
for included_package in self.included_packages
|
||||||
if package.arch in self.repo_arches[variant_arch]:
|
):
|
||||||
result = package.arch
|
|
||||||
return result
|
|
||||||
|
|
||||||
def is_skipped_module_package(
|
|
||||||
self,
|
|
||||||
package: Package,
|
|
||||||
variant_arch: str,
|
|
||||||
) -> bool:
|
|
||||||
package_key = self.get_package_key(package, variant_arch)
|
|
||||||
# Even a module package will be added to packages.json if
|
# Even a module package will be added to packages.json if
|
||||||
# it presents in the list of included packages
|
# it presents in the list of included packages
|
||||||
return 'module' in package.release and not any(
|
|
||||||
re.search(
|
|
||||||
f'^{included_pkg}$',
|
|
||||||
package_key,
|
|
||||||
) or included_pkg in (package.name, package_key)
|
|
||||||
for included_pkg in self.included_packages
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_excluded_package(
|
|
||||||
self,
|
|
||||||
package: Package,
|
|
||||||
variant_arch: str,
|
|
||||||
excluded_packages: List[str],
|
|
||||||
) -> bool:
|
|
||||||
package_key = self.get_package_key(package, variant_arch)
|
|
||||||
return any(
|
|
||||||
re.search(
|
|
||||||
f'^{excluded_pkg}$',
|
|
||||||
package_key,
|
|
||||||
) or excluded_pkg in (package.name, package_key)
|
|
||||||
for excluded_pkg in excluded_packages
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_source_rpm_name(package: Package) -> str:
|
|
||||||
source_rpm_nvra = parse_nvra(package.rpm_sourcerpm)
|
|
||||||
return source_rpm_nvra['name']
|
|
||||||
|
|
||||||
def get_package_key(self, package: Package, variant_arch: str) -> str:
|
|
||||||
return (
|
|
||||||
f'{package.name}.'
|
|
||||||
f'{self.get_package_arch(package, variant_arch)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_packages_json(
|
|
||||||
self
|
|
||||||
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
|
||||||
"""
|
|
||||||
Generate packages.json
|
|
||||||
"""
|
|
||||||
packages = defaultdict(lambda: defaultdict(lambda: {
|
|
||||||
'variants': list(),
|
|
||||||
}))
|
|
||||||
for variant_info in self.variants:
|
|
||||||
for repo_info in variant_info.repos:
|
|
||||||
is_reference = repo_info.is_reference
|
|
||||||
for package in self.get_packages_iterator(repo_info=repo_info):
|
|
||||||
if self.is_skipped_module_package(
|
|
||||||
package=package,
|
|
||||||
variant_arch=variant_info.arch,
|
|
||||||
):
|
|
||||||
continue
|
continue
|
||||||
if self.is_excluded_package(
|
if package_key not in all_packages:
|
||||||
package=package,
|
all_packages[package_key]['variants'].append(
|
||||||
variant_arch=variant_info.arch,
|
(repo_info.name, repo_info.arch)
|
||||||
excluded_packages=self.excluded_packages,
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
if self.is_excluded_package(
|
|
||||||
package=package,
|
|
||||||
variant_arch=variant_info.arch,
|
|
||||||
excluded_packages=variant_info.excluded_packages,
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
package_key = self.get_package_key(
|
|
||||||
package,
|
|
||||||
variant_info.arch,
|
|
||||||
)
|
)
|
||||||
source_rpm_name = self.get_source_rpm_name(package)
|
all_packages[package_key]['arch'] = package_arch
|
||||||
package_info = packages[source_rpm_name][package_key]
|
all_packages[package_key]['package'] = package
|
||||||
if 'is_reference' not in package_info:
|
all_packages[package_key]['type'] = repo_info.is_reference
|
||||||
package_info['variants'].append(variant_info.name)
|
# replace an older package if it's not reference or
|
||||||
package_info['is_reference'] = is_reference
|
# a newer package is from reference repo
|
||||||
package_info['package'] = package
|
elif (not all_packages[package_key]['type'] or
|
||||||
elif not package_info['is_reference'] or \
|
all_packages[package_key]['type'] ==
|
||||||
package_info['is_reference'] == is_reference and \
|
repo_info.is_reference) and \
|
||||||
self.compare_pkgs_version(
|
self.compare_pkgs_version(
|
||||||
package_1=package,
|
package,
|
||||||
package_2=package_info['package'],
|
all_packages[package_key]['package']
|
||||||
) > 0:
|
) > 0:
|
||||||
package_info['variants'] = [variant_info.name]
|
all_packages[package_key]['variants'] = [
|
||||||
package_info['is_reference'] = is_reference
|
(repo_info.name, repo_info.arch)
|
||||||
package_info['package'] = package
|
]
|
||||||
|
all_packages[package_key]['arch'] = package_arch
|
||||||
|
all_packages[package_key]['package'] = package
|
||||||
elif self.compare_pkgs_version(
|
elif self.compare_pkgs_version(
|
||||||
package_1=package,
|
package,
|
||||||
package_2=package_info['package'],
|
all_packages[package_key]['package']
|
||||||
) == 0 and repo_info.repo_type != 'absent':
|
) == 0:
|
||||||
package_info['variants'].append(variant_info.name)
|
all_packages[package_key]['variants'].append(
|
||||||
result = defaultdict(lambda: defaultdict(
|
(repo_info.name, repo_info.arch)
|
||||||
lambda: defaultdict(list),
|
)
|
||||||
))
|
|
||||||
for variant_info in self.variants:
|
for package_dict in all_packages.values():
|
||||||
for source_rpm_name, packages_info in packages.items():
|
for variant_name, variant_arch in package_dict['variants']:
|
||||||
for package_key, package_info in packages_info.items():
|
package_arch = package_dict['arch']
|
||||||
variant_pkgs = result[variant_info.name][variant_info.arch]
|
package = package_dict['package']
|
||||||
if variant_info.name not in package_info['variants']:
|
package_name = package.name
|
||||||
|
if any(re.search(excluded_package, package_name)
|
||||||
|
for excluded_package in self.excluded_packages):
|
||||||
continue
|
continue
|
||||||
variant_pkgs[source_rpm_name].append(package_key)
|
src_package_name = dnf.subject.Subject(
|
||||||
return result
|
package.rpm_sourcerpm,
|
||||||
|
).get_nevra_possibilities(
|
||||||
|
forms=hawkey.FORM_NEVRA,
|
||||||
|
)
|
||||||
|
if len(src_package_name) > 1:
|
||||||
|
# We should stop utility if we can't get exact name of srpm
|
||||||
|
raise ValueError(
|
||||||
|
'We can\'t get exact name of srpm '
|
||||||
|
f'by its NEVRA "{package.rpm_sourcerpm}"'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
src_package_name = src_package_name[0].name
|
||||||
|
# TODO: for x86_64 + i686 in one packages.json
|
||||||
|
# don't remove!
|
||||||
|
# if package.arch in self.addon_repos[variant_arch]:
|
||||||
|
# arches = self.addon_repos[variant_arch] + [variant_arch]
|
||||||
|
# else:
|
||||||
|
# arches = [variant_arch]
|
||||||
|
# for arch in arches:
|
||||||
|
# pkgs_list = packages_json[variant_name][
|
||||||
|
# arch][src_package_name]
|
||||||
|
# added_pkg = f'{package_name}.{package_arch}'
|
||||||
|
# if added_pkg not in pkgs_list:
|
||||||
|
# pkgs_list.append(added_pkg)
|
||||||
|
pkgs_list = packages_json[variant_name][
|
||||||
|
variant_arch][src_package_name]
|
||||||
|
added_pkg = f'{package_name}.{package_arch}'
|
||||||
|
if added_pkg not in pkgs_list:
|
||||||
|
pkgs_list.append(added_pkg)
|
||||||
|
return packages_json
|
||||||
|
|
||||||
|
|
||||||
def create_parser():
|
def create_parser():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c',
|
'--repo-path',
|
||||||
'--config',
|
action='append',
|
||||||
type=Path,
|
help='Path to a folder with repofolders. E.g. "/var/repos" or '
|
||||||
default=Path('config.yaml'),
|
'"http://koji.cloudlinux.com/mirrors/rhel_mirror"',
|
||||||
required=False,
|
required=True,
|
||||||
help='Path to a config',
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo-folder',
|
||||||
|
action='append',
|
||||||
|
help='A folder which contains folder repodata . E.g. "baseos-stream"',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo-arch',
|
||||||
|
action='append',
|
||||||
|
help='What architecture packages a repository contains. E.g. "x86_64"',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo-name',
|
||||||
|
action='append',
|
||||||
|
help='Name of a repository. E.g. "AppStream"',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--is-remote',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
help='A repository is remote or local',
|
||||||
|
choices=['yes', 'no'],
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--is-reference',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
help='A repository is used as reference for packages layout',
|
||||||
|
choices=['yes', 'no'],
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--excluded-packages',
|
||||||
|
nargs='+',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help='A list of globally excluded packages from generated json.'
|
||||||
|
'All of list elements should be separated by space',
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--included-packages',
|
||||||
|
nargs='+',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help='A list of globally included packages from generated json.'
|
||||||
|
'All of list elements should be separated by space',
|
||||||
|
required=False,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o',
|
|
||||||
'--json-output-path',
|
'--json-output-path',
|
||||||
type=str,
|
type=str,
|
||||||
help='Full path to output json file',
|
help='Full path to output json file',
|
||||||
@ -460,45 +432,30 @@ def create_parser():
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def read_config(config_path: Path) -> Optional[Dict]:
|
|
||||||
if not config_path.exists():
|
|
||||||
logging.error('A config by path "%s" does not exist', config_path)
|
|
||||||
exit(1)
|
|
||||||
with config_path.open('r') as config_fd:
|
|
||||||
return yaml.safe_load(config_fd)
|
|
||||||
|
|
||||||
|
|
||||||
def process_config(config_data: Dict) -> Tuple[
|
|
||||||
List[VariantInfo],
|
|
||||||
List[str],
|
|
||||||
List[str],
|
|
||||||
]:
|
|
||||||
excluded_packages = config_data.get('excluded_packages', [])
|
|
||||||
included_packages = config_data.get('included_packages', [])
|
|
||||||
variants = [VariantInfo(
|
|
||||||
name=variant_name,
|
|
||||||
arch=variant_info['arch'],
|
|
||||||
excluded_packages=variant_info.get('excluded_packages', []),
|
|
||||||
repos=[RepoInfo(
|
|
||||||
path=variant_repo['path'],
|
|
||||||
folder=variant_repo['folder'],
|
|
||||||
is_remote=variant_repo['remote'],
|
|
||||||
is_reference=variant_repo['reference'],
|
|
||||||
repo_type=variant_repo.get('repo_type', 'present'),
|
|
||||||
) for variant_repo in variant_info['repos']]
|
|
||||||
) for variant_name, variant_info in config_data['variants'].items()]
|
|
||||||
return variants, excluded_packages, included_packages
|
|
||||||
|
|
||||||
|
|
||||||
def cli_main():
|
def cli_main():
|
||||||
args = create_parser().parse_args()
|
args = create_parser().parse_args()
|
||||||
variants, excluded_packages, included_packages = process_config(
|
repos = []
|
||||||
config_data=read_config(args.config)
|
for repo_path, repo_folder, repo_name, \
|
||||||
)
|
repo_arch, is_remote, is_reference in zip(
|
||||||
|
args.repo_path,
|
||||||
|
args.repo_folder,
|
||||||
|
args.repo_name,
|
||||||
|
args.repo_arch,
|
||||||
|
args.is_remote,
|
||||||
|
args.is_reference,
|
||||||
|
):
|
||||||
|
repos.append(RepoInfo(
|
||||||
|
path=repo_path,
|
||||||
|
folder=repo_folder,
|
||||||
|
name=repo_name,
|
||||||
|
arch=repo_arch,
|
||||||
|
is_remote=True if is_remote == 'yes' else False,
|
||||||
|
is_reference=True if is_reference == 'yes' else False
|
||||||
|
))
|
||||||
pg = PackagesGenerator(
|
pg = PackagesGenerator(
|
||||||
variants=variants,
|
repos=repos,
|
||||||
excluded_packages=excluded_packages,
|
excluded_packages=args.excluded_packages,
|
||||||
included_packages=included_packages,
|
included_packages=args.included_packages,
|
||||||
)
|
)
|
||||||
result = pg.generate_packages_json()
|
result = pg.generate_packages_json()
|
||||||
with open(args.json_output_path, 'w') as packages_file:
|
with open(args.json_output_path, 'w') as packages_file:
|
||||||
|
@ -14,9 +14,6 @@ def send(cmd, data):
|
|||||||
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
||||||
try:
|
try:
|
||||||
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
||||||
if cmd == "ostree":
|
|
||||||
# https://pagure.io/fedora-infrastructure/issue/10899
|
|
||||||
msg.priority = 3
|
|
||||||
fedora_messaging.api.publish(msg)
|
fedora_messaging.api.publish(msg)
|
||||||
except fedora_messaging.exceptions.PublishReturned as e:
|
except fedora_messaging.exceptions.PublishReturned as e:
|
||||||
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
||||||
|
@ -2,7 +2,6 @@ import gzip
|
|||||||
import lzma
|
import lzma
|
||||||
import os
|
import os
|
||||||
from argparse import ArgumentParser, FileType
|
from argparse import ArgumentParser, FileType
|
||||||
from glob import iglob
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, AnyStr, Iterable, Union, Optional
|
from typing import List, AnyStr, Iterable, Union, Optional
|
||||||
@ -31,11 +30,8 @@ def grep_list_of_modules_yaml(repos_path: AnyStr) -> Iterable[BytesIO]:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
return (
|
return (
|
||||||
read_modules_yaml_from_specific_repo(repo_path=Path(path).parent)
|
read_modules_yaml_from_specific_repo(repo_path=path.parent)
|
||||||
for path in iglob(
|
for path in Path(repos_path).rglob('repodata')
|
||||||
str(Path(repos_path).joinpath('**/repodata')),
|
|
||||||
recursive=True
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -59,12 +55,7 @@ def read_modules_yaml_from_specific_repo(
|
|||||||
repo_path + '/',
|
repo_path + '/',
|
||||||
'repodata/repomd.xml',
|
'repodata/repomd.xml',
|
||||||
)
|
)
|
||||||
packages_generator = PackagesGenerator(
|
repomd_file_path = PackagesGenerator.get_remote_file_content(
|
||||||
variants=[],
|
|
||||||
excluded_packages=[],
|
|
||||||
included_packages=[],
|
|
||||||
)
|
|
||||||
repomd_file_path = packages_generator.get_remote_file_content(
|
|
||||||
file_url=repomd_url
|
file_url=repomd_url
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -82,12 +73,7 @@ def read_modules_yaml_from_specific_repo(
|
|||||||
repo_path + '/',
|
repo_path + '/',
|
||||||
record.location_href,
|
record.location_href,
|
||||||
)
|
)
|
||||||
packages_generator = PackagesGenerator(
|
modules_yaml_path = PackagesGenerator.get_remote_file_content(
|
||||||
variants=[],
|
|
||||||
excluded_packages=[],
|
|
||||||
included_packages=[],
|
|
||||||
)
|
|
||||||
modules_yaml_path = packages_generator.get_remote_file_content(
|
|
||||||
file_url=modules_yaml_url
|
file_url=modules_yaml_url
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -1,53 +1,39 @@
|
|||||||
import re
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from glob import iglob
|
|
||||||
from typing import List
|
from typing import List
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from attr import dataclass
|
||||||
from productmd.common import parse_nvra
|
from productmd.common import parse_nvra
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Package:
|
class Package:
|
||||||
nvra: dict
|
nvra: str
|
||||||
path: Path
|
path: str
|
||||||
|
|
||||||
|
|
||||||
def search_rpms(top_dir: Path) -> List[Package]:
|
def search_rpms(top_dir) -> List[Package]:
|
||||||
"""
|
"""
|
||||||
Search for all *.rpm files recursively
|
Search for all *.rpm files recursively
|
||||||
in given top directory
|
in given top directory
|
||||||
Returns:
|
Returns:
|
||||||
list: list of paths
|
list: list of paths
|
||||||
"""
|
"""
|
||||||
return [Package(
|
rpms = []
|
||||||
nvra=parse_nvra(Path(path).stem),
|
for root, dirs, files in os.walk(top_dir):
|
||||||
path=Path(path),
|
path = root.split(os.sep)
|
||||||
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
for file in files:
|
||||||
|
if not file.endswith('.rpm'):
|
||||||
|
continue
|
||||||
def is_excluded_package(
|
nvra, _ = os.path.splitext(file)
|
||||||
package: Package,
|
rpms.append(
|
||||||
excluded_packages: List[str],
|
Package(nvra=nvra, path=os.path.join('/', *path, file))
|
||||||
) -> bool:
|
|
||||||
package_key = f'{package.nvra["name"]}.{package.nvra["arch"]}'
|
|
||||||
return any(
|
|
||||||
re.search(
|
|
||||||
f'^{excluded_pkg}$',
|
|
||||||
package_key,
|
|
||||||
) or excluded_pkg in (package.nvra['name'], package_key)
|
|
||||||
for excluded_pkg in excluded_packages
|
|
||||||
)
|
)
|
||||||
|
return rpms
|
||||||
|
|
||||||
|
|
||||||
def copy_rpms(
|
def copy_rpms(packages: List[Package], target_top_dir: str):
|
||||||
packages: List[Package],
|
|
||||||
target_top_dir: Path,
|
|
||||||
excluded_packages: List[str],
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Search synced repos for rpms and prepare
|
Search synced repos for rpms and prepare
|
||||||
koji-like structure for pungi
|
koji-like structure for pungi
|
||||||
@ -59,37 +45,30 @@ def copy_rpms(
|
|||||||
Nothing:
|
Nothing:
|
||||||
"""
|
"""
|
||||||
for package in packages:
|
for package in packages:
|
||||||
if is_excluded_package(package, excluded_packages):
|
info = parse_nvra(package.nvra)
|
||||||
continue
|
|
||||||
target_arch_dir = target_top_dir.joinpath(package.nvra['arch'])
|
target_arch_dir = os.path.join(target_top_dir, info['arch'])
|
||||||
target_file = target_arch_dir.joinpath(package.path.name)
|
|
||||||
os.makedirs(target_arch_dir, exist_ok=True)
|
os.makedirs(target_arch_dir, exist_ok=True)
|
||||||
|
|
||||||
if not target_file.exists():
|
target_file = os.path.join(target_arch_dir, os.path.basename(package.path))
|
||||||
|
|
||||||
|
if not os.path.exists(target_file):
|
||||||
try:
|
try:
|
||||||
os.link(package.path, target_file)
|
os.link(package.path, target_file)
|
||||||
except OSError:
|
except OSError:
|
||||||
# hardlink failed, try symlinking
|
# hardlink failed, try symlinking
|
||||||
package.path.symlink_to(target_file)
|
os.symlink(package.path, target_file)
|
||||||
|
|
||||||
|
|
||||||
def cli_main():
|
def cli_main():
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
parser.add_argument('-p', '--path', required=True, type=Path)
|
parser.add_argument('-p', '--path', required=True)
|
||||||
parser.add_argument('-t', '--target', required=True, type=Path)
|
parser.add_argument('-t', '--target', required=True)
|
||||||
parser.add_argument(
|
|
||||||
'-e',
|
|
||||||
'--excluded-packages',
|
|
||||||
required=False,
|
|
||||||
nargs='+',
|
|
||||||
type=str,
|
|
||||||
default=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
namespace = parser.parse_args()
|
namespace = parser.parse_args()
|
||||||
|
|
||||||
rpms = search_rpms(namespace.path)
|
rpms = search_rpms(namespace.path)
|
||||||
copy_rpms(rpms, namespace.target, namespace.excluded_packages)
|
copy_rpms(rpms, namespace.target)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -319,6 +319,7 @@ def get_arguments(config):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
config = pungi.config.Config()
|
config = pungi.config.Config()
|
||||||
opts = get_arguments(config)
|
opts = get_arguments(config)
|
||||||
|
|
||||||
@ -478,7 +479,8 @@ def main():
|
|||||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
||||||
if not opts.nodebuginfo:
|
if not opts.nodebuginfo:
|
||||||
print(
|
print(
|
||||||
"DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024**2)
|
"DEBUGINFO size: %s MiB"
|
||||||
|
% (mypungi.size_debuginfo() / 1024**2)
|
||||||
)
|
)
|
||||||
if not opts.nosource:
|
if not opts.nosource:
|
||||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
||||||
|
@ -97,7 +97,6 @@ def main(ns, persistdir, cachedir):
|
|||||||
dnf_conf = Conf(ns.arch)
|
dnf_conf = Conf(ns.arch)
|
||||||
dnf_conf.persistdir = persistdir
|
dnf_conf.persistdir = persistdir
|
||||||
dnf_conf.cachedir = cachedir
|
dnf_conf.cachedir = cachedir
|
||||||
dnf_conf.optional_metadata_types = ["filelists"]
|
|
||||||
dnf_obj = DnfWrapper(dnf_conf)
|
dnf_obj = DnfWrapper(dnf_conf)
|
||||||
|
|
||||||
gather_opts = GatherOptions()
|
gather_opts = GatherOptions()
|
||||||
|
@ -23,7 +23,6 @@ from pungi.phases import PHASES_NAMES
|
|||||||
from pungi import get_full_version, util
|
from pungi import get_full_version, util
|
||||||
from pungi.errors import UnsignedPackagesError
|
from pungi.errors import UnsignedPackagesError
|
||||||
from pungi.wrappers import kojiwrapper
|
from pungi.wrappers import kojiwrapper
|
||||||
from pungi.util import rmtree
|
|
||||||
|
|
||||||
|
|
||||||
# force C locales
|
# force C locales
|
||||||
@ -252,15 +251,9 @@ def main():
|
|||||||
kobo.log.add_stderr_logger(logger)
|
kobo.log.add_stderr_logger(logger)
|
||||||
|
|
||||||
conf = util.load_config(opts.config)
|
conf = util.load_config(opts.config)
|
||||||
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
|
||||||
label = opts.label or conf.get("label")
|
|
||||||
if label:
|
|
||||||
try:
|
|
||||||
productmd.composeinfo.verify_label(label)
|
|
||||||
except ValueError as ex:
|
|
||||||
abort(str(ex))
|
|
||||||
|
|
||||||
if compose_type == "production" and not label and not opts.no_label:
|
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
||||||
|
if compose_type == "production" and not opts.label and not opts.no_label:
|
||||||
abort("must specify label for a production compose")
|
abort("must specify label for a production compose")
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@ -307,12 +300,7 @@ def main():
|
|||||||
|
|
||||||
if opts.target_dir:
|
if opts.target_dir:
|
||||||
compose_dir = Compose.get_compose_dir(
|
compose_dir = Compose.get_compose_dir(
|
||||||
opts.target_dir,
|
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
|
||||||
conf,
|
|
||||||
compose_type=compose_type,
|
|
||||||
compose_label=label,
|
|
||||||
parent_compose_ids=opts.parent_compose_id,
|
|
||||||
respin_of=opts.respin_of,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
compose_dir = opts.compose_dir
|
compose_dir = opts.compose_dir
|
||||||
@ -321,7 +309,7 @@ def main():
|
|||||||
ci = Compose.get_compose_info(
|
ci = Compose.get_compose_info(
|
||||||
conf,
|
conf,
|
||||||
compose_type=compose_type,
|
compose_type=compose_type,
|
||||||
compose_label=label,
|
compose_label=opts.label,
|
||||||
parent_compose_ids=opts.parent_compose_id,
|
parent_compose_ids=opts.parent_compose_id,
|
||||||
respin_of=opts.respin_of,
|
respin_of=opts.respin_of,
|
||||||
)
|
)
|
||||||
@ -392,14 +380,6 @@ def run_compose(
|
|||||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||||
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
||||||
|
|
||||||
installed_pkgs_log = compose.paths.log.log_file("global", "installed-pkgs")
|
|
||||||
compose.log_info("Logging installed packages to %s" % installed_pkgs_log)
|
|
||||||
try:
|
|
||||||
with open(installed_pkgs_log, "w") as f:
|
|
||||||
subprocess.Popen(["rpm", "-qa"], stdout=f)
|
|
||||||
except Exception as e:
|
|
||||||
compose.log_warning("Failed to log installed packages: %s" % str(e))
|
|
||||||
|
|
||||||
compose.read_variants()
|
compose.read_variants()
|
||||||
|
|
||||||
# dump the config file
|
# dump the config file
|
||||||
@ -423,12 +403,11 @@ def run_compose(
|
|||||||
compose, buildinstall_phase, pkgset_phase
|
compose, buildinstall_phase, pkgset_phase
|
||||||
)
|
)
|
||||||
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
|
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
|
||||||
ostree_container_phase = pungi.phases.OSTreeContainerPhase(compose, pkgset_phase)
|
|
||||||
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
||||||
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose, buildinstall_phase)
|
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose, buildinstall_phase)
|
||||||
|
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
||||||
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
||||||
image_build_phase = pungi.phases.ImageBuildPhase(compose, buildinstall_phase)
|
image_build_phase = pungi.phases.ImageBuildPhase(compose, buildinstall_phase)
|
||||||
kiwibuild_phase = pungi.phases.KiwiBuildPhase(compose)
|
|
||||||
osbuild_phase = pungi.phases.OSBuildPhase(compose)
|
osbuild_phase = pungi.phases.OSBuildPhase(compose)
|
||||||
osbs_phase = pungi.phases.OSBSPhase(compose, pkgset_phase, buildinstall_phase)
|
osbs_phase = pungi.phases.OSBSPhase(compose, pkgset_phase, buildinstall_phase)
|
||||||
image_container_phase = pungi.phases.ImageContainerPhase(compose)
|
image_container_phase = pungi.phases.ImageContainerPhase(compose)
|
||||||
@ -445,18 +424,17 @@ def run_compose(
|
|||||||
gather_phase,
|
gather_phase,
|
||||||
extrafiles_phase,
|
extrafiles_phase,
|
||||||
createiso_phase,
|
createiso_phase,
|
||||||
|
liveimages_phase,
|
||||||
livemedia_phase,
|
livemedia_phase,
|
||||||
image_build_phase,
|
image_build_phase,
|
||||||
image_checksum_phase,
|
image_checksum_phase,
|
||||||
test_phase,
|
test_phase,
|
||||||
ostree_phase,
|
ostree_phase,
|
||||||
ostree_installer_phase,
|
ostree_installer_phase,
|
||||||
ostree_container_phase,
|
|
||||||
extra_isos_phase,
|
extra_isos_phase,
|
||||||
osbs_phase,
|
osbs_phase,
|
||||||
osbuild_phase,
|
osbuild_phase,
|
||||||
image_container_phase,
|
image_container_phase,
|
||||||
kiwibuild_phase,
|
|
||||||
):
|
):
|
||||||
if phase.skip():
|
if phase.skip():
|
||||||
continue
|
continue
|
||||||
@ -471,6 +449,50 @@ def run_compose(
|
|||||||
print(i)
|
print(i)
|
||||||
raise RuntimeError("Configuration is not valid")
|
raise RuntimeError("Configuration is not valid")
|
||||||
|
|
||||||
|
# PREP
|
||||||
|
|
||||||
|
# Note: This may be put into a new method of phase classes (e.g. .prep())
|
||||||
|
# in same way as .validate() or .run()
|
||||||
|
|
||||||
|
# Prep for liveimages - Obtain a password for signing rpm wrapped images
|
||||||
|
if (
|
||||||
|
"signing_key_password_file" in compose.conf
|
||||||
|
and "signing_command" in compose.conf
|
||||||
|
and "%(signing_key_password)s" in compose.conf["signing_command"]
|
||||||
|
and not liveimages_phase.skip()
|
||||||
|
):
|
||||||
|
# TODO: Don't require key if signing is turned off
|
||||||
|
# Obtain signing key password
|
||||||
|
signing_key_password = None
|
||||||
|
|
||||||
|
# Use appropriate method
|
||||||
|
if compose.conf["signing_key_password_file"] == "-":
|
||||||
|
# Use stdin (by getpass module)
|
||||||
|
try:
|
||||||
|
signing_key_password = getpass.getpass("Signing key password: ")
|
||||||
|
except EOFError:
|
||||||
|
compose.log_debug("Ignoring signing key password")
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Use text file with password
|
||||||
|
try:
|
||||||
|
signing_key_password = (
|
||||||
|
open(compose.conf["signing_key_password_file"], "r")
|
||||||
|
.readline()
|
||||||
|
.rstrip("\n")
|
||||||
|
)
|
||||||
|
except IOError:
|
||||||
|
# Filename is not print intentionally in case someone puts
|
||||||
|
# password directly into the option
|
||||||
|
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option" # noqa: E501
|
||||||
|
compose.log_error(err_msg)
|
||||||
|
print(err_msg)
|
||||||
|
raise RuntimeError(err_msg)
|
||||||
|
|
||||||
|
if signing_key_password:
|
||||||
|
# Store the password
|
||||||
|
compose.conf["signing_key_password"] = signing_key_password
|
||||||
|
|
||||||
init_phase.start()
|
init_phase.start()
|
||||||
init_phase.stop()
|
init_phase.stop()
|
||||||
|
|
||||||
@ -483,7 +505,6 @@ def run_compose(
|
|||||||
(gather_phase, createrepo_phase),
|
(gather_phase, createrepo_phase),
|
||||||
extrafiles_phase,
|
extrafiles_phase,
|
||||||
(ostree_phase, ostree_installer_phase),
|
(ostree_phase, ostree_installer_phase),
|
||||||
ostree_container_phase,
|
|
||||||
)
|
)
|
||||||
essentials_phase = pungi.phases.WeaverPhase(compose, essentials_schema)
|
essentials_phase = pungi.phases.WeaverPhase(compose, essentials_schema)
|
||||||
essentials_phase.start()
|
essentials_phase.start()
|
||||||
@ -508,10 +529,10 @@ def run_compose(
|
|||||||
compose_images_schema = (
|
compose_images_schema = (
|
||||||
createiso_phase,
|
createiso_phase,
|
||||||
extra_isos_phase,
|
extra_isos_phase,
|
||||||
|
liveimages_phase,
|
||||||
image_build_phase,
|
image_build_phase,
|
||||||
livemedia_phase,
|
livemedia_phase,
|
||||||
osbuild_phase,
|
osbuild_phase,
|
||||||
kiwibuild_phase,
|
|
||||||
)
|
)
|
||||||
post_image_phase = pungi.phases.WeaverPhase(
|
post_image_phase = pungi.phases.WeaverPhase(
|
||||||
compose, (image_checksum_phase, image_container_phase)
|
compose, (image_checksum_phase, image_container_phase)
|
||||||
@ -533,11 +554,10 @@ def run_compose(
|
|||||||
and ostree_installer_phase.skip()
|
and ostree_installer_phase.skip()
|
||||||
and createiso_phase.skip()
|
and createiso_phase.skip()
|
||||||
and extra_isos_phase.skip()
|
and extra_isos_phase.skip()
|
||||||
|
and liveimages_phase.skip()
|
||||||
and livemedia_phase.skip()
|
and livemedia_phase.skip()
|
||||||
and image_build_phase.skip()
|
and image_build_phase.skip()
|
||||||
and kiwibuild_phase.skip()
|
|
||||||
and osbuild_phase.skip()
|
and osbuild_phase.skip()
|
||||||
and ostree_container_phase.skip()
|
|
||||||
):
|
):
|
||||||
compose.im.dump(compose.paths.compose.metadata("images.json"))
|
compose.im.dump(compose.paths.compose.metadata("images.json"))
|
||||||
compose.dump_containers_metadata()
|
compose.dump_containers_metadata()
|
||||||
@ -651,7 +671,7 @@ def cli_main():
|
|||||||
except (Exception, KeyboardInterrupt) as ex:
|
except (Exception, KeyboardInterrupt) as ex:
|
||||||
if COMPOSE:
|
if COMPOSE:
|
||||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||||
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
COMPOSE.traceback()
|
||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
COMPOSE.write_status("DOOMED")
|
COMPOSE.write_status("DOOMED")
|
||||||
else:
|
else:
|
||||||
@ -660,8 +680,3 @@ def cli_main():
|
|||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
finally:
|
|
||||||
# Remove repositories cloned during ExtraFiles phase
|
|
||||||
process_id = os.getpid()
|
|
||||||
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
|
||||||
rmtree(directoy_to_remove)
|
|
||||||
|
@ -279,7 +279,7 @@ class GitUrlResolveError(RuntimeError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def resolve_git_ref(repourl, ref, credential_helper=None):
|
def resolve_git_ref(repourl, ref):
|
||||||
"""Resolve a reference in a Git repo to a commit.
|
"""Resolve a reference in a Git repo to a commit.
|
||||||
|
|
||||||
Raises RuntimeError if there was an error. Most likely cause is failure to
|
Raises RuntimeError if there was an error. Most likely cause is failure to
|
||||||
@ -289,7 +289,7 @@ def resolve_git_ref(repourl, ref, credential_helper=None):
|
|||||||
# This looks like a commit ID already.
|
# This looks like a commit ID already.
|
||||||
return ref
|
return ref
|
||||||
try:
|
try:
|
||||||
_, output = git_ls_remote(repourl, ref, credential_helper)
|
_, output = git_ls_remote(repourl, ref)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
raise GitUrlResolveError(
|
raise GitUrlResolveError(
|
||||||
"ref does not exist in remote repo %s with the error %s %s"
|
"ref does not exist in remote repo %s with the error %s %s"
|
||||||
@ -316,7 +316,7 @@ def resolve_git_ref(repourl, ref, credential_helper=None):
|
|||||||
return lines[0].split()[0]
|
return lines[0].split()[0]
|
||||||
|
|
||||||
|
|
||||||
def resolve_git_url(url, credential_helper=None):
|
def resolve_git_url(url):
|
||||||
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
||||||
replace that specifier with actual SHA1 of the commit.
|
replace that specifier with actual SHA1 of the commit.
|
||||||
|
|
||||||
@ -335,7 +335,7 @@ def resolve_git_url(url, credential_helper=None):
|
|||||||
scheme = r.scheme.replace("git+", "")
|
scheme = r.scheme.replace("git+", "")
|
||||||
|
|
||||||
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
||||||
fragment = resolve_git_ref(baseurl, ref, credential_helper)
|
fragment = resolve_git_ref(baseurl, ref)
|
||||||
|
|
||||||
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
||||||
if "?#" in url:
|
if "?#" in url:
|
||||||
@ -354,18 +354,13 @@ class GitUrlResolver(object):
|
|||||||
self.offline = offline
|
self.offline = offline
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __call__(self, url, branch=None, options=None):
|
def __call__(self, url, branch=None):
|
||||||
credential_helper = options.get("credential_helper") if options else None
|
|
||||||
if self.offline:
|
if self.offline:
|
||||||
return branch or url
|
return branch or url
|
||||||
key = (url, branch)
|
key = (url, branch)
|
||||||
if key not in self.cache:
|
if key not in self.cache:
|
||||||
try:
|
try:
|
||||||
res = (
|
res = resolve_git_ref(url, branch) if branch else resolve_git_url(url)
|
||||||
resolve_git_ref(url, branch, credential_helper)
|
|
||||||
if branch
|
|
||||||
else resolve_git_url(url, credential_helper)
|
|
||||||
)
|
|
||||||
self.cache[key] = res
|
self.cache[key] = res
|
||||||
except GitUrlResolveError as exc:
|
except GitUrlResolveError as exc:
|
||||||
self.cache[key] = exc
|
self.cache[key] = exc
|
||||||
@ -461,9 +456,6 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||||||
if not variant_uid and "%(variant)s" in i:
|
if not variant_uid and "%(variant)s" in i:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
# fmt: off
|
|
||||||
# Black wants to add a comma after kwargs, but that's not valid in
|
|
||||||
# Python 2.7
|
|
||||||
args = get_format_substs(
|
args = get_format_substs(
|
||||||
compose,
|
compose,
|
||||||
variant=variant_uid,
|
variant=variant_uid,
|
||||||
@ -475,7 +467,6 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||||||
base_product_version=base_product_version,
|
base_product_version=base_product_version,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
# fmt: on
|
|
||||||
volid = (i % args).format(**args)
|
volid = (i % args).format(**args)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
@ -487,7 +478,10 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||||||
tried.add(volid)
|
tried.add(volid)
|
||||||
|
|
||||||
if volid and len(volid) > 32:
|
if volid and len(volid) > 32:
|
||||||
volid = volid[:32]
|
raise ValueError(
|
||||||
|
"Could not create volume ID longer than 32 bytes, options are %r",
|
||||||
|
sorted(tried, key=len),
|
||||||
|
)
|
||||||
|
|
||||||
if compose.conf["restricted_volid"]:
|
if compose.conf["restricted_volid"]:
|
||||||
# Replace all non-alphanumeric characters and non-underscores) with
|
# Replace all non-alphanumeric characters and non-underscores) with
|
||||||
@ -997,12 +991,8 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
|||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RuntimeError)
|
@retry(wait_on=RuntimeError)
|
||||||
def git_ls_remote(baseurl, ref, credential_helper=None):
|
def git_ls_remote(baseurl, ref):
|
||||||
cmd = ["git"]
|
return run(["git", "ls-remote", baseurl, ref], universal_newlines=True)
|
||||||
if credential_helper:
|
|
||||||
cmd.extend(["-c", "credential.useHttpPath=true"])
|
|
||||||
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
|
||||||
return run(cmd + ["ls-remote", baseurl, ref], universal_newlines=True)
|
|
||||||
|
|
||||||
|
|
||||||
def get_tz_offset():
|
def get_tz_offset():
|
||||||
@ -1147,16 +1137,3 @@ def read_json_file(file_path):
|
|||||||
"""A helper function to read a JSON file."""
|
"""A helper function to read a JSON file."""
|
||||||
with open(file_path) as f:
|
with open(file_path) as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
UNITS = ["", "Ki", "Mi", "Gi", "Ti"]
|
|
||||||
|
|
||||||
|
|
||||||
def format_size(sz):
|
|
||||||
sz = float(sz)
|
|
||||||
unit = 0
|
|
||||||
while sz > 1024:
|
|
||||||
sz /= 1024
|
|
||||||
unit += 1
|
|
||||||
|
|
||||||
return "%.3g %sB" % (sz, UNITS[unit])
|
|
||||||
|
@ -183,12 +183,11 @@ class CompsFilter(object):
|
|||||||
"""
|
"""
|
||||||
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
||||||
for environment in self.tree.xpath("/comps/environment"):
|
for environment in self.tree.xpath("/comps/environment"):
|
||||||
for parent_tag in ("grouplist", "optionlist"):
|
for group in environment.xpath("grouplist/groupid"):
|
||||||
for group in environment.xpath("%s/groupid" % parent_tag):
|
|
||||||
if group.text not in all_groups:
|
if group.text not in all_groups:
|
||||||
group.getparent().remove(group)
|
group.getparent().remove(group)
|
||||||
|
|
||||||
for group in environment.xpath("%s/groupid[@arch]" % parent_tag):
|
for group in environment.xpath("grouplist/groupid[@arch]"):
|
||||||
value = group.attrib.get("arch")
|
value = group.attrib.get("arch")
|
||||||
values = [v for v in re.split(r"[, ]+", value) if v]
|
values = [v for v in re.split(r"[, ]+", value) if v]
|
||||||
if arch not in values:
|
if arch not in values:
|
||||||
|
@ -260,34 +260,24 @@ def get_isohybrid_cmd(iso_path, arch):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def get_manifest_cmd(iso_name, xorriso=False, output_file=None):
|
def get_manifest_cmd(iso_name, xorriso=False):
|
||||||
if not output_file:
|
|
||||||
output_file = "%s.manifest" % iso_name
|
|
||||||
|
|
||||||
if xorriso:
|
if xorriso:
|
||||||
return """xorriso -dev %s --find |
|
return """xorriso -dev %s --find |
|
||||||
tail -n+2 |
|
tail -n+2 |
|
||||||
tr -d "'" |
|
tr -d "'" |
|
||||||
cut -c2- |
|
cut -c2- |
|
||||||
sort >> %s""" % (
|
sort >> %s.manifest""" % (
|
||||||
|
shlex_quote(iso_name),
|
||||||
shlex_quote(iso_name),
|
shlex_quote(iso_name),
|
||||||
shlex_quote(output_file),
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s" % (
|
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
||||||
|
shlex_quote(iso_name),
|
||||||
shlex_quote(iso_name),
|
shlex_quote(iso_name),
|
||||||
shlex_quote(output_file),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_volume_id(path, xorriso=False):
|
def get_volume_id(path):
|
||||||
if xorriso:
|
|
||||||
cmd = ["xorriso", "-indev", path]
|
|
||||||
retcode, output = run(cmd, universal_newlines=True)
|
|
||||||
for line in output.splitlines():
|
|
||||||
if line.startswith("Volume id"):
|
|
||||||
return line.split("'")[1]
|
|
||||||
else:
|
|
||||||
cmd = ["isoinfo", "-d", "-i", path]
|
cmd = ["isoinfo", "-d", "-i", path]
|
||||||
retcode, output = run(cmd, universal_newlines=True)
|
retcode, output = run(cmd, universal_newlines=True)
|
||||||
|
|
||||||
@ -516,21 +506,3 @@ def mount(image, logger=None, use_guestmount=True):
|
|||||||
util.run_unmount_cmd(["fusermount", "-u", mount_dir], path=mount_dir)
|
util.run_unmount_cmd(["fusermount", "-u", mount_dir], path=mount_dir)
|
||||||
else:
|
else:
|
||||||
util.run_unmount_cmd(["umount", mount_dir], path=mount_dir)
|
util.run_unmount_cmd(["umount", mount_dir], path=mount_dir)
|
||||||
|
|
||||||
|
|
||||||
def xorriso_commands(arch, input, output):
|
|
||||||
"""List of xorriso commands to modify a bootable image."""
|
|
||||||
commands = [
|
|
||||||
("-indev", input),
|
|
||||||
("-outdev", output),
|
|
||||||
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
|
||||||
("-joliet", "on"),
|
|
||||||
# Support long filenames in the Joliet trees. Repodata is particularly
|
|
||||||
# likely to run into this limit.
|
|
||||||
("-compliance", "joliet_long_names"),
|
|
||||||
("-boot_image", "any", "replay"),
|
|
||||||
]
|
|
||||||
if arch == "ppc64le":
|
|
||||||
# This is needed for the image to be bootable.
|
|
||||||
commands.append(("-as", "mkisofs", "-U", "--"))
|
|
||||||
return commands
|
|
||||||
|
@ -203,12 +203,31 @@ class KojiMock:
|
|||||||
packages = []
|
packages = []
|
||||||
|
|
||||||
# get all rpms in folder
|
# get all rpms in folder
|
||||||
rpms = search_rpms(Path(self._packages_dir))
|
rpms = search_rpms(self._packages_dir)
|
||||||
|
all_rpms = [package.path for package in rpms]
|
||||||
|
|
||||||
for rpm in rpms:
|
# get nvras for modular packages
|
||||||
info = parse_nvra(rpm.path.stem)
|
nvras = set()
|
||||||
if 'module' in info['release']:
|
for module in self._modules.values():
|
||||||
continue
|
path = os.path.join(
|
||||||
|
self._modules_dir,
|
||||||
|
module.arch,
|
||||||
|
module.nvr,
|
||||||
|
)
|
||||||
|
info = Modulemd.ModuleStream.read_string(open(path).read(), strict=True)
|
||||||
|
|
||||||
|
for package in info.get_rpm_artifacts():
|
||||||
|
data = parse_nvra(package)
|
||||||
|
nvras.add((data['name'], data['version'], data['release'], data['arch']))
|
||||||
|
|
||||||
|
# and remove modular packages from global list
|
||||||
|
for rpm in all_rpms[:]:
|
||||||
|
data = parse_nvra(os.path.basename(rpm[:-4]))
|
||||||
|
if (data['name'], data['version'], data['release'], data['arch']) in nvras:
|
||||||
|
all_rpms.remove(rpm)
|
||||||
|
|
||||||
|
for rpm in all_rpms:
|
||||||
|
info = parse_nvra(os.path.basename(rpm))
|
||||||
packages.append({
|
packages.append({
|
||||||
"build_id": RELEASE_BUILD_ID,
|
"build_id": RELEASE_BUILD_ID,
|
||||||
"name": info['name'],
|
"name": info['name'],
|
||||||
|
@ -14,23 +14,17 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import socket
|
|
||||||
import shutil
|
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
|
import contextlib
|
||||||
import requests
|
|
||||||
|
|
||||||
import koji
|
import koji
|
||||||
from kobo.shortcuts import run, force_list
|
from kobo.shortcuts import run, force_list
|
||||||
import six
|
import six
|
||||||
from six.moves import configparser, shlex_quote
|
from six.moves import configparser, shlex_quote
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
import six.moves.xmlrpc_client as xmlrpclib
|
||||||
from flufl.lock import Lock
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
from .kojimock import KojiMock
|
from .kojimock import KojiMock
|
||||||
from .. import util
|
from .. import util
|
||||||
@ -414,6 +408,92 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
def get_create_image_cmd(
|
||||||
|
self,
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
target,
|
||||||
|
arch,
|
||||||
|
ks_file,
|
||||||
|
repos,
|
||||||
|
image_type="live",
|
||||||
|
image_format=None,
|
||||||
|
release=None,
|
||||||
|
wait=True,
|
||||||
|
archive=False,
|
||||||
|
specfile=None,
|
||||||
|
ksurl=None,
|
||||||
|
):
|
||||||
|
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||||
|
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||||
|
# Examples:
|
||||||
|
# * name: RHEL-7.0
|
||||||
|
# * name: Satellite-6.0.1-RHEL-6
|
||||||
|
# ** -<type>.<arch>
|
||||||
|
# * version: YYYYMMDD[.n|.t].X
|
||||||
|
# * release: 1
|
||||||
|
|
||||||
|
cmd = self._get_cmd()
|
||||||
|
|
||||||
|
if image_type == "live":
|
||||||
|
cmd.append("spin-livecd")
|
||||||
|
elif image_type == "appliance":
|
||||||
|
cmd.append("spin-appliance")
|
||||||
|
else:
|
||||||
|
raise ValueError("Invalid image type: %s" % image_type)
|
||||||
|
|
||||||
|
if not archive:
|
||||||
|
cmd.append("--scratch")
|
||||||
|
|
||||||
|
cmd.append("--noprogress")
|
||||||
|
|
||||||
|
if wait:
|
||||||
|
cmd.append("--wait")
|
||||||
|
else:
|
||||||
|
cmd.append("--nowait")
|
||||||
|
|
||||||
|
if specfile:
|
||||||
|
cmd.append("--specfile=%s" % specfile)
|
||||||
|
|
||||||
|
if ksurl:
|
||||||
|
cmd.append("--ksurl=%s" % ksurl)
|
||||||
|
|
||||||
|
if isinstance(repos, list):
|
||||||
|
for repo in repos:
|
||||||
|
cmd.append("--repo=%s" % repo)
|
||||||
|
else:
|
||||||
|
cmd.append("--repo=%s" % repos)
|
||||||
|
|
||||||
|
if image_format:
|
||||||
|
if image_type != "appliance":
|
||||||
|
raise ValueError("Format can be specified only for appliance images'")
|
||||||
|
supported_formats = ["raw", "qcow", "qcow2", "vmx"]
|
||||||
|
if image_format not in supported_formats:
|
||||||
|
raise ValueError(
|
||||||
|
"Format is not supported: %s. Supported formats: %s"
|
||||||
|
% (image_format, " ".join(sorted(supported_formats)))
|
||||||
|
)
|
||||||
|
cmd.append("--format=%s" % image_format)
|
||||||
|
|
||||||
|
if release is not None:
|
||||||
|
cmd.append("--release=%s" % release)
|
||||||
|
|
||||||
|
# IMPORTANT: all --opts have to be provided *before* args
|
||||||
|
# Usage:
|
||||||
|
# koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
||||||
|
|
||||||
|
cmd.append(name)
|
||||||
|
cmd.append(version)
|
||||||
|
cmd.append(target)
|
||||||
|
|
||||||
|
# i686 -> i386 etc.
|
||||||
|
arch = getBaseArch(arch)
|
||||||
|
cmd.append(arch)
|
||||||
|
|
||||||
|
cmd.append(ks_file)
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
|
||||||
def _has_connection_error(self, output):
|
def _has_connection_error(self, output):
|
||||||
"""Checks if output indicates connection error."""
|
"""Checks if output indicates connection error."""
|
||||||
return re.search("error: failed to connect\n$", output)
|
return re.search("error: failed to connect\n$", output)
|
||||||
@ -527,7 +607,6 @@ class KojiWrapper(object):
|
|||||||
"createImage",
|
"createImage",
|
||||||
"createLiveMedia",
|
"createLiveMedia",
|
||||||
"createAppliance",
|
"createAppliance",
|
||||||
"createKiwiImage",
|
|
||||||
]:
|
]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -707,10 +786,11 @@ class KojiWrapper(object):
|
|||||||
if list_of_args is None and list_of_kwargs is None:
|
if list_of_args is None and list_of_kwargs is None:
|
||||||
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
||||||
|
|
||||||
if list_of_args is not None and not isinstance(list_of_args, list):
|
if type(list_of_args) not in [type(None), list] or type(list_of_kwargs) not in [
|
||||||
raise ValueError("list_of_args must be list or None.")
|
type(None),
|
||||||
if list_of_kwargs is not None and not isinstance(list_of_kwargs, list):
|
list,
|
||||||
raise ValueError("list_of_kwargs must be list or None.")
|
]:
|
||||||
|
raise ValueError("list_of_args and list_of_kwargs must be list or None.")
|
||||||
|
|
||||||
if list_of_kwargs is None:
|
if list_of_kwargs is None:
|
||||||
list_of_kwargs = [{}] * len(list_of_args)
|
list_of_kwargs = [{}] * len(list_of_args)
|
||||||
@ -724,9 +804,9 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
koji_session.multicall = True
|
koji_session.multicall = True
|
||||||
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
||||||
if not isinstance(args, list):
|
if type(args) != list:
|
||||||
args = [args]
|
args = [args]
|
||||||
if not isinstance(kwargs, dict):
|
if type(kwargs) != dict:
|
||||||
raise ValueError("Every item in list_of_kwargs must be a dict")
|
raise ValueError("Every item in list_of_kwargs must be a dict")
|
||||||
koji_session_fnc(*args, **kwargs)
|
koji_session_fnc(*args, **kwargs)
|
||||||
|
|
||||||
@ -734,7 +814,7 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
if not responses:
|
if not responses:
|
||||||
return None
|
return None
|
||||||
if not isinstance(responses, list):
|
if type(responses) != list:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Fault element was returned for multicall of method %r: %r"
|
"Fault element was returned for multicall of method %r: %r"
|
||||||
% (koji_session_fnc, responses)
|
% (koji_session_fnc, responses)
|
||||||
@ -750,7 +830,7 @@ class KojiWrapper(object):
|
|||||||
# a one-item array containing the result value,
|
# a one-item array containing the result value,
|
||||||
# or a struct of the form found inside the standard <fault> element.
|
# or a struct of the form found inside the standard <fault> element.
|
||||||
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
||||||
if isinstance(response, list):
|
if type(response) == list:
|
||||||
if not response:
|
if not response:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
||||||
@ -855,177 +935,3 @@ def get_buildroot_rpms(compose, task_id):
|
|||||||
continue
|
continue
|
||||||
result.append(i)
|
result.append(i)
|
||||||
return sorted(result)
|
return sorted(result)
|
||||||
|
|
||||||
|
|
||||||
class KojiDownloadProxy:
|
|
||||||
def __init__(self, topdir, topurl, cache_dir, logger):
|
|
||||||
if not topdir:
|
|
||||||
# This will only happen if there is either no koji_profile
|
|
||||||
# configured, or the profile doesn't have a topdir. In the first
|
|
||||||
# case there will be no koji interaction, and the second indicates
|
|
||||||
# broken koji configuration.
|
|
||||||
# We can pretend to have local access in both cases to avoid any
|
|
||||||
# external requests.
|
|
||||||
self.has_local_access = True
|
|
||||||
return
|
|
||||||
|
|
||||||
self.cache_dir = cache_dir
|
|
||||||
self.logger = logger
|
|
||||||
|
|
||||||
self.topdir = topdir
|
|
||||||
self.topurl = topurl
|
|
||||||
|
|
||||||
# If cache directory is configured, we want to use it (even if we
|
|
||||||
# actually have local access to the storage).
|
|
||||||
self.has_local_access = not bool(cache_dir)
|
|
||||||
# This is used for temporary downloaded files. The suffix is unique
|
|
||||||
# per-process. To prevent threads in the same process from colliding, a
|
|
||||||
# thread id is added later.
|
|
||||||
self.unique_suffix = "%s.%s" % (socket.gethostname(), os.getpid())
|
|
||||||
self.session = None
|
|
||||||
if not self.has_local_access:
|
|
||||||
self.session = requests.Session()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_prefix(self):
|
|
||||||
dir = self.topdir if self.has_local_access else self.cache_dir
|
|
||||||
return dir.rstrip("/") + "/"
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_config(klass, conf, logger):
|
|
||||||
topdir = None
|
|
||||||
topurl = None
|
|
||||||
cache_dir = None
|
|
||||||
if "koji_profile" in conf:
|
|
||||||
koji_module = koji.get_profile_module(conf["koji_profile"])
|
|
||||||
topdir = koji_module.config.topdir
|
|
||||||
topurl = koji_module.config.topurl
|
|
||||||
|
|
||||||
cache_dir = conf.get("koji_cache")
|
|
||||||
if cache_dir:
|
|
||||||
cache_dir = cache_dir.rstrip("/") + "/"
|
|
||||||
return klass(topdir, topurl, cache_dir, logger)
|
|
||||||
|
|
||||||
@util.retry(wait_on=requests.exceptions.RequestException)
|
|
||||||
def _download(self, url, dest):
|
|
||||||
"""Download file into given location
|
|
||||||
|
|
||||||
:param str url: URL of the file to download
|
|
||||||
:param str dest: file path to store the result in
|
|
||||||
:returns: path to the downloaded file (same as dest) or None if the URL
|
|
||||||
"""
|
|
||||||
# contextlib.closing is only needed in requests<2.18
|
|
||||||
with contextlib.closing(self.session.get(url, stream=True)) as r:
|
|
||||||
if r.status_code == 404:
|
|
||||||
self.logger.warning("GET %s NOT FOUND", url)
|
|
||||||
return None
|
|
||||||
if r.status_code != 200:
|
|
||||||
self.logger.error("GET %s %s", url, r.status_code)
|
|
||||||
r.raise_for_status()
|
|
||||||
# The exception from here will be retried by the decorator.
|
|
||||||
|
|
||||||
file_size = int(r.headers.get("Content-Length", 0))
|
|
||||||
self.logger.info("GET %s OK %s", url, util.format_size(file_size))
|
|
||||||
with open(dest, "wb") as f:
|
|
||||||
shutil.copyfileobj(r.raw, f)
|
|
||||||
return dest
|
|
||||||
|
|
||||||
def _delete(self, path):
|
|
||||||
"""Try to delete file at given path and ignore errors."""
|
|
||||||
try:
|
|
||||||
os.remove(path)
|
|
||||||
except Exception:
|
|
||||||
self.logger.warning("Failed to delete %s", path)
|
|
||||||
|
|
||||||
def _atomic_download(self, url, dest, validator):
|
|
||||||
"""Atomically download a file
|
|
||||||
|
|
||||||
:param str url: URL of the file to download
|
|
||||||
:param str dest: file path to store the result in
|
|
||||||
:returns: path to the downloaded file (same as dest) or None if the URL
|
|
||||||
return 404.
|
|
||||||
"""
|
|
||||||
temp_file = "%s.%s.%s" % (dest, self.unique_suffix, threading.get_ident())
|
|
||||||
|
|
||||||
# First download to the temporary location.
|
|
||||||
try:
|
|
||||||
if self._download(url, temp_file) is None:
|
|
||||||
# The file was not found.
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
# Download failed, let's make sure to clean up potentially partial
|
|
||||||
# temporary file.
|
|
||||||
self._delete(temp_file)
|
|
||||||
raise
|
|
||||||
|
|
||||||
# Check if the temporary file is correct (assuming we were provided a
|
|
||||||
# validator function).
|
|
||||||
try:
|
|
||||||
if validator:
|
|
||||||
validator(temp_file)
|
|
||||||
except Exception:
|
|
||||||
# Validation failed. Let's delete the problematic file and re-raise
|
|
||||||
# the exception.
|
|
||||||
self._delete(temp_file)
|
|
||||||
raise
|
|
||||||
|
|
||||||
# Atomically move the temporary file into final location
|
|
||||||
os.rename(temp_file, dest)
|
|
||||||
return dest
|
|
||||||
|
|
||||||
def _download_file(self, path, validator):
|
|
||||||
"""Ensure file on Koji volume in ``path`` is present in the local
|
|
||||||
cache.
|
|
||||||
|
|
||||||
:returns: path to the local file or None if file is not found
|
|
||||||
"""
|
|
||||||
url = path.replace(self.topdir, self.topurl)
|
|
||||||
destination_file = path.replace(self.topdir, self.cache_dir)
|
|
||||||
util.makedirs(os.path.dirname(destination_file))
|
|
||||||
|
|
||||||
lock = Lock(destination_file + ".lock")
|
|
||||||
# Hold the lock for this file for 5 minutes. If another compose needs
|
|
||||||
# the same file but it's not downloaded yet, the process will wait.
|
|
||||||
#
|
|
||||||
# If the download finishes in time, the downloaded file will be used
|
|
||||||
# here.
|
|
||||||
#
|
|
||||||
# If the download takes longer, this process will steal the lock and
|
|
||||||
# start its own download.
|
|
||||||
#
|
|
||||||
# That should not be a problem: the same file will be downloaded and
|
|
||||||
# then replaced atomically on the filesystem. If the original process
|
|
||||||
# managed to hardlink the first file already, that hardlink will be
|
|
||||||
# broken, but that will only result in the same file stored twice.
|
|
||||||
lock.lifetime = timedelta(minutes=5)
|
|
||||||
|
|
||||||
with lock:
|
|
||||||
# Check if the file already exists. If yes, return the path.
|
|
||||||
if os.path.exists(destination_file):
|
|
||||||
# Update mtime of the file. This covers the case of packages in the
|
|
||||||
# tag that are not included in the compose. Updating mtime will
|
|
||||||
# exempt them from cleanup for extra time.
|
|
||||||
os.utime(destination_file)
|
|
||||||
return destination_file
|
|
||||||
|
|
||||||
return self._atomic_download(url, destination_file, validator)
|
|
||||||
|
|
||||||
def get_file(self, path, validator=None):
|
|
||||||
"""
|
|
||||||
If path refers to an existing file in Koji, return a valid local path
|
|
||||||
to it. If no such file exists, return None.
|
|
||||||
|
|
||||||
:param validator: A callable that will be called with the path to the
|
|
||||||
downloaded file if and only if the file was actually downloaded.
|
|
||||||
Any exception raised from there will be abort the download and be
|
|
||||||
propagated.
|
|
||||||
"""
|
|
||||||
if self.has_local_access:
|
|
||||||
# We have koji volume mounted locally. No transformation needed for
|
|
||||||
# the path, just check it exists.
|
|
||||||
if os.path.exists(path):
|
|
||||||
return path
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
# We need to download the file.
|
|
||||||
return self._download_file(path, validator)
|
|
||||||
|
@ -109,3 +109,55 @@ class LoraxWrapper(object):
|
|||||||
# TODO: workdir
|
# TODO: workdir
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
def get_buildinstall_cmd(
|
||||||
|
self,
|
||||||
|
product,
|
||||||
|
version,
|
||||||
|
release,
|
||||||
|
repo_baseurl,
|
||||||
|
output_dir,
|
||||||
|
variant=None,
|
||||||
|
bugurl=None,
|
||||||
|
nomacboot=False,
|
||||||
|
noupgrade=False,
|
||||||
|
is_final=False,
|
||||||
|
buildarch=None,
|
||||||
|
volid=None,
|
||||||
|
brand=None,
|
||||||
|
):
|
||||||
|
# RHEL 6 compatibility
|
||||||
|
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root> # noqa: E501
|
||||||
|
|
||||||
|
brand = brand or "redhat"
|
||||||
|
# HACK: ignore provided release
|
||||||
|
release = "%s %s" % (brand, version)
|
||||||
|
bugurl = bugurl or "https://bugzilla.redhat.com"
|
||||||
|
|
||||||
|
cmd = ["/usr/lib/anaconda-runtime/buildinstall"]
|
||||||
|
|
||||||
|
cmd.append("--debug")
|
||||||
|
|
||||||
|
cmd.extend(["--version", version])
|
||||||
|
cmd.extend(["--brand", brand])
|
||||||
|
cmd.extend(["--product", product])
|
||||||
|
cmd.extend(["--release", release])
|
||||||
|
|
||||||
|
if is_final:
|
||||||
|
cmd.append("--final")
|
||||||
|
|
||||||
|
if buildarch:
|
||||||
|
cmd.extend(["--buildarch", buildarch])
|
||||||
|
|
||||||
|
if bugurl:
|
||||||
|
cmd.extend(["--bugurl", bugurl])
|
||||||
|
|
||||||
|
output_dir = os.path.abspath(output_dir)
|
||||||
|
cmd.extend(["--output", output_dir])
|
||||||
|
|
||||||
|
for i in force_list(repo_baseurl):
|
||||||
|
if "://" not in i:
|
||||||
|
i = "file://%s" % os.path.abspath(i)
|
||||||
|
cmd.append(i)
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
@ -20,7 +20,6 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import glob
|
import glob
|
||||||
import six
|
import six
|
||||||
import threading
|
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
from six.moves.urllib.request import urlretrieve
|
from six.moves.urllib.request import urlretrieve
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
@ -30,15 +29,12 @@ from kobo.shortcuts import run, force_list
|
|||||||
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||||
from .kojiwrapper import KojiWrapper
|
from .kojiwrapper import KojiWrapper
|
||||||
|
|
||||||
lock = threading.Lock()
|
|
||||||
|
|
||||||
|
|
||||||
class ScmBase(kobo.log.LoggingBase):
|
class ScmBase(kobo.log.LoggingBase):
|
||||||
def __init__(self, logger=None, command=None, compose=None, options=None):
|
def __init__(self, logger=None, command=None, compose=None):
|
||||||
kobo.log.LoggingBase.__init__(self, logger=logger)
|
kobo.log.LoggingBase.__init__(self, logger=logger)
|
||||||
self.command = command
|
self.command = command
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
self.options = options or {}
|
|
||||||
|
|
||||||
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
||||||
def retry_run(self, cmd, **kwargs):
|
def retry_run(self, cmd, **kwargs):
|
||||||
@ -160,31 +156,22 @@ class GitWrapper(ScmBase):
|
|||||||
if "://" not in repo:
|
if "://" not in repo:
|
||||||
repo = "file://%s" % repo
|
repo = "file://%s" % repo
|
||||||
|
|
||||||
git_cmd = ["git"]
|
|
||||||
if "credential_helper" in self.options:
|
|
||||||
git_cmd.extend(["-c", "credential.useHttpPath=true"])
|
|
||||||
git_cmd.extend(
|
|
||||||
["-c", "credential.helper=%s" % self.options["credential_helper"]]
|
|
||||||
)
|
|
||||||
|
|
||||||
run(["git", "init"], workdir=destdir)
|
run(["git", "init"], workdir=destdir)
|
||||||
try:
|
try:
|
||||||
run(git_cmd + ["fetch", "--depth=1", repo, branch], workdir=destdir)
|
run(["git", "fetch", "--depth=1", repo, branch], workdir=destdir)
|
||||||
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
# Fetch failed, to do a full clone we add a remote to our empty
|
# Fetch failed, to do a full clone we add a remote to our empty
|
||||||
# repo, get its content and check out the reference we want.
|
# repo, get its content and check out the reference we want.
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
"Trying to do a full clone because shallow clone failed: %s %s"
|
"Trying to do a full clone because shallow clone failed: %s %s"
|
||||||
% (e, getattr(e, "output", ""))
|
% (e, e.output)
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
# Re-run git init in case of previous failure breaking .git dir
|
# Re-run git init in case of previous failure breaking .git dir
|
||||||
run(["git", "init"], workdir=destdir)
|
run(["git", "init"], workdir=destdir)
|
||||||
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
||||||
self.retry_run(
|
self.retry_run(["git", "remote", "update", "origin"], workdir=destdir)
|
||||||
git_cmd + ["remote", "update", "origin"], workdir=destdir
|
|
||||||
)
|
|
||||||
run(["git", "checkout", branch], workdir=destdir)
|
run(["git", "checkout", branch], workdir=destdir)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
if self.compose:
|
if self.compose:
|
||||||
@ -198,38 +185,19 @@ class GitWrapper(ScmBase):
|
|||||||
copy_all(destdir, debugdir)
|
copy_all(destdir, debugdir)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def get_temp_repo_path(self, scm_root, scm_branch):
|
self.run_process_command(destdir)
|
||||||
scm_repo = scm_root.split("/")[-1]
|
|
||||||
process_id = os.getpid()
|
|
||||||
tmp_dir = (
|
|
||||||
"/tmp/pungi-temp-git-repos-"
|
|
||||||
+ str(process_id)
|
|
||||||
+ "/"
|
|
||||||
+ scm_repo
|
|
||||||
+ "-"
|
|
||||||
+ scm_branch
|
|
||||||
)
|
|
||||||
return tmp_dir
|
|
||||||
|
|
||||||
def setup_repo(self, scm_root, scm_branch):
|
|
||||||
tmp_dir = self.get_temp_repo_path(scm_root, scm_branch)
|
|
||||||
if not os.path.isdir(tmp_dir):
|
|
||||||
makedirs(tmp_dir)
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
|
||||||
self.run_process_command(tmp_dir)
|
|
||||||
return tmp_dir
|
|
||||||
|
|
||||||
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
||||||
scm_dir = scm_dir.lstrip("/")
|
scm_dir = scm_dir.lstrip("/")
|
||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
|
with temp_dir() as tmp_dir:
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
"Exporting directory %s from git %s (branch %s)..."
|
"Exporting directory %s from git %s (branch %s)..."
|
||||||
% (scm_dir, scm_root, scm_branch)
|
% (scm_dir, scm_root, scm_branch)
|
||||||
)
|
)
|
||||||
|
|
||||||
with lock:
|
self._clone(scm_root, scm_branch, tmp_dir)
|
||||||
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
|
||||||
|
|
||||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||||
|
|
||||||
@ -237,6 +205,7 @@ class GitWrapper(ScmBase):
|
|||||||
scm_file = scm_file.lstrip("/")
|
scm_file = scm_file.lstrip("/")
|
||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
|
with temp_dir() as tmp_dir:
|
||||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||||
|
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
@ -244,8 +213,7 @@ class GitWrapper(ScmBase):
|
|||||||
% (scm_file, scm_root, scm_branch)
|
% (scm_file, scm_root, scm_branch)
|
||||||
)
|
)
|
||||||
|
|
||||||
with lock:
|
self._clone(scm_root, scm_branch, tmp_dir)
|
||||||
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
|
||||||
|
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
||||||
@ -393,19 +361,15 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
|||||||
scm_file = os.path.abspath(scm_dict)
|
scm_file = os.path.abspath(scm_dict)
|
||||||
scm_branch = None
|
scm_branch = None
|
||||||
command = None
|
command = None
|
||||||
options = {}
|
|
||||||
else:
|
else:
|
||||||
scm_type = scm_dict["scm"]
|
scm_type = scm_dict["scm"]
|
||||||
scm_repo = scm_dict["repo"]
|
scm_repo = scm_dict["repo"]
|
||||||
scm_file = scm_dict["file"]
|
scm_file = scm_dict["file"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get("command")
|
command = scm_dict.get("command")
|
||||||
options = scm_dict.get("options", {})
|
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(
|
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
||||||
scm_type, logger=logger, command=command, compose=compose, options=options
|
|
||||||
)
|
|
||||||
|
|
||||||
files_copied = []
|
files_copied = []
|
||||||
for i in force_list(scm_file):
|
for i in force_list(scm_file):
|
||||||
@ -486,19 +450,15 @@ def get_dir_from_scm(scm_dict, target_path, compose=None):
|
|||||||
scm_dir = os.path.abspath(scm_dict)
|
scm_dir = os.path.abspath(scm_dict)
|
||||||
scm_branch = None
|
scm_branch = None
|
||||||
command = None
|
command = None
|
||||||
options = {}
|
|
||||||
else:
|
else:
|
||||||
scm_type = scm_dict["scm"]
|
scm_type = scm_dict["scm"]
|
||||||
scm_repo = scm_dict.get("repo", None)
|
scm_repo = scm_dict.get("repo", None)
|
||||||
scm_dir = scm_dict["dir"]
|
scm_dir = scm_dict["dir"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get("command")
|
command = scm_dict.get("command")
|
||||||
options = scm_dict.get("options", {})
|
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(
|
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
||||||
scm_type, logger=logger, command=command, compose=compose, options=options
|
|
||||||
)
|
|
||||||
|
|
||||||
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
||||||
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
||||||
|
@ -276,6 +276,7 @@ class Variant(object):
|
|||||||
modules=None,
|
modules=None,
|
||||||
modular_koji_tags=None,
|
modular_koji_tags=None,
|
||||||
):
|
):
|
||||||
|
|
||||||
environments = environments or []
|
environments = environments or []
|
||||||
buildinstallpackages = buildinstallpackages or []
|
buildinstallpackages = buildinstallpackages or []
|
||||||
|
|
||||||
|
705
pungi_utils/orchestrator.py
Normal file
705
pungi_utils/orchestrator.py
Normal file
@ -0,0 +1,705 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import atexit
|
||||||
|
import errno
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
import kobo.conf
|
||||||
|
import kobo.log
|
||||||
|
import productmd
|
||||||
|
from kobo import shortcuts
|
||||||
|
from six.moves import configparser, shlex_quote
|
||||||
|
|
||||||
|
import pungi.util
|
||||||
|
from pungi.compose import get_compose_dir
|
||||||
|
from pungi.linker import linker_pool
|
||||||
|
from pungi.phases.pkgset.sources.source_koji import get_koji_event_raw
|
||||||
|
from pungi.util import find_old_compose, parse_koji_event, temp_dir
|
||||||
|
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||||
|
|
||||||
|
|
||||||
|
Config = namedtuple(
|
||||||
|
"Config",
|
||||||
|
[
|
||||||
|
# Path to directory with the compose
|
||||||
|
"target",
|
||||||
|
"compose_type",
|
||||||
|
"label",
|
||||||
|
# Path to the selected old compose that will be reused
|
||||||
|
"old_compose",
|
||||||
|
# Path to directory with config file copies
|
||||||
|
"config_dir",
|
||||||
|
# Which koji event to use (if any)
|
||||||
|
"event",
|
||||||
|
# Additional arguments to pungi-koji executable
|
||||||
|
"extra_args",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Status(object):
|
||||||
|
# Ready to start
|
||||||
|
READY = "READY"
|
||||||
|
# Waiting for dependencies to finish.
|
||||||
|
WAITING = "WAITING"
|
||||||
|
# Part is currently running
|
||||||
|
STARTED = "STARTED"
|
||||||
|
# A dependency failed, this one will never start.
|
||||||
|
BLOCKED = "BLOCKED"
|
||||||
|
|
||||||
|
|
||||||
|
class ComposePart(object):
|
||||||
|
def __init__(self, name, config, just_phase=[], skip_phase=[], dependencies=[]):
|
||||||
|
self.name = name
|
||||||
|
self.config = config
|
||||||
|
self.status = Status.WAITING if dependencies else Status.READY
|
||||||
|
self.just_phase = just_phase
|
||||||
|
self.skip_phase = skip_phase
|
||||||
|
self.blocked_on = set(dependencies)
|
||||||
|
self.depends_on = set(dependencies)
|
||||||
|
self.path = None
|
||||||
|
self.log_file = None
|
||||||
|
self.failable = False
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return (
|
||||||
|
"ComposePart({0.name!r},"
|
||||||
|
" {0.config!r},"
|
||||||
|
" {0.status!r},"
|
||||||
|
" just_phase={0.just_phase!r},"
|
||||||
|
" skip_phase={0.skip_phase!r},"
|
||||||
|
" dependencies={0.depends_on!r})"
|
||||||
|
).format(self)
|
||||||
|
|
||||||
|
def refresh_status(self):
|
||||||
|
"""Refresh status of this part with the result of the compose. This
|
||||||
|
should only be called once the compose finished.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with open(os.path.join(self.path, "STATUS")) as fh:
|
||||||
|
self.status = fh.read().strip()
|
||||||
|
except IOError as exc:
|
||||||
|
log.error("Failed to update status of %s: %s", self.name, exc)
|
||||||
|
log.error("Assuming %s is DOOMED", self.name)
|
||||||
|
self.status = "DOOMED"
|
||||||
|
|
||||||
|
def is_finished(self):
|
||||||
|
return "FINISHED" in self.status
|
||||||
|
|
||||||
|
def unblock_on(self, finished_part):
|
||||||
|
"""Update set of blockers for this part. If it's empty, mark us as ready."""
|
||||||
|
self.blocked_on.discard(finished_part)
|
||||||
|
if self.status == Status.WAITING and not self.blocked_on:
|
||||||
|
log.debug("%s is ready to start", self)
|
||||||
|
self.status = Status.READY
|
||||||
|
|
||||||
|
def setup_start(self, global_config, parts):
|
||||||
|
substitutions = dict(
|
||||||
|
("part-%s" % name, p.path) for name, p in parts.items() if p.is_finished()
|
||||||
|
)
|
||||||
|
substitutions["configdir"] = global_config.config_dir
|
||||||
|
|
||||||
|
config = pungi.util.load_config(self.config)
|
||||||
|
|
||||||
|
for f in config.opened_files:
|
||||||
|
# apply substitutions
|
||||||
|
fill_in_config_file(f, substitutions)
|
||||||
|
|
||||||
|
self.status = Status.STARTED
|
||||||
|
self.path = get_compose_dir(
|
||||||
|
os.path.join(global_config.target, "parts"),
|
||||||
|
config,
|
||||||
|
compose_type=global_config.compose_type,
|
||||||
|
compose_label=global_config.label,
|
||||||
|
)
|
||||||
|
self.log_file = os.path.join(global_config.target, "logs", "%s.log" % self.name)
|
||||||
|
log.info("Starting %s in %s", self.name, self.path)
|
||||||
|
|
||||||
|
def get_cmd(self, global_config):
|
||||||
|
cmd = ["pungi-koji", "--config", self.config, "--compose-dir", self.path]
|
||||||
|
cmd.append("--%s" % global_config.compose_type)
|
||||||
|
if global_config.label:
|
||||||
|
cmd.extend(["--label", global_config.label])
|
||||||
|
for phase in self.just_phase:
|
||||||
|
cmd.extend(["--just-phase", phase])
|
||||||
|
for phase in self.skip_phase:
|
||||||
|
cmd.extend(["--skip-phase", phase])
|
||||||
|
if global_config.old_compose:
|
||||||
|
cmd.extend(
|
||||||
|
["--old-compose", os.path.join(global_config.old_compose, "parts")]
|
||||||
|
)
|
||||||
|
if global_config.event:
|
||||||
|
cmd.extend(["--koji-event", str(global_config.event)])
|
||||||
|
if global_config.extra_args:
|
||||||
|
cmd.extend(global_config.extra_args)
|
||||||
|
cmd.extend(["--no-latest-link"])
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(cls, config, section, config_dir):
|
||||||
|
part = cls(
|
||||||
|
name=section,
|
||||||
|
config=os.path.join(config_dir, config.get(section, "config")),
|
||||||
|
just_phase=_safe_get_list(config, section, "just_phase", []),
|
||||||
|
skip_phase=_safe_get_list(config, section, "skip_phase", []),
|
||||||
|
dependencies=_safe_get_list(config, section, "depends_on", []),
|
||||||
|
)
|
||||||
|
if config.has_option(section, "failable"):
|
||||||
|
part.failable = config.getboolean(section, "failable")
|
||||||
|
return part
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_get_list(config, section, option, default=None):
|
||||||
|
"""Get a value from config parser. The result is split into a list on
|
||||||
|
commas or spaces, and `default` is returned if the key does not exist.
|
||||||
|
"""
|
||||||
|
if config.has_option(section, option):
|
||||||
|
value = config.get(section, option)
|
||||||
|
return [x.strip() for x in re.split(r"[, ]+", value) if x]
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
def fill_in_config_file(fp, substs):
|
||||||
|
"""Templating function. It works with Jinja2 style placeholders such as
|
||||||
|
{{foo}}. Whitespace around the key name is fine. The file is modified in place.
|
||||||
|
|
||||||
|
:param fp string: path to the file to process
|
||||||
|
:param substs dict: a mapping for values to put into the file
|
||||||
|
"""
|
||||||
|
|
||||||
|
def repl(match):
|
||||||
|
try:
|
||||||
|
return substs[match.group(1)]
|
||||||
|
except KeyError as exc:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Unknown placeholder %s in %s" % (exc, os.path.basename(fp))
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(fp, "r") as f:
|
||||||
|
contents = re.sub(r"{{ *([a-zA-Z-_]+) *}}", repl, f.read())
|
||||||
|
with open(fp, "w") as f:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
|
||||||
|
def start_part(global_config, parts, part):
|
||||||
|
part.setup_start(global_config, parts)
|
||||||
|
fh = open(part.log_file, "w")
|
||||||
|
cmd = part.get_cmd(global_config)
|
||||||
|
log.debug("Running command %r", " ".join(shlex_quote(x) for x in cmd))
|
||||||
|
return subprocess.Popen(cmd, stdout=fh, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_finished(global_config, linker, parts, proc, finished_part):
|
||||||
|
finished_part.refresh_status()
|
||||||
|
log.info("%s finished with status %s", finished_part, finished_part.status)
|
||||||
|
if proc.returncode == 0:
|
||||||
|
# Success, unblock other parts...
|
||||||
|
for part in parts.values():
|
||||||
|
part.unblock_on(finished_part.name)
|
||||||
|
# ...and link the results into final destination.
|
||||||
|
copy_part(global_config, linker, finished_part)
|
||||||
|
update_metadata(global_config, finished_part)
|
||||||
|
else:
|
||||||
|
# Failure, other stuff may be blocked.
|
||||||
|
log.info("See details in %s", finished_part.log_file)
|
||||||
|
block_on(parts, finished_part.name)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_part(global_config, linker, part):
|
||||||
|
c = productmd.Compose(part.path)
|
||||||
|
for variant in c.info.variants:
|
||||||
|
data_path = os.path.join(part.path, "compose", variant)
|
||||||
|
link = os.path.join(global_config.target, "compose", variant)
|
||||||
|
log.info("Hardlinking content %s -> %s", data_path, link)
|
||||||
|
hardlink_dir(linker, data_path, link)
|
||||||
|
|
||||||
|
|
||||||
|
def hardlink_dir(linker, srcdir, dstdir):
|
||||||
|
for root, dirs, files in os.walk(srcdir):
|
||||||
|
root = os.path.relpath(root, srcdir)
|
||||||
|
for f in files:
|
||||||
|
src = os.path.normpath(os.path.join(srcdir, root, f))
|
||||||
|
dst = os.path.normpath(os.path.join(dstdir, root, f))
|
||||||
|
linker.queue_put((src, dst))
|
||||||
|
|
||||||
|
|
||||||
|
def update_metadata(global_config, part):
|
||||||
|
part_metadata_dir = os.path.join(part.path, "compose", "metadata")
|
||||||
|
final_metadata_dir = os.path.join(global_config.target, "compose", "metadata")
|
||||||
|
for f in os.listdir(part_metadata_dir):
|
||||||
|
# Load the metadata
|
||||||
|
with open(os.path.join(part_metadata_dir, f)) as fh:
|
||||||
|
part_metadata = json.load(fh)
|
||||||
|
final_metadata = os.path.join(final_metadata_dir, f)
|
||||||
|
if os.path.exists(final_metadata):
|
||||||
|
# We already have this file, will need to merge.
|
||||||
|
merge_metadata(final_metadata, part_metadata)
|
||||||
|
else:
|
||||||
|
# A new file, just copy it.
|
||||||
|
copy_metadata(global_config, final_metadata, part_metadata)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_metadata(global_config, final_metadata, source):
|
||||||
|
"""Copy file to final location, but update compose information."""
|
||||||
|
with open(
|
||||||
|
os.path.join(global_config.target, "compose/metadata/composeinfo.json")
|
||||||
|
) as f:
|
||||||
|
composeinfo = json.load(f)
|
||||||
|
try:
|
||||||
|
source["payload"]["compose"].update(composeinfo["payload"]["compose"])
|
||||||
|
except KeyError:
|
||||||
|
# No [payload][compose], probably OSBS metadata
|
||||||
|
pass
|
||||||
|
with open(final_metadata, "w") as f:
|
||||||
|
json.dump(source, f, indent=2, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
|
def merge_metadata(final_metadata, source):
|
||||||
|
with open(final_metadata) as f:
|
||||||
|
metadata = json.load(f)
|
||||||
|
|
||||||
|
try:
|
||||||
|
key = {
|
||||||
|
"productmd.composeinfo": "variants",
|
||||||
|
"productmd.modules": "modules",
|
||||||
|
"productmd.images": "images",
|
||||||
|
"productmd.rpms": "rpms",
|
||||||
|
}[source["header"]["type"]]
|
||||||
|
# TODO what if multiple parts create images for the same variant
|
||||||
|
metadata["payload"][key].update(source["payload"][key])
|
||||||
|
except KeyError:
|
||||||
|
# OSBS metadata, merge whole file
|
||||||
|
metadata.update(source)
|
||||||
|
with open(final_metadata, "w") as f:
|
||||||
|
json.dump(metadata, f, indent=2, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
|
def block_on(parts, name):
|
||||||
|
"""Part ``name`` failed, mark everything depending on it as blocked."""
|
||||||
|
for part in parts.values():
|
||||||
|
if name in part.blocked_on:
|
||||||
|
log.warning("%s is blocked now and will not run", part)
|
||||||
|
part.status = Status.BLOCKED
|
||||||
|
block_on(parts, part.name)
|
||||||
|
|
||||||
|
|
||||||
|
def check_finished_processes(processes):
|
||||||
|
"""Walk through all active processes and check if something finished."""
|
||||||
|
for proc in processes.keys():
|
||||||
|
proc.poll()
|
||||||
|
if proc.returncode is not None:
|
||||||
|
yield proc, processes[proc]
|
||||||
|
|
||||||
|
|
||||||
|
def run_all(global_config, parts):
|
||||||
|
# Mapping subprocess.Popen -> ComposePart
|
||||||
|
processes = dict()
|
||||||
|
remaining = set(p.name for p in parts.values() if not p.is_finished())
|
||||||
|
|
||||||
|
with linker_pool("hardlink") as linker:
|
||||||
|
while remaining or processes:
|
||||||
|
update_status(global_config, parts)
|
||||||
|
|
||||||
|
for proc, part in check_finished_processes(processes):
|
||||||
|
del processes[proc]
|
||||||
|
handle_finished(global_config, linker, parts, proc, part)
|
||||||
|
|
||||||
|
# Start new available processes.
|
||||||
|
for name in list(remaining):
|
||||||
|
part = parts[name]
|
||||||
|
# Start all ready parts
|
||||||
|
if part.status == Status.READY:
|
||||||
|
remaining.remove(name)
|
||||||
|
processes[start_part(global_config, parts, part)] = part
|
||||||
|
# Remove blocked parts from todo list
|
||||||
|
elif part.status == Status.BLOCKED:
|
||||||
|
remaining.remove(part.name)
|
||||||
|
|
||||||
|
# Wait for any child process to finish if there is any.
|
||||||
|
if processes:
|
||||||
|
pid, reason = os.wait()
|
||||||
|
for proc in processes.keys():
|
||||||
|
# Set the return code for process that we caught by os.wait().
|
||||||
|
# Calling poll() on it would not set the return code properly
|
||||||
|
# since the value was already consumed by os.wait().
|
||||||
|
if proc.pid == pid:
|
||||||
|
proc.returncode = (reason >> 8) & 0xFF
|
||||||
|
|
||||||
|
log.info("Waiting for linking to finish...")
|
||||||
|
return update_status(global_config, parts)
|
||||||
|
|
||||||
|
|
||||||
|
def get_target_dir(config, compose_info, label, reldir=""):
|
||||||
|
"""Find directory where this compose will be.
|
||||||
|
|
||||||
|
@param reldir: if target path in config is relative, it will be resolved
|
||||||
|
against this directory
|
||||||
|
"""
|
||||||
|
dir = os.path.realpath(os.path.join(reldir, config.get("general", "target")))
|
||||||
|
target_dir = get_compose_dir(
|
||||||
|
dir,
|
||||||
|
compose_info,
|
||||||
|
compose_type=config.get("general", "compose_type"),
|
||||||
|
compose_label=label,
|
||||||
|
)
|
||||||
|
return target_dir
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(debug=False):
|
||||||
|
FORMAT = "%(asctime)s: %(levelname)s: %(message)s"
|
||||||
|
level = logging.DEBUG if debug else logging.INFO
|
||||||
|
kobo.log.add_stderr_logger(log, log_level=level, format=FORMAT)
|
||||||
|
log.setLevel(level)
|
||||||
|
|
||||||
|
|
||||||
|
def compute_status(statuses):
|
||||||
|
if any(map(lambda x: x[0] in ("STARTED", "WAITING"), statuses)):
|
||||||
|
# If there is anything still running or waiting to start, the whole is
|
||||||
|
# still running.
|
||||||
|
return "STARTED"
|
||||||
|
elif any(map(lambda x: x[0] in ("DOOMED", "BLOCKED") and not x[1], statuses)):
|
||||||
|
# If any required part is doomed or blocked, the whole is doomed
|
||||||
|
return "DOOMED"
|
||||||
|
elif all(map(lambda x: x[0] == "FINISHED", statuses)):
|
||||||
|
# If all parts are complete, the whole is complete
|
||||||
|
return "FINISHED"
|
||||||
|
else:
|
||||||
|
return "FINISHED_INCOMPLETE"
|
||||||
|
|
||||||
|
|
||||||
|
def update_status(global_config, parts):
|
||||||
|
log.debug("Updating status metadata")
|
||||||
|
metadata = {}
|
||||||
|
statuses = set()
|
||||||
|
for part in parts.values():
|
||||||
|
metadata[part.name] = {"status": part.status, "path": part.path}
|
||||||
|
statuses.add((part.status, part.failable))
|
||||||
|
metadata_path = os.path.join(
|
||||||
|
global_config.target, "compose", "metadata", "parts.json"
|
||||||
|
)
|
||||||
|
with open(metadata_path, "w") as fh:
|
||||||
|
json.dump(metadata, fh, indent=2, sort_keys=True, separators=(",", ": "))
|
||||||
|
|
||||||
|
status = compute_status(statuses)
|
||||||
|
log.info("Overall status is %s", status)
|
||||||
|
with open(os.path.join(global_config.target, "STATUS"), "w") as fh:
|
||||||
|
fh.write(status)
|
||||||
|
|
||||||
|
return status != "DOOMED"
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_compose_dir(config, args, main_config_file, compose_info):
|
||||||
|
if not hasattr(args, "compose_path"):
|
||||||
|
# Creating a brand new compose
|
||||||
|
target_dir = get_target_dir(
|
||||||
|
config, compose_info, args.label, reldir=os.path.dirname(main_config_file)
|
||||||
|
)
|
||||||
|
for dir in ("logs", "parts", "compose/metadata", "work/global"):
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.join(target_dir, dir))
|
||||||
|
except OSError as exc:
|
||||||
|
if exc.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
with open(os.path.join(target_dir, "STATUS"), "w") as fh:
|
||||||
|
fh.write("STARTED")
|
||||||
|
# Copy initial composeinfo for new compose
|
||||||
|
shutil.copy(
|
||||||
|
os.path.join(target_dir, "work/global/composeinfo-base.json"),
|
||||||
|
os.path.join(target_dir, "compose/metadata/composeinfo.json"),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Restarting a particular compose
|
||||||
|
target_dir = args.compose_path
|
||||||
|
|
||||||
|
return target_dir
|
||||||
|
|
||||||
|
|
||||||
|
def load_parts_metadata(global_config):
|
||||||
|
parts_metadata = os.path.join(global_config.target, "compose/metadata/parts.json")
|
||||||
|
with open(parts_metadata) as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_for_restart(global_config, parts, to_restart):
|
||||||
|
has_stuff_to_do = False
|
||||||
|
metadata = load_parts_metadata(global_config)
|
||||||
|
for key in metadata:
|
||||||
|
# Update state to match what is on disk
|
||||||
|
log.debug(
|
||||||
|
"Reusing %s (%s) from %s",
|
||||||
|
key,
|
||||||
|
metadata[key]["status"],
|
||||||
|
metadata[key]["path"],
|
||||||
|
)
|
||||||
|
parts[key].status = metadata[key]["status"]
|
||||||
|
parts[key].path = metadata[key]["path"]
|
||||||
|
for key in to_restart:
|
||||||
|
# Set restarted parts to run again
|
||||||
|
parts[key].status = Status.WAITING
|
||||||
|
parts[key].path = None
|
||||||
|
|
||||||
|
for key in to_restart:
|
||||||
|
# Remove blockers that are already finished
|
||||||
|
for blocker in list(parts[key].blocked_on):
|
||||||
|
if parts[blocker].is_finished():
|
||||||
|
parts[key].blocked_on.discard(blocker)
|
||||||
|
if not parts[key].blocked_on:
|
||||||
|
log.debug("Part %s in not blocked", key)
|
||||||
|
# Nothing blocks it; let's go
|
||||||
|
parts[key].status = Status.READY
|
||||||
|
has_stuff_to_do = True
|
||||||
|
|
||||||
|
if not has_stuff_to_do:
|
||||||
|
raise RuntimeError("All restarted parts are blocked. Nothing to do.")
|
||||||
|
|
||||||
|
|
||||||
|
def run_kinit(config):
|
||||||
|
if not config.getboolean("general", "kerberos"):
|
||||||
|
return
|
||||||
|
|
||||||
|
keytab = config.get("general", "kerberos_keytab")
|
||||||
|
principal = config.get("general", "kerberos_principal")
|
||||||
|
|
||||||
|
fd, fname = tempfile.mkstemp(prefix="krb5cc_pungi-orchestrate_")
|
||||||
|
os.close(fd)
|
||||||
|
os.environ["KRB5CCNAME"] = fname
|
||||||
|
shortcuts.run(["kinit", "-k", "-t", keytab, principal])
|
||||||
|
log.debug("Created a kerberos ticket for %s", principal)
|
||||||
|
|
||||||
|
atexit.register(os.remove, fname)
|
||||||
|
|
||||||
|
|
||||||
|
def get_compose_data(compose_path):
|
||||||
|
try:
|
||||||
|
compose = productmd.compose.Compose(compose_path)
|
||||||
|
data = {
|
||||||
|
"compose_id": compose.info.compose.id,
|
||||||
|
"compose_date": compose.info.compose.date,
|
||||||
|
"compose_type": compose.info.compose.type,
|
||||||
|
"compose_respin": str(compose.info.compose.respin),
|
||||||
|
"compose_label": compose.info.compose.label,
|
||||||
|
"release_id": compose.info.release_id,
|
||||||
|
"release_name": compose.info.release.name,
|
||||||
|
"release_short": compose.info.release.short,
|
||||||
|
"release_version": compose.info.release.version,
|
||||||
|
"release_type": compose.info.release.type,
|
||||||
|
"release_is_layered": compose.info.release.is_layered,
|
||||||
|
}
|
||||||
|
if compose.info.release.is_layered:
|
||||||
|
data.update(
|
||||||
|
{
|
||||||
|
"base_product_name": compose.info.base_product.name,
|
||||||
|
"base_product_short": compose.info.base_product.short,
|
||||||
|
"base_product_version": compose.info.base_product.version,
|
||||||
|
"base_product_type": compose.info.base_product.type,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_script_env(compose_path):
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["COMPOSE_PATH"] = compose_path
|
||||||
|
for key, value in get_compose_data(compose_path).items():
|
||||||
|
if isinstance(value, bool):
|
||||||
|
env[key.upper()] = "YES" if value else ""
|
||||||
|
else:
|
||||||
|
env[key.upper()] = str(value) if value else ""
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
def run_scripts(prefix, compose_dir, scripts):
|
||||||
|
env = get_script_env(compose_dir)
|
||||||
|
for idx, script in enumerate(scripts.strip().splitlines()):
|
||||||
|
command = script.strip()
|
||||||
|
logfile = os.path.join(compose_dir, "logs", "%s%s.log" % (prefix, idx))
|
||||||
|
log.debug("Running command: %r", command)
|
||||||
|
log.debug("See output in %s", logfile)
|
||||||
|
shortcuts.run(command, env=env, logfile=logfile)
|
||||||
|
|
||||||
|
|
||||||
|
def try_translate_path(parts, path):
|
||||||
|
translation = []
|
||||||
|
for part in parts.values():
|
||||||
|
conf = pungi.util.load_config(part.config)
|
||||||
|
translation.extend(conf.get("translate_paths", []))
|
||||||
|
return pungi.util.translate_path_raw(translation, path)
|
||||||
|
|
||||||
|
|
||||||
|
def send_notification(compose_dir, command, parts):
|
||||||
|
if not command:
|
||||||
|
return
|
||||||
|
from pungi.notifier import PungiNotifier
|
||||||
|
|
||||||
|
data = get_compose_data(compose_dir)
|
||||||
|
data["location"] = try_translate_path(parts, compose_dir)
|
||||||
|
notifier = PungiNotifier([command])
|
||||||
|
with open(os.path.join(compose_dir, "STATUS")) as f:
|
||||||
|
status = f.read().strip()
|
||||||
|
notifier.send("status-change", workdir=compose_dir, status=status, **data)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_progress_monitor(global_config, parts):
|
||||||
|
"""Update configuration so that each part send notifications about its
|
||||||
|
progress to the orchestrator.
|
||||||
|
|
||||||
|
There is a file to which the notification is written. The orchestrator is
|
||||||
|
reading it and mapping the entries to particular parts. The path to this
|
||||||
|
file is stored in an environment variable.
|
||||||
|
"""
|
||||||
|
tmp_file = tempfile.NamedTemporaryFile(prefix="pungi-progress-monitor_")
|
||||||
|
os.environ["_PUNGI_ORCHESTRATOR_PROGRESS_MONITOR"] = tmp_file.name
|
||||||
|
atexit.register(os.remove, tmp_file.name)
|
||||||
|
|
||||||
|
global_config.extra_args.append(
|
||||||
|
"--notification-script=pungi-notification-report-progress"
|
||||||
|
)
|
||||||
|
|
||||||
|
def reader():
|
||||||
|
while True:
|
||||||
|
line = tmp_file.readline()
|
||||||
|
if not line:
|
||||||
|
time.sleep(0.1)
|
||||||
|
continue
|
||||||
|
path, msg = line.split(":", 1)
|
||||||
|
for part in parts:
|
||||||
|
if parts[part].path == os.path.dirname(path):
|
||||||
|
log.debug("%s: %s", part, msg.strip())
|
||||||
|
break
|
||||||
|
|
||||||
|
monitor = threading.Thread(target=reader)
|
||||||
|
monitor.daemon = True
|
||||||
|
monitor.start()
|
||||||
|
|
||||||
|
|
||||||
|
def run(work_dir, main_config_file, args):
|
||||||
|
config_dir = os.path.join(work_dir, "config")
|
||||||
|
shutil.copytree(os.path.dirname(main_config_file), config_dir)
|
||||||
|
|
||||||
|
# Read main config
|
||||||
|
parser = configparser.RawConfigParser(
|
||||||
|
defaults={
|
||||||
|
"kerberos": "false",
|
||||||
|
"pre_compose_script": "",
|
||||||
|
"post_compose_script": "",
|
||||||
|
"notification_script": "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
parser.read(main_config_file)
|
||||||
|
|
||||||
|
# Create kerberos ticket
|
||||||
|
run_kinit(parser)
|
||||||
|
|
||||||
|
compose_info = dict(parser.items("general"))
|
||||||
|
compose_type = parser.get("general", "compose_type")
|
||||||
|
|
||||||
|
target_dir = prepare_compose_dir(parser, args, main_config_file, compose_info)
|
||||||
|
kobo.log.add_file_logger(log, os.path.join(target_dir, "logs", "orchestrator.log"))
|
||||||
|
log.info("Composing %s", target_dir)
|
||||||
|
|
||||||
|
run_scripts("pre_compose_", target_dir, parser.get("general", "pre_compose_script"))
|
||||||
|
|
||||||
|
old_compose = find_old_compose(
|
||||||
|
os.path.dirname(target_dir),
|
||||||
|
compose_info["release_short"],
|
||||||
|
compose_info["release_version"],
|
||||||
|
"",
|
||||||
|
)
|
||||||
|
if old_compose:
|
||||||
|
log.info("Reusing old compose %s", old_compose)
|
||||||
|
|
||||||
|
global_config = Config(
|
||||||
|
target=target_dir,
|
||||||
|
compose_type=compose_type,
|
||||||
|
label=args.label,
|
||||||
|
old_compose=old_compose,
|
||||||
|
config_dir=os.path.dirname(main_config_file),
|
||||||
|
event=args.koji_event,
|
||||||
|
extra_args=_safe_get_list(parser, "general", "extra_args"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if not global_config.event and parser.has_option("general", "koji_profile"):
|
||||||
|
koji_wrapper = KojiWrapper(parser.get("general", "koji_profile"))
|
||||||
|
event_file = os.path.join(global_config.target, "work/global/koji-event")
|
||||||
|
result = get_koji_event_raw(koji_wrapper, None, event_file)
|
||||||
|
global_config = global_config._replace(event=result["id"])
|
||||||
|
|
||||||
|
parts = {}
|
||||||
|
for section in parser.sections():
|
||||||
|
if section == "general":
|
||||||
|
continue
|
||||||
|
parts[section] = ComposePart.from_config(parser, section, config_dir)
|
||||||
|
|
||||||
|
if hasattr(args, "part"):
|
||||||
|
setup_for_restart(global_config, parts, args.part)
|
||||||
|
|
||||||
|
setup_progress_monitor(global_config, parts)
|
||||||
|
|
||||||
|
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
||||||
|
|
||||||
|
retcode = run_all(global_config, parts)
|
||||||
|
|
||||||
|
if retcode:
|
||||||
|
# Only run the script if we are not doomed.
|
||||||
|
run_scripts(
|
||||||
|
"post_compose_", target_dir, parser.get("general", "post_compose_script")
|
||||||
|
)
|
||||||
|
|
||||||
|
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
||||||
|
|
||||||
|
return retcode
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args(argv):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--debug", action="store_true")
|
||||||
|
parser.add_argument("--koji-event", metavar="ID", type=parse_koji_event)
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
start = subparsers.add_parser("start")
|
||||||
|
start.add_argument("config", metavar="CONFIG")
|
||||||
|
start.add_argument("--label")
|
||||||
|
|
||||||
|
restart = subparsers.add_parser("restart")
|
||||||
|
restart.add_argument("config", metavar="CONFIG")
|
||||||
|
restart.add_argument("compose_path", metavar="COMPOSE_PATH")
|
||||||
|
restart.add_argument(
|
||||||
|
"part", metavar="PART", nargs="*", help="which parts to restart"
|
||||||
|
)
|
||||||
|
restart.add_argument("--label")
|
||||||
|
|
||||||
|
return parser.parse_args(argv)
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv=None):
|
||||||
|
args = parse_args(argv)
|
||||||
|
setup_logging(args.debug)
|
||||||
|
|
||||||
|
main_config_file = os.path.abspath(args.config)
|
||||||
|
|
||||||
|
with temp_dir() as work_dir:
|
||||||
|
try:
|
||||||
|
if not run(work_dir, main_config_file, args):
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception:
|
||||||
|
log.exception("Unhandled exception!")
|
||||||
|
sys.exit(1)
|
@ -148,15 +148,6 @@ class UnifiedISO(object):
|
|||||||
new_path = os.path.join(self.temp_dir, "trees", arch, old_relpath)
|
new_path = os.path.join(self.temp_dir, "trees", arch, old_relpath)
|
||||||
|
|
||||||
makedirs(os.path.dirname(new_path))
|
makedirs(os.path.dirname(new_path))
|
||||||
# Resolve symlinks to external files. Symlinks within the
|
|
||||||
# provided `dir` are kept.
|
|
||||||
if os.path.islink(old_path):
|
|
||||||
real_path = os.readlink(old_path)
|
|
||||||
abspath = os.path.normpath(
|
|
||||||
os.path.join(os.path.dirname(old_path), real_path)
|
|
||||||
)
|
|
||||||
if not abspath.startswith(dir):
|
|
||||||
old_path = real_path
|
|
||||||
try:
|
try:
|
||||||
self.linker.link(old_path, new_path)
|
self.linker.link(old_path, new_path)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
||||||
|
dict.sorted
|
||||||
dogpile.cache
|
dogpile.cache
|
||||||
flufl.lock ; python_version >= '3.0'
|
funcsigs
|
||||||
flufl.lock < 3.0 ; python_version <= '2.7'
|
|
||||||
jsonschema
|
jsonschema
|
||||||
kobo
|
kobo
|
||||||
koji
|
koji
|
||||||
@ -12,4 +12,4 @@ ordered_set
|
|||||||
productmd
|
productmd
|
||||||
pykickstart
|
pykickstart
|
||||||
python-multilib
|
python-multilib
|
||||||
urlgrabber ; python_version < '3.0'
|
urlgrabber
|
||||||
|
13
setup.py
13
setup.py
@ -5,9 +5,14 @@
|
|||||||
import os
|
import os
|
||||||
import glob
|
import glob
|
||||||
|
|
||||||
|
import distutils.command.sdist
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
|
# override default tarball format with bzip2
|
||||||
|
distutils.command.sdist.sdist.default_format = {"posix": "bztar"}
|
||||||
|
|
||||||
|
|
||||||
# recursively scan for python modules to be included
|
# recursively scan for python modules to be included
|
||||||
package_root_dirs = ["pungi", "pungi_utils"]
|
package_root_dirs = ["pungi", "pungi_utils"]
|
||||||
packages = set()
|
packages = set()
|
||||||
@ -20,7 +25,7 @@ packages = sorted(packages)
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="pungi",
|
name="pungi",
|
||||||
version="4.7.0",
|
version="4.3.7",
|
||||||
description="Distribution compose tool",
|
description="Distribution compose tool",
|
||||||
url="https://pagure.io/pungi",
|
url="https://pagure.io/pungi",
|
||||||
author="Dennis Gilmore",
|
author="Dennis Gilmore",
|
||||||
@ -32,16 +37,15 @@ setup(
|
|||||||
"comps_filter = pungi.scripts.comps_filter:main",
|
"comps_filter = pungi.scripts.comps_filter:main",
|
||||||
"pungi = pungi.scripts.pungi:main",
|
"pungi = pungi.scripts.pungi:main",
|
||||||
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
||||||
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
|
||||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||||
"pungi-make-ostree = pungi.ostree:main",
|
"pungi-make-ostree = pungi.ostree:main",
|
||||||
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
||||||
|
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
||||||
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
||||||
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
||||||
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
||||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||||
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||||
"pungi-cache-cleanup = pungi.scripts.cache_cleanup:main",
|
|
||||||
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
|
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
|
||||||
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
|
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
|
||||||
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
|
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
|
||||||
@ -50,7 +54,6 @@ setup(
|
|||||||
},
|
},
|
||||||
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
||||||
data_files=[
|
data_files=[
|
||||||
("/usr/lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
|
||||||
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.ks")),
|
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||||
@ -66,5 +69,5 @@ setup(
|
|||||||
"dogpile.cache",
|
"dogpile.cache",
|
||||||
],
|
],
|
||||||
extras_require={':python_version=="2.7"': ["enum34", "lockfile"]},
|
extras_require={':python_version=="2.7"': ["enum34", "lockfile"]},
|
||||||
tests_require=["pytest", "pytest-cov", "pyfakefs"],
|
tests_require=["mock", "pytest", "pytest-cov", "pyfakefs"],
|
||||||
)
|
)
|
||||||
|
1
sources
1
sources
@ -1 +0,0 @@
|
|||||||
SHA512 (pungi-4.7.0.tar.bz2) = 55c7527a0dff6efa8ed13b1ccdfd3628686fadb55b78fb456e552f4972b831aa96f3ff37ac54837462d91df834157f38426e6b66b52216e1e5861628df724eca
|
|
@ -1,5 +1,5 @@
|
|||||||
mock; python_version < '3.3'
|
mock
|
||||||
parameterized
|
parameterized
|
||||||
pytest
|
pytest
|
||||||
pytest-cov
|
pytest-cov
|
||||||
unittest2; python_version < '3.0'
|
unittest2
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM registry.fedoraproject.org/fedora:latest
|
FROM fedora:33
|
||||||
LABEL \
|
LABEL \
|
||||||
name="Pungi test" \
|
name="Pungi test" \
|
||||||
description="Run tests using tox with Python 3" \
|
description="Run tests using tox with Python 3" \
|
||||||
|
@ -108,7 +108,6 @@
|
|||||||
<groupid>core</groupid>
|
<groupid>core</groupid>
|
||||||
</grouplist>
|
</grouplist>
|
||||||
<optionlist>
|
<optionlist>
|
||||||
<groupid arch="x86_64">standard</groupid>
|
|
||||||
</optionlist>
|
</optionlist>
|
||||||
</environment>
|
</environment>
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
import difflib
|
import difflib
|
||||||
import errno
|
import errno
|
||||||
import hashlib
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
@ -22,15 +21,6 @@ from pungi import paths, checks
|
|||||||
from pungi.module_util import Modulemd
|
from pungi.module_util import Modulemd
|
||||||
|
|
||||||
|
|
||||||
GIT_WITH_CREDS = [
|
|
||||||
"git",
|
|
||||||
"-c",
|
|
||||||
"credential.useHttpPath=true",
|
|
||||||
"-c",
|
|
||||||
"credential.helper=!ch",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class BaseTestCase(unittest.TestCase):
|
class BaseTestCase(unittest.TestCase):
|
||||||
def assertFilesEqual(self, fn1, fn2):
|
def assertFilesEqual(self, fn1, fn2):
|
||||||
with open(fn1, "rb") as f1:
|
with open(fn1, "rb") as f1:
|
||||||
@ -168,20 +158,6 @@ class IterableMock(mock.Mock):
|
|||||||
return iter([])
|
return iter([])
|
||||||
|
|
||||||
|
|
||||||
class FSKojiDownloader(object):
|
|
||||||
"""Mock for KojiDownloadProxy that checks provided path."""
|
|
||||||
|
|
||||||
def get_file(self, path, validator=None):
|
|
||||||
return path if os.path.isfile(path) else None
|
|
||||||
|
|
||||||
|
|
||||||
class DummyKojiDownloader(object):
|
|
||||||
"""Mock for KojiDownloadProxy that always finds the file in original location."""
|
|
||||||
|
|
||||||
def get_file(self, path, validator=None):
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
class DummyCompose(object):
|
class DummyCompose(object):
|
||||||
def __init__(self, topdir, config):
|
def __init__(self, topdir, config):
|
||||||
self.supported = True
|
self.supported = True
|
||||||
@ -256,8 +232,6 @@ class DummyCompose(object):
|
|||||||
self.cache_region = None
|
self.cache_region = None
|
||||||
self.containers_metadata = {}
|
self.containers_metadata = {}
|
||||||
self.load_old_compose_config = mock.Mock(return_value=None)
|
self.load_old_compose_config = mock.Mock(return_value=None)
|
||||||
self.koji_downloader = DummyKojiDownloader()
|
|
||||||
self.koji_downloader.path_prefix = "/prefix"
|
|
||||||
|
|
||||||
def setup_optional(self):
|
def setup_optional(self):
|
||||||
self.all_variants["Server-optional"] = MockVariant(
|
self.all_variants["Server-optional"] = MockVariant(
|
||||||
@ -298,7 +272,7 @@ class DummyCompose(object):
|
|||||||
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
||||||
|
|
||||||
|
|
||||||
def touch(path, content=None, mode=None):
|
def touch(path, content=None):
|
||||||
"""Helper utility that creates an dummy file in given location. Directories
|
"""Helper utility that creates an dummy file in given location. Directories
|
||||||
will be created."""
|
will be created."""
|
||||||
content = content or (path + "\n")
|
content = content or (path + "\n")
|
||||||
@ -310,8 +284,6 @@ def touch(path, content=None, mode=None):
|
|||||||
content = content.encode()
|
content = content.encode()
|
||||||
with open(path, "wb") as f:
|
with open(path, "wb") as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
if mode:
|
|
||||||
os.chmod(path, mode)
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
@ -362,9 +334,3 @@ def fake_run_in_threads(func, params, threads=None):
|
|||||||
"""Like run_in_threads from Kobo, but actually runs tasks serially."""
|
"""Like run_in_threads from Kobo, but actually runs tasks serially."""
|
||||||
for num, param in enumerate(params):
|
for num, param in enumerate(params):
|
||||||
func(None, param, num)
|
func(None, param, num)
|
||||||
|
|
||||||
|
|
||||||
def hash_string(alg, s):
|
|
||||||
m = hashlib.new(alg)
|
|
||||||
m.update(s.encode("utf-8"))
|
|
||||||
return m.hexdigest()
|
|
||||||
|
@ -254,7 +254,6 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
def test_starts_threads_for_each_cmd_with_lorax_koji_plugin(
|
def test_starts_threads_for_each_cmd_with_lorax_koji_plugin(
|
||||||
self, get_volid, poolCls
|
self, get_volid, poolCls
|
||||||
):
|
):
|
||||||
topurl = "https://example.com/composes/"
|
|
||||||
compose = BuildInstallCompose(
|
compose = BuildInstallCompose(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
@ -265,7 +264,6 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"lorax_use_koji_plugin": True,
|
"lorax_use_koji_plugin": True,
|
||||||
"disc_types": {"dvd": "DVD"},
|
"disc_types": {"dvd": "DVD"},
|
||||||
"translate_paths": [(self.topdir, topurl)],
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -282,9 +280,9 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"version": "1",
|
"version": "1",
|
||||||
"release": "1",
|
"release": "1",
|
||||||
"sources": [
|
"sources": [
|
||||||
topurl + "work/amd64/repo/p1",
|
self.topdir + "/work/amd64/repo/p1",
|
||||||
topurl + "work/amd64/repo/p2",
|
self.topdir + "/work/amd64/repo/p2",
|
||||||
topurl + "work/amd64/comps_repo_Server",
|
self.topdir + "/work/amd64/comps_repo_Server",
|
||||||
],
|
],
|
||||||
"variant": "Server",
|
"variant": "Server",
|
||||||
"installpkgs": ["bash", "vim"],
|
"installpkgs": ["bash", "vim"],
|
||||||
@ -301,6 +299,7 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"rootfs-size": None,
|
"rootfs-size": None,
|
||||||
"dracut-args": [],
|
"dracut-args": [],
|
||||||
"skip_branding": False,
|
"skip_branding": False,
|
||||||
|
"outputdir": self.topdir + "/work/amd64/buildinstall/Server",
|
||||||
"squashfs_only": False,
|
"squashfs_only": False,
|
||||||
"configuration_file": None,
|
"configuration_file": None,
|
||||||
},
|
},
|
||||||
@ -309,9 +308,9 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"version": "1",
|
"version": "1",
|
||||||
"release": "1",
|
"release": "1",
|
||||||
"sources": [
|
"sources": [
|
||||||
topurl + "work/amd64/repo/p1",
|
self.topdir + "/work/amd64/repo/p1",
|
||||||
topurl + "work/amd64/repo/p2",
|
self.topdir + "/work/amd64/repo/p2",
|
||||||
topurl + "work/amd64/comps_repo_Client",
|
self.topdir + "/work/amd64/comps_repo_Client",
|
||||||
],
|
],
|
||||||
"variant": "Client",
|
"variant": "Client",
|
||||||
"installpkgs": [],
|
"installpkgs": [],
|
||||||
@ -328,6 +327,7 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"rootfs-size": None,
|
"rootfs-size": None,
|
||||||
"dracut-args": [],
|
"dracut-args": [],
|
||||||
"skip_branding": False,
|
"skip_branding": False,
|
||||||
|
"outputdir": self.topdir + "/work/amd64/buildinstall/Client",
|
||||||
"squashfs_only": False,
|
"squashfs_only": False,
|
||||||
"configuration_file": None,
|
"configuration_file": None,
|
||||||
},
|
},
|
||||||
@ -336,9 +336,9 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"version": "1",
|
"version": "1",
|
||||||
"release": "1",
|
"release": "1",
|
||||||
"sources": [
|
"sources": [
|
||||||
topurl + "work/x86_64/repo/p1",
|
self.topdir + "/work/x86_64/repo/p1",
|
||||||
topurl + "work/x86_64/repo/p2",
|
self.topdir + "/work/x86_64/repo/p2",
|
||||||
topurl + "work/x86_64/comps_repo_Server",
|
self.topdir + "/work/x86_64/comps_repo_Server",
|
||||||
],
|
],
|
||||||
"variant": "Server",
|
"variant": "Server",
|
||||||
"installpkgs": ["bash", "vim"],
|
"installpkgs": ["bash", "vim"],
|
||||||
@ -355,6 +355,7 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
"rootfs-size": None,
|
"rootfs-size": None,
|
||||||
"dracut-args": [],
|
"dracut-args": [],
|
||||||
"skip_branding": False,
|
"skip_branding": False,
|
||||||
|
"outputdir": self.topdir + "/work/x86_64/buildinstall/Server",
|
||||||
"squashfs_only": False,
|
"squashfs_only": False,
|
||||||
"configuration_file": None,
|
"configuration_file": None,
|
||||||
},
|
},
|
||||||
@ -472,6 +473,70 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.buildinstall.ThreadPool")
|
||||||
|
@mock.patch("pungi.phases.buildinstall.LoraxWrapper")
|
||||||
|
@mock.patch("pungi.phases.buildinstall.get_volid")
|
||||||
|
def test_starts_threads_for_each_cmd_with_buildinstall(
|
||||||
|
self, get_volid, loraxCls, poolCls
|
||||||
|
):
|
||||||
|
compose = BuildInstallCompose(
|
||||||
|
self.topdir,
|
||||||
|
{
|
||||||
|
"bootable": True,
|
||||||
|
"release_name": "Test",
|
||||||
|
"release_short": "t",
|
||||||
|
"release_version": "1",
|
||||||
|
"buildinstall_method": "buildinstall",
|
||||||
|
"disc_types": {"dvd": "DVD"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
get_volid.return_value = "vol_id"
|
||||||
|
|
||||||
|
phase = BuildinstallPhase(compose, self._make_pkgset_phase(["p1"]))
|
||||||
|
|
||||||
|
phase.run()
|
||||||
|
|
||||||
|
# Two items added for processing in total.
|
||||||
|
pool = poolCls.return_value
|
||||||
|
self.assertEqual(2, len(pool.queue_put.mock_calls))
|
||||||
|
|
||||||
|
# Obtained correct lorax commands.
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
loraxCls.return_value.get_buildinstall_cmd.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
"Test",
|
||||||
|
"1",
|
||||||
|
"1",
|
||||||
|
[self.topdir + "/work/x86_64/repo/p1"],
|
||||||
|
self.topdir + "/work/x86_64/buildinstall",
|
||||||
|
buildarch="x86_64",
|
||||||
|
is_final=True,
|
||||||
|
volid="vol_id",
|
||||||
|
),
|
||||||
|
mock.call(
|
||||||
|
"Test",
|
||||||
|
"1",
|
||||||
|
"1",
|
||||||
|
[self.topdir + "/work/amd64/repo/p1"],
|
||||||
|
self.topdir + "/work/amd64/buildinstall",
|
||||||
|
buildarch="amd64",
|
||||||
|
is_final=True,
|
||||||
|
volid="vol_id",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
get_volid.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(compose, "x86_64", disc_type="DVD"),
|
||||||
|
mock.call(compose, "amd64", disc_type="DVD"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.buildinstall.get_file")
|
@mock.patch("pungi.phases.buildinstall.get_file")
|
||||||
@mock.patch("pungi.phases.buildinstall.ThreadPool")
|
@mock.patch("pungi.phases.buildinstall.ThreadPool")
|
||||||
@mock.patch("pungi.phases.buildinstall.LoraxWrapper")
|
@mock.patch("pungi.phases.buildinstall.LoraxWrapper")
|
||||||
@ -1144,7 +1209,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1233,9 +1297,9 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
||||||
@mock.patch("pungi.phases.buildinstall.run")
|
@mock.patch("pungi.phases.buildinstall.run")
|
||||||
@mock.patch("pungi.phases.buildinstall.download_and_extract_archive")
|
@mock.patch("pungi.phases.buildinstall.move_all")
|
||||||
def test_buildinstall_thread_with_lorax_using_koji_plugin(
|
def test_buildinstall_thread_with_lorax_using_koji_plugin(
|
||||||
self, download, run, get_buildroot_rpms, KojiWrapperMock, mock_tweak, mock_link
|
self, move_all, run, get_buildroot_rpms, KojiWrapperMock, mock_tweak, mock_link
|
||||||
):
|
):
|
||||||
compose = BuildInstallCompose(
|
compose = BuildInstallCompose(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
@ -1244,7 +1308,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"lorax_use_koji_plugin": True,
|
"lorax_use_koji_plugin": True,
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1281,8 +1344,9 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
self.cmd,
|
self.cmd,
|
||||||
channel=None,
|
channel=None,
|
||||||
packages=["lorax"],
|
packages=["lorax"],
|
||||||
|
mounts=[self.topdir],
|
||||||
weight=123,
|
weight=123,
|
||||||
chown_uid=None,
|
chown_uid=os.getuid(),
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -1323,18 +1387,151 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
[mock.call(compose, "x86_64", compose.variants["Server"], False)],
|
[mock.call(compose, "x86_64", compose.variants["Server"], False)],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
download.call_args_list,
|
move_all.call_args_list,
|
||||||
[
|
[
|
||||||
mock.call(compose, 1234, "results.tar.gz", destdir),
|
mock.call(os.path.join(destdir, "results"), destdir, rm_src_dir=True),
|
||||||
mock.call(
|
mock.call(
|
||||||
compose,
|
os.path.join(destdir, "logs"),
|
||||||
1234,
|
|
||||||
"logs.tar.gz",
|
|
||||||
os.path.join(self.topdir, "logs/x86_64/buildinstall-Server-logs"),
|
os.path.join(self.topdir, "logs/x86_64/buildinstall-Server-logs"),
|
||||||
|
rm_src_dir=True,
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.buildinstall.link_boot_iso")
|
||||||
|
@mock.patch("pungi.phases.buildinstall.tweak_buildinstall")
|
||||||
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
|
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
||||||
|
@mock.patch("pungi.phases.buildinstall.run")
|
||||||
|
def test_buildinstall_thread_with_buildinstall_in_runroot(
|
||||||
|
self, run, get_buildroot_rpms, KojiWrapperMock, mock_tweak, mock_link
|
||||||
|
):
|
||||||
|
compose = BuildInstallCompose(
|
||||||
|
self.topdir,
|
||||||
|
{
|
||||||
|
"buildinstall_method": "buildinstall",
|
||||||
|
"runroot_tag": "rrt",
|
||||||
|
"koji_profile": "koji",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
get_buildroot_rpms.return_value = ["bash", "zsh"]
|
||||||
|
|
||||||
|
get_runroot_cmd = KojiWrapperMock.return_value.get_runroot_cmd
|
||||||
|
|
||||||
|
run_runroot_cmd = KojiWrapperMock.return_value.run_runroot_cmd
|
||||||
|
run_runroot_cmd.return_value = {
|
||||||
|
"output": "Foo bar baz",
|
||||||
|
"retcode": 0,
|
||||||
|
"task_id": 1234,
|
||||||
|
}
|
||||||
|
|
||||||
|
t = BuildinstallThread(self.pool)
|
||||||
|
|
||||||
|
with mock.patch("time.sleep"):
|
||||||
|
pkgset_phase = self._make_pkgset_phase(["p1"])
|
||||||
|
t.process((compose, "amd64", None, self.cmd, pkgset_phase), 0)
|
||||||
|
|
||||||
|
destdir = os.path.join(self.topdir, "work/amd64/buildinstall")
|
||||||
|
self.assertEqual(
|
||||||
|
get_runroot_cmd.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
"rrt",
|
||||||
|
"amd64",
|
||||||
|
self.cmd,
|
||||||
|
channel=None,
|
||||||
|
use_shell=True,
|
||||||
|
packages=["anaconda"],
|
||||||
|
mounts=[self.topdir],
|
||||||
|
weight=None,
|
||||||
|
chown_paths=[destdir],
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
run_runroot_cmd.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
get_runroot_cmd.return_value,
|
||||||
|
log_file=self.topdir + "/logs/amd64/buildinstall.amd64.log",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
with open(self.topdir + "/logs/amd64/buildinstall-RPMs.amd64.log") as f:
|
||||||
|
rpms = f.read().strip().split("\n")
|
||||||
|
six.assertCountEqual(self, rpms, ["bash", "zsh"])
|
||||||
|
six.assertCountEqual(self, self.pool.finished_tasks, [(None, "amd64")])
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
mock_tweak.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
compose,
|
||||||
|
destdir,
|
||||||
|
os.path.join(self.topdir, "compose", var, "amd64/os"),
|
||||||
|
"amd64",
|
||||||
|
var,
|
||||||
|
"",
|
||||||
|
"dummy-volid",
|
||||||
|
self.pool.kickstart_file,
|
||||||
|
)
|
||||||
|
for var in ["Client", "Server"]
|
||||||
|
],
|
||||||
|
)
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
mock_link.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(compose, "amd64", compose.variants["Client"], False),
|
||||||
|
mock.call(compose, "amd64", compose.variants["Server"], False),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
|
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
||||||
|
@mock.patch("pungi.phases.buildinstall.run")
|
||||||
|
def test_buildinstall_fail_exit_code(
|
||||||
|
self, run, get_buildroot_rpms, KojiWrapperMock
|
||||||
|
):
|
||||||
|
compose = BuildInstallCompose(
|
||||||
|
self.topdir,
|
||||||
|
{
|
||||||
|
"buildinstall_method": "buildinstall",
|
||||||
|
"runroot_tag": "rrt",
|
||||||
|
"koji_profile": "koji",
|
||||||
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
get_buildroot_rpms.return_value = ["bash", "zsh"]
|
||||||
|
|
||||||
|
run_runroot_cmd = KojiWrapperMock.return_value.run_runroot_cmd
|
||||||
|
run_runroot_cmd.return_value = {
|
||||||
|
"output": "Foo bar baz",
|
||||||
|
"retcode": 1,
|
||||||
|
"task_id": 1234,
|
||||||
|
}
|
||||||
|
|
||||||
|
t = BuildinstallThread(self.pool)
|
||||||
|
|
||||||
|
with mock.patch("time.sleep"):
|
||||||
|
pkgset_phase = self._make_pkgset_phase(["p1"])
|
||||||
|
t.process((compose, "x86_64", None, self.cmd, pkgset_phase), 0)
|
||||||
|
|
||||||
|
compose._logger.error.assert_has_calls(
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
"[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway." # noqa: E501
|
||||||
|
),
|
||||||
|
mock.call(
|
||||||
|
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details." # noqa: E501
|
||||||
|
% self.topdir
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(self.pool.finished_tasks, set())
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
||||||
@mock.patch("pungi.phases.buildinstall.run")
|
@mock.patch("pungi.phases.buildinstall.run")
|
||||||
@ -1345,7 +1542,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1395,7 +1591,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1468,7 +1663,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1507,7 +1701,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
"buildinstall_topdir": "/buildinstall_topdir",
|
"buildinstall_topdir": "/buildinstall_topdir",
|
||||||
},
|
},
|
||||||
@ -1617,7 +1810,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1862,7 +2054,7 @@ class TestSymlinkIso(PungiTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(iso.get_implanted_md5.mock_calls, [mock.call(tgt)])
|
self.assertEqual(iso.get_implanted_md5.mock_calls, [mock.call(tgt)])
|
||||||
self.assertEqual(iso.get_manifest_cmd.mock_calls, [mock.call("image-name")])
|
self.assertEqual(iso.get_manifest_cmd.mock_calls, [mock.call("image-name")])
|
||||||
self.assertEqual(iso.get_volume_id.mock_calls, [mock.call(tgt, None)])
|
self.assertEqual(iso.get_volume_id.mock_calls, [mock.call(tgt)])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
run.mock_calls,
|
run.mock_calls,
|
||||||
[
|
[
|
||||||
@ -1933,7 +2125,7 @@ class TestSymlinkIso(PungiTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(iso.get_implanted_md5.mock_calls, [mock.call(tgt)])
|
self.assertEqual(iso.get_implanted_md5.mock_calls, [mock.call(tgt)])
|
||||||
self.assertEqual(iso.get_manifest_cmd.mock_calls, [mock.call("image-name")])
|
self.assertEqual(iso.get_manifest_cmd.mock_calls, [mock.call("image-name")])
|
||||||
self.assertEqual(iso.get_volume_id.mock_calls, [mock.call(tgt, None)])
|
self.assertEqual(iso.get_volume_id.mock_calls, [mock.call(tgt)])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
run.mock_calls,
|
run.mock_calls,
|
||||||
[
|
[
|
||||||
|
@ -656,7 +656,6 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
mocked_requests.post.assert_called_once_with(
|
mocked_requests.post.assert_called_once_with(
|
||||||
"https://cts.localhost.tld/api/1/composes/",
|
"https://cts.localhost.tld/api/1/composes/",
|
||||||
auth=mock.ANY,
|
auth=mock.ANY,
|
||||||
data=None,
|
|
||||||
json=expected_json,
|
json=expected_json,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -795,16 +794,12 @@ class TracebackTest(unittest.TestCase):
|
|||||||
shutil.rmtree(self.tmp_dir)
|
shutil.rmtree(self.tmp_dir)
|
||||||
self.patcher.stop()
|
self.patcher.stop()
|
||||||
|
|
||||||
def assertTraceback(self, filename, show_locals=True):
|
def assertTraceback(self, filename):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.Traceback.mock_calls,
|
self.Traceback.mock_calls, [mock.call(), mock.call().get_traceback()]
|
||||||
[
|
|
||||||
mock.call(show_locals=show_locals),
|
|
||||||
mock.call(show_locals=show_locals).get_traceback(),
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_traceback_default(self):
|
def test_traceback_default(self):
|
||||||
@ -829,8 +824,8 @@ class RetryRequestTest(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mocked_requests.mock_calls,
|
mocked_requests.mock_calls,
|
||||||
[
|
[
|
||||||
mock.call.post(url, data=None, json=None, auth=None),
|
mock.call.post(url, json=None, auth=None),
|
||||||
mock.call.post(url, data=None, json=None, auth=None),
|
mock.call.post(url, json=None, auth=None),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
self.assertEqual(rv.status_code, 200)
|
self.assertEqual(rv.status_code, 200)
|
||||||
@ -846,5 +841,5 @@ class RetryRequestTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mocked_requests.mock_calls,
|
mocked_requests.mock_calls,
|
||||||
[mock.call.post(url, data=None, json=None, auth=None)],
|
[mock.call.post(url, json=None, auth=None)],
|
||||||
)
|
)
|
||||||
|
@ -223,6 +223,22 @@ class BuildinstallConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
|
|
||||||
|
def test_buildinstall_with_lorax_options(self):
|
||||||
|
cfg = load_config(
|
||||||
|
PKGSET_REPOS,
|
||||||
|
buildinstall_method="buildinstall",
|
||||||
|
lorax_options=[("^Server$", {})],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertValidation(
|
||||||
|
cfg,
|
||||||
|
[
|
||||||
|
checks.CONFLICTS.format(
|
||||||
|
"buildinstall_method", "buildinstall", "lorax_options"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def test_lorax_with_lorax_options(self):
|
def test_lorax_with_lorax_options(self):
|
||||||
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax", lorax_options=[])
|
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax", lorax_options=[])
|
||||||
|
|
||||||
@ -424,7 +440,7 @@ class LiveMediaConfigTestCase(ConfigTestCase):
|
|||||||
live_media_version="Rawhide",
|
live_media_version="Rawhide",
|
||||||
)
|
)
|
||||||
|
|
||||||
resolve_git_url.side_effect = lambda x, _helper: x.replace("HEAD", "CAFE")
|
resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE")
|
||||||
|
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
||||||
|
@ -5,7 +5,7 @@ from unittest import TestCase, mock, main
|
|||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraVariantInfo, RepoInfo
|
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraRepoInfo
|
||||||
|
|
||||||
FOLDER_WITH_TEST_DATA = os.path.join(
|
FOLDER_WITH_TEST_DATA = os.path.join(
|
||||||
os.path.dirname(
|
os.path.dirname(
|
||||||
@ -114,17 +114,14 @@ data:
|
|||||||
...
|
...
|
||||||
""", Loader=yaml.BaseLoader)
|
""", Loader=yaml.BaseLoader)
|
||||||
|
|
||||||
TEST_REPO_INFO = RepoInfo(
|
TEST_REPO_INFO = ExtraRepoInfo(
|
||||||
path=FOLDER_WITH_TEST_DATA,
|
path=FOLDER_WITH_TEST_DATA,
|
||||||
folder='test_repo',
|
folder='test_repo',
|
||||||
is_remote=False,
|
|
||||||
)
|
|
||||||
TEST_VARIANT_INFO = ExtraVariantInfo(
|
|
||||||
name='TestRepo',
|
name='TestRepo',
|
||||||
arch='x86_64',
|
arch='x86_64',
|
||||||
|
is_remote=False,
|
||||||
packages=[],
|
packages=[],
|
||||||
modules=[],
|
modules=[],
|
||||||
repos=[TEST_REPO_INFO]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
BS_BUILD_INFO = {
|
BS_BUILD_INFO = {
|
||||||
@ -164,19 +161,15 @@ class TestCreteExtraRepo(TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[
|
[
|
||||||
ExtraVariantInfo(
|
ExtraRepoInfo(
|
||||||
name=f'{build_id}-fake_platform-{arch}',
|
|
||||||
arch=arch,
|
|
||||||
packages=packages,
|
|
||||||
modules=modules,
|
|
||||||
repos=[
|
|
||||||
RepoInfo(
|
|
||||||
path='https://build.cloudlinux.com/'
|
path='https://build.cloudlinux.com/'
|
||||||
f'build_repos/{build_id}/fake_platform',
|
f'build_repos/{build_id}/fake_platform',
|
||||||
folder=arch,
|
folder=arch,
|
||||||
|
name=f'{build_id}-fake_platform-{arch}',
|
||||||
|
arch=arch,
|
||||||
is_remote=True,
|
is_remote=True,
|
||||||
)
|
packages=packages,
|
||||||
]
|
modules=modules,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
repos_info,
|
repos_info,
|
||||||
@ -204,7 +197,7 @@ class TestCreteExtraRepo(TestCase):
|
|||||||
'CreateExtraRepo._create_local_extra_repo'
|
'CreateExtraRepo._create_local_extra_repo'
|
||||||
) as mock__create_local_extra_repo:
|
) as mock__create_local_extra_repo:
|
||||||
cer = CreateExtraRepo(
|
cer = CreateExtraRepo(
|
||||||
variants=[TEST_VARIANT_INFO],
|
repos=[TEST_REPO_INFO],
|
||||||
bs_auth_token='fake_auth_token',
|
bs_auth_token='fake_auth_token',
|
||||||
local_repository_path='/path/to/local/repo',
|
local_repository_path='/path/to/local/repo',
|
||||||
clear_target_repo=False,
|
clear_target_repo=False,
|
||||||
|
@ -4,11 +4,7 @@ import os
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from unittest import TestCase, mock, main
|
from unittest import TestCase, mock, main
|
||||||
|
|
||||||
from pungi.scripts.create_packages_json import (
|
from pungi.scripts.create_packages_json import PackagesGenerator, RepoInfo
|
||||||
PackagesGenerator,
|
|
||||||
RepoInfo,
|
|
||||||
VariantInfo,
|
|
||||||
)
|
|
||||||
|
|
||||||
FOLDER_WITH_TEST_DATA = os.path.join(
|
FOLDER_WITH_TEST_DATA = os.path.join(
|
||||||
os.path.dirname(
|
os.path.dirname(
|
||||||
@ -20,6 +16,8 @@ FOLDER_WITH_TEST_DATA = os.path.join(
|
|||||||
test_repo_info = RepoInfo(
|
test_repo_info = RepoInfo(
|
||||||
path=FOLDER_WITH_TEST_DATA,
|
path=FOLDER_WITH_TEST_DATA,
|
||||||
folder='test_repo',
|
folder='test_repo',
|
||||||
|
name='TestRepo',
|
||||||
|
arch='x86_64',
|
||||||
is_remote=False,
|
is_remote=False,
|
||||||
is_reference=True,
|
is_reference=True,
|
||||||
)
|
)
|
||||||
@ -27,18 +25,10 @@ test_repo_info = RepoInfo(
|
|||||||
test_repo_info_2 = RepoInfo(
|
test_repo_info_2 = RepoInfo(
|
||||||
path=FOLDER_WITH_TEST_DATA,
|
path=FOLDER_WITH_TEST_DATA,
|
||||||
folder='test_repo_2',
|
folder='test_repo_2',
|
||||||
is_remote=False,
|
|
||||||
is_reference=True,
|
|
||||||
)
|
|
||||||
variant_info_1 = VariantInfo(
|
|
||||||
name='TestRepo',
|
|
||||||
arch='x86_64',
|
|
||||||
repos=[test_repo_info]
|
|
||||||
)
|
|
||||||
variant_info_2 = VariantInfo(
|
|
||||||
name='TestRepo2',
|
name='TestRepo2',
|
||||||
arch='x86_64',
|
arch='x86_64',
|
||||||
repos=[test_repo_info_2]
|
is_remote=False,
|
||||||
|
is_reference=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -57,12 +47,7 @@ class TestPackagesJson(TestCase):
|
|||||||
'pungi.scripts.create_packages_json.tempfile.NamedTemporaryFile',
|
'pungi.scripts.create_packages_json.tempfile.NamedTemporaryFile',
|
||||||
) as mock_tempfile:
|
) as mock_tempfile:
|
||||||
mock_tempfile.return_value.__enter__.return_value.name = 'tmpfile'
|
mock_tempfile.return_value.__enter__.return_value.name = 'tmpfile'
|
||||||
packages_generator = PackagesGenerator(
|
file_name = PackagesGenerator.get_remote_file_content(
|
||||||
variants=[],
|
|
||||||
excluded_packages=[],
|
|
||||||
included_packages=[],
|
|
||||||
)
|
|
||||||
file_name = packages_generator.get_remote_file_content(
|
|
||||||
file_url='fakeurl')
|
file_url='fakeurl')
|
||||||
mock_requests_get.assert_called_once_with(url='fakeurl')
|
mock_requests_get.assert_called_once_with(url='fakeurl')
|
||||||
mock_tempfile.assert_called_once_with(delete=False)
|
mock_tempfile.assert_called_once_with(delete=False)
|
||||||
@ -75,9 +60,9 @@ class TestPackagesJson(TestCase):
|
|||||||
|
|
||||||
def test_02_generate_additional_packages(self):
|
def test_02_generate_additional_packages(self):
|
||||||
pg = PackagesGenerator(
|
pg = PackagesGenerator(
|
||||||
variants=[
|
repos=[
|
||||||
variant_info_1,
|
test_repo_info,
|
||||||
variant_info_2,
|
test_repo_info_2,
|
||||||
],
|
],
|
||||||
excluded_packages=['zziplib-utils'],
|
excluded_packages=['zziplib-utils'],
|
||||||
included_packages=['vim-file*'],
|
included_packages=['vim-file*'],
|
||||||
|
@ -3,10 +3,8 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
import contextlib
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import productmd
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from tests import helpers
|
from tests import helpers
|
||||||
@ -554,7 +552,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -611,9 +608,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
iso.get_implanted_md5.call_args_list,
|
iso.get_implanted_md5.call_args_list,
|
||||||
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"])])
|
||||||
iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"], False)]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(len(compose.im.add.call_args_list), 1)
|
self.assertEqual(len(compose.im.add.call_args_list), 1)
|
||||||
args, _ = compose.im.add.call_args_list[0]
|
args, _ = compose.im.add.call_args_list[0]
|
||||||
@ -638,7 +633,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"create_jigdo": False,
|
"create_jigdo": False,
|
||||||
"runroot_weights": {"createiso": 123},
|
"runroot_weights": {"createiso": 123},
|
||||||
},
|
},
|
||||||
@ -696,9 +690,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
iso.get_implanted_md5.call_args_list,
|
iso.get_implanted_md5.call_args_list,
|
||||||
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"])])
|
||||||
iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"], False)]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(len(compose.im.add.call_args_list), 2)
|
self.assertEqual(len(compose.im.add.call_args_list), 2)
|
||||||
for args, _ in compose.im.add.call_args_list:
|
for args, _ in compose.im.add.call_args_list:
|
||||||
@ -725,7 +717,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -789,9 +780,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
iso.get_implanted_md5.call_args_list,
|
iso.get_implanted_md5.call_args_list,
|
||||||
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"])])
|
||||||
iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"], False)]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(len(compose.im.add.call_args_list), 1)
|
self.assertEqual(len(compose.im.add.call_args_list), 1)
|
||||||
args, _ = compose.im.add.call_args_list[0]
|
args, _ = compose.im.add.call_args_list[0]
|
||||||
@ -818,7 +807,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -851,7 +839,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -894,7 +881,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -972,9 +958,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
iso.get_implanted_md5.call_args_list,
|
iso.get_implanted_md5.call_args_list,
|
||||||
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
[mock.call(cmd["iso_path"], logger=compose._logger)],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"])])
|
||||||
iso.get_volume_id.call_args_list, [mock.call(cmd["iso_path"], False)]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(len(compose.im.add.call_args_list), 1)
|
self.assertEqual(len(compose.im.add.call_args_list), 1)
|
||||||
args, _ = compose.im.add.call_args_list[0]
|
args, _ = compose.im.add.call_args_list[0]
|
||||||
@ -1386,9 +1370,7 @@ class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_old_config_changed(self):
|
def test_old_config_changed(self):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
self.topdir, {"createiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
old_config = compose.conf.copy()
|
old_config = compose.conf.copy()
|
||||||
old_config["release_version"] = "2"
|
old_config["release_version"] = "2"
|
||||||
compose.load_old_compose_config.return_value = old_config
|
compose.load_old_compose_config.return_value = old_config
|
||||||
@ -1401,26 +1383,8 @@ class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.createiso.read_json_file")
|
|
||||||
def test_unsigned_packages_allowed(self, read_json_file):
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
|
||||||
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
|
||||||
phase.logger = self.logger
|
|
||||||
cmd = {"disc_num": 1, "disc_count": 1}
|
|
||||||
|
|
||||||
opts = CreateIsoOpts(volid="new-volid")
|
|
||||||
|
|
||||||
read_json_file.return_value = {"opts": {"volid": "old-volid"}}
|
|
||||||
|
|
||||||
self.assertFalse(
|
|
||||||
phase.try_reuse(cmd, compose.variants["Server"], "x86_64", opts)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_no_old_metadata(self):
|
def test_no_old_metadata(self):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
self.topdir, {"createiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
phase.logger = self.logger
|
phase.logger = self.logger
|
||||||
@ -1433,9 +1397,7 @@ class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("pungi.phases.createiso.read_json_file")
|
@mock.patch("pungi.phases.createiso.read_json_file")
|
||||||
def test_volume_id_differs(self, read_json_file):
|
def test_volume_id_differs(self, read_json_file):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
self.topdir, {"createiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
phase.logger = self.logger
|
phase.logger = self.logger
|
||||||
@ -1451,9 +1413,7 @@ class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("pungi.phases.createiso.read_json_file")
|
@mock.patch("pungi.phases.createiso.read_json_file")
|
||||||
def test_packages_differ(self, read_json_file):
|
def test_packages_differ(self, read_json_file):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
self.topdir, {"createiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
phase.logger = self.logger
|
phase.logger = self.logger
|
||||||
@ -1475,9 +1435,7 @@ class CreateisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("pungi.phases.createiso.read_json_file")
|
@mock.patch("pungi.phases.createiso.read_json_file")
|
||||||
def test_runs_perform_reuse(self, read_json_file):
|
def test_runs_perform_reuse(self, read_json_file):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"createiso_allow_reuse": True})
|
||||||
self.topdir, {"createiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
phase = createiso.CreateisoPhase(compose, mock.Mock())
|
||||||
phase.logger = self.logger
|
phase.logger = self.logger
|
||||||
@ -1631,103 +1589,3 @@ class ComposeConfGetIsoLevelTest(helpers.PungiTestCase):
|
|||||||
compose, compose.variants["Client"], "x86_64"
|
compose, compose.variants["Client"], "x86_64"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def mk_mount(topdir, images):
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def dummy_mount(path, logger):
|
|
||||||
treeinfo = [
|
|
||||||
"[general]",
|
|
||||||
"family = Test",
|
|
||||||
"version = 1.0",
|
|
||||||
"arch = x86_64",
|
|
||||||
"variant = Server",
|
|
||||||
"[checksums]",
|
|
||||||
]
|
|
||||||
for image in images:
|
|
||||||
helpers.touch(os.path.join(topdir, image.path), image.content)
|
|
||||||
treeinfo.append("%s = sha256:%s" % (image.path, image.checksum))
|
|
||||||
helpers.touch(os.path.join(topdir, ".treeinfo"), "\n".join(treeinfo))
|
|
||||||
yield topdir
|
|
||||||
|
|
||||||
return dummy_mount
|
|
||||||
|
|
||||||
|
|
||||||
class _MockRun:
|
|
||||||
"""This class replaces kobo.shortcuts.run and validates that the correct
|
|
||||||
two commands are called. The assertions can not be done after the tested
|
|
||||||
function finishes because it will clean up the .treeinfo file that needs to
|
|
||||||
be checked.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.num_calls = 0
|
|
||||||
self.asserts = [self._assert_xorriso, self._assert_implantisomd5]
|
|
||||||
|
|
||||||
def __call__(self, cmd, logfile):
|
|
||||||
self.num_calls += 1
|
|
||||||
self.asserts.pop(0)(cmd)
|
|
||||||
|
|
||||||
def _assert_xorriso(self, cmd):
|
|
||||||
assert cmd[0] == "xorriso"
|
|
||||||
ti = productmd.TreeInfo()
|
|
||||||
input_iso = None
|
|
||||||
for i, arg in enumerate(cmd):
|
|
||||||
if arg == "-map":
|
|
||||||
ti.load(cmd[i + 1])
|
|
||||||
if arg == "-outdev":
|
|
||||||
self.temp_iso = cmd[i + 1]
|
|
||||||
if arg == "-indev":
|
|
||||||
input_iso = cmd[i + 1]
|
|
||||||
assert self.input_iso == input_iso
|
|
||||||
assert ti.checksums.checksums[self.image_relative_path] == self.image_checksum
|
|
||||||
|
|
||||||
def _assert_implantisomd5(self, cmd):
|
|
||||||
assert cmd[0] == "/usr/bin/implantisomd5"
|
|
||||||
assert cmd[-1] == self.temp_iso
|
|
||||||
|
|
||||||
|
|
||||||
class DummyImage:
|
|
||||||
def __init__(self, path, content, checksum=None):
|
|
||||||
self.path = path
|
|
||||||
self.content = content
|
|
||||||
self.checksum = checksum or helpers.hash_string("sha256", content)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("os.rename")
|
|
||||||
@mock.patch("pungi.phases.createiso.run", new_callable=_MockRun)
|
|
||||||
class FixChecksumsTest(helpers.PungiTestCase):
|
|
||||||
def test_checksum_matches(self, mock_run, mock_rename):
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {})
|
|
||||||
arch = "x86_64"
|
|
||||||
iso_path = "DUMMY_ISO"
|
|
||||||
|
|
||||||
with mock.patch(
|
|
||||||
"pungi.wrappers.iso.mount",
|
|
||||||
new=mk_mount(self.topdir, [DummyImage("images/eltorito.img", "eltorito")]),
|
|
||||||
):
|
|
||||||
createiso.fix_treeinfo_checksums(compose, iso_path, arch)
|
|
||||||
|
|
||||||
self.assertEqual(mock_run.num_calls, 0)
|
|
||||||
self.assertEqual(mock_rename.call_args_list, [])
|
|
||||||
|
|
||||||
def test_checksum_fix(self, mock_run, mock_rename):
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {})
|
|
||||||
arch = "x86_64"
|
|
||||||
img = "images/eltorito.img"
|
|
||||||
content = "eltorito"
|
|
||||||
iso_path = "DUMMY_ISO"
|
|
||||||
mock_run.input_iso = iso_path
|
|
||||||
mock_run.image_relative_path = "images/eltorito.img"
|
|
||||||
mock_run.image_checksum = ("sha256", helpers.hash_string("sha256", content))
|
|
||||||
|
|
||||||
with mock.patch(
|
|
||||||
"pungi.wrappers.iso.mount",
|
|
||||||
new=mk_mount(self.topdir, [DummyImage(img, content, "abc")]),
|
|
||||||
):
|
|
||||||
createiso.fix_treeinfo_checksums(compose, iso_path, arch)
|
|
||||||
|
|
||||||
# The new image was copied over the old one
|
|
||||||
self.assertEqual(
|
|
||||||
mock_rename.call_args_list, [mock.call(mock_run.temp_iso, iso_path)]
|
|
||||||
)
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
from parameterized import parameterized
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
@ -267,6 +266,58 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_bootable_run_buildinstall(self):
|
||||||
|
createiso.write_script(
|
||||||
|
createiso.CreateIsoOpts(
|
||||||
|
output_dir=self.outdir,
|
||||||
|
iso_name="DP-1.0-20160405.t.3-ppc64.iso",
|
||||||
|
volid="DP-1.0-20160405.t.3",
|
||||||
|
graft_points="graft-list",
|
||||||
|
arch="ppc64",
|
||||||
|
buildinstall_method="buildinstall",
|
||||||
|
),
|
||||||
|
self.out,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertScript(
|
||||||
|
[
|
||||||
|
" ".join(
|
||||||
|
[
|
||||||
|
"/usr/bin/genisoimage",
|
||||||
|
"-untranslated-filenames",
|
||||||
|
"-volid",
|
||||||
|
"DP-1.0-20160405.t.3",
|
||||||
|
"-J",
|
||||||
|
"-joliet-long",
|
||||||
|
"-rational-rock",
|
||||||
|
"-translation-table",
|
||||||
|
"-x",
|
||||||
|
"./lost+found",
|
||||||
|
"-part",
|
||||||
|
"-hfs",
|
||||||
|
"-r",
|
||||||
|
"-l",
|
||||||
|
"-sysid",
|
||||||
|
"PPC",
|
||||||
|
"-no-desktop",
|
||||||
|
"-allow-multidot",
|
||||||
|
"-chrp-boot",
|
||||||
|
"-map",
|
||||||
|
"/usr/lib/anaconda-runtime/boot/mapping",
|
||||||
|
"-hfs-bless",
|
||||||
|
"/ppc/mac",
|
||||||
|
"-o",
|
||||||
|
"DP-1.0-20160405.t.3-ppc64.iso",
|
||||||
|
"-graft-points",
|
||||||
|
"-path-list",
|
||||||
|
"graft-list",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
|
||||||
|
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest", # noqa: E501
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
@mock.patch("sys.stderr")
|
@mock.patch("sys.stderr")
|
||||||
@mock.patch("kobo.shortcuts.run")
|
@mock.patch("kobo.shortcuts.run")
|
||||||
def test_run_with_jigdo_bad_args(self, run, stderr):
|
def test_run_with_jigdo_bad_args(self, run, stderr):
|
||||||
@ -340,27 +391,3 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@parameterized.expand(
|
|
||||||
[("644", 0o644), ("664", 0o664), ("666", 0o666), ("2644", 0o2644)]
|
|
||||||
)
|
|
||||||
def test_get_perms_non_executable(self, test_name, mode):
|
|
||||||
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
|
||||||
self.assertEqual(createiso._get_perms(path), 0o444)
|
|
||||||
|
|
||||||
@parameterized.expand(
|
|
||||||
[
|
|
||||||
("544", 0o544),
|
|
||||||
("554", 0o554),
|
|
||||||
("555", 0o555),
|
|
||||||
("744", 0o744),
|
|
||||||
("755", 0o755),
|
|
||||||
("774", 0o774),
|
|
||||||
("775", 0o775),
|
|
||||||
("777", 0o777),
|
|
||||||
("2775", 0o2775),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
def test_get_perms_executable(self, test_name, mode):
|
|
||||||
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
|
||||||
self.assertEqual(createiso._get_perms(path), 0o555)
|
|
||||||
|
@ -130,7 +130,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
iso_path = os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso")
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
rcc.call_args_list,
|
rcc.call_args_list,
|
||||||
[
|
[
|
||||||
@ -149,7 +148,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
iso_path=iso_path,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -160,7 +158,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
compose,
|
compose,
|
||||||
server,
|
server,
|
||||||
"x86_64",
|
"x86_64",
|
||||||
iso_path,
|
os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso"),
|
||||||
True,
|
True,
|
||||||
additional_variants=["Client"],
|
additional_variants=["Client"],
|
||||||
)
|
)
|
||||||
@ -207,7 +205,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
iso_path = os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso")
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
rcc.call_args_list,
|
rcc.call_args_list,
|
||||||
[
|
[
|
||||||
@ -226,7 +223,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
iso_path=iso_path,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -237,7 +233,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
compose,
|
compose,
|
||||||
server,
|
server,
|
||||||
"x86_64",
|
"x86_64",
|
||||||
iso_path,
|
os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso"),
|
||||||
True,
|
True,
|
||||||
additional_variants=["Client"],
|
additional_variants=["Client"],
|
||||||
)
|
)
|
||||||
@ -282,7 +278,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
iso_path = os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso")
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
rcc.call_args_list,
|
rcc.call_args_list,
|
||||||
[
|
[
|
||||||
@ -301,7 +296,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
iso_path=iso_path,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -312,7 +306,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
compose,
|
compose,
|
||||||
server,
|
server,
|
||||||
"x86_64",
|
"x86_64",
|
||||||
iso_path,
|
os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso"),
|
||||||
True,
|
True,
|
||||||
additional_variants=["Client"],
|
additional_variants=["Client"],
|
||||||
)
|
)
|
||||||
@ -359,7 +353,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
iso_path = os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso")
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
rcc.call_args_list,
|
rcc.call_args_list,
|
||||||
[
|
[
|
||||||
@ -378,7 +371,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
self.topdir, "logs/x86_64/extraiso-my.iso.x86_64.log"
|
||||||
),
|
),
|
||||||
iso_path=iso_path,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -389,7 +381,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
compose,
|
compose,
|
||||||
server,
|
server,
|
||||||
"x86_64",
|
"x86_64",
|
||||||
iso_path,
|
os.path.join(self.topdir, "compose/Server/x86_64/iso/my.iso"),
|
||||||
False,
|
False,
|
||||||
additional_variants=["Client"],
|
additional_variants=["Client"],
|
||||||
)
|
)
|
||||||
@ -431,7 +423,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
iso_path = os.path.join(self.topdir, "compose/Server/source/iso/my.iso")
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
rcc.call_args_list,
|
rcc.call_args_list,
|
||||||
[
|
[
|
||||||
@ -450,7 +441,6 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
log_file=os.path.join(
|
log_file=os.path.join(
|
||||||
self.topdir, "logs/src/extraiso-my.iso.src.log"
|
self.topdir, "logs/src/extraiso-my.iso.src.log"
|
||||||
),
|
),
|
||||||
iso_path=iso_path,
|
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -461,7 +451,7 @@ class ExtraIsosThreadTest(helpers.PungiTestCase):
|
|||||||
compose,
|
compose,
|
||||||
server,
|
server,
|
||||||
"src",
|
"src",
|
||||||
iso_path,
|
os.path.join(self.topdir, "compose/Server/source/iso/my.iso"),
|
||||||
False,
|
False,
|
||||||
additional_variants=["Client"],
|
additional_variants=["Client"],
|
||||||
)
|
)
|
||||||
@ -898,8 +888,10 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
|||||||
"images/efiboot.img": os.path.join(iso_dir, "images/efiboot.img"),
|
"images/efiboot.img": os.path.join(iso_dir, "images/efiboot.img"),
|
||||||
}
|
}
|
||||||
|
|
||||||
ggp.side_effect = lambda compose, x: (
|
ggp.side_effect = (
|
||||||
gp[x[0][len(self.topdir) + 1 :]] if len(x) == 1 else bi_gp
|
lambda compose, x: gp[x[0][len(self.topdir) + 1 :]]
|
||||||
|
if len(x) == 1
|
||||||
|
else bi_gp
|
||||||
)
|
)
|
||||||
gp_file = os.path.join(self.topdir, "work/x86_64/iso/my.iso-graft-points")
|
gp_file = os.path.join(self.topdir, "work/x86_64/iso/my.iso-graft-points")
|
||||||
|
|
||||||
@ -1156,9 +1148,7 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_buildinstall_changed(self):
|
def test_buildinstall_changed(self):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
thread.logger = self.logger
|
thread.logger = self.logger
|
||||||
thread.bi = mock.Mock()
|
thread.bi = mock.Mock()
|
||||||
@ -1172,9 +1162,7 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_no_old_config(self):
|
def test_no_old_config(self):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
thread.logger = self.logger
|
thread.logger = self.logger
|
||||||
opts = CreateIsoOpts()
|
opts = CreateIsoOpts()
|
||||||
@ -1186,9 +1174,7 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_old_config_changed(self):
|
def test_old_config_changed(self):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
old_config = compose.conf.copy()
|
old_config = compose.conf.copy()
|
||||||
old_config["release_version"] = "2"
|
old_config["release_version"] = "2"
|
||||||
compose.load_old_compose_config.return_value = old_config
|
compose.load_old_compose_config.return_value = old_config
|
||||||
@ -1203,9 +1189,7 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_no_old_metadata(self):
|
def test_no_old_metadata(self):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
thread.logger = self.logger
|
thread.logger = self.logger
|
||||||
@ -1219,9 +1203,7 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
||||||
def test_volume_id_differs(self, read_json_file):
|
def test_volume_id_differs(self, read_json_file):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
thread.logger = self.logger
|
thread.logger = self.logger
|
||||||
@ -1238,9 +1220,7 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
||||||
def test_packages_differ(self, read_json_file):
|
def test_packages_differ(self, read_json_file):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
thread.logger = self.logger
|
thread.logger = self.logger
|
||||||
@ -1261,41 +1241,9 @@ class ExtraisoTryReusePhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
|
||||||
def test_unsigned_packages(self, read_json_file):
|
|
||||||
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
|
||||||
thread.logger = self.logger
|
|
||||||
thread.perform_reuse = mock.Mock()
|
|
||||||
|
|
||||||
new_graft_points = os.path.join(self.topdir, "new_graft_points")
|
|
||||||
helpers.touch(new_graft_points)
|
|
||||||
opts = CreateIsoOpts(graft_points=new_graft_points, volid="volid")
|
|
||||||
|
|
||||||
old_graft_points = os.path.join(self.topdir, "old_graft_points")
|
|
||||||
helpers.touch(old_graft_points)
|
|
||||||
dummy_iso_path = "dummy-iso-path/dummy.iso"
|
|
||||||
read_json_file.return_value = {
|
|
||||||
"opts": {
|
|
||||||
"graft_points": old_graft_points,
|
|
||||||
"volid": "volid",
|
|
||||||
"output_dir": os.path.dirname(dummy_iso_path),
|
|
||||||
"iso_name": os.path.basename(dummy_iso_path),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertFalse(
|
|
||||||
thread.try_reuse(
|
|
||||||
compose, compose.variants["Server"], "x86_64", "abcdef", opts
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
@mock.patch("pungi.phases.extra_isos.read_json_file")
|
||||||
def test_runs_perform_reuse(self, read_json_file):
|
def test_runs_perform_reuse(self, read_json_file):
|
||||||
compose = helpers.DummyCompose(
|
compose = helpers.DummyCompose(self.topdir, {"extraiso_allow_reuse": True})
|
||||||
self.topdir, {"extraiso_allow_reuse": True, "sigkeys": ["abcdef"]}
|
|
||||||
)
|
|
||||||
compose.load_old_compose_config.return_value = compose.conf.copy()
|
compose.load_old_compose_config.return_value = compose.conf.copy()
|
||||||
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
thread = extra_isos.ExtraIsosThread(compose, mock.Mock())
|
||||||
thread.logger = self.logger
|
thread.logger = self.logger
|
||||||
|
@ -2286,7 +2286,6 @@ class DNFDepsolvingTestCase(DepsolvingBase, unittest.TestCase):
|
|||||||
conf = Conf(base_arch)
|
conf = Conf(base_arch)
|
||||||
conf.persistdir = persistdir
|
conf.persistdir = persistdir
|
||||||
conf.cachedir = self.cachedir
|
conf.cachedir = self.cachedir
|
||||||
conf.optional_metadata_types = ["filelists"]
|
|
||||||
if exclude:
|
if exclude:
|
||||||
conf.exclude = exclude
|
conf.exclude = exclude
|
||||||
dnf = DnfWrapper(conf)
|
dnf = DnfWrapper(conf)
|
||||||
|
@ -1057,8 +1057,10 @@ class TestGatherPackages(helpers.PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.gather.get_gather_method")
|
@mock.patch("pungi.phases.gather.get_gather_method")
|
||||||
def test_hybrid_method(self, get_gather_method, get_variant_packages):
|
def test_hybrid_method(self, get_gather_method, get_variant_packages):
|
||||||
packages, groups, filters = mock.Mock(), mock.Mock(), mock.Mock()
|
packages, groups, filters = mock.Mock(), mock.Mock(), mock.Mock()
|
||||||
get_variant_packages.side_effect = lambda c, v, a, s, p: (
|
get_variant_packages.side_effect = (
|
||||||
(packages, groups, filters) if s == "comps" else (None, None, None)
|
lambda c, v, a, s, p: (packages, groups, filters)
|
||||||
|
if s == "comps"
|
||||||
|
else (None, None, None)
|
||||||
)
|
)
|
||||||
get_gather_method.return_value.return_value.return_value = {
|
get_gather_method.return_value.return_value.return_value = {
|
||||||
"rpm": [],
|
"rpm": [],
|
||||||
|
@ -6,7 +6,6 @@ from pathlib import Path
|
|||||||
from pyfakefs.fake_filesystem_unittest import TestCase
|
from pyfakefs.fake_filesystem_unittest import TestCase
|
||||||
|
|
||||||
from pungi.scripts.gather_rpms import search_rpms, copy_rpms, Package
|
from pungi.scripts.gather_rpms import search_rpms, copy_rpms, Package
|
||||||
from productmd.common import parse_nvra
|
|
||||||
|
|
||||||
PATH_TO_REPOS = '/path/to/repos'
|
PATH_TO_REPOS = '/path/to/repos'
|
||||||
MODULES_YAML_GZ = 'modules.yaml.gz'
|
MODULES_YAML_GZ = 'modules.yaml.gz'
|
||||||
@ -16,13 +15,10 @@ class TestGatherRpms(TestCase):
|
|||||||
maxDiff = None
|
maxDiff = None
|
||||||
|
|
||||||
FILES_TO_CREATE = [
|
FILES_TO_CREATE = [
|
||||||
'powertools/Packages/libvirt-6.0.0-28.module_el'
|
'powertools/Packages/libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm',
|
||||||
'8.3.0+555+a55c8938.i686.rpm',
|
|
||||||
'powertools/Packages/libgit2-devel-0.26.8-2.el8.x86_64.rpm',
|
'powertools/Packages/libgit2-devel-0.26.8-2.el8.x86_64.rpm',
|
||||||
'powertools/Packages/xalan-j2-2.7.1-38.module_el'
|
'powertools/Packages/xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm',
|
||||||
'8.0.0+30+832da3a1.noarch.rpm',
|
'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm',
|
||||||
'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el'
|
|
||||||
'8.0.0+30+832da3a1.noarch.rpm',
|
|
||||||
'appstream/Packages/OpenEXR-devel-2.2.0-11.el8.i686.rpm',
|
'appstream/Packages/OpenEXR-devel-2.2.0-11.el8.i686.rpm',
|
||||||
'appstream/Packages/mingw-binutils-generic-2.30-1.el8.x86_64.rpm',
|
'appstream/Packages/mingw-binutils-generic-2.30-1.el8.x86_64.rpm',
|
||||||
'appstream/Packages/somenonrpm',
|
'appstream/Packages/somenonrpm',
|
||||||
@ -34,98 +30,56 @@ class TestGatherRpms(TestCase):
|
|||||||
os.makedirs(PATH_TO_REPOS)
|
os.makedirs(PATH_TO_REPOS)
|
||||||
|
|
||||||
for filepath in self.FILES_TO_CREATE:
|
for filepath in self.FILES_TO_CREATE:
|
||||||
os.makedirs(
|
os.makedirs(os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)), exist_ok=True)
|
||||||
os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)),
|
|
||||||
exist_ok=True,
|
|
||||||
)
|
|
||||||
open(os.path.join(PATH_TO_REPOS, filepath), 'w').close()
|
open(os.path.join(PATH_TO_REPOS, filepath), 'w').close()
|
||||||
|
|
||||||
def test_gather_rpms(self):
|
def test_gather_rpms(self):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[Package(nvra=parse_nvra('libvirt-6.0.0-28.module_'
|
[Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686',
|
||||||
'el8.3.0+555+a55c8938.i686'),
|
path=f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
path=Path(
|
f'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm'),
|
||||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64',
|
||||||
f'libvirt-6.0.0-28.module_el'
|
path=f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
f'8.3.0+555+a55c8938.i686.rpm'
|
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'),
|
||||||
)),
|
Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch',
|
||||||
Package(nvra=parse_nvra('libgit2-devel-0.26.8-2.el8.x86_64'),
|
path=f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
path=Path(
|
f'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm'),
|
||||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch',
|
||||||
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
|
path='/path/to/repos/appstream/Packages/'
|
||||||
)),
|
'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'),
|
||||||
Package(nvra=parse_nvra('xalan-j2-2.7.1-38.module_el'
|
Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686',
|
||||||
'8.0.0+30+832da3a1.noarch'),
|
path=f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
path=Path(
|
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'),
|
||||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64',
|
||||||
f'xalan-j2-2.7.1-38.module_el'
|
path=f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
f'8.0.0+30+832da3a1.noarch.rpm'
|
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm')],
|
||||||
)),
|
search_rpms(PATH_TO_REPOS)
|
||||||
Package(nvra=parse_nvra('bnd-maven-plugin-3.5.0-4.module_el'
|
|
||||||
'8.0.0+30+832da3a1.noarch'),
|
|
||||||
path=Path(
|
|
||||||
'/path/to/repos/appstream/Packages/'
|
|
||||||
'bnd-maven-plugin-3.5.0-4.module_el'
|
|
||||||
'8.0.0+30+832da3a1.noarch.rpm'
|
|
||||||
)),
|
|
||||||
Package(nvra=parse_nvra('OpenEXR-devel-2.2.0-11.el8.i686'),
|
|
||||||
path=Path(
|
|
||||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
|
||||||
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
|
||||||
)),
|
|
||||||
Package(nvra=parse_nvra('mingw-binutils-generic-'
|
|
||||||
'2.30-1.el8.x86_64'),
|
|
||||||
path=Path(
|
|
||||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
|
||||||
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
|
||||||
))
|
|
||||||
],
|
|
||||||
search_rpms(Path(PATH_TO_REPOS))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_copy_rpms(self):
|
def test_copy_rpms(self):
|
||||||
target_path = Path('/mnt/koji')
|
target_path = Path('/mnt/koji')
|
||||||
packages = [
|
packages = [
|
||||||
|
|
||||||
Package(nvra=parse_nvra('libvirt-6.0.0-28.module_'
|
Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686',
|
||||||
'el8.3.0+555+a55c8938.i686'),
|
path=f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
path=Path(
|
f'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm'),
|
||||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64',
|
||||||
f'libvirt-6.0.0-28.module_el'
|
path=f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
f'8.3.0+555+a55c8938.i686.rpm'
|
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'),
|
||||||
)),
|
Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch',
|
||||||
Package(nvra=parse_nvra('libgit2-devel-0.26.8-2.el8.x86_64'),
|
path=f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
path=Path(
|
f'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm'),
|
||||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch',
|
||||||
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
|
path='/path/to/repos/appstream/Packages/'
|
||||||
)),
|
'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'),
|
||||||
Package(nvra=parse_nvra('xalan-j2-2.7.1-38.module_'
|
Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686',
|
||||||
'el8.0.0+30+832da3a1.noarch'),
|
path=f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
path=Path(
|
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'),
|
||||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64',
|
||||||
f'xalan-j2-2.7.1-38.module_el'
|
path=f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
f'8.0.0+30+832da3a1.noarch.rpm'
|
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm')
|
||||||
)),
|
|
||||||
Package(nvra=parse_nvra('bnd-maven-plugin-3.5.0-4.module_el'
|
|
||||||
'8.0.0+30+832da3a1.noarch'),
|
|
||||||
path=Path(
|
|
||||||
'/path/to/repos/appstream/Packages/'
|
|
||||||
'bnd-maven-plugin-3.5.0-4.module_el'
|
|
||||||
'8.0.0+30+832da3a1.noarch.rpm'
|
|
||||||
)),
|
|
||||||
Package(nvra=parse_nvra('OpenEXR-devel-2.2.0-11.el8.i686'),
|
|
||||||
path=Path(
|
|
||||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
|
||||||
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
|
||||||
)),
|
|
||||||
Package(nvra=parse_nvra('mingw-binutils-generic-'
|
|
||||||
'2.30-1.el8.x86_64'),
|
|
||||||
path=Path(
|
|
||||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
|
||||||
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
|
||||||
))
|
|
||||||
]
|
]
|
||||||
copy_rpms(packages, target_path, [])
|
copy_rpms(packages, target_path)
|
||||||
|
|
||||||
self.assertCountEqual([
|
self.assertCountEqual([
|
||||||
'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm',
|
'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm',
|
||||||
|
@ -122,7 +122,6 @@ class ImageContainerThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"translate_paths": [(self.topdir, "http://root")],
|
"translate_paths": [(self.topdir, "http://root")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -227,7 +226,7 @@ class ImageContainerThreadTest(helpers.PungiTestCase):
|
|||||||
(self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
|
(self.compose, self.compose.variants["Server"], self.cfg.copy()), 1
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertRegex(str(ctx.exception), r"task failed: 12345. See .+ for details")
|
self.assertRegex(str(ctx.exception), r"task 12345 failed: see .+ for details")
|
||||||
self.assertRepoFile()
|
self.assertRepoFile()
|
||||||
self.assertKojiCalls(self.cfg)
|
self.assertKojiCalls(self.cfg)
|
||||||
self.assertEqual(add_metadata.call_args_list, [])
|
self.assertEqual(add_metadata.call_args_list, [])
|
||||||
|
@ -35,7 +35,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Client|Server$": [original_image_conf]},
|
"image_build": {"^Client|Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -46,7 +45,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
client_args = {
|
client_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -128,7 +127,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"image_build_version": "Rawhide",
|
"image_build_version": "Rawhide",
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -139,7 +137,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -190,7 +188,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"image_build_target": "f24",
|
"image_build_target": "f24",
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -199,7 +196,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -254,7 +251,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -265,8 +261,8 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_not_called()
|
self.assertFalse(phase.pool.add.called)
|
||||||
phase.pool.queue_put.assert_not_called()
|
self.assertFalse(phase.pool.queue_put.called)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.image_build.ThreadPool")
|
@mock.patch("pungi.phases.image_build.ThreadPool")
|
||||||
def test_image_build_set_install_tree(self, ThreadPool):
|
def test_image_build_set_install_tree(self, ThreadPool):
|
||||||
@ -290,7 +286,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -302,9 +297,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -358,7 +353,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"translate_paths": [("/my", "http://example.com")],
|
"translate_paths": [("/my", "http://example.com")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -370,9 +364,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -425,7 +419,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -437,9 +430,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -498,7 +491,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -509,9 +501,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -567,7 +559,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -578,9 +569,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
||||||
@ -611,7 +602,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -622,9 +612,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
||||||
@ -655,7 +645,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -666,9 +655,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
phase.pool.queue_put.assert_called_once()
|
self.assertTrue(phase.pool.queue_put.called_once)
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertTrue(args[0][1].get("scratch"))
|
self.assertTrue(args[0][1].get("scratch"))
|
||||||
|
|
||||||
@ -692,7 +681,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server-optional$": [original_image_conf]},
|
"image_build": {"^Server-optional$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -704,7 +692,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -756,7 +744,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -768,7 +755,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -956,9 +943,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||||
@mock.patch("pungi.phases.image_build.Linker")
|
@mock.patch("pungi.phases.image_build.Linker")
|
||||||
def test_process_handle_fail(self, Linker, KojiWrapper):
|
def test_process_handle_fail(self, Linker, KojiWrapper):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
|
||||||
)
|
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -1015,9 +1000,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||||
@mock.patch("pungi.phases.image_build.Linker")
|
@mock.patch("pungi.phases.image_build.Linker")
|
||||||
def test_process_handle_exception(self, Linker, KojiWrapper):
|
def test_process_handle_exception(self, Linker, KojiWrapper):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
|
||||||
)
|
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -1063,9 +1046,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||||
@mock.patch("pungi.phases.image_build.Linker")
|
@mock.patch("pungi.phases.image_build.Linker")
|
||||||
def test_process_handle_fail_only_one_optional(self, Linker, KojiWrapper):
|
def test_process_handle_fail_only_one_optional(self, Linker, KojiWrapper):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
|
||||||
)
|
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
|
@ -20,19 +20,6 @@ Supported ISO: no
|
|||||||
|
|
||||||
INCORRECT_OUTPUT = """This should never happen: File not found"""
|
INCORRECT_OUTPUT = """This should never happen: File not found"""
|
||||||
|
|
||||||
XORRISO_LOAD_OUTPUT = """\
|
|
||||||
xorriso 1.5.4 : RockRidge filesystem manipulator, libburnia project.
|
|
||||||
|
|
||||||
xorriso : NOTE : Loading ISO image tree from LBA 0
|
|
||||||
xorriso : UPDATE : 7074 nodes read in 1 seconds
|
|
||||||
Drive current: -indev 'dummy.iso'
|
|
||||||
Media current: stdio file, overwriteable
|
|
||||||
Media status : is written , is appendable
|
|
||||||
Boot record : El Torito , MBR isohybrid cyl-align-off GPT
|
|
||||||
Media summary: 1 session, 5415454 data blocks, 10.3g data, 4086g free
|
|
||||||
Volume id : 'My volume id'
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Cached to use in tests that mock os.listdir
|
# Cached to use in tests that mock os.listdir
|
||||||
orig_listdir = os.listdir
|
orig_listdir = os.listdir
|
||||||
|
|
||||||
@ -41,7 +28,6 @@ def fake_listdir(pattern, result=None, exc=None):
|
|||||||
"""Create a function that mocks os.listdir. If the path contains pattern,
|
"""Create a function that mocks os.listdir. If the path contains pattern,
|
||||||
result will be returned or exc raised. Otherwise it's normal os.listdir
|
result will be returned or exc raised. Otherwise it's normal os.listdir
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The point of this is to avoid issues on Python 2, where apparently
|
# The point of this is to avoid issues on Python 2, where apparently
|
||||||
# isdir() is using listdir(), so the mocking is breaking it.
|
# isdir() is using listdir(), so the mocking is breaking it.
|
||||||
def worker(path):
|
def worker(path):
|
||||||
@ -196,11 +182,6 @@ class TestIsoUtils(unittest.TestCase):
|
|||||||
self.assertEqual(len(mock_unmount.call_args_list), 0)
|
self.assertEqual(len(mock_unmount.call_args_list), 0)
|
||||||
self.assertEqual(len(log.mock_calls), 1)
|
self.assertEqual(len(log.mock_calls), 1)
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.iso.run")
|
|
||||||
def test_get_volume_id_xorriso(self, mock_run):
|
|
||||||
mock_run.return_value = (0, XORRISO_LOAD_OUTPUT)
|
|
||||||
self.assertEqual(iso.get_volume_id("/dummy.iso", True), "My volume id")
|
|
||||||
|
|
||||||
|
|
||||||
class TestCmpGraftPoints(unittest.TestCase):
|
class TestCmpGraftPoints(unittest.TestCase):
|
||||||
def assertSorted(self, *args):
|
def assertSorted(self, *args):
|
||||||
|
@ -1,459 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
try:
|
|
||||||
from unittest import mock
|
|
||||||
except ImportError:
|
|
||||||
import mock
|
|
||||||
|
|
||||||
from pungi.phases.kiwibuild import KiwiBuildPhase, RunKiwiBuildThread
|
|
||||||
from tests.helpers import DummyCompose, PungiTestCase
|
|
||||||
|
|
||||||
|
|
||||||
MINIMAL_CONF = {
|
|
||||||
"description_scm": "https://example.com/kiwi.git",
|
|
||||||
"description_path": "Fedora.kiwi",
|
|
||||||
"kiwi_profile": "Cloud-Base-Generic",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _merge(a, b):
|
|
||||||
"""This would be a | b on 3.9 and later, or {**a, **b} or 3.5 and later."""
|
|
||||||
c = a.copy()
|
|
||||||
c.update(b)
|
|
||||||
return c
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi.phases.kiwibuild.ThreadPool")
|
|
||||||
class TestKiwiBuildPhase(PungiTestCase):
|
|
||||||
def test_minimal(self, ThreadPool):
|
|
||||||
cfg = _merge({"target": "f40"}, MINIMAL_CONF)
|
|
||||||
compose = DummyCompose(self.topdir, {"kiwibuild": {"^Server$": [cfg]}})
|
|
||||||
|
|
||||||
self.assertValidConfig(compose.conf)
|
|
||||||
|
|
||||||
phase = KiwiBuildPhase(compose)
|
|
||||||
|
|
||||||
phase.run()
|
|
||||||
phase.pool.add.assert_called()
|
|
||||||
assert phase.pool.queue_put.call_args_list == [
|
|
||||||
mock.call(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
cfg,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": None,
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": None,
|
|
||||||
"type_attr": None,
|
|
||||||
"bundle_name_format": None,
|
|
||||||
},
|
|
||||||
[self.topdir + "/compose/Server/$arch/os"],
|
|
||||||
[], # failable arches
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_full(self, ThreadPool):
|
|
||||||
cfg = _merge(
|
|
||||||
{
|
|
||||||
"target": "f40",
|
|
||||||
"release": "1234",
|
|
||||||
"arches": ["x86_64"],
|
|
||||||
"repos": ["https://example.com/repo/", "Client"],
|
|
||||||
"failable": ["*"],
|
|
||||||
"subvariant": "Test",
|
|
||||||
"type": "custom",
|
|
||||||
"type_attr": ["foo", "bar"],
|
|
||||||
"bundle_name_format": "fmt",
|
|
||||||
},
|
|
||||||
MINIMAL_CONF,
|
|
||||||
)
|
|
||||||
compose = DummyCompose(self.topdir, {"kiwibuild": {"^Server$": [cfg]}})
|
|
||||||
|
|
||||||
self.assertValidConfig(compose.conf)
|
|
||||||
|
|
||||||
phase = KiwiBuildPhase(compose)
|
|
||||||
|
|
||||||
phase.run()
|
|
||||||
phase.pool.add.assert_called()
|
|
||||||
assert phase.pool.queue_put.call_args_list == [
|
|
||||||
mock.call(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
cfg,
|
|
||||||
["x86_64"],
|
|
||||||
{
|
|
||||||
"release": "1234",
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": "custom",
|
|
||||||
"type_attr": ["foo", "bar"],
|
|
||||||
"bundle_name_format": "fmt",
|
|
||||||
},
|
|
||||||
[
|
|
||||||
"https://example.com/repo/",
|
|
||||||
self.topdir + "/compose/Client/$arch/os",
|
|
||||||
self.topdir + "/compose/Server/$arch/os",
|
|
||||||
],
|
|
||||||
["x86_64"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_failable(self, ThreadPool):
|
|
||||||
cfg = _merge({"target": "f40", "failable": ["x86_64"]}, MINIMAL_CONF)
|
|
||||||
compose = DummyCompose(self.topdir, {"kiwibuild": {"^Server$": [cfg]}})
|
|
||||||
|
|
||||||
self.assertValidConfig(compose.conf)
|
|
||||||
|
|
||||||
phase = KiwiBuildPhase(compose)
|
|
||||||
|
|
||||||
phase.run()
|
|
||||||
phase.pool.add.assert_called()
|
|
||||||
assert phase.pool.queue_put.call_args_list == [
|
|
||||||
mock.call(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
cfg,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": None,
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": None,
|
|
||||||
"type_attr": None,
|
|
||||||
"bundle_name_format": None,
|
|
||||||
},
|
|
||||||
[self.topdir + "/compose/Server/$arch/os"],
|
|
||||||
["x86_64"], # failable arches
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_with_phase_opts(self, ThreadPool):
|
|
||||||
cfg = {"kiwi_profile": "Generic"}
|
|
||||||
compose = DummyCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"kiwibuild": {"^Server$": [cfg]},
|
|
||||||
"kiwibuild_target": "f40",
|
|
||||||
"kiwibuild_release": "1234",
|
|
||||||
"kiwibuild_description_scm": "foo",
|
|
||||||
"kiwibuild_description_path": "bar",
|
|
||||||
"kiwibuild_type": "custom",
|
|
||||||
"kiwibuild_type_attr": ["foo", "bar"],
|
|
||||||
"kiwibuild_bundle_name_format": "fmt",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertValidConfig(compose.conf)
|
|
||||||
|
|
||||||
phase = KiwiBuildPhase(compose)
|
|
||||||
|
|
||||||
phase.run()
|
|
||||||
phase.pool.add.assert_called()
|
|
||||||
assert phase.pool.queue_put.call_args_list == [
|
|
||||||
mock.call(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
cfg,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": "1234",
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": "foo",
|
|
||||||
"descpath": "bar",
|
|
||||||
"type": "custom",
|
|
||||||
"type_attr": ["foo", "bar"],
|
|
||||||
"bundle_name_format": "fmt",
|
|
||||||
},
|
|
||||||
[self.topdir + "/compose/Server/$arch/os"],
|
|
||||||
[], # failable arches
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_with_global_opts(self, ThreadPool):
|
|
||||||
cfg = MINIMAL_CONF
|
|
||||||
compose = DummyCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"kiwibuild": {"^Server$": [cfg]},
|
|
||||||
"global_target": "f40",
|
|
||||||
"global_release": "1234",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertValidConfig(compose.conf)
|
|
||||||
|
|
||||||
phase = KiwiBuildPhase(compose)
|
|
||||||
|
|
||||||
phase.run()
|
|
||||||
phase.pool.add.assert_called()
|
|
||||||
assert phase.pool.queue_put.call_args_list == [
|
|
||||||
mock.call(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
cfg,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": "1234",
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": None,
|
|
||||||
"type_attr": None,
|
|
||||||
"bundle_name_format": None,
|
|
||||||
},
|
|
||||||
[self.topdir + "/compose/Server/$arch/os"],
|
|
||||||
[], # failable arches
|
|
||||||
)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi.phases.kiwibuild.Linker")
|
|
||||||
@mock.patch("pungi.util.get_mtime")
|
|
||||||
@mock.patch("pungi.util.get_file_size")
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
|
||||||
class TestKiwiBuildThread(PungiTestCase):
|
|
||||||
def _img_path(self, arch, filename=None):
|
|
||||||
path = self.topdir + "/compose/Server/%s/images" % arch
|
|
||||||
if filename:
|
|
||||||
path += "/" + filename
|
|
||||||
return path
|
|
||||||
|
|
||||||
def test_process(self, KojiWrapper, get_file_size, get_mtime, Linker):
|
|
||||||
img_name = "FCBG.{arch}-Rawhide-1.6.vagrant.libvirt.box"
|
|
||||||
self.repo = self.topdir + "/compose/Server/$arch/os"
|
|
||||||
compose = DummyCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"koji_profile": "koji",
|
|
||||||
"kiwibuild_bundle_format": "%N-%P-40_Beta-%I.%A.%T",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
config = _merge({"subvariant": "Test"}, MINIMAL_CONF)
|
|
||||||
pool = mock.Mock()
|
|
||||||
|
|
||||||
get_image_paths = KojiWrapper.return_value.get_image_paths
|
|
||||||
get_image_paths.return_value = {
|
|
||||||
"x86_64": [
|
|
||||||
"/koji/task/1234/FCBG.x86_64-Rawhide-1.6.packages",
|
|
||||||
"/koji/task/1234/%s" % img_name.format(arch="x86_64"),
|
|
||||||
],
|
|
||||||
"amd64": [
|
|
||||||
"/koji/task/1234/FCBG.amd64-Rawhide-1.6.packages",
|
|
||||||
"/koji/task/1234/%s" % img_name.format(arch="amd64"),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
KojiWrapper.return_value.koji_proxy.kiwiBuild.return_value = 1234
|
|
||||||
KojiWrapper.return_value.watch_task.return_value = 0
|
|
||||||
|
|
||||||
t = RunKiwiBuildThread(pool)
|
|
||||||
get_file_size.return_value = 1024
|
|
||||||
get_mtime.return_value = 13579
|
|
||||||
t.process(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
config,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": "1.6",
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": "t",
|
|
||||||
"type_attr": ["ta"],
|
|
||||||
"bundle_name_format": "fmt",
|
|
||||||
},
|
|
||||||
[self.repo],
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert KojiWrapper.return_value.koji_proxy.kiwiBuild.mock_calls == [
|
|
||||||
mock.call(
|
|
||||||
"f40",
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
MINIMAL_CONF["description_scm"],
|
|
||||||
MINIMAL_CONF["description_path"],
|
|
||||||
profile=MINIMAL_CONF["kiwi_profile"],
|
|
||||||
release="1.6",
|
|
||||||
repos=[self.repo],
|
|
||||||
type="t",
|
|
||||||
type_attr=["ta"],
|
|
||||||
result_bundle_name_format="fmt",
|
|
||||||
optional_arches=[],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
assert get_image_paths.mock_calls == [mock.call(1234)]
|
|
||||||
assert os.path.isdir(self._img_path("x86_64"))
|
|
||||||
assert os.path.isdir(self._img_path("amd64"))
|
|
||||||
Linker.return_value.link.assert_has_calls(
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
"/koji/task/1234/FCBG.amd64-Rawhide-1.6.vagrant.libvirt.box",
|
|
||||||
self._img_path("amd64", img_name.format(arch="amd64")),
|
|
||||||
link_type="hardlink-or-copy",
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
"/koji/task/1234/FCBG.x86_64-Rawhide-1.6.vagrant.libvirt.box",
|
|
||||||
self._img_path("x86_64", img_name.format(arch="x86_64")),
|
|
||||||
link_type="hardlink-or-copy",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
any_order=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(compose.im.add.call_args_list) == 2
|
|
||||||
for call in compose.im.add.call_args_list:
|
|
||||||
_, kwargs = call
|
|
||||||
image = kwargs["image"]
|
|
||||||
expected_path = "Server/{0.arch}/images/{1}".format(
|
|
||||||
image, img_name.format(arch=image.arch)
|
|
||||||
)
|
|
||||||
assert kwargs["variant"] == "Server"
|
|
||||||
assert kwargs["arch"] in ("amd64", "x86_64")
|
|
||||||
assert kwargs["arch"] == image.arch
|
|
||||||
assert image.path == expected_path
|
|
||||||
assert "vagrant-libvirt.box" == image.format
|
|
||||||
assert "vagrant-libvirt" == image.type
|
|
||||||
assert "Test" == image.subvariant
|
|
||||||
|
|
||||||
def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime, Linker):
|
|
||||||
self.repo = self.topdir + "/compose/Server/$arch/os"
|
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
|
||||||
config = MINIMAL_CONF
|
|
||||||
pool = mock.Mock()
|
|
||||||
|
|
||||||
get_image_paths = KojiWrapper.return_value.get_image_paths
|
|
||||||
|
|
||||||
KojiWrapper.return_value.koji_proxy.kiwiBuild.return_value = 1234
|
|
||||||
KojiWrapper.return_value.watch_task.return_value = 1
|
|
||||||
|
|
||||||
t = RunKiwiBuildThread(pool)
|
|
||||||
try:
|
|
||||||
t.process(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
config,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": "1.6",
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": None,
|
|
||||||
"type_attr": None,
|
|
||||||
"bundle_name_format": None,
|
|
||||||
},
|
|
||||||
[self.repo],
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
assert False, "Exception should have been raised"
|
|
||||||
except RuntimeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
assert len(KojiWrapper.return_value.koji_proxy.kiwiBuild.mock_calls) == 1
|
|
||||||
assert get_image_paths.mock_calls == []
|
|
||||||
assert Linker.return_value.link.mock_calls == []
|
|
||||||
assert len(compose.im.add.call_args_list) == 0
|
|
||||||
|
|
||||||
def test_handle_fail_on_optional_arch(
|
|
||||||
self, KojiWrapper, get_file_size, get_mtime, Linker
|
|
||||||
):
|
|
||||||
self.repo = self.topdir + "/compose/Server/$arch/os"
|
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
|
||||||
config = MINIMAL_CONF
|
|
||||||
pool = mock.Mock()
|
|
||||||
|
|
||||||
get_image_paths = KojiWrapper.return_value.get_image_paths
|
|
||||||
get_image_paths.return_value = {
|
|
||||||
"x86_64": [
|
|
||||||
"/koji/task/1234/FCBG.x86_64-Rawhide-1.6.packages",
|
|
||||||
"/koji/task/1234/FCBG.x86_64-Rawhide-1.6.qcow2",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
KojiWrapper.return_value.koji_proxy.kiwiBuild.return_value = 1234
|
|
||||||
KojiWrapper.return_value.watch_task.return_value = 0
|
|
||||||
|
|
||||||
t = RunKiwiBuildThread(pool)
|
|
||||||
get_file_size.return_value = 1024
|
|
||||||
get_mtime.return_value = 13579
|
|
||||||
t.process(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Server"],
|
|
||||||
config,
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
{
|
|
||||||
"release": "1.6",
|
|
||||||
"target": "f40",
|
|
||||||
"descscm": MINIMAL_CONF["description_scm"],
|
|
||||||
"descpath": MINIMAL_CONF["description_path"],
|
|
||||||
"type": None,
|
|
||||||
"type_attr": None,
|
|
||||||
"bundle_name_format": None,
|
|
||||||
},
|
|
||||||
[self.repo],
|
|
||||||
["amd64"],
|
|
||||||
),
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert KojiWrapper.return_value.koji_proxy.kiwiBuild.mock_calls == [
|
|
||||||
mock.call(
|
|
||||||
"f40",
|
|
||||||
["amd64", "x86_64"],
|
|
||||||
MINIMAL_CONF["description_scm"],
|
|
||||||
MINIMAL_CONF["description_path"],
|
|
||||||
profile=MINIMAL_CONF["kiwi_profile"],
|
|
||||||
release="1.6",
|
|
||||||
repos=[self.repo],
|
|
||||||
type=None,
|
|
||||||
type_attr=None,
|
|
||||||
result_bundle_name_format=None,
|
|
||||||
optional_arches=["amd64"],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
assert get_image_paths.mock_calls == [mock.call(1234)]
|
|
||||||
assert os.path.isdir(self._img_path("x86_64"))
|
|
||||||
assert not os.path.isdir(self._img_path("amd64"))
|
|
||||||
assert Linker.return_value.link.mock_calls == [
|
|
||||||
mock.call(
|
|
||||||
"/koji/task/1234/FCBG.x86_64-Rawhide-1.6.qcow2",
|
|
||||||
self._img_path("x86_64", "FCBG.x86_64-Rawhide-1.6.qcow2"),
|
|
||||||
link_type="hardlink-or-copy",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert len(compose.im.add.call_args_list) == 1
|
|
||||||
_, kwargs = compose.im.add.call_args_list[0]
|
|
||||||
image = kwargs["image"]
|
|
||||||
expected_path = "Server/x86_64/images/FCBG.x86_64-Rawhide-1.6.qcow2"
|
|
||||||
assert kwargs["variant"] == "Server"
|
|
||||||
assert kwargs["arch"] == "x86_64"
|
|
||||||
assert kwargs["arch"] == image.arch
|
|
||||||
assert image.path == expected_path
|
|
||||||
assert "qcow2" == image.format
|
|
||||||
assert "qcow2" == image.type
|
|
||||||
assert "Server" == image.subvariant
|
|
@ -121,6 +121,7 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_get_image_paths(self):
|
def test_get_image_paths(self):
|
||||||
|
|
||||||
# The data for this tests is obtained from the actual Koji build. It
|
# The data for this tests is obtained from the actual Koji build. It
|
||||||
# includes lots of fields that are not used, but for the sake of
|
# includes lots of fields that are not used, but for the sake of
|
||||||
# completeness is fully preserved.
|
# completeness is fully preserved.
|
||||||
@ -320,6 +321,7 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_get_image_paths_failed_subtask(self):
|
def test_get_image_paths_failed_subtask(self):
|
||||||
|
|
||||||
failed = set()
|
failed = set()
|
||||||
|
|
||||||
def failed_callback(arch):
|
def failed_callback(arch):
|
||||||
@ -437,6 +439,96 @@ class LiveMediaTestCase(KojiWrapperBaseTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LiveImageKojiWrapperTest(KojiWrapperBaseTestCase):
|
||||||
|
def test_get_create_image_cmd_minimal(self):
|
||||||
|
cmd = self.koji.get_create_image_cmd(
|
||||||
|
"my_name", "1.0", "f24-candidate", "x86_64", "/path/to/ks", ["/repo/1"]
|
||||||
|
)
|
||||||
|
self.assertEqual(cmd[0:3], ["koji", "--profile=custom-koji", "spin-livecd"])
|
||||||
|
six.assertCountEqual(
|
||||||
|
self, cmd[3:7], ["--noprogress", "--scratch", "--wait", "--repo=/repo/1"]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
cmd[7:], ["my_name", "1.0", "f24-candidate", "x86_64", "/path/to/ks"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_create_image_cmd_full(self):
|
||||||
|
cmd = self.koji.get_create_image_cmd(
|
||||||
|
"my_name",
|
||||||
|
"1.0",
|
||||||
|
"f24-candidate",
|
||||||
|
"x86_64",
|
||||||
|
"/path/to/ks",
|
||||||
|
["/repo/1", "/repo/2"],
|
||||||
|
release="1",
|
||||||
|
wait=False,
|
||||||
|
archive=True,
|
||||||
|
specfile="foo.spec",
|
||||||
|
ksurl="https://git.example.com/",
|
||||||
|
)
|
||||||
|
self.assertEqual(cmd[0:3], ["koji", "--profile=custom-koji", "spin-livecd"])
|
||||||
|
self.assertEqual(
|
||||||
|
cmd[-5:], ["my_name", "1.0", "f24-candidate", "x86_64", "/path/to/ks"]
|
||||||
|
)
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
cmd[3:-5],
|
||||||
|
[
|
||||||
|
"--noprogress",
|
||||||
|
"--nowait",
|
||||||
|
"--repo=/repo/1",
|
||||||
|
"--repo=/repo/2",
|
||||||
|
"--release=1",
|
||||||
|
"--specfile=foo.spec",
|
||||||
|
"--ksurl=https://git.example.com/",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_spin_livecd_with_format(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.koji.get_create_image_cmd(
|
||||||
|
"my_name",
|
||||||
|
"1.0",
|
||||||
|
"f24-candidate",
|
||||||
|
"x86_64",
|
||||||
|
"/path/to/ks",
|
||||||
|
[],
|
||||||
|
image_format="qcow",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_spin_appliance_with_format(self):
|
||||||
|
cmd = self.koji.get_create_image_cmd(
|
||||||
|
"my_name",
|
||||||
|
"1.0",
|
||||||
|
"f24-candidate",
|
||||||
|
"x86_64",
|
||||||
|
"/path/to/ks",
|
||||||
|
[],
|
||||||
|
image_type="appliance",
|
||||||
|
image_format="qcow",
|
||||||
|
)
|
||||||
|
self.assertEqual(cmd[0:3], ["koji", "--profile=custom-koji", "spin-appliance"])
|
||||||
|
six.assertCountEqual(
|
||||||
|
self, cmd[3:7], ["--noprogress", "--scratch", "--wait", "--format=qcow"]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
cmd[7:], ["my_name", "1.0", "f24-candidate", "x86_64", "/path/to/ks"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_spin_appliance_with_wrong_format(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.koji.get_create_image_cmd(
|
||||||
|
"my_name",
|
||||||
|
"1.0",
|
||||||
|
"f24-candidate",
|
||||||
|
"x86_64",
|
||||||
|
"/path/to/ks",
|
||||||
|
[],
|
||||||
|
image_type="appliance",
|
||||||
|
image_format="pretty",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.dict("os.environ", {"FOO": "BAR"}, clear=True)
|
@mock.patch.dict("os.environ", {"FOO": "BAR"}, clear=True)
|
||||||
class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
|
class RunrootKojiWrapperTest(KojiWrapperBaseTestCase):
|
||||||
def test_get_cmd_minimal(self):
|
def test_get_cmd_minimal(self):
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -28,7 +28,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -37,7 +36,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -86,7 +85,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -95,7 +93,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -150,7 +148,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -159,7 +156,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -262,7 +259,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -271,7 +267,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -368,7 +364,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -399,7 +394,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -450,7 +444,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
phase.pool.add.assert_called()
|
self.assertTrue(phase.pool.add.called)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
@ -617,9 +611,7 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.livemedia_phase.get_file_size")
|
@mock.patch("pungi.phases.livemedia_phase.get_file_size")
|
||||||
@mock.patch("pungi.phases.livemedia_phase.KojiWrapper")
|
@mock.patch("pungi.phases.livemedia_phase.KojiWrapper")
|
||||||
def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime):
|
def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime):
|
||||||
compose = DummyCompose(
|
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
|
||||||
)
|
|
||||||
config = {
|
config = {
|
||||||
"arches": ["amd64", "x86_64"],
|
"arches": ["amd64", "x86_64"],
|
||||||
"ksfile": "file.ks",
|
"ksfile": "file.ks",
|
||||||
@ -696,7 +688,6 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -766,7 +757,6 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -133,7 +133,7 @@ class TestNotifier(unittest.TestCase):
|
|||||||
def test_does_not_run_without_config(self, run, makedirs):
|
def test_does_not_run_without_config(self, run, makedirs):
|
||||||
n = PungiNotifier(None)
|
n = PungiNotifier(None)
|
||||||
n.send("cmd", foo="bar", baz="quux")
|
n.send("cmd", foo="bar", baz="quux")
|
||||||
run.assert_not_called()
|
self.assertFalse(run.called)
|
||||||
|
|
||||||
@mock.patch("pungi.util.translate_path")
|
@mock.patch("pungi.util.translate_path")
|
||||||
@mock.patch("kobo.shortcuts.run")
|
@mock.patch("kobo.shortcuts.run")
|
||||||
@ -146,4 +146,4 @@ class TestNotifier(unittest.TestCase):
|
|||||||
n.send("cmd", **self.data)
|
n.send("cmd", **self.data)
|
||||||
|
|
||||||
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
|
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
|
||||||
self.compose.log_warning.assert_called()
|
self.assertTrue(self.compose.log_warning.called)
|
||||||
|
934
tests/test_orchestrator.py
Normal file
934
tests/test_orchestrator.py
Normal file
@ -0,0 +1,934 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
from functools import wraps
|
||||||
|
import operator
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from unittest import mock
|
||||||
|
import six
|
||||||
|
from six.moves import configparser
|
||||||
|
|
||||||
|
from parameterized import parameterized
|
||||||
|
|
||||||
|
from tests.helpers import BaseTestCase, PungiTestCase, touch, FIXTURE_DIR
|
||||||
|
from pungi_utils import orchestrator as o
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfigSubstitute(PungiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestConfigSubstitute, self).setUp()
|
||||||
|
self.fp = os.path.join(self.topdir, "config.conf")
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
("hello = 'world'", "hello = 'world'"),
|
||||||
|
("hello = '{{foo}}'", "hello = 'bar'"),
|
||||||
|
("hello = '{{ foo}}'", "hello = 'bar'"),
|
||||||
|
("hello = '{{foo }}'", "hello = 'bar'"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_substitutions(self, initial, expected):
|
||||||
|
touch(self.fp, initial)
|
||||||
|
o.fill_in_config_file(self.fp, {"foo": "bar"})
|
||||||
|
with open(self.fp) as f:
|
||||||
|
self.assertEqual(expected, f.read())
|
||||||
|
|
||||||
|
def test_missing_key(self):
|
||||||
|
touch(self.fp, "hello = '{{unknown}}'")
|
||||||
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
|
o.fill_in_config_file(self.fp, {})
|
||||||
|
self.assertEqual(
|
||||||
|
"Unknown placeholder 'unknown' in config.conf", str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSafeGetList(BaseTestCase):
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
("", []),
|
||||||
|
("foo", ["foo"]),
|
||||||
|
("foo,bar", ["foo", "bar"]),
|
||||||
|
("foo bar", ["foo", "bar"]),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_success(self, value, expected):
|
||||||
|
cf = configparser.RawConfigParser()
|
||||||
|
cf.add_section("general")
|
||||||
|
cf.set("general", "key", value)
|
||||||
|
self.assertEqual(o._safe_get_list(cf, "general", "key"), expected)
|
||||||
|
|
||||||
|
def test_default(self):
|
||||||
|
cf = configparser.RawConfigParser()
|
||||||
|
cf.add_section("general")
|
||||||
|
self.assertEqual(o._safe_get_list(cf, "general", "missing", "hello"), "hello")
|
||||||
|
|
||||||
|
|
||||||
|
class TestComposePart(PungiTestCase):
|
||||||
|
def test_from_minimal_config(self):
|
||||||
|
cf = configparser.RawConfigParser()
|
||||||
|
cf.add_section("test")
|
||||||
|
cf.set("test", "config", "my.conf")
|
||||||
|
|
||||||
|
part = o.ComposePart.from_config(cf, "test", "/tmp/config")
|
||||||
|
deps = "set()" if six.PY3 else "set([])"
|
||||||
|
self.assertEqual(str(part), "test")
|
||||||
|
self.assertEqual(
|
||||||
|
repr(part),
|
||||||
|
"ComposePart('test', '/tmp/config/my.conf', 'READY', "
|
||||||
|
"just_phase=[], skip_phase=[], dependencies=%s)" % deps,
|
||||||
|
)
|
||||||
|
self.assertFalse(part.failable)
|
||||||
|
|
||||||
|
def test_from_full_config(self):
|
||||||
|
cf = configparser.RawConfigParser()
|
||||||
|
cf.add_section("test")
|
||||||
|
cf.set("test", "config", "my.conf")
|
||||||
|
cf.set("test", "depends_on", "base")
|
||||||
|
cf.set("test", "skip_phase", "skip")
|
||||||
|
cf.set("test", "just_phase", "just")
|
||||||
|
cf.set("test", "failable", "yes")
|
||||||
|
|
||||||
|
part = o.ComposePart.from_config(cf, "test", "/tmp/config")
|
||||||
|
deps = "{'base'}" if six.PY3 else "set(['base'])"
|
||||||
|
self.assertEqual(
|
||||||
|
repr(part),
|
||||||
|
"ComposePart('test', '/tmp/config/my.conf', 'WAITING', "
|
||||||
|
"just_phase=['just'], skip_phase=['skip'], dependencies=%s)" % deps,
|
||||||
|
)
|
||||||
|
self.assertTrue(part.failable)
|
||||||
|
|
||||||
|
def test_get_cmd(self):
|
||||||
|
conf = o.Config(
|
||||||
|
"/tgt/", "production", "RC-1.0", "/old", "/cfg", 1234, ["--quiet"]
|
||||||
|
)
|
||||||
|
part = o.ComposePart(
|
||||||
|
"test", "/tmp/my.conf", just_phase=["just"], skip_phase=["skip"]
|
||||||
|
)
|
||||||
|
part.path = "/compose"
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
part.get_cmd(conf),
|
||||||
|
[
|
||||||
|
"pungi-koji",
|
||||||
|
"--config",
|
||||||
|
"/tmp/my.conf",
|
||||||
|
"--compose-dir",
|
||||||
|
"/compose",
|
||||||
|
"--production",
|
||||||
|
"--label",
|
||||||
|
"RC-1.0",
|
||||||
|
"--just-phase",
|
||||||
|
"just",
|
||||||
|
"--skip-phase",
|
||||||
|
"skip",
|
||||||
|
"--old-compose",
|
||||||
|
"/old/parts",
|
||||||
|
"--koji-event",
|
||||||
|
"1234",
|
||||||
|
"--quiet",
|
||||||
|
"--no-latest-link",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_refresh_status(self):
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
part.path = os.path.join(self.topdir)
|
||||||
|
touch(os.path.join(self.topdir, "STATUS"), "FINISHED")
|
||||||
|
part.refresh_status()
|
||||||
|
self.assertEqual(part.status, "FINISHED")
|
||||||
|
|
||||||
|
def test_refresh_status_missing_file(self):
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
part.path = os.path.join(self.topdir)
|
||||||
|
part.refresh_status()
|
||||||
|
self.assertEqual(part.status, "DOOMED")
|
||||||
|
|
||||||
|
@parameterized.expand(["FINISHED", "FINISHED_INCOMPLETE"])
|
||||||
|
def test_is_finished(self, status):
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
part.status = status
|
||||||
|
self.assertTrue(part.is_finished())
|
||||||
|
|
||||||
|
@parameterized.expand(["STARTED", "WAITING"])
|
||||||
|
def test_is_not_finished(self, status):
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
part.status = status
|
||||||
|
self.assertFalse(part.is_finished())
|
||||||
|
|
||||||
|
@mock.patch("pungi_utils.orchestrator.fill_in_config_file")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.get_compose_dir")
|
||||||
|
@mock.patch("kobo.conf.PyConfigParser")
|
||||||
|
def test_setup_start(self, Conf, gcd, ficf):
|
||||||
|
def pth(*path):
|
||||||
|
return os.path.join(self.topdir, *path)
|
||||||
|
|
||||||
|
conf = o.Config(
|
||||||
|
pth("tgt"), "production", "RC-1.0", "/old", pth("cfg"), None, None
|
||||||
|
)
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
parts = {"base": mock.Mock(path="/base", is_finished=lambda: True)}
|
||||||
|
Conf.return_value.opened_files = ["foo.conf"]
|
||||||
|
|
||||||
|
part.setup_start(conf, parts)
|
||||||
|
|
||||||
|
self.assertEqual(part.status, "STARTED")
|
||||||
|
self.assertEqual(part.path, gcd.return_value)
|
||||||
|
self.assertEqual(part.log_file, pth("tgt", "logs", "test.log"))
|
||||||
|
self.assertEqual(
|
||||||
|
ficf.call_args_list,
|
||||||
|
[mock.call("foo.conf", {"part-base": "/base", "configdir": pth("cfg")})],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
gcd.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
pth("tgt/parts"),
|
||||||
|
Conf.return_value,
|
||||||
|
compose_type="production",
|
||||||
|
compose_label="RC-1.0",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
# Nothing blocking, no change
|
||||||
|
([], [], o.Status.READY),
|
||||||
|
# Remove last blocker and switch to READY
|
||||||
|
(["finished"], [], o.Status.READY),
|
||||||
|
# Blocker remaining, stay in WAITING
|
||||||
|
(["finished", "block"], ["block"], o.Status.WAITING),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_unblock_on(self, deps, blockers, status):
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf", dependencies=deps)
|
||||||
|
part.unblock_on("finished")
|
||||||
|
six.assertCountEqual(self, part.blocked_on, blockers)
|
||||||
|
self.assertEqual(part.status, status)
|
||||||
|
|
||||||
|
|
||||||
|
class TestStartPart(PungiTestCase):
|
||||||
|
@mock.patch("subprocess.Popen")
|
||||||
|
def test_start(self, Popen):
|
||||||
|
part = mock.Mock(log_file=os.path.join(self.topdir, "log"))
|
||||||
|
config = mock.Mock()
|
||||||
|
parts = mock.Mock()
|
||||||
|
cmd = ["pungi-koji", "..."]
|
||||||
|
|
||||||
|
part.get_cmd.return_value = cmd
|
||||||
|
|
||||||
|
proc = o.start_part(config, parts, part)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
part.mock_calls,
|
||||||
|
[mock.call.setup_start(config, parts), mock.call.get_cmd(config)],
|
||||||
|
)
|
||||||
|
self.assertEqual(proc, Popen.return_value)
|
||||||
|
self.assertEqual(
|
||||||
|
Popen.call_args_list,
|
||||||
|
[mock.call(cmd, stdout=mock.ANY, stderr=subprocess.STDOUT)],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestHandleFinished(BaseTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.config = mock.Mock()
|
||||||
|
self.linker = mock.Mock()
|
||||||
|
self.parts = {"a": mock.Mock(), "b": mock.Mock()}
|
||||||
|
|
||||||
|
@mock.patch("pungi_utils.orchestrator.update_metadata")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.copy_part")
|
||||||
|
def test_handle_success(self, cp, um):
|
||||||
|
proc = mock.Mock(returncode=0)
|
||||||
|
o.handle_finished(self.config, self.linker, self.parts, proc, self.parts["a"])
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.parts["a"].mock_calls,
|
||||||
|
[mock.call.refresh_status(), mock.call.unblock_on(self.parts["a"].name)],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
self.parts["b"].mock_calls, [mock.call.unblock_on(self.parts["a"].name)]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
cp.call_args_list, [mock.call(self.config, self.linker, self.parts["a"])]
|
||||||
|
)
|
||||||
|
self.assertEqual(um.call_args_list, [mock.call(self.config, self.parts["a"])])
|
||||||
|
|
||||||
|
@mock.patch("pungi_utils.orchestrator.block_on")
|
||||||
|
def test_handle_failure(self, bo):
|
||||||
|
proc = mock.Mock(returncode=1)
|
||||||
|
o.handle_finished(self.config, self.linker, self.parts, proc, self.parts["a"])
|
||||||
|
|
||||||
|
self.assertEqual(self.parts["a"].mock_calls, [mock.call.refresh_status()])
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
bo.call_args_list, [mock.call(self.parts, self.parts["a"].name)]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestBlockOn(BaseTestCase):
|
||||||
|
def test_single(self):
|
||||||
|
parts = {"b": o.ComposePart("b", "b.conf", dependencies=["a"])}
|
||||||
|
|
||||||
|
o.block_on(parts, "a")
|
||||||
|
|
||||||
|
self.assertEqual(parts["b"].status, o.Status.BLOCKED)
|
||||||
|
|
||||||
|
def test_chain(self):
|
||||||
|
parts = {
|
||||||
|
"b": o.ComposePart("b", "b.conf", dependencies=["a"]),
|
||||||
|
"c": o.ComposePart("c", "c.conf", dependencies=["b"]),
|
||||||
|
"d": o.ComposePart("d", "d.conf", dependencies=["c"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
o.block_on(parts, "a")
|
||||||
|
|
||||||
|
self.assertEqual(parts["b"].status, o.Status.BLOCKED)
|
||||||
|
self.assertEqual(parts["c"].status, o.Status.BLOCKED)
|
||||||
|
self.assertEqual(parts["d"].status, o.Status.BLOCKED)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUpdateMetadata(PungiTestCase):
|
||||||
|
def assertEqualJSON(self, f1, f2):
|
||||||
|
with open(f1) as f:
|
||||||
|
actual = json.load(f)
|
||||||
|
with open(f2) as f:
|
||||||
|
expected = json.load(f)
|
||||||
|
self.assertEqual(actual, expected)
|
||||||
|
|
||||||
|
def assertEqualMetadata(self, expected):
|
||||||
|
expected_dir = os.path.join(FIXTURE_DIR, expected, "compose/metadata")
|
||||||
|
for f in os.listdir(expected_dir):
|
||||||
|
self.assertEqualJSON(
|
||||||
|
os.path.join(self.tgt, "compose/metadata", f),
|
||||||
|
os.path.join(expected_dir, f),
|
||||||
|
)
|
||||||
|
|
||||||
|
@parameterized.expand(["empty-metadata", "basic-metadata"])
|
||||||
|
def test_merge_into_empty(self, fixture):
|
||||||
|
self.tgt = os.path.join(self.topdir, "target")
|
||||||
|
|
||||||
|
conf = o.Config(self.tgt, "production", None, None, None, None, [])
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
part.path = os.path.join(FIXTURE_DIR, "DP-1.0-20181001.n.0")
|
||||||
|
|
||||||
|
shutil.copytree(os.path.join(FIXTURE_DIR, fixture), self.tgt)
|
||||||
|
|
||||||
|
o.update_metadata(conf, part)
|
||||||
|
|
||||||
|
self.assertEqualMetadata(fixture + "-merged")
|
||||||
|
|
||||||
|
|
||||||
|
class TestCopyPart(PungiTestCase):
|
||||||
|
@mock.patch("pungi_utils.orchestrator.hardlink_dir")
|
||||||
|
def test_copy(self, hd):
|
||||||
|
self.tgt = os.path.join(self.topdir, "target")
|
||||||
|
conf = o.Config(self.tgt, "production", None, None, None, None, [])
|
||||||
|
linker = mock.Mock()
|
||||||
|
part = o.ComposePart("test", "/tmp/my.conf")
|
||||||
|
part.path = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
||||||
|
|
||||||
|
o.copy_part(conf, linker, part)
|
||||||
|
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
hd.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
linker,
|
||||||
|
os.path.join(part.path, "compose", variant),
|
||||||
|
os.path.join(self.tgt, "compose", variant),
|
||||||
|
)
|
||||||
|
for variant in ["Client", "Server"]
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestHardlinkDir(PungiTestCase):
|
||||||
|
def test_hardlinking(self):
|
||||||
|
linker = mock.Mock()
|
||||||
|
src = os.path.join(self.topdir, "src")
|
||||||
|
dst = os.path.join(self.topdir, "dst")
|
||||||
|
files = ["file.txt", "nested/deep/another.txt"]
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
touch(os.path.join(src, f))
|
||||||
|
|
||||||
|
o.hardlink_dir(linker, src, dst)
|
||||||
|
|
||||||
|
six.assertCountEqual(
|
||||||
|
self,
|
||||||
|
linker.queue_put.call_args_list,
|
||||||
|
[mock.call((os.path.join(src, f), os.path.join(dst, f))) for f in files],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCheckFinishedProcesses(BaseTestCase):
|
||||||
|
def test_nothing_finished(self):
|
||||||
|
k1 = mock.Mock(returncode=None)
|
||||||
|
v1 = mock.Mock()
|
||||||
|
processes = {k1: v1}
|
||||||
|
|
||||||
|
six.assertCountEqual(self, o.check_finished_processes(processes), [])
|
||||||
|
|
||||||
|
def test_yields_finished(self):
|
||||||
|
k1 = mock.Mock(returncode=None)
|
||||||
|
v1 = mock.Mock()
|
||||||
|
k2 = mock.Mock(returncode=0)
|
||||||
|
v2 = mock.Mock()
|
||||||
|
processes = {k1: v1, k2: v2}
|
||||||
|
|
||||||
|
six.assertCountEqual(self, o.check_finished_processes(processes), [(k2, v2)])
|
||||||
|
|
||||||
|
def test_yields_failed(self):
|
||||||
|
k1 = mock.Mock(returncode=1)
|
||||||
|
v1 = mock.Mock()
|
||||||
|
processes = {k1: v1}
|
||||||
|
|
||||||
|
six.assertCountEqual(self, o.check_finished_processes(processes), [(k1, v1)])
|
||||||
|
|
||||||
|
|
||||||
|
class _Part(object):
|
||||||
|
def __init__(self, name, parent=None, fails=False, status=None):
|
||||||
|
self.name = name
|
||||||
|
self.finished = False
|
||||||
|
self.status = o.Status.WAITING if parent else o.Status.READY
|
||||||
|
if status:
|
||||||
|
self.status = status
|
||||||
|
self.proc = mock.Mock(name="proc_%s" % name, pid=hash(self))
|
||||||
|
self.parent = parent
|
||||||
|
self.fails = fails
|
||||||
|
self.failable = False
|
||||||
|
self.path = "/path/to/%s" % name
|
||||||
|
self.blocked_on = set([parent]) if parent else set()
|
||||||
|
|
||||||
|
def is_finished(self):
|
||||||
|
return self.finished or self.status == "FINISHED"
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<_Part(%r, parent=%r)>" % (self.name, self.parent)
|
||||||
|
|
||||||
|
|
||||||
|
def with_mocks(parts, finish_order, wait_results):
|
||||||
|
"""Setup all mocks and create dict with the parts.
|
||||||
|
:param finish_order: nested list: first element contains parts that finish
|
||||||
|
in first iteration, etc.
|
||||||
|
:param wait_results: list of names of processes that are returned by wait in each
|
||||||
|
iteration
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
@wraps(func)
|
||||||
|
def worker(self, lp, update_status, cfp, hf, sp, wait):
|
||||||
|
self.parts = dict((p.name, p) for p in parts)
|
||||||
|
self.linker = lp.return_value.__enter__.return_value
|
||||||
|
|
||||||
|
update_status.side_effect = self.mock_update
|
||||||
|
hf.side_effect = self.mock_finish
|
||||||
|
sp.side_effect = self.mock_start
|
||||||
|
|
||||||
|
finish = [[]]
|
||||||
|
for grp in finish_order:
|
||||||
|
finish.append([(self.parts[p].proc, self.parts[p]) for p in grp])
|
||||||
|
|
||||||
|
cfp.side_effect = finish
|
||||||
|
wait.side_effect = [(self.parts[p].proc.pid, 0) for p in wait_results]
|
||||||
|
|
||||||
|
func(self)
|
||||||
|
|
||||||
|
self.assertEqual(lp.call_args_list, [mock.call("hardlink")])
|
||||||
|
|
||||||
|
return worker
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("os.wait")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.start_part")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.handle_finished")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.check_finished_processes")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.update_status")
|
||||||
|
@mock.patch("pungi_utils.orchestrator.linker_pool")
|
||||||
|
class TestRunAll(BaseTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.maxDiff = None
|
||||||
|
self.conf = mock.Mock(name="global_config")
|
||||||
|
self.calls = []
|
||||||
|
|
||||||
|
def mock_update(self, global_config, parts):
|
||||||
|
self.assertEqual(global_config, self.conf)
|
||||||
|
self.assertEqual(parts, self.parts)
|
||||||
|
self.calls.append("update_status")
|
||||||
|
|
||||||
|
def mock_start(self, global_config, parts, part):
|
||||||
|
self.assertEqual(global_config, self.conf)
|
||||||
|
self.assertEqual(parts, self.parts)
|
||||||
|
self.calls.append(("start_part", part.name))
|
||||||
|
part.status = o.Status.STARTED
|
||||||
|
return part.proc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sorted_calls(self):
|
||||||
|
"""Sort the consecutive calls of the same function based on the argument."""
|
||||||
|
|
||||||
|
def key(val):
|
||||||
|
return val[0] if isinstance(val, tuple) else val
|
||||||
|
|
||||||
|
return list(
|
||||||
|
itertools.chain.from_iterable(
|
||||||
|
sorted(grp, key=operator.itemgetter(1))
|
||||||
|
for _, grp in itertools.groupby(self.calls, key)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def mock_finish(self, global_config, linker, parts, proc, part):
|
||||||
|
self.assertEqual(global_config, self.conf)
|
||||||
|
self.assertEqual(linker, self.linker)
|
||||||
|
self.assertEqual(parts, self.parts)
|
||||||
|
self.calls.append(("handle_finished", part.name))
|
||||||
|
for child in parts.values():
|
||||||
|
if child.parent == part.name:
|
||||||
|
child.status = o.Status.BLOCKED if part.fails else o.Status.READY
|
||||||
|
part.status = "DOOMED" if part.fails else "FINISHED"
|
||||||
|
|
||||||
|
@with_mocks(
|
||||||
|
[_Part("fst"), _Part("snd", parent="fst")], [["fst"], ["snd"]], ["fst", "snd"]
|
||||||
|
)
|
||||||
|
def test_sequential(self):
|
||||||
|
o.run_all(self.conf, self.parts)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.sorted_calls,
|
||||||
|
[
|
||||||
|
# First iteration starts fst
|
||||||
|
"update_status",
|
||||||
|
("start_part", "fst"),
|
||||||
|
# Second iteration handles finish of fst and starts snd
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "fst"),
|
||||||
|
("start_part", "snd"),
|
||||||
|
# Third iteration handles finish of snd
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "snd"),
|
||||||
|
# Final update of status
|
||||||
|
"update_status",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@with_mocks([_Part("fst"), _Part("snd")], [["fst", "snd"]], ["fst"])
|
||||||
|
def test_parallel(self):
|
||||||
|
o.run_all(self.conf, self.parts)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.sorted_calls,
|
||||||
|
[
|
||||||
|
# First iteration starts both fst and snd
|
||||||
|
"update_status",
|
||||||
|
("start_part", "fst"),
|
||||||
|
("start_part", "snd"),
|
||||||
|
# Second iteration handles finish of both of them
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "fst"),
|
||||||
|
("handle_finished", "snd"),
|
||||||
|
# Final update of status
|
||||||
|
"update_status",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@with_mocks(
|
||||||
|
[_Part("1"), _Part("2", parent="1"), _Part("3", parent="1")],
|
||||||
|
[["1"], ["2", "3"]],
|
||||||
|
["1", "2"],
|
||||||
|
)
|
||||||
|
def test_waits_for_dep_then_parallel_with_simultaneous_end(self):
|
||||||
|
o.run_all(self.conf, self.parts)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.sorted_calls,
|
||||||
|
[
|
||||||
|
# First iteration starts first part
|
||||||
|
"update_status",
|
||||||
|
("start_part", "1"),
|
||||||
|
# Second iteration starts 2 and 3
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "1"),
|
||||||
|
("start_part", "2"),
|
||||||
|
("start_part", "3"),
|
||||||
|
# Both 2 and 3 end in third iteration
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "2"),
|
||||||
|
("handle_finished", "3"),
|
||||||
|
# Final update of status
|
||||||
|
"update_status",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@with_mocks(
|
||||||
|
[_Part("1"), _Part("2", parent="1"), _Part("3", parent="1")],
|
||||||
|
[["1"], ["3"], ["2"]],
|
||||||
|
["1", "3", "2"],
|
||||||
|
)
|
||||||
|
def test_waits_for_dep_then_parallel_with_different_end_times(self):
|
||||||
|
o.run_all(self.conf, self.parts)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.sorted_calls,
|
||||||
|
[
|
||||||
|
# First iteration starts first part
|
||||||
|
"update_status",
|
||||||
|
("start_part", "1"),
|
||||||
|
# Second iteration starts 2 and 3
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "1"),
|
||||||
|
("start_part", "2"),
|
||||||
|
("start_part", "3"),
|
||||||
|
# Third iteration sees 3 finish
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "3"),
|
||||||
|
# Fourth iteration, 2 finishes
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "2"),
|
||||||
|
# Final update of status
|
||||||
|
"update_status",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@with_mocks(
|
||||||
|
[_Part("fst", fails=True), _Part("snd", parent="fst")], [["fst"]], ["fst"]
|
||||||
|
)
|
||||||
|
def test_blocked(self):
|
||||||
|
o.run_all(self.conf, self.parts)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.sorted_calls,
|
||||||
|
[
|
||||||
|
# First iteration starts first part
|
||||||
|
"update_status",
|
||||||
|
("start_part", "fst"),
|
||||||
|
# Second iteration handles fail of first part
|
||||||
|
"update_status",
|
||||||
|
("handle_finished", "fst"),
|
||||||
|
# Final update of status
|
||||||
|
"update_status",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi_utils.orchestrator.get_compose_dir")
|
||||||
|
class TestGetTargetDir(BaseTestCase):
|
||||||
|
def test_with_absolute_path(self, gcd):
|
||||||
|
config = {"target": "/tgt", "compose_type": "nightly"}
|
||||||
|
cfg = mock.Mock()
|
||||||
|
cfg.get.side_effect = lambda _, k: config[k]
|
||||||
|
ci = mock.Mock()
|
||||||
|
res = o.get_target_dir(cfg, ci, None, reldir="/checkout")
|
||||||
|
self.assertEqual(res, gcd.return_value)
|
||||||
|
self.assertEqual(
|
||||||
|
gcd.call_args_list,
|
||||||
|
[mock.call("/tgt", ci, compose_type="nightly", compose_label=None)],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_with_relative_path(self, gcd):
|
||||||
|
config = {"target": "tgt", "compose_type": "nightly"}
|
||||||
|
cfg = mock.Mock()
|
||||||
|
cfg.get.side_effect = lambda _, k: config[k]
|
||||||
|
ci = mock.Mock()
|
||||||
|
res = o.get_target_dir(cfg, ci, None, reldir="/checkout")
|
||||||
|
self.assertEqual(res, gcd.return_value)
|
||||||
|
self.assertEqual(
|
||||||
|
gcd.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
"/checkout/tgt", ci, compose_type="nightly", compose_label=None
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestComputeStatus(BaseTestCase):
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
([("FINISHED", False)], "FINISHED"),
|
||||||
|
([("FINISHED", False), ("STARTED", False)], "STARTED"),
|
||||||
|
([("FINISHED", False), ("STARTED", False), ("WAITING", False)], "STARTED"),
|
||||||
|
([("FINISHED", False), ("DOOMED", False)], "DOOMED"),
|
||||||
|
(
|
||||||
|
[("FINISHED", False), ("BLOCKED", True), ("DOOMED", True)],
|
||||||
|
"FINISHED_INCOMPLETE",
|
||||||
|
),
|
||||||
|
([("FINISHED", False), ("BLOCKED", False), ("DOOMED", True)], "DOOMED"),
|
||||||
|
([("FINISHED", False), ("DOOMED", True)], "FINISHED_INCOMPLETE"),
|
||||||
|
([("FINISHED", False), ("STARTED", False), ("DOOMED", False)], "STARTED"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_cases(self, statuses, expected):
|
||||||
|
self.assertEqual(o.compute_status(statuses), expected)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUpdateStatus(PungiTestCase):
|
||||||
|
def test_updating(self):
|
||||||
|
os.makedirs(os.path.join(self.topdir, "compose/metadata"))
|
||||||
|
conf = o.Config(
|
||||||
|
self.topdir, "production", "RC-1.0", "/old", "/cfg", 1234, ["--quiet"]
|
||||||
|
)
|
||||||
|
o.update_status(
|
||||||
|
conf,
|
||||||
|
{"1": _Part("1", status="FINISHED"), "2": _Part("2", status="STARTED")},
|
||||||
|
)
|
||||||
|
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
|
||||||
|
self.assertFileContent(
|
||||||
|
os.path.join(self.topdir, "compose/metadata/parts.json"),
|
||||||
|
dedent(
|
||||||
|
"""\
|
||||||
|
{
|
||||||
|
"1": {
|
||||||
|
"path": "/path/to/1",
|
||||||
|
"status": "FINISHED"
|
||||||
|
},
|
||||||
|
"2": {
|
||||||
|
"path": "/path/to/2",
|
||||||
|
"status": "STARTED"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi_utils.orchestrator.get_target_dir")
|
||||||
|
class TestPrepareComposeDir(PungiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestPrepareComposeDir, self).setUp()
|
||||||
|
self.conf = mock.Mock(name="config")
|
||||||
|
self.main_config = "/some/config"
|
||||||
|
self.compose_info = mock.Mock(name="compose_info")
|
||||||
|
|
||||||
|
def test_new_compose(self, gtd):
|
||||||
|
def mock_get_target(conf, compose_info, label, reldir):
|
||||||
|
self.assertEqual(conf, self.conf)
|
||||||
|
self.assertEqual(compose_info, self.compose_info)
|
||||||
|
self.assertEqual(label, args.label)
|
||||||
|
self.assertEqual(reldir, "/some")
|
||||||
|
touch(os.path.join(self.topdir, "work/global/composeinfo-base.json"), "WOO")
|
||||||
|
return self.topdir
|
||||||
|
|
||||||
|
gtd.side_effect = mock_get_target
|
||||||
|
args = mock.Mock(name="args", spec=["label"])
|
||||||
|
retval = o.prepare_compose_dir(
|
||||||
|
self.conf, args, self.main_config, self.compose_info
|
||||||
|
)
|
||||||
|
self.assertEqual(retval, self.topdir)
|
||||||
|
self.assertFileContent(
|
||||||
|
os.path.join(self.topdir, "compose/metadata/composeinfo.json"), "WOO"
|
||||||
|
)
|
||||||
|
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "logs")))
|
||||||
|
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "parts")))
|
||||||
|
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "work/global")))
|
||||||
|
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
|
||||||
|
|
||||||
|
def test_restarting_compose(self, gtd):
|
||||||
|
args = mock.Mock(name="args", spec=["label", "compose_path"])
|
||||||
|
retval = o.prepare_compose_dir(
|
||||||
|
self.conf, args, self.main_config, self.compose_info
|
||||||
|
)
|
||||||
|
self.assertEqual(gtd.call_args_list, [])
|
||||||
|
self.assertEqual(retval, args.compose_path)
|
||||||
|
|
||||||
|
|
||||||
|
class TestLoadPartsMetadata(PungiTestCase):
|
||||||
|
def test_loading(self):
|
||||||
|
touch(
|
||||||
|
os.path.join(self.topdir, "compose/metadata/parts.json"), '{"foo": "bar"}'
|
||||||
|
)
|
||||||
|
conf = mock.Mock(target=self.topdir)
|
||||||
|
|
||||||
|
self.assertEqual(o.load_parts_metadata(conf), {"foo": "bar"})
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi_utils.orchestrator.load_parts_metadata")
|
||||||
|
class TestSetupForRestart(BaseTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.conf = mock.Mock(name="global_config")
|
||||||
|
|
||||||
|
def test_restart_ok(self, lpm):
|
||||||
|
lpm.return_value = {
|
||||||
|
"p1": {"status": "FINISHED", "path": "/p1"},
|
||||||
|
"p2": {"status": "DOOMED", "path": "/p2"},
|
||||||
|
}
|
||||||
|
parts = {"p1": _Part("p1"), "p2": _Part("p2", parent="p1")}
|
||||||
|
|
||||||
|
o.setup_for_restart(self.conf, parts, ["p2"])
|
||||||
|
|
||||||
|
self.assertEqual(parts["p1"].status, "FINISHED")
|
||||||
|
self.assertEqual(parts["p1"].path, "/p1")
|
||||||
|
self.assertEqual(parts["p2"].status, "READY")
|
||||||
|
self.assertEqual(parts["p2"].path, None)
|
||||||
|
|
||||||
|
def test_restart_one_blocked_one_ok(self, lpm):
|
||||||
|
lpm.return_value = {
|
||||||
|
"p1": {"status": "DOOMED", "path": "/p1"},
|
||||||
|
"p2": {"status": "DOOMED", "path": "/p2"},
|
||||||
|
"p3": {"status": "WAITING", "path": None},
|
||||||
|
}
|
||||||
|
parts = {
|
||||||
|
"p1": _Part("p1"),
|
||||||
|
"p2": _Part("p2", parent="p1"),
|
||||||
|
"p3": _Part("p3", parent="p2"),
|
||||||
|
}
|
||||||
|
|
||||||
|
o.setup_for_restart(self.conf, parts, ["p1", "p3"])
|
||||||
|
|
||||||
|
self.assertEqual(parts["p1"].status, "READY")
|
||||||
|
self.assertEqual(parts["p1"].path, None)
|
||||||
|
self.assertEqual(parts["p2"].status, "DOOMED")
|
||||||
|
self.assertEqual(parts["p2"].path, "/p2")
|
||||||
|
self.assertEqual(parts["p3"].status, "WAITING")
|
||||||
|
self.assertEqual(parts["p3"].path, None)
|
||||||
|
|
||||||
|
def test_restart_all_blocked(self, lpm):
|
||||||
|
lpm.return_value = {
|
||||||
|
"p1": {"status": "DOOMED", "path": "/p1"},
|
||||||
|
"p2": {"status": "STARTED", "path": "/p2"},
|
||||||
|
}
|
||||||
|
parts = {"p1": _Part("p1"), "p2": _Part("p2", parent="p1")}
|
||||||
|
|
||||||
|
with self.assertRaises(RuntimeError):
|
||||||
|
o.setup_for_restart(self.conf, parts, ["p2"])
|
||||||
|
|
||||||
|
self.assertEqual(parts["p1"].status, "DOOMED")
|
||||||
|
self.assertEqual(parts["p1"].path, "/p1")
|
||||||
|
self.assertEqual(parts["p2"].status, "WAITING")
|
||||||
|
self.assertEqual(parts["p2"].path, None)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("atexit.register")
|
||||||
|
@mock.patch("kobo.shortcuts.run")
|
||||||
|
class TestRunKinit(BaseTestCase):
|
||||||
|
def test_without_config(self, run, register):
|
||||||
|
conf = mock.Mock()
|
||||||
|
conf.getboolean.return_value = False
|
||||||
|
|
||||||
|
o.run_kinit(conf)
|
||||||
|
|
||||||
|
self.assertEqual(run.call_args_list, [])
|
||||||
|
self.assertEqual(register.call_args_list, [])
|
||||||
|
|
||||||
|
@mock.patch.dict("os.environ")
|
||||||
|
def test_with_config(self, run, register):
|
||||||
|
conf = mock.Mock()
|
||||||
|
conf.getboolean.return_value = True
|
||||||
|
conf.get.side_effect = lambda section, option: option
|
||||||
|
|
||||||
|
o.run_kinit(conf)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
run.call_args_list,
|
||||||
|
[mock.call(["kinit", "-k", "-t", "kerberos_keytab", "kerberos_principal"])],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
register.call_args_list, [mock.call(os.remove, os.environ["KRB5CCNAME"])]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch.dict("os.environ", {}, clear=True)
|
||||||
|
class TestGetScriptEnv(BaseTestCase):
|
||||||
|
def test_without_metadata(self):
|
||||||
|
env = o.get_script_env("/foobar")
|
||||||
|
self.assertEqual(env, {"COMPOSE_PATH": "/foobar"})
|
||||||
|
|
||||||
|
def test_with_metadata(self):
|
||||||
|
compose_dir = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
||||||
|
env = o.get_script_env(compose_dir)
|
||||||
|
self.maxDiff = None
|
||||||
|
self.assertEqual(
|
||||||
|
env,
|
||||||
|
{
|
||||||
|
"COMPOSE_PATH": compose_dir,
|
||||||
|
"COMPOSE_ID": "DP-1.0-20161013.t.4",
|
||||||
|
"COMPOSE_DATE": "20161013",
|
||||||
|
"COMPOSE_TYPE": "test",
|
||||||
|
"COMPOSE_RESPIN": "4",
|
||||||
|
"COMPOSE_LABEL": "",
|
||||||
|
"RELEASE_ID": "DP-1.0",
|
||||||
|
"RELEASE_NAME": "Dummy Product",
|
||||||
|
"RELEASE_SHORT": "DP",
|
||||||
|
"RELEASE_VERSION": "1.0",
|
||||||
|
"RELEASE_TYPE": "ga",
|
||||||
|
"RELEASE_IS_LAYERED": "",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRunScripts(BaseTestCase):
|
||||||
|
@mock.patch("pungi_utils.orchestrator.get_script_env")
|
||||||
|
@mock.patch("kobo.shortcuts.run")
|
||||||
|
def test_run_scripts(self, run, get_env):
|
||||||
|
commands = """
|
||||||
|
date
|
||||||
|
env
|
||||||
|
"""
|
||||||
|
|
||||||
|
o.run_scripts("pref_", "/tmp/compose", commands)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
run.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
"date",
|
||||||
|
logfile="/tmp/compose/logs/pref_0.log",
|
||||||
|
env=get_env.return_value,
|
||||||
|
),
|
||||||
|
mock.call(
|
||||||
|
"env",
|
||||||
|
logfile="/tmp/compose/logs/pref_1.log",
|
||||||
|
env=get_env.return_value,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi.notifier.PungiNotifier")
|
||||||
|
class TestSendNotification(BaseTestCase):
|
||||||
|
def test_no_command(self, notif):
|
||||||
|
o.send_notification("/foobar", None, None)
|
||||||
|
self.assertEqual(notif.mock_calls, [])
|
||||||
|
|
||||||
|
@mock.patch("pungi.util.load_config")
|
||||||
|
def test_with_command_and_translate(self, load_config, notif):
|
||||||
|
compose_dir = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
||||||
|
load_config.return_value = {
|
||||||
|
"translate_paths": [(os.path.dirname(compose_dir), "http://example.com")],
|
||||||
|
}
|
||||||
|
parts = {"foo": mock.Mock()}
|
||||||
|
|
||||||
|
o.send_notification(compose_dir, "handler", parts)
|
||||||
|
|
||||||
|
self.assertEqual(len(notif.mock_calls), 2)
|
||||||
|
self.assertEqual(notif.mock_calls[0], mock.call(["handler"]))
|
||||||
|
_, args, kwargs = notif.mock_calls[1]
|
||||||
|
self.assertEqual(args, ("status-change",))
|
||||||
|
self.assertEqual(
|
||||||
|
kwargs,
|
||||||
|
{
|
||||||
|
"status": "FINISHED",
|
||||||
|
"workdir": compose_dir,
|
||||||
|
"location": "http://example.com/DP-1.0-20161013.t.4",
|
||||||
|
"compose_id": "DP-1.0-20161013.t.4",
|
||||||
|
"compose_date": "20161013",
|
||||||
|
"compose_type": "test",
|
||||||
|
"compose_respin": "4",
|
||||||
|
"compose_label": None,
|
||||||
|
"release_id": "DP-1.0",
|
||||||
|
"release_name": "Dummy Product",
|
||||||
|
"release_short": "DP",
|
||||||
|
"release_version": "1.0",
|
||||||
|
"release_type": "ga",
|
||||||
|
"release_is_layered": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(load_config.call_args_list, [mock.call(parts["foo"].config)])
|
@ -171,7 +171,6 @@ class OSBSThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"translate_paths": [(self.topdir, "http://root")],
|
"translate_paths": [(self.topdir, "http://root")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -576,7 +575,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
|
|||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
|
self.t.process((self.compose, self.compose.variants["Server"], cfg), 1)
|
||||||
|
|
||||||
self.assertRegex(str(ctx.exception), r"task failed: 12345. See .+ for details")
|
self.assertRegex(str(ctx.exception), r"task 12345 failed: see .+ for details")
|
||||||
|
|
||||||
@mock.patch("pungi.phases.osbs.get_file_from_scm")
|
@mock.patch("pungi.phases.osbs.get_file_from_scm")
|
||||||
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
|
@mock.patch("pungi.phases.osbs.kojiwrapper.KojiWrapper")
|
||||||
|
@ -186,51 +186,6 @@ class OSBuildPhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
self.assertNotEqual(validate(compose.conf), ([], []))
|
self.assertNotEqual(validate(compose.conf), ([], []))
|
||||||
|
|
||||||
@mock.patch("pungi.phases.osbuild.ThreadPool")
|
|
||||||
def test_run_with_customizations(self, ThreadPool):
|
|
||||||
cfg = {
|
|
||||||
"name": "test-image",
|
|
||||||
"distro": "rhel-8",
|
|
||||||
"image_types": ["qcow2"],
|
|
||||||
"customizations": {"installation_device": "/dev/sda"},
|
|
||||||
}
|
|
||||||
compose = helpers.DummyCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"osbuild": {"^Everything$": [cfg]},
|
|
||||||
"osbuild_target": "image-target",
|
|
||||||
"osbuild_version": "1",
|
|
||||||
"osbuild_release": "2",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertValidConfig(compose.conf)
|
|
||||||
|
|
||||||
pool = ThreadPool.return_value
|
|
||||||
|
|
||||||
phase = osbuild.OSBuildPhase(compose)
|
|
||||||
phase.run()
|
|
||||||
|
|
||||||
self.assertEqual(len(pool.add.call_args_list), 1)
|
|
||||||
self.assertEqual(
|
|
||||||
pool.queue_put.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
(
|
|
||||||
compose,
|
|
||||||
compose.variants["Everything"],
|
|
||||||
cfg,
|
|
||||||
sorted(compose.variants["Everything"].arches),
|
|
||||||
"1",
|
|
||||||
"2",
|
|
||||||
"image-target",
|
|
||||||
[self.topdir + "/compose/Everything/$arch/os"],
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("pungi.phases.osbuild.ThreadPool")
|
@mock.patch("pungi.phases.osbuild.ThreadPool")
|
||||||
def test_rich_repos(self, ThreadPool):
|
def test_rich_repos(self, ThreadPool):
|
||||||
repo = {"baseurl": "http://example.com/repo", "package_sets": ["build"]}
|
repo = {"baseurl": "http://example.com/repo", "package_sets": ["build"]}
|
||||||
@ -284,7 +239,6 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"translate_paths": [(self.topdir, "http://root")],
|
"translate_paths": [(self.topdir, "http://root")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -103,7 +103,6 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
"release_name": "Fedora",
|
"release_name": "Fedora",
|
||||||
"release_version": "Rawhide",
|
"release_version": "Rawhide",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"image_volid_formats": ["{release_short}-{variant}-{arch}"],
|
"image_volid_formats": ["{release_short}-{variant}-{arch}"],
|
||||||
"translate_paths": [(self.topdir + "/work", "http://example.com/work")],
|
"translate_paths": [(self.topdir + "/work", "http://example.com/work")],
|
||||||
|
@ -123,7 +123,6 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
"koji_cache": "/tmp",
|
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"translate_paths": [(self.topdir, "http://example.com")],
|
"translate_paths": [(self.topdir, "http://example.com")],
|
||||||
},
|
},
|
||||||
|
@ -315,6 +315,7 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("kobo.shortcuts.run")
|
@mock.patch("kobo.shortcuts.run")
|
||||||
def test_extra_config_with_keep_original_sources(self, run):
|
def test_extra_config_with_keep_original_sources(self, run):
|
||||||
|
|
||||||
configdir = os.path.join(self.topdir, "config")
|
configdir = os.path.join(self.topdir, "config")
|
||||||
self._make_dummy_config_dir(configdir)
|
self._make_dummy_config_dir(configdir)
|
||||||
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
|
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
|
||||||
|
@ -47,7 +47,7 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
|
|||||||
pkgset.name = name
|
pkgset.name = name
|
||||||
pkgset.reuse = None
|
pkgset.reuse = None
|
||||||
|
|
||||||
def mock_subset(primary, arch_list, **kwargs):
|
def mock_subset(primary, arch_list, exclusive_noarch):
|
||||||
self.subsets[primary] = mock.Mock()
|
self.subsets[primary] = mock.Mock()
|
||||||
return self.subsets[primary]
|
return self.subsets[primary]
|
||||||
|
|
||||||
@ -73,16 +73,10 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
|
|||||||
self.assertEqual(result["amd64"], self.subsets["amd64"])
|
self.assertEqual(result["amd64"], self.subsets["amd64"])
|
||||||
|
|
||||||
self.pkgset.subset.assert_any_call(
|
self.pkgset.subset.assert_any_call(
|
||||||
"x86_64",
|
"x86_64", ["x86_64", "noarch", "src"], exclusive_noarch=True
|
||||||
["x86_64", "noarch", "src"],
|
|
||||||
exclusive_noarch=True,
|
|
||||||
inherit_to_noarch=True,
|
|
||||||
)
|
)
|
||||||
self.pkgset.subset.assert_any_call(
|
self.pkgset.subset.assert_any_call(
|
||||||
"amd64",
|
"amd64", ["amd64", "x86_64", "noarch", "src"], exclusive_noarch=True
|
||||||
["amd64", "x86_64", "noarch", "src"],
|
|
||||||
exclusive_noarch=True,
|
|
||||||
inherit_to_noarch=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for arch, pkgset in result.package_sets.items():
|
for arch, pkgset in result.package_sets.items():
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import ddt
|
import ddt
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
import os
|
import os
|
||||||
@ -134,14 +133,6 @@ class PkgsetCompareMixin(object):
|
|||||||
self.assertEqual({}, actual)
|
self.assertEqual({}, actual)
|
||||||
|
|
||||||
|
|
||||||
class DummySystem(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.methods = ["_listapi", "Dummy", "getRPM", "getRPMChecksums"]
|
|
||||||
|
|
||||||
def listMethods(self):
|
|
||||||
return self.methods
|
|
||||||
|
|
||||||
|
|
||||||
@ddt.ddt
|
@ddt.ddt
|
||||||
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
|
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
|
||||||
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
|
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
|
||||||
@ -167,10 +158,9 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
self.tagged_rpms = json.load(f)
|
self.tagged_rpms = json.load(f)
|
||||||
|
|
||||||
self.path_info = MockPathInfo(self.topdir)
|
self.path_info = MockPathInfo(self.topdir)
|
||||||
self.koji_downloader = helpers.FSKojiDownloader()
|
|
||||||
self.koji_wrapper = mock.Mock()
|
self.koji_wrapper = mock.Mock()
|
||||||
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = self.tagged_rpms
|
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = self.tagged_rpms
|
||||||
self.koji_wrapper.koji_proxy.system = DummySystem()
|
|
||||||
self.koji_wrapper.koji_module.pathinfo = self.path_info
|
self.koji_wrapper.koji_module.pathinfo = self.path_info
|
||||||
|
|
||||||
def _touch_files(self, filenames):
|
def _touch_files(self, filenames):
|
||||||
@ -201,9 +191,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
pkgset = package_set(
|
pkgset = package_set("pkgset", self.koji_wrapper, [None])
|
||||||
"pkgset", self.koji_wrapper, [None], downloader=self.koji_downloader
|
|
||||||
)
|
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
|
|
||||||
@ -241,11 +229,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgset = package_set(
|
pkgset = package_set(
|
||||||
"pkgset",
|
"pkgset", self.koji_wrapper, [None], arches=["x86_64"]
|
||||||
self.koji_wrapper,
|
|
||||||
[None],
|
|
||||||
arches=["x86_64"],
|
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -275,11 +259,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset",
|
"pkgset", self.koji_wrapper, ["cafebabe", "deadbeef"], arches=["x86_64"]
|
||||||
self.koji_wrapper,
|
|
||||||
["cafebabe", "deadbeef"],
|
|
||||||
arches=["x86_64"],
|
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -308,11 +288,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset",
|
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
|
||||||
self.koji_wrapper,
|
|
||||||
["cafebabe", None],
|
|
||||||
arches=["x86_64"],
|
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -334,11 +310,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
|
|
||||||
def test_can_not_find_signed_package(self):
|
def test_can_not_find_signed_package(self):
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset",
|
"pkgset", self.koji_wrapper, ["cafebabe"], arches=["x86_64"]
|
||||||
self.koji_wrapper,
|
|
||||||
["cafebabe"],
|
|
||||||
arches=["x86_64"],
|
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
@ -350,7 +322,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
figure = re.compile(
|
figure = re.compile(
|
||||||
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24\.x86_64.+bash-debuginfo-4\.3\.42-4\.fc24\.x86_64$", # noqa: E501
|
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501
|
||||||
re.DOTALL,
|
re.DOTALL,
|
||||||
)
|
)
|
||||||
self.assertRegex(str(ctx.exception), figure)
|
self.assertRegex(str(ctx.exception), figure)
|
||||||
@ -377,7 +349,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
signed_packages_retries=2,
|
signed_packages_retries=2,
|
||||||
signed_packages_wait=5,
|
signed_packages_wait=5,
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -415,7 +386,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
["cafebabe"],
|
["cafebabe"],
|
||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
allow_invalid_sigkeys=True,
|
allow_invalid_sigkeys=True,
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
pkgset.populate("f25")
|
pkgset.populate("f25")
|
||||||
@ -429,18 +399,14 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
|
pkgset.raise_invalid_sigkeys_exception(pkgset.invalid_sigkey_rpms)
|
||||||
|
|
||||||
figure = re.compile(
|
figure = re.compile(
|
||||||
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24\.x86_64.+bash-debuginfo-4\.3\.42-4\.fc24\.x86_64$", # noqa: E501
|
r"^RPM\(s\) not found for sigs: .+Check log for details.+bash-4\.3\.42-4\.fc24.+bash-debuginfo-4\.3\.42-4\.fc24$", # noqa: E501
|
||||||
re.DOTALL,
|
re.DOTALL,
|
||||||
)
|
)
|
||||||
self.assertRegex(str(ctx.exception), figure)
|
self.assertRegex(str(ctx.exception), figure)
|
||||||
|
|
||||||
def test_can_not_find_any_package(self):
|
def test_can_not_find_any_package(self):
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset",
|
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
|
||||||
self.koji_wrapper,
|
|
||||||
["cafebabe", None],
|
|
||||||
arches=["x86_64"],
|
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
@ -465,7 +431,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
signed_packages_retries=2,
|
signed_packages_retries=2,
|
||||||
signed_packages_wait=5,
|
signed_packages_wait=5,
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
@ -506,7 +471,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
[None],
|
[None],
|
||||||
packages=["bash"],
|
packages=["bash"],
|
||||||
populate_only_packages=True,
|
populate_only_packages=True,
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -615,7 +579,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
[None],
|
[None],
|
||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
cache_region=cache_region,
|
cache_region=cache_region,
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try calling the populate twice, but expect just single listTaggedRPMs
|
# Try calling the populate twice, but expect just single listTaggedRPMs
|
||||||
@ -659,7 +622,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
[None],
|
[None],
|
||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
cache_region=cache_region,
|
cache_region=cache_region,
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try calling the populate twice with different event id. It must not
|
# Try calling the populate twice with different event id. It must not
|
||||||
@ -717,11 +679,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
|
|
||||||
pkgset = package_set(
|
pkgset = package_set(
|
||||||
"pkgset",
|
"pkgset", self.koji_wrapper, [None], extra_builds=["pungi-4.1.3-3.fc25"]
|
||||||
self.koji_wrapper,
|
|
||||||
[None],
|
|
||||||
extra_builds=["pungi-4.1.3-3.fc25"],
|
|
||||||
downloader=self.koji_downloader,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -933,8 +891,6 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
|
|||||||
"include_packages": None,
|
"include_packages": None,
|
||||||
"rpms_by_arch": mock.Mock(),
|
"rpms_by_arch": mock.Mock(),
|
||||||
"srpms_by_name": mock.Mock(),
|
"srpms_by_name": mock.Mock(),
|
||||||
"inherit_to_noarch": True,
|
|
||||||
"exclusive_noarch": True,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.pkgset.old_file_cache = mock.Mock()
|
self.pkgset.old_file_cache = mock.Mock()
|
||||||
@ -1144,8 +1100,6 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
|
|||||||
"include_packages": None,
|
"include_packages": None,
|
||||||
"rpms_by_arch": mock.Mock(),
|
"rpms_by_arch": mock.Mock(),
|
||||||
"srpms_by_name": mock.Mock(),
|
"srpms_by_name": mock.Mock(),
|
||||||
"exclusive_noarch": True,
|
|
||||||
"inherit_to_noarch": True,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.pkgset.old_file_cache = mock.Mock()
|
self.pkgset.old_file_cache = mock.Mock()
|
||||||
@ -1227,28 +1181,6 @@ class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase):
|
|||||||
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
|
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_merge_doesnt_exclude_noarch_exclude_arch_when_configured(self):
|
|
||||||
first = pkgsets.PackageSetBase("first", [None])
|
|
||||||
second = pkgsets.PackageSetBase("second", [None])
|
|
||||||
|
|
||||||
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
|
|
||||||
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
|
|
||||||
|
|
||||||
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
|
|
||||||
pkg.excludearch = ["i686"]
|
|
||||||
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
|
|
||||||
|
|
||||||
first.merge(second, "i386", ["i686", "noarch"], inherit_to_noarch=False)
|
|
||||||
|
|
||||||
print(first.rpms_by_arch)
|
|
||||||
self.assertPkgsetEqual(
|
|
||||||
first.rpms_by_arch,
|
|
||||||
{
|
|
||||||
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
|
|
||||||
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_merge_excludes_noarch_exclusive_arch(self):
|
def test_merge_excludes_noarch_exclusive_arch(self):
|
||||||
first = pkgsets.PackageSetBase("first", [None])
|
first = pkgsets.PackageSetBase("first", [None])
|
||||||
second = pkgsets.PackageSetBase("second", [None])
|
second = pkgsets.PackageSetBase("second", [None])
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user