Compare commits
121 Commits
master
...
pungi-4.6.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0cb18bfa24 | ||
|
|
f72adc03b1 | ||
|
|
8ced384540 | ||
|
|
4a5106375e | ||
|
|
627b72597e | ||
|
|
bc0334cc09 | ||
|
|
5c9e79f535 | ||
|
|
29c166ab99 | ||
|
|
51d58322f2 | ||
|
|
0ef1c102b8 | ||
|
|
b6cfd8c5d4 | ||
|
|
9f8377abab | ||
|
|
949add0dac | ||
|
|
8fb694f000 | ||
|
|
8a3b64e5b8 | ||
|
|
c80ebb029b | ||
|
|
e2ceb48450 | ||
|
|
242d7d951f | ||
|
|
04d4e1d585 | ||
|
|
e90ffdfd93 | ||
|
|
0d310fb3b3 | ||
|
|
5172d7e5eb | ||
|
|
0d306d4964 | ||
|
|
1494f203ce | ||
|
|
93b4b4ae0f | ||
|
|
b8e26bfb64 | ||
|
|
e738f65458 | ||
|
|
209d308e1c | ||
|
|
be410d9fd5 | ||
|
|
1f819ee08a | ||
|
|
b9d94970b5 | ||
|
|
b032425f30 | ||
|
|
bcd937d16d | ||
|
|
f0137fd9b9 | ||
|
|
3d630d3e8e | ||
|
|
8412890640 | ||
|
|
42befba0b1 | ||
|
|
52c2cea0ef | ||
|
|
d2e9ccefde | ||
|
|
2c61416423 | ||
|
|
986099f8b5 | ||
|
|
947ddf0a1a | ||
|
|
e46393263e | ||
|
|
ff5a7e6377 | ||
|
|
dd7ecbd5fd | ||
|
|
ba613563f6 | ||
|
|
c8d16e6978 | ||
|
|
860360629d | ||
|
|
f25489d060 | ||
|
|
432b0bce04 | ||
|
|
7e779aa90f | ||
|
|
f4bf0739aa | ||
|
|
119b212241 | ||
|
|
081c31238b | ||
|
|
95497d2676 | ||
|
|
aa7fcc1c20 | ||
|
|
b32c8f3e5e | ||
|
|
935da7c246 | ||
|
|
b513c8cd00 | ||
|
|
8cf1d98312 | ||
|
|
2534ddee99 | ||
|
|
f30a8b4d15 | ||
|
|
3ffb991bac | ||
|
|
dbc0e531b2 | ||
|
|
4c7611291d | ||
|
|
0d3cd150bd | ||
|
|
aa0aae3d3e | ||
|
|
77f8fa25ad | ||
|
|
e6d9f31ef4 | ||
|
|
bf3e9bc53a | ||
|
|
631bb01d8f | ||
|
|
b6296bdfcd | ||
|
|
1c4275bbfa | ||
|
|
fe2dad3b3c | ||
|
|
7128021654 | ||
|
|
bd64894a03 | ||
|
|
14e025a5a1 | ||
|
|
ada8f4e346 | ||
|
|
e4c525ecbf | ||
|
|
091d228219 | ||
|
|
bcc440491e | ||
|
|
fa50eedfad | ||
|
|
b7adbf8a91 | ||
|
|
82ae9e86d5 | ||
|
|
2ad341a01c | ||
|
|
e888e76992 | ||
|
|
6e72de7efe | ||
|
|
c8263fcd39 | ||
|
|
82ca4f4e65 | ||
|
|
b8b6b46ce7 | ||
|
|
e9d836c115 | ||
|
|
d3f0701e01 | ||
|
|
8f6f0f463f | ||
|
|
467c7a7f6a | ||
|
|
e1d7544c2b | ||
|
|
a71c8e23be | ||
|
|
ab508c1511 | ||
|
|
f960b4d155 | ||
|
|
602b698080 | ||
|
|
b30f7e0d83 | ||
|
|
0c3b6e22f9 | ||
|
|
3175ede38a | ||
|
|
8920eef339 | ||
|
|
58036eab84 | ||
|
|
a4476f2570 | ||
|
|
8c06b7a3f1 | ||
|
|
64ae81b416 | ||
|
|
826169af7c | ||
|
|
d97b8bdd33 | ||
|
|
8768b23cbe | ||
|
|
51628a974d | ||
|
|
88327d5784 | ||
|
|
6e0a9385f2 | ||
|
|
8be0d84f8a | ||
|
|
8f0906be53 | ||
|
|
e3072c3d5f | ||
|
|
ef6d40dce4 | ||
|
|
df6664098d | ||
|
|
147df93f75 | ||
|
|
dd8c1002d4 | ||
|
|
12e3a46390 |
25
1860.patch
25
1860.patch
@ -1,25 +0,0 @@
|
||||
From 3bd28f97b2991cf4e3b4ce9ce34c80cba2bf21ab Mon Sep 17 00:00:00 2001
|
||||
From: Lubomír Sedlář <lsedlar@redhat.com>
|
||||
Date: Aug 08 2025 11:54:39 +0000
|
||||
Subject: repoclosure: Don't fail if cache doesn't exist
|
||||
|
||||
|
||||
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
|
||||
|
||||
---
|
||||
|
||||
diff --git a/pungi/phases/repoclosure.py b/pungi/phases/repoclosure.py
|
||||
index 1d3fad0..398802f 100644
|
||||
--- a/pungi/phases/repoclosure.py
|
||||
+++ b/pungi/phases/repoclosure.py
|
||||
@@ -136,6 +136,9 @@ def _delete_repoclosure_cache_dirs(compose):
|
||||
pass
|
||||
|
||||
for top_cache_dir in cache_dirs:
|
||||
+ if not os.path.isdir(top_cache_dir):
|
||||
+ # Skip if the cache doesn't exist.
|
||||
+ continue
|
||||
for name in os.listdir(top_cache_dir):
|
||||
if name.startswith(compose.compose_id):
|
||||
cache_path = os.path.join(top_cache_dir, name)
|
||||
|
||||
488
doc/_static/phases.svg
vendored
488
doc/_static/phases.svg
vendored
@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="698.46503"
|
||||
height="367.16599"
|
||||
viewBox="0 0 698.46506 367.16599"
|
||||
width="610.46454"
|
||||
height="327.16599"
|
||||
viewBox="0 0 610.46457 327.16599"
|
||||
id="svg2"
|
||||
version="1.1"
|
||||
inkscape:version="1.4 (e7c3feb1, 2024-10-09)"
|
||||
inkscape:version="1.3.2 (091e20e, 2023-11-25)"
|
||||
sodipodi:docname="phases.svg"
|
||||
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
||||
inkscape:export-xdpi="90"
|
||||
@ -24,9 +24,9 @@
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="1"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1.5268051"
|
||||
inkscape:cx="281.30637"
|
||||
inkscape:cy="222.68723"
|
||||
inkscape:zoom="1.5"
|
||||
inkscape:cx="268"
|
||||
inkscape:cy="260.66667"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="false"
|
||||
@ -105,7 +105,7 @@
|
||||
style="font-size:13.1479px;line-height:1.25">Pkgset</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(141.04531,-80.817124)"
|
||||
transform="translate(56.378954,-80.817124)"
|
||||
id="g3398">
|
||||
<rect
|
||||
y="553.98242"
|
||||
@ -153,191 +153,181 @@
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path3642"
|
||||
d="M 100.90864,859.8891 H 734.73997"
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.25724px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" />
|
||||
d="M 100.90864,859.8891 H 654.22706"
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.17467px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" />
|
||||
<g
|
||||
id="g10">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="205.63933"
|
||||
x="872.67383"
|
||||
height="137.98026"
|
||||
width="26.295755"
|
||||
id="rect3342"
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:0.772066px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3364"
|
||||
y="890.72327"
|
||||
x="207.94366"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
transform="translate(26.249988)"
|
||||
id="g262">
|
||||
<g
|
||||
id="g234">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="179.38934"
|
||||
x="872.67383"
|
||||
height="162.72726"
|
||||
width="26.295755"
|
||||
id="rect3342"
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:0.838448px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3364"
|
||||
y="890.72327"
|
||||
x="207.94366"
|
||||
id="tspan3366"
|
||||
sodipodi:role="line">Buildinstall</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3639"
|
||||
transform="translate(102.17568,-0.34404039)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="103.28194"
|
||||
x="905.2099"
|
||||
height="54.197887"
|
||||
width="26.295755"
|
||||
id="rect3344"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3368"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
x="181.69368"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="890.72327"
|
||||
x="181.69368"
|
||||
id="tspan3366"
|
||||
sodipodi:role="line">Buildinstall</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3639"
|
||||
transform="translate(75.925692,-0.34404039)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="103.28194"
|
||||
x="905.2099"
|
||||
height="54.197887"
|
||||
width="26.295755"
|
||||
id="rect3344"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3368"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
id="tspan3370"
|
||||
sodipodi:role="line">Gather</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(42.17571,32.494534)"
|
||||
id="g3647">
|
||||
<g
|
||||
id="g3644">
|
||||
<rect
|
||||
style="fill:#ad7fa8;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3346"
|
||||
width="26.295755"
|
||||
height="72.729973"
|
||||
x="905.2099"
|
||||
y="162.92607"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
id="tspan3370"
|
||||
sodipodi:role="line">Gather</tspan></text>
|
||||
</g>
|
||||
<text
|
||||
id="text3372"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
<g
|
||||
transform="translate(15.925722,63.405928)"
|
||||
id="g3647">
|
||||
<g
|
||||
id="g3644">
|
||||
<rect
|
||||
style="fill:#ad7fa8;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3346"
|
||||
width="26.295755"
|
||||
height="72.729973"
|
||||
x="905.2099"
|
||||
y="162.92607"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
</g>
|
||||
<text
|
||||
id="text3372"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
id="tspan3374"
|
||||
sodipodi:role="line">ExtraFiles</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(23.42572,-0.34404039)"
|
||||
id="g3658">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="241.10229"
|
||||
x="905.2099"
|
||||
height="78.636055"
|
||||
width="26.295755"
|
||||
id="rect3348"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3376"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
id="tspan3374"
|
||||
sodipodi:role="line">ExtraFiles</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-2.824268,-0.34404039)"
|
||||
id="g3658">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="241.10229"
|
||||
x="905.2099"
|
||||
height="78.636055"
|
||||
width="26.295755"
|
||||
id="rect3348"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3376"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
id="tspan3378"
|
||||
sodipodi:role="line">Createrepo</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3408"
|
||||
transform="translate(-48.38832,300.30474)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="253.37347"
|
||||
x="670.65399"
|
||||
height="137.77563"
|
||||
width="26.295755"
|
||||
id="rect3350-3"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.60245px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3380-2"
|
||||
y="688.04315"
|
||||
x="256.90588"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
id="tspan3406"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
id="tspan3378"
|
||||
sodipodi:role="line">Createrepo</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3408"
|
||||
transform="translate(-74.638308,113.77258)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="254.60153"
|
||||
x="823.54675"
|
||||
height="53.653927"
|
||||
width="26.295755"
|
||||
id="rect3350-3"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3380-2"
|
||||
y="840.3219"
|
||||
x="256.90588"
|
||||
y="688.04315"
|
||||
sodipodi:role="line">OSTree</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.48564px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428"
|
||||
width="26.295755"
|
||||
height="224.79666"
|
||||
x="1122.0793"
|
||||
y="351.26718"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="355.4136"
|
||||
y="1140.0824"
|
||||
id="text3430"><tspan
|
||||
id="tspan283"
|
||||
sodipodi:role="line"
|
||||
x="355.4136"
|
||||
y="1140.0824"
|
||||
style="font-size:12px;line-height:0">OSTreeInstaller</tspan></text>
|
||||
<g
|
||||
id="g11">
|
||||
<rect
|
||||
style="fill:#edd400;fill-rule:evenodd;stroke:none;stroke-width:1.90661px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428-5"
|
||||
width="26.295755"
|
||||
height="370.24628"
|
||||
x="1155.5499"
|
||||
y="205.91063"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="208.21498"
|
||||
y="1172.3251"
|
||||
id="text3430-3"><tspan
|
||||
id="tspan283-5"
|
||||
sodipodi:role="line"
|
||||
x="208.21498"
|
||||
y="1172.3251"
|
||||
style="font-size:12px;line-height:0">OSTreeContainer</tspan></text>
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
id="tspan3406"
|
||||
sodipodi:role="line"
|
||||
x="256.90588"
|
||||
y="840.3219">OSTree</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-252.46536,-85.861863)"
|
||||
id="g288">
|
||||
<g
|
||||
transform="translate(0.56706579)"
|
||||
id="g3653">
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428"
|
||||
width="26.295755"
|
||||
height="101.85102"
|
||||
x="1022.637"
|
||||
y="490.33765"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="492.642"
|
||||
y="1039.4121"
|
||||
id="text3430"><tspan
|
||||
id="tspan283"
|
||||
sodipodi:role="line"
|
||||
x="492.642"
|
||||
y="1039.4121"
|
||||
style="font-size:12px;line-height:0">OSTreeInstaller</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g9"
|
||||
transform="translate(-23.616254)">
|
||||
id="g2"
|
||||
transform="translate(-1.4062678e-8,9.3749966)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:0.898355px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1.85901px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1"
|
||||
width="25.155075"
|
||||
height="110.86161"
|
||||
x="872.67383"
|
||||
y="602.95026" />
|
||||
width="103.12497"
|
||||
height="115.80065"
|
||||
x="863.29883"
|
||||
y="486.55563" />
|
||||
<text
|
||||
id="text3384-0"
|
||||
y="889.42767"
|
||||
x="605.95917"
|
||||
y="921.73846"
|
||||
x="489.56451"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391"
|
||||
sodipodi:role="line"
|
||||
x="605.95917"
|
||||
y="889.42767">ImageChecksum</tspan></text>
|
||||
x="489.56451"
|
||||
y="921.73846">ImageChecksum</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-68.341107,-80.817124)"
|
||||
transform="translate(-42.209584,-80.817124)"
|
||||
id="g3458">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
@ -359,9 +349,32 @@
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">Createiso</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3453"
|
||||
transform="translate(-42.466031,-84.525321)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="420.39337"
|
||||
x="989.65247"
|
||||
height="101.85102"
|
||||
width="26.295755"
|
||||
id="rect3352"
|
||||
style="fill:#73d216;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3388"
|
||||
y="1006.4276"
|
||||
x="422.69772"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
y="1006.4276"
|
||||
x="422.69772"
|
||||
id="tspan3390"
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">LiveImages</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3448"
|
||||
transform="translate(-68.597554,-120.23498)">
|
||||
transform="translate(-42.466031,-88.485966)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="420.39337"
|
||||
@ -384,7 +397,7 @@
|
||||
</g>
|
||||
<g
|
||||
id="g3443"
|
||||
transform="translate(-69.304646,-124.55121)">
|
||||
transform="translate(-43.173123,-92.80219)">
|
||||
<rect
|
||||
style="fill:#edd400;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3422"
|
||||
@ -405,31 +418,27 @@
|
||||
y="1079.6111"
|
||||
style="font-size:13.1479px;line-height:1.25">LiveMedia</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g8"
|
||||
transform="translate(-26.131523,-31.749016)">
|
||||
<rect
|
||||
style="fill:#c17d11;fill-rule:evenodd;stroke:none;stroke-width:1.48416px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290"
|
||||
width="26.295755"
|
||||
height="224.35098"
|
||||
x="1091.7223"
|
||||
y="378.43698"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.74133"
|
||||
<rect
|
||||
style="fill:#c17d11;fill-rule:evenodd;stroke:none;stroke-width:1.48416px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290"
|
||||
width="26.295755"
|
||||
height="224.35098"
|
||||
x="1091.7223"
|
||||
y="378.43698"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.74133"
|
||||
y="1106.6223"
|
||||
id="text294"><tspan
|
||||
y="1106.6223"
|
||||
id="text294"><tspan
|
||||
y="1106.6223"
|
||||
x="380.74133"
|
||||
sodipodi:role="line"
|
||||
id="tspan301"
|
||||
style="font-size:12px;line-height:0">OSBS</tspan></text>
|
||||
</g>
|
||||
x="380.74133"
|
||||
sodipodi:role="line"
|
||||
id="tspan301"
|
||||
style="font-size:12px;line-height:0">OSBS</tspan></text>
|
||||
<g
|
||||
transform="translate(-97.065065,-82.792165)"
|
||||
transform="translate(-70.933542,-51.043149)"
|
||||
id="g3819">
|
||||
<rect
|
||||
style="fill:#73d216;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
@ -451,32 +460,28 @@
|
||||
y="1069.0087"
|
||||
id="tspan3812">ExtraIsos</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g7"
|
||||
transform="translate(-26.131523,-31.749016)">
|
||||
<rect
|
||||
y="377.92242"
|
||||
x="1122.3463"
|
||||
height="224.24059"
|
||||
width="26.295755"
|
||||
id="rect87"
|
||||
style="fill:#5ed4ec;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.48006px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
<rect
|
||||
y="377.92242"
|
||||
x="1122.3463"
|
||||
height="224.24059"
|
||||
width="26.295755"
|
||||
id="rect87"
|
||||
style="fill:#5ed4ec;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.48006px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.7789"
|
||||
y="1140.3958"
|
||||
id="text91"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
sodipodi:role="line"
|
||||
id="tspan89"
|
||||
x="380.7789"
|
||||
y="1140.3958"
|
||||
id="text91"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
sodipodi:role="line"
|
||||
id="tspan89"
|
||||
x="380.7789"
|
||||
y="1140.3958">Repoclosure</tspan></text>
|
||||
</g>
|
||||
y="1140.3958">Repoclosure</tspan></text>
|
||||
<g
|
||||
id="g206"
|
||||
transform="translate(-26.131523,-33.624015)">
|
||||
transform="translate(0,-1.8749994)">
|
||||
<rect
|
||||
style="fill:#fcd9a4;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290-6"
|
||||
@ -497,28 +502,37 @@
|
||||
id="tspan301-5"
|
||||
style="font-size:12px;line-height:0">KiwiBuild</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:2.42607px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1-3"
|
||||
width="180.25586"
|
||||
height="115.80065"
|
||||
x="873.67194"
|
||||
y="460.4241" />
|
||||
<text
|
||||
id="text3384-0-6"
|
||||
y="967.06702"
|
||||
x="467.91034"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391-7"
|
||||
sodipodi:role="line"
|
||||
x="467.91034"
|
||||
y="967.06702">ImageContainer</tspan></text>
|
||||
<g
|
||||
id="g3">
|
||||
<g
|
||||
id="g1">
|
||||
<g
|
||||
id="g4">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.83502px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1-3"
|
||||
width="103.12497"
|
||||
height="115.80065"
|
||||
x="983.44263"
|
||||
y="486.55563" />
|
||||
<text
|
||||
id="text3384-0-6"
|
||||
y="1038.8422"
|
||||
x="489.56451"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391-7"
|
||||
sodipodi:role="line"
|
||||
x="489.56451"
|
||||
y="1038.8422">ImageContainer</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g206-1"
|
||||
transform="translate(-26.177813,-3.0471625)">
|
||||
transform="translate(-0.04628921,28.701853)">
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290-6-7"
|
||||
|
||||
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 23 KiB |
@ -51,9 +51,9 @@ copyright = "2016, Red Hat, Inc."
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = "4.10"
|
||||
version = "4.6"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = "4.10.1"
|
||||
release = "4.6.3"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
||||
@ -292,8 +292,8 @@ There a couple common format specifiers available for both the options:
|
||||
format string. The pattern should not overlap, otherwise it is undefined
|
||||
which one will be used.
|
||||
|
||||
This format will be used for some phases generating images. Currently that
|
||||
means ``createiso``, ``buildinstall`` and ``ostree_installer``.
|
||||
This format will be used for all phases generating images. Currently that
|
||||
means ``createiso``, ``live_images`` and ``buildinstall``.
|
||||
|
||||
Available extra keys are:
|
||||
* ``disc_num``
|
||||
@ -323,6 +323,7 @@ There a couple common format specifiers available for both the options:
|
||||
|
||||
Available keys are:
|
||||
* ``boot`` -- for ``boot.iso`` images created in *buildinstall* phase
|
||||
* ``live`` -- for images created by *live_images* phase
|
||||
* ``dvd`` -- for images created by *createiso* phase
|
||||
* ``ostree`` -- for ostree installer images
|
||||
|
||||
@ -350,10 +351,48 @@ Example
|
||||
|
||||
disc_types = {
|
||||
'boot': 'netinst',
|
||||
'live': 'Live',
|
||||
'dvd': 'DVD',
|
||||
}
|
||||
|
||||
|
||||
Signing
|
||||
=======
|
||||
|
||||
If you want to sign deliverables generated during pungi run like RPM wrapped
|
||||
images. You must provide few configuration options:
|
||||
|
||||
**signing_command** [optional]
|
||||
(*str*) -- Command that will be run with a koji build as a single
|
||||
argument. This command must not require any user interaction.
|
||||
If you need to pass a password for a signing key to the command,
|
||||
do this via command line option of the command and use string
|
||||
formatting syntax ``%(signing_key_password)s``.
|
||||
(See **signing_key_password_file**).
|
||||
|
||||
**signing_key_id** [optional]
|
||||
(*str*) -- ID of the key that will be used for the signing.
|
||||
This ID will be used when crafting koji paths to signed files
|
||||
(``kojipkgs.fedoraproject.org/packages/NAME/VER/REL/data/signed/KEYID/..``).
|
||||
|
||||
**signing_key_password_file** [optional]
|
||||
(*str*) -- Path to a file with password that will be formatted
|
||||
into **signing_command** string via ``%(signing_key_password)s``
|
||||
string format syntax (if used).
|
||||
Because pungi config is usually stored in git and is part of compose
|
||||
logs we don't want password to be included directly in the config.
|
||||
Note: If ``-`` string is used instead of a filename, then you will be asked
|
||||
for the password interactivelly right after pungi starts.
|
||||
|
||||
Example
|
||||
-------
|
||||
::
|
||||
|
||||
signing_command = '~/git/releng/scripts/sigulsign_unsigned.py -vv --password=%(signing_key_password)s fedora-24'
|
||||
signing_key_id = '81b46521'
|
||||
signing_key_password_file = '~/password_for_fedora-24_key'
|
||||
|
||||
|
||||
.. _git-urls:
|
||||
|
||||
Git URLs
|
||||
@ -629,10 +668,6 @@ Options
|
||||
* ``squashfs_only`` -- *bool* (default ``False``) pass the --squashfs_only to Lorax.
|
||||
* ``configuration_file`` -- (:ref:`scm_dict <scm_support>`) (default empty) pass the
|
||||
specified configuration file to Lorax using the -c option.
|
||||
* ``rootfs_type`` -- *string* (default empty) pass the ``--rootfs-type``
|
||||
option to Lorax with the provided value. If not specified, no type is
|
||||
specified to Lorax, which will choose whatever default it is configured
|
||||
with.
|
||||
**lorax_extra_sources**
|
||||
(*list*) -- a variant/arch mapping with urls for extra source repositories
|
||||
added to Lorax command line. Either one repo or a list can be specified.
|
||||
@ -1007,8 +1042,6 @@ Example
|
||||
to track decisions.
|
||||
|
||||
|
||||
.. _koji-settings:
|
||||
|
||||
Koji Settings
|
||||
=============
|
||||
|
||||
@ -1023,11 +1056,6 @@ Options
|
||||
to set up your Koji client profile. In the examples, the profile name is
|
||||
"koji", which points to Fedora's koji.fedoraproject.org.
|
||||
|
||||
**koji_cache**
|
||||
(*str*) -- koji cache directory. Setting this causes Pungi to download
|
||||
packages over HTTP into a cache, which is used in lieu of the Koji profile's
|
||||
``topdir`` setting. See :doc:`koji` for details on this behavior.
|
||||
|
||||
**global_runroot_method**
|
||||
(*str*) -- global runroot method to use. If ``runroot_method`` is set
|
||||
per Pungi phase using a dictionary, this option defines the default
|
||||
@ -1291,7 +1319,7 @@ Options
|
||||
(*int|str*) -- how much free space should be left on each disk. The format
|
||||
is the same as for ``iso_size`` option.
|
||||
|
||||
**iso_hfs_ppc64le_compatible** = False
|
||||
**iso_hfs_ppc64le_compatible** = True
|
||||
(*bool*) -- when set to False, the Apple/HFS compatibility is turned off
|
||||
for ppc64le ISOs. This option only makes sense for bootable products, and
|
||||
affects images produced in *createiso* and *extra_isos* phases.
|
||||
@ -1340,8 +1368,8 @@ All non-``RC`` milestones from label get appended to the version. For release
|
||||
either label is used or date, type and respin.
|
||||
|
||||
|
||||
Common options for Live Media and Image Build
|
||||
=============================================
|
||||
Common options for Live Images, Live Media and Image Build
|
||||
==========================================================
|
||||
|
||||
All images can have ``ksurl``, ``version``, ``release`` and ``target``
|
||||
specified. Since this can create a lot of duplication, there are global options
|
||||
@ -1357,12 +1385,14 @@ The kickstart URL is configured by these options.
|
||||
* ``global_ksurl`` -- global fallback setting
|
||||
* ``live_media_ksurl``
|
||||
* ``image_build_ksurl``
|
||||
* ``live_images_ksurl``
|
||||
|
||||
Target is specified by these settings.
|
||||
|
||||
* ``global_target`` -- global fallback setting
|
||||
* ``live_media_target``
|
||||
* ``image_build_target``
|
||||
* ``live_images_target``
|
||||
* ``osbuild_target``
|
||||
|
||||
Version is specified by these options. If no version is set, a default value
|
||||
@ -1371,6 +1401,7 @@ will be provided according to :ref:`automatic versioning <auto-version>`.
|
||||
* ``global_version`` -- global fallback setting
|
||||
* ``live_media_version``
|
||||
* ``image_build_version``
|
||||
* ``live_images_version``
|
||||
* ``osbuild_version``
|
||||
|
||||
Release is specified by these options. If set to a magic value to
|
||||
@ -1380,14 +1411,44 @@ to :ref:`automatic versioning <auto-version>`.
|
||||
* ``global_release`` -- global fallback setting
|
||||
* ``live_media_release``
|
||||
* ``image_build_release``
|
||||
* ``live_images_release``
|
||||
* ``osbuild_release``
|
||||
|
||||
Each configuration block can also optionally specify a ``failable`` key. It
|
||||
Each configuration block can also optionally specify a ``failable`` key. For
|
||||
live images it should have a boolean value. For live media and image build it
|
||||
should be a list of strings containing architectures that are optional. If any
|
||||
deliverable fails on an optional architecture, it will not abort the whole
|
||||
compose. If the list contains only ``"*"``, all arches will be substituted.
|
||||
|
||||
|
||||
Live Images Settings
|
||||
====================
|
||||
|
||||
**live_images**
|
||||
(*list*) -- Configuration for the particular image. The elements of the
|
||||
list should be tuples ``(variant_uid_regex, {arch|*: config})``. The config
|
||||
should be a dict with these keys:
|
||||
|
||||
* ``kickstart`` (*str*)
|
||||
* ``ksurl`` (*str*) [optional] -- where to get the kickstart from
|
||||
* ``name`` (*str*)
|
||||
* ``version`` (*str*)
|
||||
* ``target`` (*str*)
|
||||
* ``repo`` (*str|[str]*) -- repos specified by URL or variant UID
|
||||
* ``specfile`` (*str*) -- for images wrapped in RPM
|
||||
* ``scratch`` (*bool*) -- only RPM-wrapped images can use scratch builds,
|
||||
but by default this is turned off
|
||||
* ``type`` (*str*) -- what kind of task to start in Koji. Defaults to
|
||||
``live`` meaning ``koji spin-livecd`` will be used. Alternative option
|
||||
is ``appliance`` corresponding to ``koji spin-appliance``.
|
||||
* ``sign`` (*bool*) -- only RPM-wrapped images can be signed
|
||||
|
||||
**live_images_no_rename**
|
||||
(*bool*) -- When set to ``True``, filenames generated by Koji will be used.
|
||||
When ``False``, filenames will be generated based on ``image_name_format``
|
||||
configuration option.
|
||||
|
||||
|
||||
Live Media Settings
|
||||
===================
|
||||
|
||||
@ -1577,23 +1638,6 @@ KiwiBuild Settings
|
||||
provided, all variant architectures will be built.
|
||||
* ``failable`` -- (*[str]*) List of architectures for which this
|
||||
deliverable is not release blocking.
|
||||
* ``type`` -- (*str*) override default type from the bundle with this value.
|
||||
* ``type_attr`` -- (*[str]*) override default attributes for the build type
|
||||
from description.
|
||||
* ``bundle_name_format`` -- (*str*) override default bundle format name.
|
||||
* ``version`` -- (*str*) override version. Follows the same rules as
|
||||
described in :ref:`automatic versioning <auto-version>`.
|
||||
* ``repo_releasever`` -- (*str*) Override default releasever of the output
|
||||
image.
|
||||
* ``manifest_type`` -- the image type that is put into the manifest by
|
||||
pungi. If not supplied, an autodetected value will be provided. It may or
|
||||
may not make sense.
|
||||
* ``use_buildroot_repo = False`` -- (*bool*) whether the task should
|
||||
automatically enable buildroot repository corresponding to the used
|
||||
target.
|
||||
|
||||
The options can be set either for the specific image, or at the phase level
|
||||
(see below). Version also falls back to ``global_version``.
|
||||
|
||||
**kiwibuild_description_scm**
|
||||
(*str*) -- URL for scm containing the description files
|
||||
@ -1601,24 +1645,6 @@ KiwiBuild Settings
|
||||
**kiwibuild_description_path**
|
||||
(*str*) -- path to a description file within the description scm
|
||||
|
||||
**kiwibuild_type**
|
||||
(*str*) -- override default type from the bundle with this value.
|
||||
|
||||
**kiwibuild_type_attr**
|
||||
(*[str]*) -- override default attributes for the build type from description.
|
||||
|
||||
**kiwibuild_bundle_name_format**
|
||||
(*str*) -- override default bundle format name.
|
||||
|
||||
**kiwibuild_version**
|
||||
(*str*) -- overide version for all kiwibuild tasks.
|
||||
|
||||
**kiwibuild_repo_releasever**
|
||||
(*str*) -- override releasever for all kiwibuild tasks.
|
||||
|
||||
**kiwibuild_use_buildroot_repo**
|
||||
(*bool*) -- set enablement of a buildroot repo for all kiwibuild tasks.
|
||||
|
||||
|
||||
OSBuild Composer for building images
|
||||
====================================
|
||||
@ -1726,102 +1752,6 @@ OSBuild Composer for building images
|
||||
arch.
|
||||
|
||||
|
||||
Image Builder Settings
|
||||
======================
|
||||
|
||||
**imagebuilder**
|
||||
(*dict*) -- configuration for building images with the ``koji-image-builder``
|
||||
Koji plugin. Pungi will trigger a Koji task which will build the image with
|
||||
the given configuration using the ``image-builder`` executable in the build
|
||||
root.
|
||||
|
||||
Format: ``{variant_uid_regex: [{...}]}``.
|
||||
|
||||
Required keys in the configuration dict:
|
||||
|
||||
* ``name`` -- name of the Koji package
|
||||
* ``types`` -- a list with a single image type string representing
|
||||
the image type to build (e.g. ``qcow2``). Only a single image type
|
||||
can be provided as an argument.
|
||||
|
||||
Optional keys:
|
||||
|
||||
* ``target`` -- which build target to use for the task. Either this option,
|
||||
the global ``imagebuilder_target``, or ``global_target`` is required.
|
||||
* ``version`` -- version for the final build (as a string). This option is
|
||||
required if the global ``imagebuilder_version`` or its ``global_version``
|
||||
equivalent are not specified.
|
||||
* ``release`` -- release part of the final NVR. If neither this option nor
|
||||
the global ``imagebuilder_release`` nor its ``global_release`` equivalent
|
||||
are set, Koji will automatically generate a value.
|
||||
* ``repos`` -- a list of repositories from which to consume packages for
|
||||
building the image. By default only the variant repository is used.
|
||||
The list items use the following formats:
|
||||
|
||||
* String with just the repository URL.
|
||||
* Variant ID in the current compose.
|
||||
|
||||
* ``arches`` -- list of architectures for which to build the image. By
|
||||
default, the variant arches are used. This option can only restrict it,
|
||||
not add a new one.
|
||||
|
||||
* ``seed`` -- An integer that can be used to make builds more reproducible.
|
||||
When ``image-builder`` builds images various bits and bobs are generated
|
||||
with a PRNG (partition uuids, etc). Pinning the seed with this argument
|
||||
or ``imagebuilder_seed`` to do so globally will make builds use the same
|
||||
random values each time. Note that using ``seed`` requires the Koji side
|
||||
to have at least ``koji-image-builder >= 7`` deployed.
|
||||
|
||||
* ``scratch`` -- A boolean to instruct ``koji-image-builder`` to perform scratch
|
||||
builds. This might have implications on garbage collection within the ``koji``
|
||||
instance you're targeting. Can also be set globally through
|
||||
``imagebuilder_scratch``.
|
||||
|
||||
* ``ostree`` -- A dictionary describing where to get ``ostree`` content when
|
||||
applicable. The dictionary contains the following keys:
|
||||
|
||||
* ``url`` -- URL of the repository that's used to fetch the parent
|
||||
commit from.
|
||||
* ``ref`` -- Name of an ostree branch or tag
|
||||
|
||||
* ``blueprint`` -- A dictionary with a blueprint to use for the
|
||||
image build. Blueprints can customize images beyond their initial definition.
|
||||
For the list of supported customizations, see external
|
||||
`Documentation <https://osbuild.org/docs/user-guide/blueprint-reference/>`__
|
||||
|
||||
.. note::
|
||||
There is initial support for having this task as failable without aborting
|
||||
the whole compose. This can be enabled by setting ``"failable": ["*"]`` in
|
||||
the config for the image. It is an on/off switch without granularity per
|
||||
arch.
|
||||
|
||||
|
||||
Example Config
|
||||
--------------
|
||||
::
|
||||
|
||||
imagebuilder_target = 'f43-image-builder'
|
||||
imagebuilder_seed = 43
|
||||
imagebuilder_scratch = True
|
||||
|
||||
imagebuilder = {
|
||||
"^IoT$": [
|
||||
{
|
||||
"name": "%s-raw" % release_name,
|
||||
"types": ["iot-raw-xz"],
|
||||
"arches": ["x86_64"], #, "aarch64"],
|
||||
"repos": ["https://kojipkgs.fedoraproject.org/compose/rawhide/latest-Fedora-Rawhide/compose/Everything/$arch/os/"],
|
||||
"ostree": {
|
||||
"url": "https://kojipkgs.fedoraproject.org/compose/iot/repo/",
|
||||
"ref": "fedora/rawhide/$arch/iot",
|
||||
},
|
||||
"subvariant": "IoT",
|
||||
"failable": ["*"],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Image container
|
||||
===============
|
||||
|
||||
@ -1999,15 +1929,6 @@ will thus create a new OCI archive image *from scratch*.
|
||||
reference will not be created.
|
||||
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
||||
installed in the runroot environment in Koji.
|
||||
* ``subvariant`` -- (*str*) The subvariant value to be used in the metadata
|
||||
for the image. Also used in the image's filename, unless overridden by
|
||||
``name``. Defaults to being the same as the variant. If building more
|
||||
than one ostree container in a variant, each must have a unique
|
||||
subvariant.
|
||||
* ``name`` -- (*str*) The base for the image's filename. To produce the
|
||||
complete filename, the image's architecture, the version string, and the
|
||||
format suffix are appended to this. Defaults to the value of
|
||||
``release_short`` and the subvariant, joined by a dash.
|
||||
|
||||
Example config
|
||||
--------------
|
||||
@ -2385,9 +2306,9 @@ Miscellaneous Settings
|
||||
format string accepting ``%(variant_name)s`` and ``%(arch)s`` placeholders.
|
||||
|
||||
**symlink_isos_to**
|
||||
(*str*) -- If set, the ISO files from ``buildinstall`` and ``createiso``
|
||||
phases will be put into this destination, and a symlink pointing to this
|
||||
location will be created in actual compose directory.
|
||||
(*str*) -- If set, the ISO files from ``buildinstall``, ``createiso`` and
|
||||
``live_images`` phases will be put into this destination, and a symlink
|
||||
pointing to this location will be created in actual compose directory.
|
||||
|
||||
**dogpile_cache_backend**
|
||||
(*str*) -- If set, Pungi will use the configured Dogpile cache backend to
|
||||
|
||||
@ -294,6 +294,30 @@ This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
||||
})
|
||||
]
|
||||
|
||||
live_target = 'f32'
|
||||
live_images_no_rename = True
|
||||
live_images = [
|
||||
('^Workstation$', {
|
||||
'armhfp': {
|
||||
'kickstart': 'fedora-arm-workstation.ks',
|
||||
'name': 'Fedora-Workstation-armhfp',
|
||||
# Again workstation takes packages from Everything.
|
||||
'repo': 'Everything',
|
||||
'type': 'appliance',
|
||||
'failable': True,
|
||||
}
|
||||
}),
|
||||
('^Server$', {
|
||||
# But Server has its own repo.
|
||||
'armhfp': {
|
||||
'kickstart': 'fedora-arm-server.ks',
|
||||
'name': 'Fedora-Server-armhfp',
|
||||
'type': 'appliance',
|
||||
'failable': True,
|
||||
}
|
||||
}),
|
||||
]
|
||||
|
||||
ostree = {
|
||||
"^Silverblue$": {
|
||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||
|
||||
@ -28,8 +28,7 @@ It is possible now to run a compose from a Koji tag without direct access to
|
||||
Koji storage.
|
||||
|
||||
Pungi can download the packages over HTTP protocol, store them in a local
|
||||
cache, and consume them from there. To enable this behavior, set the
|
||||
:ref:`koji_cache <koji-settings>` option in the compose configuration.
|
||||
cache, and consume them from there.
|
||||
|
||||
The local cache has similar structure to what is on the Koji volume.
|
||||
|
||||
@ -44,8 +43,7 @@ If it doesn't exist, it will be downloaded from Koji (by replacing the
|
||||
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
|
||||
The packages can be hard- or softlinked from this cache directory
|
||||
(``/mnt/compose/cache`` in the example).
|
||||
The packages can be hardlinked from this cache directory.
|
||||
|
||||
|
||||
Cleanup
|
||||
|
||||
@ -124,12 +124,6 @@ OSBuild
|
||||
Similarly to image build, this phases creates a koji `osbuild` task. In the
|
||||
background it uses OSBuild Composer to create images.
|
||||
|
||||
ImageBuilder
|
||||
------------
|
||||
|
||||
Similarly to image build, this phases creates a koji `imageBuilderBuild`
|
||||
task. In the background it uses `image-builder` to create images.
|
||||
|
||||
OSBS
|
||||
----
|
||||
|
||||
|
||||
@ -18,7 +18,6 @@ which can contain following keys.
|
||||
* ``cvs`` -- copies files from a CVS repository
|
||||
* ``rpm`` -- copies files from a package in the compose
|
||||
* ``koji`` -- downloads archives from a given build in Koji build system
|
||||
* ``container-image`` -- downloads an artifact from a container registry
|
||||
|
||||
* ``repo``
|
||||
|
||||
@ -86,24 +85,6 @@ For ``extra_files`` phase either key is valid and should be chosen depending on
|
||||
what the actual use case.
|
||||
|
||||
|
||||
``container-image`` example
|
||||
---------------------------
|
||||
|
||||
Example of pulling a container image into the compose. ::
|
||||
|
||||
{
|
||||
# Pull a container into an oci-archive tar file
|
||||
"scm": "container-image",
|
||||
# This is the pull spec including tag. It is passed directly to skopeo
|
||||
# copy with no modification.
|
||||
"repo": "docker://registry.access.redhat.com/ubi9/ubi-minimal:latest",
|
||||
# Key `file` is required, but the value is ignored.
|
||||
"file": "",
|
||||
# Optional subdirectory under Server/<arch>/os
|
||||
"target": "containers",
|
||||
}
|
||||
|
||||
|
||||
Caveats
|
||||
-------
|
||||
|
||||
|
||||
1108
pungi.spec
1108
pungi.spec
File diff suppressed because it is too large
Load Diff
@ -16,8 +16,7 @@ def get_full_version():
|
||||
proc = subprocess.Popen(
|
||||
["git", "--git-dir=%s/.git" % location, "describe", "--tags"],
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
output, _ = proc.communicate()
|
||||
return re.sub(r"-1.fc\d\d?", "", output.strip().replace("pungi-", ""))
|
||||
@ -25,7 +24,7 @@ def get_full_version():
|
||||
import subprocess
|
||||
|
||||
proc = subprocess.Popen(
|
||||
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, text=True, errors="replace"
|
||||
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, universal_newlines=True
|
||||
)
|
||||
(output, err) = proc.communicate()
|
||||
if not err:
|
||||
|
||||
@ -93,11 +93,6 @@ def split_name_arch(name_arch):
|
||||
|
||||
def is_excluded(package, arches, logger=None):
|
||||
"""Check if package is excluded from given architectures."""
|
||||
if any(
|
||||
getBaseArch(exc_arch) == 'x86_64' for exc_arch in package.exclusivearch
|
||||
) and 'x86_64_v2' not in package.exclusivearch:
|
||||
package.exclusivearch.append('x86_64_v2')
|
||||
|
||||
if package.excludearch and set(package.excludearch) & set(arches):
|
||||
if logger:
|
||||
logger.debug(
|
||||
|
||||
@ -34,8 +34,6 @@ arches = {
|
||||
"x86_64": "athlon",
|
||||
"amd64": "x86_64",
|
||||
"ia32e": "x86_64",
|
||||
# x86-64-v2
|
||||
"x86_64_v2": "noarch",
|
||||
# ppc64le
|
||||
"ppc64le": "noarch",
|
||||
# ppc
|
||||
@ -84,8 +82,6 @@ arches = {
|
||||
"sh3": "noarch",
|
||||
# itanium
|
||||
"ia64": "noarch",
|
||||
# riscv64
|
||||
"riscv64": "noarch",
|
||||
}
|
||||
|
||||
# Will contain information parsed from /proc/self/auxv via _parse_auxv().
|
||||
|
||||
205
pungi/checks.py
205
pungi/checks.py
@ -42,6 +42,7 @@ import platform
|
||||
import re
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
from kobo.shortcuts import force_list
|
||||
from pungi.phases import PHASES_NAMES
|
||||
from pungi.runroot import RUNROOT_TYPES
|
||||
@ -235,8 +236,8 @@ def validate(config, offline=False, schema=None):
|
||||
schema,
|
||||
{
|
||||
"array": (tuple, list),
|
||||
"regex": str,
|
||||
"url": str,
|
||||
"regex": six.string_types,
|
||||
"url": six.string_types,
|
||||
},
|
||||
)
|
||||
errors = []
|
||||
@ -265,28 +266,6 @@ def validate(config, offline=False, schema=None):
|
||||
if error.validator in ("anyOf", "oneOf"):
|
||||
for suberror in error.context:
|
||||
errors.append(" Possible reason: %s" % suberror.message)
|
||||
|
||||
# Resolve container tags in extra_files
|
||||
tag_resolver = util.ContainerTagResolver(offline=offline)
|
||||
if config.get("extra_files"):
|
||||
for _, arch_dict in config["extra_files"]:
|
||||
for value in arch_dict.values():
|
||||
if isinstance(value, dict):
|
||||
_resolve_container_tag(value, tag_resolver)
|
||||
elif isinstance(value, list):
|
||||
for subinstance in value:
|
||||
_resolve_container_tag(subinstance, tag_resolver)
|
||||
if config.get("extra_isos"):
|
||||
for cfgs in config["extra_isos"].values():
|
||||
if not isinstance(cfgs, list):
|
||||
cfgs = [cfgs]
|
||||
for cfg in cfgs:
|
||||
if isinstance(cfg.get("extra_files"), dict):
|
||||
_resolve_container_tag(cfg["extra_files"], tag_resolver)
|
||||
elif isinstance(cfg.get("extra_files"), list):
|
||||
for c in cfg["extra_files"]:
|
||||
_resolve_container_tag(c, tag_resolver)
|
||||
|
||||
return (errors + _validate_requires(schema, config, CONFIG_DEPS), warnings)
|
||||
|
||||
|
||||
@ -483,7 +462,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
||||
return isinstance(instance, (tuple, list))
|
||||
|
||||
def is_string_type(checker, instance):
|
||||
return isinstance(instance, str)
|
||||
return isinstance(instance, six.string_types)
|
||||
|
||||
kwargs["type_checker"] = validator_class.TYPE_CHECKER.redefine_many(
|
||||
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||
@ -555,18 +534,6 @@ def make_schema():
|
||||
"str_or_scm_dict": {
|
||||
"anyOf": [{"type": "string"}, {"$ref": "#/definitions/scm_dict"}]
|
||||
},
|
||||
"extra_file": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"scm": {"type": "string"},
|
||||
"repo": {"type": "string"},
|
||||
"branch": {"$ref": "#/definitions/optional_string"},
|
||||
"file": {"$ref": "#/definitions/strings"},
|
||||
"dir": {"$ref": "#/definitions/strings"},
|
||||
"target": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"repo_dict": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -586,6 +553,26 @@ def make_schema():
|
||||
"list_of_strings": {"type": "array", "items": {"type": "string"}},
|
||||
"strings": _one_or_list({"type": "string"}),
|
||||
"optional_string": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"live_image_config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kickstart": {"type": "string"},
|
||||
"ksurl": {"type": "url"},
|
||||
"name": {"type": "string"},
|
||||
"subvariant": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"version": {"type": "string"},
|
||||
"repo": {"$ref": "#/definitions/repos"},
|
||||
"specfile": {"type": "string"},
|
||||
"scratch": {"type": "boolean"},
|
||||
"type": {"type": "string"},
|
||||
"sign": {"type": "boolean"},
|
||||
"failable": {"type": "boolean"},
|
||||
"release": {"$ref": "#/definitions/optional_string"},
|
||||
},
|
||||
"required": ["kickstart"],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"osbs_config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -621,7 +608,6 @@ def make_schema():
|
||||
"release_discinfo_description": {"type": "string"},
|
||||
"treeinfo_version": {"type": "string"},
|
||||
"compose_type": {"type": "string", "enum": COMPOSE_TYPES},
|
||||
"label": {"type": "string"},
|
||||
"base_product_name": {"type": "string"},
|
||||
"base_product_short": {"type": "string"},
|
||||
"base_product_version": {"type": "string"},
|
||||
@ -699,11 +685,7 @@ def make_schema():
|
||||
"pkgset_allow_reuse": {"type": "boolean", "default": True},
|
||||
"createiso_allow_reuse": {"type": "boolean", "default": True},
|
||||
"extraiso_allow_reuse": {"type": "boolean", "default": True},
|
||||
"pkgset_source": {"type": "string", "enum": [
|
||||
"koji",
|
||||
"repos",
|
||||
"kojimock",
|
||||
]},
|
||||
"pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
|
||||
"createrepo_c": {"type": "boolean", "default": True},
|
||||
"createrepo_checksum": {
|
||||
"type": "string",
|
||||
@ -736,6 +718,7 @@ def make_schema():
|
||||
),
|
||||
"repoclosure_backend": {
|
||||
"type": "string",
|
||||
# Gather and repoclosure both have the same backends: yum + dnf
|
||||
"default": _get_default_gather_backend(),
|
||||
"enum": _get_gather_backends(),
|
||||
},
|
||||
@ -802,7 +785,7 @@ def make_schema():
|
||||
_variant_arch_mapping({"type": "number", "enum": [1, 2, 3, 4]}),
|
||||
],
|
||||
},
|
||||
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": False},
|
||||
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": True},
|
||||
"multilib": _variant_arch_mapping(
|
||||
{"$ref": "#/definitions/list_of_strings"}
|
||||
),
|
||||
@ -831,14 +814,6 @@ def make_schema():
|
||||
"type": "string",
|
||||
"enum": ["lorax"],
|
||||
},
|
||||
# In phase `buildinstall` we should add to compose only the
|
||||
# images that will be used only as netinstall
|
||||
"netinstall_variants": {
|
||||
"$ref": "#/definitions/list_of_strings",
|
||||
"default": [
|
||||
"BaseOS",
|
||||
],
|
||||
},
|
||||
"buildinstall_topdir": {"type": "string"},
|
||||
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
|
||||
@ -894,10 +869,7 @@ def make_schema():
|
||||
"paths_module": {"type": "string"},
|
||||
"skip_phases": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"enum": PHASES_NAMES + ["productimg", "live_images"],
|
||||
},
|
||||
"items": {"type": "string", "enum": PHASES_NAMES + ["productimg"]},
|
||||
"default": [],
|
||||
},
|
||||
"image_name_format": {
|
||||
@ -931,6 +903,11 @@ def make_schema():
|
||||
},
|
||||
"restricted_volid": {"type": "boolean", "default": False},
|
||||
"volume_id_substitutions": {"type": "object", "default": {}},
|
||||
"live_images_no_rename": {"type": "boolean", "default": False},
|
||||
"live_images_ksurl": {"type": "url"},
|
||||
"live_images_target": {"type": "string"},
|
||||
"live_images_release": {"$ref": "#/definitions/optional_string"},
|
||||
"live_images_version": {"type": "string"},
|
||||
"image_build_ksurl": {"type": "url"},
|
||||
"image_build_target": {"type": "string"},
|
||||
"image_build_release": {"$ref": "#/definitions/optional_string"},
|
||||
@ -963,6 +940,8 @@ def make_schema():
|
||||
"product_id": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||
"product_id_allow_missing": {"type": "boolean", "default": False},
|
||||
"product_id_allow_name_prefix": {"type": "boolean", "default": True},
|
||||
# Deprecated in favour of regular local/phase/global setting.
|
||||
"live_target": {"type": "string"},
|
||||
"tree_arches": {"$ref": "#/definitions/list_of_strings", "default": []},
|
||||
"tree_variants": {"$ref": "#/definitions/list_of_strings", "default": []},
|
||||
"translate_paths": {"$ref": "#/definitions/string_pairs", "default": []},
|
||||
@ -982,7 +961,20 @@ def make_schema():
|
||||
"properties": {
|
||||
"include_variants": {"$ref": "#/definitions/strings"},
|
||||
"extra_files": _one_or_list(
|
||||
{"$ref": "#/definitions/extra_file"}
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"scm": {"type": "string"},
|
||||
"repo": {"type": "string"},
|
||||
"branch": {
|
||||
"$ref": "#/definitions/optional_string"
|
||||
},
|
||||
"file": {"$ref": "#/definitions/strings"},
|
||||
"dir": {"$ref": "#/definitions/strings"},
|
||||
"target": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
}
|
||||
),
|
||||
"filename": {"type": "string"},
|
||||
"volid": {"$ref": "#/definitions/strings"},
|
||||
@ -1133,8 +1125,6 @@ def make_schema():
|
||||
"runroot_packages": {
|
||||
"$ref": "#/definitions/list_of_strings",
|
||||
},
|
||||
"subvariant": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
},
|
||||
"required": [
|
||||
"treefile",
|
||||
@ -1173,6 +1163,9 @@ def make_schema():
|
||||
"ostree_container_use_koji_plugin": {"type": "boolean", "default": False},
|
||||
"ostree_installer_use_koji_plugin": {"type": "boolean", "default": False},
|
||||
"ostree_installer_overwrite": {"type": "boolean", "default": False},
|
||||
"live_images": _variant_arch_mapping(
|
||||
_one_or_list({"$ref": "#/definitions/live_image_config"})
|
||||
),
|
||||
"image_build_allow_reuse": {"type": "boolean", "default": False},
|
||||
"image_build": {
|
||||
"type": "object",
|
||||
@ -1243,13 +1236,6 @@ def make_schema():
|
||||
"repos": {"$ref": "#/definitions/list_of_strings"},
|
||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||
"subvariant": {"type": "string"},
|
||||
"type": {"type": "string"},
|
||||
"type_attr": {"$ref": "#/definitions/list_of_strings"},
|
||||
"bundle_name_format": {"type": "string"},
|
||||
"version": {"type": "string"},
|
||||
"repo_releasever": {"type": "string"},
|
||||
"manifest_type": {"type": "string"},
|
||||
"use_buildroot_repo": {"type": "boolean"},
|
||||
},
|
||||
"required": [
|
||||
# description_scm and description_path
|
||||
@ -1268,12 +1254,6 @@ def make_schema():
|
||||
"kiwibuild_description_path": {"type": "string"},
|
||||
"kiwibuild_target": {"type": "string"},
|
||||
"kiwibuild_release": {"$ref": "#/definitions/optional_string"},
|
||||
"kiwibuild_type": {"type": "string"},
|
||||
"kiwibuild_type_attr": {"$ref": "#/definitions/list_of_strings"},
|
||||
"kiwibuild_bundle_name_format": {"type": "string"},
|
||||
"kiwibuild_version": {"type": "string"},
|
||||
"kiwibuild_repo_releasever": {"type": "string"},
|
||||
"kiwibuild_use_buildroot_repo": {"type": "boolean", "default": False},
|
||||
"osbuild_target": {"type": "string"},
|
||||
"osbuild_release": {"$ref": "#/definitions/optional_string"},
|
||||
"osbuild_version": {"type": "string"},
|
||||
@ -1428,58 +1408,6 @@ def make_schema():
|
||||
},
|
||||
},
|
||||
},
|
||||
"imagebuilder": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
# Warning: this pattern is a variant uid regex, but the
|
||||
# format does not let us validate it as there is no regular
|
||||
# expression to describe all regular expressions.
|
||||
".+": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
||||
"types": {"$ref": "#/definitions/list_of_strings"},
|
||||
"version": {"type": "string"},
|
||||
"repos": {"$ref": "#/definitions/list_of_strings"},
|
||||
"release": {"type": "string"},
|
||||
"distro": {"type": "string"},
|
||||
"scratch": {"type": "boolean"},
|
||||
"ostree": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parent": {"type": "string"},
|
||||
"ref": {"type": "string"},
|
||||
"url": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||
"subvariant": {"type": "string"},
|
||||
"blueprint": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
},
|
||||
"seed": {"type": "integer"},
|
||||
"manifest_type": {"type": "string"},
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"types",
|
||||
],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"imagebuilder_target": {"type": "string"},
|
||||
"imagebuilder_release": {"$ref": "#/definitions/optional_string"},
|
||||
"imagebuilder_version": {"type": "string"},
|
||||
"imagebuilder_seed": {"type": "integer"},
|
||||
"imagebuilder_scratch": {"type": "boolean"},
|
||||
"lorax_options": _variant_arch_mapping(
|
||||
{
|
||||
"type": "object",
|
||||
@ -1499,7 +1427,6 @@ def make_schema():
|
||||
"skip_branding": {"type": "boolean"},
|
||||
"squashfs_only": {"type": "boolean"},
|
||||
"configuration_file": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||
"rootfs_type": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
}
|
||||
@ -1508,6 +1435,9 @@ def make_schema():
|
||||
{"$ref": "#/definitions/strings"}
|
||||
),
|
||||
"lorax_use_koji_plugin": {"type": "boolean", "default": False},
|
||||
"signing_key_id": {"type": "string"},
|
||||
"signing_key_password_file": {"type": "string"},
|
||||
"signing_command": {"type": "string"},
|
||||
"productimg": {
|
||||
"deprecated": "remove it. Productimg phase has been removed"
|
||||
},
|
||||
@ -1559,7 +1489,21 @@ def make_schema():
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"extra_files": _variant_arch_mapping(
|
||||
{"type": "array", "items": {"$ref": "#/definitions/extra_file"}}
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"scm": {"type": "string"},
|
||||
"repo": {"type": "string"},
|
||||
"branch": {"$ref": "#/definitions/optional_string"},
|
||||
"file": {"$ref": "#/definitions/strings"},
|
||||
"dir": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
}
|
||||
),
|
||||
"gather_lookaside_repos": _variant_arch_mapping(
|
||||
{"$ref": "#/definitions/strings"}
|
||||
@ -1680,13 +1624,10 @@ def update_schema(schema, update_dict):
|
||||
|
||||
|
||||
def _get_gather_backends():
|
||||
if six.PY2:
|
||||
return ["yum", "dnf"]
|
||||
return ["dnf"]
|
||||
|
||||
|
||||
def _get_default_gather_backend():
|
||||
return "dnf"
|
||||
|
||||
|
||||
def _resolve_container_tag(instance, tag_resolver):
|
||||
if instance.get("scm") == "container-image":
|
||||
instance["repo"] = tag_resolver(instance["repo"])
|
||||
return "yum" if six.PY2 else "dnf"
|
||||
|
||||
@ -50,7 +50,6 @@ from pungi.util import (
|
||||
translate_path_raw,
|
||||
)
|
||||
from pungi.metadata import compose_to_composeinfo
|
||||
from pungi.otel import tracing
|
||||
|
||||
try:
|
||||
# This is available since productmd >= 1.18
|
||||
@ -131,16 +130,15 @@ def cts_auth(pungi_conf):
|
||||
cts_oidc_client_id = os.environ.get(
|
||||
"CTS_OIDC_CLIENT_ID", ""
|
||||
) or pungi_conf.get("cts_oidc_client_id", "")
|
||||
with tracing.span("obtain-oidc-token"):
|
||||
token = retry_request(
|
||||
"post",
|
||||
cts_oidc_token_url,
|
||||
data={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": cts_oidc_client_id,
|
||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||
},
|
||||
).json()["access_token"]
|
||||
token = retry_request(
|
||||
"post",
|
||||
cts_oidc_token_url,
|
||||
data={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": cts_oidc_client_id,
|
||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||
},
|
||||
).json()["access_token"]
|
||||
auth = BearerAuth(token)
|
||||
del token
|
||||
|
||||
@ -196,9 +194,8 @@ def get_compose_info(
|
||||
"parent_compose_ids": parent_compose_ids,
|
||||
"respin_of": respin_of,
|
||||
}
|
||||
with tracing.span("create-compose-in-cts"):
|
||||
with cts_auth(conf) as authentication:
|
||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||
with cts_auth(conf) as authentication:
|
||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||
|
||||
# Update local ComposeInfo with received ComposeInfo.
|
||||
cts_ci = ComposeInfo()
|
||||
@ -234,9 +231,8 @@ def update_compose_url(compose_id, compose_dir, conf):
|
||||
"action": "set_url",
|
||||
"compose_url": compose_url,
|
||||
}
|
||||
with tracing.span("update-compose-url"):
|
||||
with cts_auth(conf) as authentication:
|
||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||
with cts_auth(conf) as authentication:
|
||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||
|
||||
|
||||
def get_compose_dir(
|
||||
@ -377,7 +373,6 @@ class Compose(kobo.log.LoggingBase):
|
||||
self.ci_base.load(
|
||||
os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json")
|
||||
)
|
||||
tracing.set_attribute("compose_id", self.compose_id)
|
||||
|
||||
self.supported = supported
|
||||
if (
|
||||
@ -471,10 +466,13 @@ class Compose(kobo.log.LoggingBase):
|
||||
|
||||
@property
|
||||
def should_create_yum_database(self):
|
||||
"""Explicit configuration trumps all. Yum is no longer supported, so
|
||||
default to False.
|
||||
"""Explicit configuration trumps all. Otherwise check gather backend
|
||||
and only create it for Yum.
|
||||
"""
|
||||
return self.conf.get("createrepo_database", False)
|
||||
config = self.conf.get("createrepo_database")
|
||||
if config is not None:
|
||||
return config
|
||||
return self.conf["gather_backend"] == "yum"
|
||||
|
||||
def read_variants(self):
|
||||
# TODO: move to phases/init ?
|
||||
@ -562,7 +560,6 @@ class Compose(kobo.log.LoggingBase):
|
||||
old_status = self.get_status()
|
||||
if stat_msg == old_status:
|
||||
return
|
||||
tracing.set_attribute("compose_status", stat_msg)
|
||||
if old_status == "FINISHED":
|
||||
msg = "Could not modify a FINISHED compose: %s" % self.topdir
|
||||
self.log_error(msg)
|
||||
|
||||
79
pungi/config.py
Normal file
79
pungi/config.py
Normal file
@ -0,0 +1,79 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ConfigParser import SafeConfigParser
|
||||
|
||||
from .arch_utils import getBaseArch
|
||||
|
||||
# In development, `here` will point to the bin/ directory with scripts.
|
||||
here = sys.path[0]
|
||||
MULTILIBCONF = (
|
||||
os.path.join(os.path.dirname(__file__), "..", "share", "multilib")
|
||||
if here != "/usr/bin"
|
||||
else "/usr/share/pungi/multilib"
|
||||
)
|
||||
|
||||
|
||||
class Config(SafeConfigParser):
|
||||
def __init__(self, pungirc=None):
|
||||
SafeConfigParser.__init__(self)
|
||||
|
||||
self.add_section("pungi")
|
||||
self.add_section("lorax")
|
||||
|
||||
self.set("pungi", "osdir", "os")
|
||||
self.set("pungi", "sourcedir", "source")
|
||||
self.set("pungi", "debugdir", "debug")
|
||||
self.set("pungi", "isodir", "iso")
|
||||
self.set("pungi", "multilibconf", MULTILIBCONF)
|
||||
self.set(
|
||||
"pungi", "relnotefilere", "LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG"
|
||||
)
|
||||
self.set("pungi", "relnotedirre", "")
|
||||
self.set(
|
||||
"pungi", "relnotepkgs", "fedora-repos fedora-release fedora-release-notes"
|
||||
)
|
||||
self.set("pungi", "product_path", "Packages")
|
||||
self.set("pungi", "cachedir", "/var/cache/pungi")
|
||||
self.set("pungi", "compress_type", "xz")
|
||||
self.set("pungi", "arch", getBaseArch())
|
||||
self.set("pungi", "family", "Fedora")
|
||||
self.set("pungi", "iso_basename", "Fedora")
|
||||
self.set("pungi", "version", time.strftime("%Y%m%d", time.localtime()))
|
||||
self.set("pungi", "variant", "")
|
||||
self.set("pungi", "destdir", os.getcwd())
|
||||
self.set("pungi", "workdirbase", "/work")
|
||||
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
|
||||
self.set("pungi", "cdsize", "695.0")
|
||||
self.set("pungi", "debuginfo", "True")
|
||||
self.set("pungi", "alldeps", "True")
|
||||
self.set("pungi", "isfinal", "False")
|
||||
self.set("pungi", "nohash", "False")
|
||||
self.set("pungi", "full_archlist", "False")
|
||||
self.set("pungi", "multilib", "")
|
||||
self.set("pungi", "lookaside_repos", "")
|
||||
self.set("pungi", "resolve_deps", "True")
|
||||
self.set("pungi", "no_dvd", "False")
|
||||
self.set("pungi", "nomacboot", "False")
|
||||
self.set("pungi", "rootfs_size", "False")
|
||||
|
||||
# if missing, self.read() is a noop, else change 'defaults'
|
||||
if pungirc:
|
||||
self.read(os.path.expanduser(pungirc))
|
||||
@ -3,9 +3,10 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import six
|
||||
from collections import namedtuple
|
||||
from kobo.shortcuts import run
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from .wrappers import iso
|
||||
from .wrappers.jigdo import JigdoWrapper
|
||||
@ -40,13 +41,13 @@ def quote(str):
|
||||
expanded.
|
||||
"""
|
||||
if str.startswith("$TEMPLATE"):
|
||||
return "$TEMPLATE%s" % shlex.quote(str.replace("$TEMPLATE", "", 1))
|
||||
return shlex.quote(str)
|
||||
return "$TEMPLATE%s" % shlex_quote(str.replace("$TEMPLATE", "", 1))
|
||||
return shlex_quote(str)
|
||||
|
||||
|
||||
def emit(f, cmd):
|
||||
"""Print line of shell code into the stream."""
|
||||
if isinstance(cmd, str):
|
||||
if isinstance(cmd, six.string_types):
|
||||
print(cmd, file=f)
|
||||
else:
|
||||
print(" ".join([quote(x) for x in cmd]), file=f)
|
||||
@ -158,11 +159,15 @@ def write_xorriso_commands(opts):
|
||||
|
||||
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||
with open(script, "w") as f:
|
||||
for cmd in iso.xorriso_commands(
|
||||
opts.arch, opts.boot_iso, os.path.join(opts.output_dir, opts.iso_name)
|
||||
):
|
||||
emit(f, " ".join(cmd))
|
||||
emit(f, "-indev %s" % opts.boot_iso)
|
||||
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
||||
emit(f, "-boot_image any replay")
|
||||
emit(f, "-volid %s" % opts.volid)
|
||||
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
||||
emit(f, "-joliet on")
|
||||
# Support long filenames in the Joliet trees. Repodata is particularly
|
||||
# likely to run into this limit.
|
||||
emit(f, "-compliance joliet_long_names")
|
||||
|
||||
with open(opts.graft_points) as gp:
|
||||
for line in gp:
|
||||
@ -173,6 +178,10 @@ def write_xorriso_commands(opts):
|
||||
emit(f, "%s %s %s" % (cmd, fs_path, iso_path))
|
||||
emit(f, "-chmod 0%o %s" % (_get_perms(fs_path), iso_path))
|
||||
|
||||
if opts.arch == "ppc64le":
|
||||
# This is needed for the image to be bootable.
|
||||
emit(f, "-as mkisofs -U --")
|
||||
|
||||
emit(f, "-chown_r 0 /")
|
||||
emit(f, "-chgrp_r 0 /")
|
||||
emit(f, "-end")
|
||||
|
||||
2297
pungi/gather.py
Normal file
2297
pungi/gather.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -35,6 +35,11 @@ from pungi.profiler import Profiler
|
||||
from pungi.util import DEBUG_PATTERNS
|
||||
|
||||
|
||||
def get_source_name(pkg):
|
||||
# Workaround for rhbz#1418298
|
||||
return pkg.sourcerpm.rsplit("-", 2)[0]
|
||||
|
||||
|
||||
def filter_dotarch(queue, pattern, **kwargs):
|
||||
"""Filter queue for packages matching the pattern. If pattern matches the
|
||||
dotarch format of <name>.<arch>, it is processed as such. Otherwise it is
|
||||
@ -384,7 +389,7 @@ class Gather(GatherBase):
|
||||
# lookaside
|
||||
if self.is_from_lookaside(i):
|
||||
self._set_flag(i, PkgFlag.lookaside)
|
||||
if i.source_name in self.opts.fulltree_excludes:
|
||||
if i.sourcerpm.rsplit("-", 2)[0] in self.opts.fulltree_excludes:
|
||||
self._set_flag(i, PkgFlag.fulltree_exclude)
|
||||
|
||||
def _get_package_deps(self, pkg, debuginfo=False):
|
||||
@ -834,7 +839,7 @@ class Gather(GatherBase):
|
||||
continue
|
||||
if self.is_from_lookaside(i):
|
||||
self._set_flag(i, PkgFlag.lookaside)
|
||||
srpm_name = i.source_name
|
||||
srpm_name = i.sourcerpm.rsplit("-", 2)[0]
|
||||
if srpm_name in self.opts.fulltree_excludes:
|
||||
self._set_flag(i, PkgFlag.fulltree_exclude)
|
||||
if PkgFlag.input in self.result_package_flags.get(srpm_name, set()):
|
||||
@ -866,7 +871,7 @@ class Gather(GatherBase):
|
||||
for pkg in sorted(self.result_binary_packages):
|
||||
assert pkg is not None
|
||||
|
||||
if pkg.source_name in self.opts.fulltree_excludes:
|
||||
if get_source_name(pkg) in self.opts.fulltree_excludes:
|
||||
self.logger.debug("No fulltree for %s due to exclude list", pkg)
|
||||
continue
|
||||
|
||||
@ -1080,7 +1085,7 @@ class Gather(GatherBase):
|
||||
if ex.errno == errno.EEXIST:
|
||||
self.logger.warning("Downloaded package exists in %s", target)
|
||||
else:
|
||||
self.logger.error("Unable to link %s from the dnf cache.", pkg.name)
|
||||
self.logger.error("Unable to link %s from the yum cache.", pkg.name)
|
||||
raise
|
||||
|
||||
def log_count(self, msg, method, *args):
|
||||
|
||||
@ -228,7 +228,20 @@ class Linker(kobo.log.LoggingBase):
|
||||
raise ValueError("Unknown link_type: %s" % link_type)
|
||||
|
||||
def link(self, src, dst, link_type="hardlink-or-copy"):
|
||||
if os.path.isdir(src):
|
||||
raise RuntimeError("Linking directories recursively is not supported")
|
||||
"""Link directories recursively."""
|
||||
if os.path.isfile(src) or os.path.islink(src):
|
||||
self._link_file(src, dst, link_type)
|
||||
return
|
||||
|
||||
self._link_file(src, dst, link_type)
|
||||
if os.path.isfile(dst):
|
||||
raise OSError(errno.EEXIST, "File exists")
|
||||
|
||||
if not self.test:
|
||||
if not os.path.exists(dst):
|
||||
makedirs(dst)
|
||||
shutil.copystat(src, dst)
|
||||
|
||||
for i in os.listdir(src):
|
||||
src_path = os.path.join(src, i)
|
||||
dst_path = os.path.join(dst, i)
|
||||
self.link(src_path, dst_path, link_type)
|
||||
|
||||
295
pungi/multilib_yum.py
Executable file
295
pungi/multilib_yum.py
Executable file
@ -0,0 +1,295 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import re
|
||||
import fnmatch
|
||||
|
||||
import pungi.pathmatch
|
||||
import pungi.gather
|
||||
import pungi.util
|
||||
|
||||
|
||||
LINE_PATTERN_RE = re.compile(r"^\s*(?P<line>[^#]+)(:?\s+(?P<comment>#.*))?$")
|
||||
RUNTIME_PATTERN_SPLIT_RE = re.compile(
|
||||
r"^\s*(?P<path>[^\s]+)\s+(?P<pattern>[^\s]+)(:?\s+(?P<comment>#.*))?$"
|
||||
)
|
||||
SONAME_PATTERN_RE = re.compile(r"^(.+\.so\.[a-zA-Z0-9_\.]+).*$")
|
||||
|
||||
|
||||
def read_lines(lines):
|
||||
result = []
|
||||
for i in lines:
|
||||
i = i.strip()
|
||||
|
||||
if not i:
|
||||
continue
|
||||
|
||||
# skip comments
|
||||
if i.startswith("#"):
|
||||
continue
|
||||
|
||||
match = LINE_PATTERN_RE.match(i)
|
||||
if match is None:
|
||||
raise ValueError("Couldn't parse line: %s" % i)
|
||||
gd = match.groupdict()
|
||||
result.append(gd["line"])
|
||||
return result
|
||||
|
||||
|
||||
def read_lines_from_file(path):
|
||||
lines = open(path, "r").readlines()
|
||||
lines = read_lines(lines)
|
||||
return lines
|
||||
|
||||
|
||||
def read_runtime_patterns(lines):
|
||||
result = []
|
||||
for i in read_lines(lines):
|
||||
match = RUNTIME_PATTERN_SPLIT_RE.match(i)
|
||||
if match is None:
|
||||
raise ValueError("Couldn't parse pattern: %s" % i)
|
||||
gd = match.groupdict()
|
||||
result.append((gd["path"], gd["pattern"]))
|
||||
return result
|
||||
|
||||
|
||||
def read_runtime_patterns_from_file(path):
|
||||
lines = open(path, "r").readlines()
|
||||
return read_runtime_patterns(lines)
|
||||
|
||||
|
||||
def expand_runtime_patterns(patterns):
|
||||
pm = pungi.pathmatch.PathMatch()
|
||||
for path, pattern in patterns:
|
||||
for root in ("", "/opt/*/*/root"):
|
||||
# include Software Collections: /opt/<vendor>/<scl_name>/root/...
|
||||
if "$LIBDIR" in path:
|
||||
for lib_dir in ("/lib", "/lib64", "/usr/lib", "/usr/lib64"):
|
||||
path_pattern = path.replace("$LIBDIR", lib_dir)
|
||||
path_pattern = "%s/%s" % (root, path_pattern.lstrip("/"))
|
||||
pm[path_pattern] = (path_pattern, pattern)
|
||||
else:
|
||||
path_pattern = "%s/%s" % (root, path.lstrip("/"))
|
||||
pm[path_pattern] = (path_pattern, pattern)
|
||||
return pm
|
||||
|
||||
|
||||
class MultilibMethodBase(object):
|
||||
"""a base class for multilib methods"""
|
||||
|
||||
name = "base"
|
||||
|
||||
def __init__(self, config_path):
|
||||
self.config_path = config_path
|
||||
|
||||
def select(self, po):
|
||||
raise NotImplementedError
|
||||
|
||||
def skip(self, po):
|
||||
if (
|
||||
pungi.gather.is_noarch(po)
|
||||
or pungi.gather.is_source(po)
|
||||
or pungi.util.pkg_is_debug(po)
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name == "kernel":
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel_devel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name == "kernel-devel":
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel_or_kernel_devel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name in ("kernel", "kernel-devel"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class NoneMultilibMethod(MultilibMethodBase):
|
||||
"""multilib disabled"""
|
||||
|
||||
name = "none"
|
||||
|
||||
def select(self, po):
|
||||
return False
|
||||
|
||||
|
||||
class AllMultilibMethod(MultilibMethodBase):
|
||||
"""all packages are multilib"""
|
||||
|
||||
name = "all"
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class RuntimeMultilibMethod(MultilibMethodBase):
|
||||
"""pre-defined paths to libs"""
|
||||
|
||||
name = "runtime"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RuntimeMultilibMethod, self).__init__(*args, **kwargs)
|
||||
self.blacklist = read_lines_from_file(
|
||||
self.config_path + "runtime-blacklist.conf"
|
||||
)
|
||||
self.whitelist = read_lines_from_file(
|
||||
self.config_path + "runtime-whitelist.conf"
|
||||
)
|
||||
self.patterns = expand_runtime_patterns(
|
||||
read_runtime_patterns_from_file(self.config_path + "runtime-patterns.conf")
|
||||
)
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
if po.name in self.blacklist:
|
||||
return False
|
||||
if po.name in self.whitelist:
|
||||
return True
|
||||
if self.is_kernel(po):
|
||||
return False
|
||||
|
||||
# gather all *.so.* provides from the RPM header
|
||||
provides = set()
|
||||
for i in po.provides:
|
||||
match = SONAME_PATTERN_RE.match(i[0])
|
||||
if match is not None:
|
||||
provides.add(match.group(1))
|
||||
|
||||
for path in po.returnFileEntries() + po.returnFileEntries("ghost"):
|
||||
dirname, filename = path.rsplit("/", 1)
|
||||
dirname = dirname.rstrip("/")
|
||||
|
||||
patterns = self.patterns[dirname]
|
||||
if not patterns:
|
||||
continue
|
||||
for dir_pattern, file_pattern in patterns:
|
||||
if file_pattern == "-":
|
||||
return True
|
||||
if fnmatch.fnmatch(filename, file_pattern):
|
||||
if ".so.*" in file_pattern:
|
||||
if filename in provides:
|
||||
# return only if the lib is provided in RPM header
|
||||
# (some libs may be private, hence not exposed in Provides)
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class KernelMultilibMethod(MultilibMethodBase):
|
||||
"""kernel and kernel-devel"""
|
||||
|
||||
name = "kernel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(KernelMultilibMethod, self).__init__(*args, **kwargs)
|
||||
|
||||
def select(self, po):
|
||||
if self.is_kernel_or_kernel_devel(po):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class YabootMultilibMethod(MultilibMethodBase):
|
||||
"""yaboot on ppc"""
|
||||
|
||||
name = "yaboot"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(YabootMultilibMethod, self).__init__(*args, **kwargs)
|
||||
|
||||
def select(self, po):
|
||||
if po.arch in ["ppc"]:
|
||||
if po.name.startswith("yaboot"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class DevelMultilibMethod(MultilibMethodBase):
|
||||
"""all -devel and -static packages"""
|
||||
|
||||
name = "devel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DevelMultilibMethod, self).__init__(*args, **kwargs)
|
||||
self.blacklist = read_lines_from_file(self.config_path + "devel-blacklist.conf")
|
||||
self.whitelist = read_lines_from_file(self.config_path + "devel-whitelist.conf")
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
if po.name in self.blacklist:
|
||||
return False
|
||||
if po.name in self.whitelist:
|
||||
return True
|
||||
if self.is_kernel_devel(po):
|
||||
return False
|
||||
# HACK: exclude ghc*
|
||||
if po.name.startswith("ghc-"):
|
||||
return False
|
||||
if po.name.endswith("-devel"):
|
||||
return True
|
||||
if po.name.endswith("-static"):
|
||||
return True
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name.endswith("-devel"):
|
||||
return True
|
||||
if p_name.endswith("-static"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
DEFAULT_METHODS = ["devel", "runtime"]
|
||||
METHOD_MAP = {}
|
||||
|
||||
|
||||
def init(config_path="/usr/share/pungi/multilib/"):
|
||||
global METHOD_MAP
|
||||
|
||||
if not config_path.endswith("/"):
|
||||
config_path += "/"
|
||||
|
||||
for cls in (
|
||||
AllMultilibMethod,
|
||||
DevelMultilibMethod,
|
||||
KernelMultilibMethod,
|
||||
NoneMultilibMethod,
|
||||
RuntimeMultilibMethod,
|
||||
YabootMultilibMethod,
|
||||
):
|
||||
method = cls(config_path)
|
||||
METHOD_MAP[method.name] = method
|
||||
|
||||
|
||||
def po_is_multilib(po, methods):
|
||||
for method_name in methods:
|
||||
if not method_name:
|
||||
continue
|
||||
method = METHOD_MAP[method_name]
|
||||
if method.select(po):
|
||||
return method_name
|
||||
return None
|
||||
@ -104,8 +104,7 @@ class PungiNotifier(object):
|
||||
workdir=workdir,
|
||||
return_stdout=False,
|
||||
show_cmd=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
logfile=logfile,
|
||||
)
|
||||
if ret != 0:
|
||||
|
||||
@ -16,7 +16,8 @@
|
||||
|
||||
import os
|
||||
import json
|
||||
import shlex
|
||||
import six
|
||||
from six.moves import shlex_quote
|
||||
|
||||
|
||||
from .base import OSTree
|
||||
@ -25,10 +26,10 @@ from .utils import tweak_treeconf
|
||||
|
||||
def emit(cmd):
|
||||
"""Print line of shell code into the stream."""
|
||||
if isinstance(cmd, str):
|
||||
if isinstance(cmd, six.string_types):
|
||||
print(cmd)
|
||||
else:
|
||||
print(" ".join([shlex.quote(x) for x in cmd]))
|
||||
print(" ".join([shlex_quote(x) for x in cmd]))
|
||||
|
||||
|
||||
class Container(OSTree):
|
||||
|
||||
@ -64,8 +64,7 @@ class Tree(OSTree):
|
||||
show_cmd=True,
|
||||
stdout=True,
|
||||
logfile=log_file,
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
finally:
|
||||
os.umask(oldumask)
|
||||
@ -78,8 +77,7 @@ class Tree(OSTree):
|
||||
show_cmd=True,
|
||||
stdout=True,
|
||||
logfile=log_file,
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
def _update_ref(self):
|
||||
|
||||
229
pungi/otel.py
229
pungi/otel.py
@ -1,229 +0,0 @@
|
||||
import itertools
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
"""
|
||||
This module contains two classes with the same interface. An instance of one of
|
||||
them is available as `tracing`. Which class is instantiated is selected
|
||||
depending on whether environment variables configuring OTel are configured.
|
||||
"""
|
||||
|
||||
|
||||
class DummyTracing:
|
||||
"""A dummy tracing module that doesn't actually do anything."""
|
||||
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def span(self, *args, **kwargs):
|
||||
yield
|
||||
|
||||
def set_attribute(self, name, value):
|
||||
pass
|
||||
|
||||
def force_flush(self):
|
||||
pass
|
||||
|
||||
def instrument_xmlrpc_proxy(self, proxy):
|
||||
return proxy
|
||||
|
||||
def get_traceparent(self):
|
||||
return None
|
||||
|
||||
def set_context(self, traceparent):
|
||||
pass
|
||||
|
||||
def record_exception(self, exc, set_error_status=True):
|
||||
pass
|
||||
|
||||
|
||||
class OtelTracing:
|
||||
"""This class implements the actual integration with opentelemetry."""
|
||||
|
||||
def setup(self):
|
||||
"""Configure opentelemetry tracing based on environment variables. This
|
||||
setup is optional as it may not be desirable when pungi is used as a
|
||||
library.
|
||||
"""
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import (
|
||||
BatchSpanProcessor,
|
||||
ConsoleSpanExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
||||
OTLPSpanExporter,
|
||||
)
|
||||
|
||||
otel_endpoint = os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"]
|
||||
provider = TracerProvider(
|
||||
resource=Resource(attributes={"service.name": "pungi"})
|
||||
)
|
||||
if "console" == otel_endpoint:
|
||||
# This is for debugging the tracing locally.
|
||||
self.processor = BatchSpanProcessor(ConsoleSpanExporter())
|
||||
else:
|
||||
self.processor = BatchSpanProcessor(OTLPSpanExporter())
|
||||
provider.add_span_processor(self.processor)
|
||||
trace.set_tracer_provider(provider)
|
||||
|
||||
traceparent = os.environ.get("TRACEPARENT")
|
||||
if traceparent:
|
||||
self.set_context(traceparent)
|
||||
|
||||
try:
|
||||
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||
|
||||
RequestsInstrumentor().instrument()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@property
|
||||
def tracer(self):
|
||||
from opentelemetry import trace
|
||||
|
||||
return trace.get_tracer(__name__)
|
||||
|
||||
@contextmanager
|
||||
def span(self, name, **attributes):
|
||||
"""Create a new span as a child of the current one. Attributes can be
|
||||
passed via kwargs."""
|
||||
with self.tracer.start_as_current_span(name, attributes=attributes) as span:
|
||||
yield span
|
||||
|
||||
def get_traceparent(self):
|
||||
from opentelemetry.trace.propagation.tracecontext import (
|
||||
TraceContextTextMapPropagator,
|
||||
)
|
||||
|
||||
carrier = {}
|
||||
TraceContextTextMapPropagator().inject(carrier)
|
||||
return carrier["traceparent"]
|
||||
|
||||
def set_attribute(self, name, value):
|
||||
"""Set an attribute on the current span."""
|
||||
from opentelemetry import trace
|
||||
|
||||
span = trace.get_current_span()
|
||||
span.set_attribute(name, value)
|
||||
|
||||
def force_flush(self):
|
||||
"""Ensure all spans and traces are sent out. Call this before the
|
||||
process exits."""
|
||||
self.processor.force_flush()
|
||||
|
||||
def instrument_xmlrpc_proxy(self, proxy):
|
||||
return InstrumentedClientSession(proxy)
|
||||
|
||||
def set_context(self, traceparent):
|
||||
"""Configure current context to match the given traceparent."""
|
||||
from opentelemetry import context
|
||||
from opentelemetry.trace.propagation.tracecontext import (
|
||||
TraceContextTextMapPropagator,
|
||||
)
|
||||
|
||||
ctx = TraceContextTextMapPropagator().extract(
|
||||
carrier={"traceparent": traceparent}
|
||||
)
|
||||
context.attach(ctx)
|
||||
|
||||
def record_exception(self, exc, set_error_status=True):
|
||||
"""Records an exception for the current span and optionally marks the
|
||||
span as failed."""
|
||||
from opentelemetry import trace
|
||||
|
||||
span = trace.get_current_span()
|
||||
span.record_exception(exc)
|
||||
|
||||
if set_error_status:
|
||||
span.set_status(trace.status.StatusCode.ERROR)
|
||||
|
||||
|
||||
class InstrumentedClientSession:
|
||||
"""Wrapper around koji.ClientSession that creates spans for each API call.
|
||||
RequestsInstrumentor can create spans at the HTTP requests level, but since
|
||||
those all go the same XML-RPC endpoint, they are not very informative.
|
||||
|
||||
Multicall is not handled very well here. The spans will only have a
|
||||
`multicall` boolean attribute, but they don't carry any additional data
|
||||
that could group them.
|
||||
|
||||
Koji ClientSession supports three ways of making multicalls, but Pungi only
|
||||
uses one, and that one is supported here.
|
||||
|
||||
Supported:
|
||||
|
||||
c.multicall = True
|
||||
c.getBuild(1)
|
||||
c.getBuild(2)
|
||||
results = c.multiCall()
|
||||
|
||||
Not supported:
|
||||
|
||||
with c.multicall() as m:
|
||||
r1 = m.getBuild(1)
|
||||
r2 = m.getBuild(2)
|
||||
|
||||
Also not supported:
|
||||
|
||||
m = c.multicall()
|
||||
r1 = m.getBuild(1)
|
||||
r2 = m.getBuild(2)
|
||||
m.call_all()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
def _name(self, name):
|
||||
"""Helper for generating span names."""
|
||||
return "%s.%s" % (self.session.__class__.__name__, name)
|
||||
|
||||
@property
|
||||
def system(self):
|
||||
"""This is only ever used to get list of available API calls. It is
|
||||
rather awkward though. Ideally we wouldn't really trace this at all,
|
||||
but there's the underlying POST request to the hub, which is quite
|
||||
confusing in the trace if there is no additional context."""
|
||||
return self.session.system
|
||||
|
||||
@property
|
||||
def multicall(self):
|
||||
return self.session.multicall
|
||||
|
||||
@multicall.setter
|
||||
def multicall(self, value):
|
||||
self.session.multicall = value
|
||||
|
||||
def __getattr__(self, name):
|
||||
return self._instrument_method(name, getattr(self.session, name))
|
||||
|
||||
def _instrument_method(self, name, callable):
|
||||
def wrapper(*args, **kwargs):
|
||||
with tracing.span(self._name(name)) as span:
|
||||
span.set_attribute("arguments", _format_args(args, kwargs))
|
||||
if self.session.multicall:
|
||||
tracing.set_attribute("multicall", True)
|
||||
return callable(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _format_args(args, kwargs):
|
||||
"""Turn args+kwargs into a single string. OTel could choke on more
|
||||
complicated data."""
|
||||
return ", ".join(
|
||||
itertools.chain(
|
||||
(repr(arg) for arg in args),
|
||||
(f"{key}={value!r}" for key, value in kwargs.items()),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if "OTEL_EXPORTER_OTLP_ENDPOINT" in os.environ:
|
||||
tracing = OtelTracing()
|
||||
else:
|
||||
tracing = DummyTracing()
|
||||
73
pungi/pathmatch.py
Normal file
73
pungi/pathmatch.py
Normal file
@ -0,0 +1,73 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import fnmatch
|
||||
|
||||
|
||||
def head_tail_split(name):
|
||||
name_split = name.strip("/").split("/", 1)
|
||||
if len(name_split) == 2:
|
||||
head = name_split[0]
|
||||
tail = name_split[1].strip("/")
|
||||
else:
|
||||
head, tail = name_split[0], None
|
||||
return head, tail
|
||||
|
||||
|
||||
class PathMatch(object):
|
||||
def __init__(self, parent=None, desc=None):
|
||||
self._patterns = {}
|
||||
self._final_patterns = {}
|
||||
self._values = []
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
head, tail = head_tail_split(name)
|
||||
|
||||
if tail is not None:
|
||||
# recursion
|
||||
if head not in self._patterns:
|
||||
self._patterns[head] = PathMatch(parent=self, desc=head)
|
||||
self._patterns[head][tail] = value
|
||||
else:
|
||||
if head not in self._final_patterns:
|
||||
self._final_patterns[head] = PathMatch(parent=self, desc=head)
|
||||
if value not in self._final_patterns[head]._values:
|
||||
self._final_patterns[head]._values.append(value)
|
||||
|
||||
def __getitem__(self, name):
|
||||
result = []
|
||||
head, tail = head_tail_split(name)
|
||||
for pattern in self._patterns:
|
||||
if fnmatch.fnmatch(head, pattern):
|
||||
if tail is None:
|
||||
values = self._patterns[pattern]._values
|
||||
else:
|
||||
values = self._patterns[pattern][tail]
|
||||
for value in values:
|
||||
if value not in result:
|
||||
result.append(value)
|
||||
|
||||
for pattern in self._final_patterns:
|
||||
if tail is None:
|
||||
x = head
|
||||
else:
|
||||
x = "%s/%s" % (head, tail)
|
||||
if fnmatch.fnmatch(x, pattern):
|
||||
values = self._final_patterns[pattern]._values
|
||||
for value in values:
|
||||
if value not in result:
|
||||
result.append(value)
|
||||
return result
|
||||
@ -25,11 +25,11 @@ from .buildinstall import BuildinstallPhase # noqa
|
||||
from .extra_files import ExtraFilesPhase # noqa
|
||||
from .createiso import CreateisoPhase # noqa
|
||||
from .extra_isos import ExtraIsosPhase # noqa
|
||||
from .live_images import LiveImagesPhase # noqa
|
||||
from .image_build import ImageBuildPhase # noqa
|
||||
from .image_container import ImageContainerPhase # noqa
|
||||
from .kiwibuild import KiwiBuildPhase # noqa
|
||||
from .osbuild import OSBuildPhase # noqa
|
||||
from .imagebuilder import ImageBuilderPhase # noqa
|
||||
from .repoclosure import RepoclosurePhase # noqa
|
||||
from .test import TestPhase # noqa
|
||||
from .image_checksum import ImageChecksumPhase # noqa
|
||||
|
||||
@ -16,17 +16,17 @@
|
||||
|
||||
import errno
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
import shlex
|
||||
import shutil
|
||||
import re
|
||||
from six.moves import cPickle as pickle
|
||||
from copy import copy
|
||||
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import run, force_list
|
||||
import kobo.rpmlib
|
||||
from productmd.images import Image
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.arch import get_valid_arches
|
||||
from pungi.util import get_volid, get_arch_variant_data
|
||||
@ -39,7 +39,6 @@ from pungi.wrappers.scm import get_file_from_scm
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.phases.base import PhaseBase
|
||||
from pungi.runroot import Runroot, download_and_extract_archive
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class BuildinstallPhase(PhaseBase):
|
||||
@ -95,7 +94,6 @@ class BuildinstallPhase(PhaseBase):
|
||||
squashfs_only = False
|
||||
configuration_file = None
|
||||
configuration_file_source = None
|
||||
rootfs_type = None
|
||||
version = self.compose.conf.get(
|
||||
"treeinfo_version", self.compose.conf["release_version"]
|
||||
)
|
||||
@ -118,7 +116,6 @@ class BuildinstallPhase(PhaseBase):
|
||||
skip_branding = data.get("skip_branding", False)
|
||||
configuration_file_source = data.get("configuration_file")
|
||||
squashfs_only = data.get("squashfs_only", False)
|
||||
rootfs_type = data.get("rootfs_type", None)
|
||||
if "version" in data:
|
||||
version = data["version"]
|
||||
output_dir = os.path.join(output_dir, variant.uid)
|
||||
@ -174,7 +171,6 @@ class BuildinstallPhase(PhaseBase):
|
||||
"skip_branding": skip_branding,
|
||||
"squashfs_only": squashfs_only,
|
||||
"configuration_file": configuration_file,
|
||||
"rootfs-type": rootfs_type,
|
||||
}
|
||||
else:
|
||||
# If the buildinstall_topdir is set, it means Koji is used for
|
||||
@ -209,11 +205,10 @@ class BuildinstallPhase(PhaseBase):
|
||||
skip_branding=skip_branding,
|
||||
squashfs_only=squashfs_only,
|
||||
configuration_file=configuration_file,
|
||||
rootfs_type=rootfs_type,
|
||||
)
|
||||
return "rm -rf %s && %s" % (
|
||||
shlex.quote(output_topdir),
|
||||
" ".join([shlex.quote(x) for x in lorax_cmd]),
|
||||
shlex_quote(output_topdir),
|
||||
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||
)
|
||||
|
||||
def get_repos(self, arch):
|
||||
@ -418,8 +413,8 @@ def tweak_buildinstall(
|
||||
# copy src to temp
|
||||
# TODO: place temp on the same device as buildinstall dir so we can hardlink
|
||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||
shlex.quote(src),
|
||||
shlex.quote(tmp_dir),
|
||||
shlex_quote(src),
|
||||
shlex_quote(tmp_dir),
|
||||
)
|
||||
run(cmd)
|
||||
|
||||
@ -457,12 +452,12 @@ def tweak_buildinstall(
|
||||
run(cmd)
|
||||
|
||||
# HACK: make buildinstall files world readable
|
||||
run("chmod -R a+rX %s" % shlex.quote(tmp_dir))
|
||||
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
||||
|
||||
# copy temp to dst
|
||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||
shlex.quote(tmp_dir),
|
||||
shlex.quote(dst),
|
||||
shlex_quote(tmp_dir),
|
||||
shlex_quote(dst),
|
||||
)
|
||||
run(cmd)
|
||||
|
||||
@ -532,14 +527,7 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
||||
)
|
||||
except RuntimeError:
|
||||
pass
|
||||
# In this phase we should add to compose only the images that
|
||||
# will be used only as netinstall.
|
||||
# On this step lorax generates environment
|
||||
# for creating isos and create them.
|
||||
# On step `extra_isos` we overwrite the not needed iso `boot Minimal` by
|
||||
# new iso. It already contains necessary packages from incldued variants.
|
||||
if variant.uid in compose.conf['netinstall_variants']:
|
||||
compose.im.add(variant.uid, arch, img)
|
||||
compose.im.add(variant.uid, arch, img)
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
|
||||
@ -14,18 +14,17 @@
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import random
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
import json
|
||||
|
||||
import productmd.treeinfo
|
||||
from productmd.images import Image
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.shortcuts import run, relative_path, compute_file_checksums
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import run, relative_path
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.wrappers import iso
|
||||
from pungi.wrappers.createrepo import CreaterepoWrapper
|
||||
@ -43,7 +42,6 @@ from pungi.util import (
|
||||
from pungi.media_split import MediaSplitter, convert_media_size
|
||||
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
||||
from pungi.runroot import Runroot
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
from .. import createiso
|
||||
|
||||
@ -190,14 +188,6 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
||||
if not old_config:
|
||||
self.logger.info("%s - no config for old compose", log_msg)
|
||||
return False
|
||||
|
||||
# Disable reuse if unsigned packages are allowed. The older compose
|
||||
# could have unsigned packages, and those may have been signed since
|
||||
# then. We want to regenerate the ISO to have signatures.
|
||||
if None in self.compose.conf["sigkeys"]:
|
||||
self.logger.info("%s - unsigned packages are allowed", log_msg)
|
||||
return False
|
||||
|
||||
# Convert current configuration to JSON and back to encode it similarly
|
||||
# to the old one
|
||||
config = json.loads(json.dumps(self.compose.conf))
|
||||
@ -467,14 +457,7 @@ class CreateIsoThread(WorkerThread):
|
||||
|
||||
try:
|
||||
run_createiso_command(
|
||||
num,
|
||||
compose,
|
||||
bootable,
|
||||
arch,
|
||||
cmd["cmd"],
|
||||
mounts,
|
||||
log_file,
|
||||
cmd["iso_path"],
|
||||
num, compose, bootable, arch, cmd["cmd"], mounts, log_file
|
||||
)
|
||||
except Exception:
|
||||
self.fail(compose, cmd, variant, arch)
|
||||
@ -555,9 +538,7 @@ def add_iso_to_metadata(
|
||||
return img
|
||||
|
||||
|
||||
def run_createiso_command(
|
||||
num, compose, bootable, arch, cmd, mounts, log_file, iso_path
|
||||
):
|
||||
def run_createiso_command(num, compose, bootable, arch, cmd, mounts, log_file):
|
||||
packages = [
|
||||
"coreutils",
|
||||
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
|
||||
@ -599,76 +580,6 @@ def run_createiso_command(
|
||||
weight=compose.conf["runroot_weights"].get("createiso"),
|
||||
)
|
||||
|
||||
if bootable and compose.conf.get("createiso_use_xorrisofs"):
|
||||
fix_treeinfo_checksums(compose, iso_path, arch)
|
||||
|
||||
|
||||
def fix_treeinfo_checksums(compose, iso_path, arch):
|
||||
"""It is possible for the ISO to contain a .treefile with incorrect
|
||||
checksums. By modifying the ISO (adding files) some of the images may
|
||||
change.
|
||||
|
||||
This function fixes that after the fact by looking for incorrect checksums,
|
||||
recalculating them and updating the .treeinfo file. Since the size of the
|
||||
file doesn't change, this seems to not change any images.
|
||||
"""
|
||||
modified = False
|
||||
with iso.mount(iso_path, compose._logger) as mountpoint:
|
||||
ti = productmd.TreeInfo()
|
||||
ti.load(os.path.join(mountpoint, ".treeinfo"))
|
||||
for image, (type_, expected) in ti.checksums.checksums.items():
|
||||
checksums = compute_file_checksums(os.path.join(mountpoint, image), [type_])
|
||||
actual = checksums[type_]
|
||||
if actual == expected:
|
||||
# Everything fine here, skip to next image.
|
||||
continue
|
||||
|
||||
compose.log_debug("%s: %s: checksum mismatch", iso_path, image)
|
||||
# Update treeinfo with correct checksum
|
||||
ti.checksums.checksums[image] = (type_, actual)
|
||||
modified = True
|
||||
|
||||
if not modified:
|
||||
compose.log_debug("%s: All checksums match, nothing to do.", iso_path)
|
||||
return
|
||||
|
||||
try:
|
||||
tmpdir = compose.mkdtemp(arch, prefix="fix-checksum-")
|
||||
# Write modified .treeinfo
|
||||
ti_path = os.path.join(tmpdir, ".treeinfo")
|
||||
compose.log_debug("Storing modified .treeinfo in %s", ti_path)
|
||||
ti.dump(ti_path)
|
||||
# Write a modified DVD into a temporary path, that is atomically moved
|
||||
# over the original file.
|
||||
fixed_path = os.path.join(tmpdir, "fixed-checksum-dvd.iso")
|
||||
cmd = ["xorriso"]
|
||||
cmd.extend(
|
||||
itertools.chain.from_iterable(
|
||||
iso.xorriso_commands(arch, iso_path, fixed_path)
|
||||
)
|
||||
)
|
||||
cmd.extend(["-map", ti_path, ".treeinfo"])
|
||||
run(
|
||||
cmd,
|
||||
logfile=compose.paths.log.log_file(
|
||||
arch, "checksum-fix_generate_%s" % os.path.basename(iso_path)
|
||||
),
|
||||
)
|
||||
# The modified ISO no longer has implanted MD5, so that needs to be
|
||||
# fixed again.
|
||||
compose.log_debug("Implanting new MD5 to %s", fixed_path)
|
||||
run(
|
||||
iso.get_implantisomd5_cmd(fixed_path, compose.supported),
|
||||
logfile=compose.paths.log.log_file(
|
||||
arch, "checksum-fix_implantisomd5_%s" % os.path.basename(iso_path)
|
||||
),
|
||||
)
|
||||
# All done, move the updated image to the final location.
|
||||
compose.log_debug("Updating %s", iso_path)
|
||||
os.rename(fixed_path, iso_path)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def split_iso(compose, arch, variant, no_split=False, logger=None):
|
||||
"""
|
||||
@ -783,7 +694,7 @@ def prepare_iso(
|
||||
|
||||
if file_list_content:
|
||||
# write modified repodata only if there are packages available
|
||||
run("cp -a %s/repodata %s/" % (shlex.quote(tree_dir), shlex.quote(iso_dir)))
|
||||
run("cp -a %s/repodata %s/" % (shlex_quote(tree_dir), shlex_quote(iso_dir)))
|
||||
with open(file_list, "w") as f:
|
||||
f.write("\n".join(file_list_content))
|
||||
cmd = repo.get_createrepo_cmd(
|
||||
|
||||
@ -27,7 +27,7 @@ import xml.dom.minidom
|
||||
import productmd.modules
|
||||
import productmd.rpms
|
||||
from kobo.shortcuts import relative_path, run
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
|
||||
from ..module_util import Modulemd, collect_module_defaults, collect_module_obsoletes
|
||||
from ..util import (
|
||||
@ -38,7 +38,6 @@ from ..util import (
|
||||
from ..wrappers.createrepo import CreaterepoWrapper
|
||||
from ..wrappers.scm import get_dir_from_scm
|
||||
from .base import PhaseBase
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
CACHE_TOPDIR = "/var/cache/pungi/createrepo_c/"
|
||||
createrepo_lock = threading.Lock()
|
||||
|
||||
@ -112,7 +112,7 @@ def copy_extra_files(
|
||||
target_path = os.path.join(
|
||||
extra_files_dir, scm_dict.get("target", "").lstrip("/")
|
||||
)
|
||||
getter(scm_dict, target_path, compose=compose, arch=arch)
|
||||
getter(scm_dict, target_path, compose=compose)
|
||||
|
||||
if os.listdir(extra_files_dir):
|
||||
metadata.populate_extra_files_metadata(
|
||||
|
||||
@ -18,8 +18,7 @@ import hashlib
|
||||
import json
|
||||
|
||||
from kobo.shortcuts import force_list
|
||||
from kobo.threads import ThreadPool
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
import productmd.treeinfo
|
||||
from productmd.extra_files import ExtraFiles
|
||||
|
||||
@ -167,7 +166,6 @@ class ExtraIsosThread(WorkerThread):
|
||||
log_file=compose.paths.log.log_file(
|
||||
arch, "extraiso-%s" % os.path.basename(iso_path)
|
||||
),
|
||||
iso_path=iso_path,
|
||||
)
|
||||
|
||||
img = add_iso_to_metadata(
|
||||
@ -206,14 +204,6 @@ class ExtraIsosThread(WorkerThread):
|
||||
if not old_config:
|
||||
self.pool.log_info("%s - no config for old compose", log_msg)
|
||||
return False
|
||||
|
||||
# Disable reuse if unsigned packages are allowed. The older compose
|
||||
# could have unsigned packages, and those may have been signed since
|
||||
# then. We want to regenerate the ISO to have signatures.
|
||||
if None in compose.conf["sigkeys"]:
|
||||
self.pool.log_info("%s - unsigned packages are allowed", log_msg)
|
||||
return False
|
||||
|
||||
# Convert current configuration to JSON and back to encode it similarly
|
||||
# to the old one
|
||||
config = json.loads(json.dumps(compose.conf))
|
||||
@ -343,24 +333,23 @@ def get_extra_files(compose, variant, arch, extra_files):
|
||||
included in the ISO.
|
||||
"""
|
||||
extra_files_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
||||
filelist = []
|
||||
for scm_dict in extra_files:
|
||||
getter = get_file_from_scm if "file" in scm_dict else get_dir_from_scm
|
||||
target = scm_dict.get("target", "").lstrip("/")
|
||||
target_path = os.path.join(extra_files_dir, target).rstrip("/")
|
||||
getter(scm_dict, target_path, compose=compose, arch=arch)
|
||||
filelist.extend(
|
||||
os.path.join(target, f)
|
||||
for f in getter(scm_dict, target_path, compose=compose)
|
||||
)
|
||||
|
||||
filelist = [
|
||||
os.path.relpath(os.path.join(root, f), extra_files_dir)
|
||||
for root, _, files in os.walk(extra_files_dir)
|
||||
for f in files
|
||||
]
|
||||
if filelist:
|
||||
metadata.populate_extra_files_metadata(
|
||||
ExtraFiles(),
|
||||
variant,
|
||||
arch,
|
||||
extra_files_dir,
|
||||
sorted(filelist),
|
||||
filelist,
|
||||
compose.conf["media_checksums"],
|
||||
)
|
||||
|
||||
@ -431,12 +420,6 @@ def get_iso_contents(
|
||||
original_treeinfo,
|
||||
os.path.join(extra_files_dir, ".treeinfo"),
|
||||
)
|
||||
tweak_repo_treeinfo(
|
||||
compose,
|
||||
include_variants,
|
||||
original_treeinfo,
|
||||
original_treeinfo,
|
||||
)
|
||||
|
||||
# Add extra files specific for the ISO
|
||||
files.update(
|
||||
@ -448,45 +431,6 @@ def get_iso_contents(
|
||||
return gp
|
||||
|
||||
|
||||
def tweak_repo_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
"""
|
||||
The method includes the variants to file .treeinfo of a variant. It takes
|
||||
the variants which are described
|
||||
by options `extra_isos -> include_variants`.
|
||||
"""
|
||||
ti = productmd.treeinfo.TreeInfo()
|
||||
ti.load(source_file)
|
||||
main_variant = next(iter(ti.variants))
|
||||
for variant_uid in include_variants:
|
||||
variant = compose.all_variants[variant_uid]
|
||||
var = productmd.treeinfo.Variant(ti)
|
||||
var.id = variant.id
|
||||
var.uid = variant.uid
|
||||
var.name = variant.name
|
||||
var.type = variant.type
|
||||
ti.variants.add(var)
|
||||
|
||||
for variant_id in ti.variants:
|
||||
var = ti.variants[variant_id]
|
||||
if variant_id == main_variant:
|
||||
var.paths.packages = 'Packages'
|
||||
var.paths.repository = '.'
|
||||
else:
|
||||
var.paths.packages = os.path.join(
|
||||
'../../..',
|
||||
var.uid,
|
||||
var.arch,
|
||||
'os/Packages',
|
||||
)
|
||||
var.paths.repository = os.path.join(
|
||||
'../../..',
|
||||
var.uid,
|
||||
var.arch,
|
||||
'os',
|
||||
)
|
||||
ti.dump(dest_file, main_variant=main_variant)
|
||||
|
||||
|
||||
def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
ti = load_and_tweak_treeinfo(source_file)
|
||||
for variant_uid in include_variants:
|
||||
@ -502,6 +446,7 @@ def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
var = ti.variants[variant_id]
|
||||
var.paths.packages = os.path.join(var.uid, "Packages")
|
||||
var.paths.repository = var.uid
|
||||
|
||||
ti.dump(dest_file)
|
||||
|
||||
|
||||
|
||||
@ -17,14 +17,13 @@
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import shutil
|
||||
import threading
|
||||
|
||||
from kobo.rpmlib import parse_nvra
|
||||
from kobo.shortcuts import run
|
||||
from productmd.rpms import Rpms
|
||||
from pungi.phases.pkgset.common import get_all_arches
|
||||
from six.moves import cPickle as pickle
|
||||
|
||||
try:
|
||||
from queue import Queue
|
||||
@ -650,11 +649,6 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
||||
pungi.wrappers.kojiwrapper.KojiWrapper(compose).koji_module.config.topdir,
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
"kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
|
||||
compose,
|
||||
get_all_arches(compose),
|
||||
).koji_module.config.topdir.rstrip("/")
|
||||
+ "/",
|
||||
}
|
||||
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
||||
package_list = set()
|
||||
|
||||
@ -87,7 +87,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((os.path.realpath(pkg["path"]), dst))
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
@ -116,7 +116,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((os.path.realpath(pkg["path"]), dst))
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
@ -146,7 +146,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((os.path.realpath(pkg["path"]), dst))
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
|
||||
@ -15,6 +15,7 @@
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from kobo.shortcuts import run
|
||||
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
||||
@ -219,7 +220,9 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
||||
yum_arch = tree_arch_to_yum_arch(arch)
|
||||
tmp_dir = compose.paths.work.tmp_dir(arch, variant)
|
||||
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
||||
# TODO: remove YUM code, fully migrate to DNF
|
||||
backends = {
|
||||
"yum": pungi_wrapper.get_pungi_cmd,
|
||||
"dnf": pungi_wrapper.get_pungi_cmd_dnf,
|
||||
}
|
||||
get_cmd = backends[compose.conf["gather_backend"]]
|
||||
@ -242,6 +245,17 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
||||
with temp_dir(prefix="pungi_") as work_dir:
|
||||
run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
|
||||
|
||||
# Clean up tmp dir
|
||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||
yumroot_dir = os.path.join(tmp_dir, "work", arch, "yumroot")
|
||||
if os.path.isdir(yumroot_dir):
|
||||
try:
|
||||
shutil.rmtree(yumroot_dir)
|
||||
except Exception as e:
|
||||
compose.log_warning(
|
||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||
)
|
||||
|
||||
with open(pungi_log, "r") as f:
|
||||
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
||||
|
||||
|
||||
@ -16,6 +16,7 @@
|
||||
import os
|
||||
from pprint import pformat
|
||||
import re
|
||||
import six
|
||||
|
||||
import pungi.arch
|
||||
from pungi.util import pkg_is_rpm, pkg_is_srpm, pkg_is_debug
|
||||
@ -73,7 +74,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
||||
if not pkg_is_rpm(pkg):
|
||||
continue
|
||||
for gathered_pkg, pkg_arch in packages:
|
||||
if isinstance(gathered_pkg, str) and not re.match(
|
||||
if isinstance(gathered_pkg, six.string_types) and not re.match(
|
||||
gathered_pkg.replace(".", "\\.")
|
||||
.replace("+", "\\+")
|
||||
.replace("*", ".*")
|
||||
|
||||
@ -13,8 +13,7 @@ from pungi.util import as_local_file, translate_path, get_repo_urls, version_gen
|
||||
from pungi.phases import base
|
||||
from pungi.linker import Linker
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
from kobo.threads import ThreadPool
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import force_list
|
||||
from productmd.images import Image
|
||||
from productmd.rpms import Rpms
|
||||
@ -23,13 +22,10 @@ from productmd.rpms import Rpms
|
||||
# This is a mapping from formats to file extensions. The format is what koji
|
||||
# image-build command expects as argument, and the extension is what the file
|
||||
# name will be ending with. The extensions are used to filter out which task
|
||||
# results will be pulled into the compose. This dict is also used later in
|
||||
# the process to set the image 'type' in productmd metadata terms - the type
|
||||
# is set as the first key in this dict which has the file's extension in its
|
||||
# values. This dict is imported and extended for similar purposes by other
|
||||
# phases (at least osbuild and kiwibuild).
|
||||
# results will be pulled into the compose.
|
||||
EXTENSIONS = {
|
||||
"docker": ["tar.xz"],
|
||||
"docker": ["tar.gz", "tar.xz"],
|
||||
"iso": ["iso"],
|
||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||
"qcow": ["qcow"],
|
||||
"qcow2": ["qcow2"],
|
||||
@ -44,6 +40,7 @@ EXTENSIONS = {
|
||||
"vdi": ["vdi"],
|
||||
"vmdk": ["vmdk"],
|
||||
"vpc": ["vhd"],
|
||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||
"vsphere-ova": ["vsphere.ova"],
|
||||
}
|
||||
|
||||
|
||||
@ -2,13 +2,12 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
|
||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||
from .. import util
|
||||
from ..wrappers import kojiwrapper
|
||||
from ..phases.osbs import add_metadata
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class ImageContainerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||
|
||||
@ -1,263 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo import shortcuts
|
||||
from productmd.images import Image
|
||||
|
||||
from . import base
|
||||
from .. import util
|
||||
from ..linker import Linker
|
||||
from ..wrappers import kojiwrapper
|
||||
from .image_build import EXTENSIONS
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
IMAGEBUILDEREXTENSIONS = [
|
||||
("vagrant-libvirt", ["vagrant.libvirt.box"], "vagrant-libvirt.box"),
|
||||
(
|
||||
"vagrant-virtualbox",
|
||||
["vagrant.virtualbox.box"],
|
||||
"vagrant-virtualbox.box",
|
||||
),
|
||||
("container", ["oci.tar.xz"], "tar.xz"),
|
||||
("wsl2", ["wsl"], "wsl"),
|
||||
# .iso images can be of many types - boot, cd, dvd, live... -
|
||||
# so 'boot' is just a default guess. 'iso' is not a valid
|
||||
# productmd image type
|
||||
("boot", [".iso"], "iso"),
|
||||
]
|
||||
|
||||
|
||||
class ImageBuilderPhase(
|
||||
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||
):
|
||||
name = "imagebuilder"
|
||||
|
||||
def __init__(self, compose):
|
||||
super(ImageBuilderPhase, self).__init__(compose)
|
||||
self.pool = ThreadPool(logger=self.logger)
|
||||
|
||||
def _get_arches(self, image_conf, arches):
|
||||
"""Get an intersection of arches in the config dict and the given ones."""
|
||||
if "arches" in image_conf:
|
||||
arches = set(image_conf["arches"]) & arches
|
||||
return sorted(arches)
|
||||
|
||||
@staticmethod
|
||||
def _get_repo_urls(compose, repos, arch="$basearch"):
|
||||
"""
|
||||
Get list of repos with resolved repo URLs. Preserve repos defined
|
||||
as dicts.
|
||||
"""
|
||||
resolved_repos = []
|
||||
|
||||
for repo in repos:
|
||||
repo = util.get_repo_url(compose, repo, arch=arch)
|
||||
if repo is None:
|
||||
raise RuntimeError("Failed to resolve repo URL for %s" % repo)
|
||||
resolved_repos.append(repo)
|
||||
|
||||
return resolved_repos
|
||||
|
||||
def _get_repo(self, image_conf, variant):
|
||||
"""
|
||||
Get a list of repos. First included are those explicitly listed in
|
||||
config, followed by by repo for current variant if it's not included in
|
||||
the list already.
|
||||
"""
|
||||
repos = shortcuts.force_list(image_conf.get("repos", []))
|
||||
|
||||
if not variant.is_empty and variant.uid not in repos:
|
||||
repos.append(variant.uid)
|
||||
|
||||
return ImageBuilderPhase._get_repo_urls(self.compose, repos, arch="$arch")
|
||||
|
||||
def run(self):
|
||||
for variant in self.compose.get_variants():
|
||||
arches = set([x for x in variant.arches if x != "src"])
|
||||
|
||||
for image_conf in self.get_config_block(variant):
|
||||
build_arches = self._get_arches(image_conf, arches)
|
||||
if not build_arches:
|
||||
self.log_debug("skip: no arches")
|
||||
continue
|
||||
|
||||
# these properties can be set per-image *or* as e.g.
|
||||
# imagebuilder_release or global_release in the config
|
||||
generics = {
|
||||
"release": self.get_release(image_conf),
|
||||
"target": self.get_config(image_conf, "target"),
|
||||
"types": self.get_config(image_conf, "types"),
|
||||
"seed": self.get_config(image_conf, "seed"),
|
||||
"scratch": self.get_config(image_conf, "scratch"),
|
||||
"version": self.get_version(image_conf),
|
||||
}
|
||||
|
||||
repo = self._get_repo(image_conf, variant)
|
||||
|
||||
failable_arches = image_conf.pop("failable", [])
|
||||
if failable_arches == ["*"]:
|
||||
failable_arches = image_conf["arches"]
|
||||
|
||||
self.pool.add(RunImageBuilderThread(self.pool))
|
||||
self.pool.queue_put(
|
||||
(
|
||||
self.compose,
|
||||
variant,
|
||||
image_conf,
|
||||
build_arches,
|
||||
generics,
|
||||
repo,
|
||||
failable_arches,
|
||||
)
|
||||
)
|
||||
|
||||
self.pool.start()
|
||||
|
||||
|
||||
class RunImageBuilderThread(WorkerThread):
|
||||
def process(self, item, num):
|
||||
(compose, variant, config, arches, generics, repo, failable_arches) = item
|
||||
self.failable_arches = []
|
||||
# the Koji task as a whole can only fail if *all* arches are failable
|
||||
can_task_fail = set(self.failable_arches).issuperset(set(arches))
|
||||
self.num = num
|
||||
with util.failable(
|
||||
compose,
|
||||
can_task_fail,
|
||||
variant,
|
||||
"*",
|
||||
"imageBuilderBuild",
|
||||
logger=self.pool._logger,
|
||||
):
|
||||
self.worker(compose, variant, config, arches, generics, repo)
|
||||
|
||||
def worker(self, compose, variant, config, arches, generics, repo):
|
||||
msg = "imageBuilderBuild task for variant %s" % variant.uid
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
koji = kojiwrapper.KojiWrapper(compose)
|
||||
koji.login()
|
||||
|
||||
opts = {}
|
||||
opts["repos"] = repo
|
||||
|
||||
if generics.get("release"):
|
||||
opts["release"] = generics["release"]
|
||||
|
||||
if generics.get("seed"):
|
||||
opts["seed"] = generics["seed"]
|
||||
|
||||
if generics.get("scratch"):
|
||||
opts["scratch"] = generics["scratch"]
|
||||
|
||||
if config.get("ostree"):
|
||||
opts["ostree"] = config["ostree"]
|
||||
|
||||
if config.get("blueprint"):
|
||||
opts["blueprint"] = config["blueprint"]
|
||||
|
||||
task_id = koji.koji_proxy.imageBuilderBuild(
|
||||
generics["target"],
|
||||
arches,
|
||||
types=generics["types"],
|
||||
name=config["name"],
|
||||
version=generics["version"],
|
||||
opts=opts,
|
||||
)
|
||||
|
||||
koji.save_task_id(task_id)
|
||||
|
||||
# Wait for it to finish and capture the output into log file.
|
||||
log_dir = os.path.join(compose.paths.log.topdir(), "imageBuilderBuild")
|
||||
util.makedirs(log_dir)
|
||||
log_file = os.path.join(
|
||||
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
||||
)
|
||||
if koji.watch_task(task_id, log_file) != 0:
|
||||
raise RuntimeError(
|
||||
"imageBuilderBuild task failed: %s. See %s for details"
|
||||
% (task_id, log_file)
|
||||
)
|
||||
|
||||
# Refresh koji session which may have timed out while the task was
|
||||
# running. Watching is done via a subprocess, so the session is
|
||||
# inactive.
|
||||
koji = kojiwrapper.KojiWrapper(compose)
|
||||
|
||||
linker = Linker(logger=self.pool._logger)
|
||||
|
||||
# Process all images in the build. There should be one for each
|
||||
# architecture, but we don't verify that.
|
||||
paths = koji.get_image_paths(task_id)
|
||||
|
||||
for arch, paths in paths.items():
|
||||
for path in paths:
|
||||
type_, format_ = _find_type_and_format(path)
|
||||
if not format_:
|
||||
# Path doesn't match any known type.
|
||||
continue
|
||||
|
||||
# image_dir is absolute path to which the image should be copied.
|
||||
# We also need the same path as relative to compose directory for
|
||||
# including in the metadata.
|
||||
if format_ == "iso":
|
||||
# If the produced image is actually an ISO, it should go to
|
||||
# iso/ subdirectory.
|
||||
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||
rel_image_dir = compose.paths.compose.iso_dir(
|
||||
arch, variant, relative=True
|
||||
)
|
||||
else:
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {
|
||||
"arch": arch
|
||||
}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
util.makedirs(image_dir)
|
||||
|
||||
filename = os.path.basename(path)
|
||||
|
||||
image_dest = os.path.join(image_dir, filename)
|
||||
|
||||
src_file = compose.koji_downloader.get_file(path)
|
||||
|
||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
|
||||
# If user configured exact type, use it, otherwise try to
|
||||
# figure it out based on the koji output.
|
||||
img.type = config.get("manifest_type", type_)
|
||||
img.format = format_
|
||||
img.path = os.path.join(rel_image_dir, filename)
|
||||
img.mtime = util.get_mtime(image_dest)
|
||||
img.size = util.get_file_size(image_dest)
|
||||
img.arch = arch
|
||||
img.disc_number = 1 # We don't expect multiple disks
|
||||
img.disc_count = 1
|
||||
|
||||
img.bootable = format_ == "iso"
|
||||
img.subvariant = config.get("subvariant", variant.uid)
|
||||
setattr(img, "can_fail", arch in self.failable_arches)
|
||||
setattr(img, "deliverable", "imageBuilderBuild")
|
||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||
|
||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, task_id))
|
||||
|
||||
|
||||
def _find_type_and_format(path):
|
||||
# these are our image-builder-exclusive mappings for images whose extensions
|
||||
# aren't quite the same as imagefactory. they come first as we
|
||||
# want our oci.tar.xz mapping to win over the tar.xz one in
|
||||
# EXTENSIONS
|
||||
for type_, suffixes, format_ in IMAGEBUILDEREXTENSIONS:
|
||||
if any(path.endswith(suffix) for suffix in suffixes):
|
||||
return type_, format_
|
||||
for type_, suffixes in EXTENSIONS.items():
|
||||
for suffix in suffixes:
|
||||
if path.endswith(suffix):
|
||||
return type_, suffix
|
||||
return None, None
|
||||
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo import shortcuts
|
||||
from productmd.images import Image
|
||||
|
||||
@ -10,24 +10,11 @@ from .. import util
|
||||
from ..linker import Linker
|
||||
from ..wrappers import kojiwrapper
|
||||
from .image_build import EXTENSIONS
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
KIWIEXTENSIONS = [
|
||||
("vhd-compressed", ["vhdfixed.xz"], "vhd.xz"),
|
||||
("vagrant-libvirt", ["vagrant.libvirt.box"], "vagrant-libvirt.box"),
|
||||
("vagrant-virtualbox", ["vagrant.virtualbox.box"], "vagrant-virtualbox.box"),
|
||||
# .iso images can be of many types - boot, cd, dvd, live... -
|
||||
# so 'boot' is just a default guess. 'iso' is not a valid
|
||||
# productmd image type
|
||||
("boot", [".iso"], "iso"),
|
||||
("fex", ["erofs.xz"], "erofs.xz"),
|
||||
("fex", ["erofs.gz"], "erofs.gz"),
|
||||
("fex", ["erofs"], "erofs"),
|
||||
("fex", ["squashfs.xz"], "squashfs.xz"),
|
||||
("fex", ["squashfs.gz"], "squashfs.gz"),
|
||||
("fex", ["squashfs"], "squashfs"),
|
||||
("container", ["oci.tar.xz"], "tar.xz"),
|
||||
("wsl2", ["wsl"], "wsl"),
|
||||
]
|
||||
|
||||
|
||||
@ -92,16 +79,6 @@ class KiwiBuildPhase(
|
||||
"target": self.get_config(image_conf, "target"),
|
||||
"descscm": self.get_config(image_conf, "description_scm"),
|
||||
"descpath": self.get_config(image_conf, "description_path"),
|
||||
"type": self.get_config(image_conf, "type"),
|
||||
"type_attr": self.get_config(image_conf, "type_attr"),
|
||||
"bundle_name_format": self.get_config(
|
||||
image_conf, "bundle_name_format"
|
||||
),
|
||||
"version": self.get_version(image_conf),
|
||||
"repo_releasever": self.get_config(image_conf, "repo_releasever"),
|
||||
"use_buildroot_repo": self.get_config(
|
||||
image_conf, "use_buildroot_repo"
|
||||
),
|
||||
}
|
||||
|
||||
repo = self._get_repo(image_conf, variant)
|
||||
@ -157,14 +134,8 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
profile=config["kiwi_profile"],
|
||||
release=generics["release"],
|
||||
repos=repo,
|
||||
type=generics["type"],
|
||||
type_attr=generics["type_attr"],
|
||||
result_bundle_name_format=generics["bundle_name_format"],
|
||||
# this ensures the task won't fail if only failable arches fail
|
||||
optional_arches=self.failable_arches,
|
||||
version=generics["version"],
|
||||
repo_releasever=generics["repo_releasever"],
|
||||
use_buildroot_repo=generics["use_buildroot_repo"],
|
||||
)
|
||||
|
||||
koji.save_task_id(task_id)
|
||||
@ -201,20 +172,10 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
# image_dir is absolute path to which the image should be copied.
|
||||
# We also need the same path as relative to compose directory for
|
||||
# including in the metadata.
|
||||
if format_ == "iso":
|
||||
# If the produced image is actually an ISO, it should go to
|
||||
# iso/ subdirectory.
|
||||
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||
rel_image_dir = compose.paths.compose.iso_dir(
|
||||
arch, variant, relative=True
|
||||
)
|
||||
else:
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {
|
||||
"arch": arch
|
||||
}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
util.makedirs(image_dir)
|
||||
|
||||
filename = os.path.basename(path)
|
||||
@ -228,9 +189,9 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
|
||||
# If user configured exact type, use it, otherwise try to
|
||||
# figure it out based on the koji output.
|
||||
img.type = config.get("manifest_type", type_)
|
||||
# Get the manifest type from the config if supplied, otherwise we
|
||||
# determine the manifest type based on the koji output
|
||||
img.type = type_
|
||||
img.format = format_
|
||||
img.path = os.path.join(rel_image_dir, filename)
|
||||
img.mtime = util.get_mtime(image_dest)
|
||||
@ -238,8 +199,7 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
img.arch = arch
|
||||
img.disc_number = 1 # We don't expect multiple disks
|
||||
img.disc_count = 1
|
||||
# Kiwi produces only bootable ISOs. Other kinds of images are
|
||||
img.bootable = format_ == "iso"
|
||||
img.bootable = False
|
||||
img.subvariant = config.get("subvariant", variant.uid)
|
||||
setattr(img, "can_fail", arch in self.failable_arches)
|
||||
setattr(img, "deliverable", "kiwibuild")
|
||||
@ -249,15 +209,13 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
|
||||
|
||||
def _find_type_and_format(path):
|
||||
# these are our kiwi-exclusive mappings for images whose extensions
|
||||
# aren't quite the same as imagefactory. they come first as we
|
||||
# want our oci.tar.xz mapping to win over the tar.xz one in
|
||||
# EXTENSIONS
|
||||
for type_, suffixes, format_ in KIWIEXTENSIONS:
|
||||
if any(path.endswith(suffix) for suffix in suffixes):
|
||||
return type_, format_
|
||||
for type_, suffixes in EXTENSIONS.items():
|
||||
for suffix in suffixes:
|
||||
if path.endswith(suffix):
|
||||
return type_, suffix
|
||||
# these are our kiwi-exclusive mappings for images whose extensions
|
||||
# aren't quite the same as imagefactory
|
||||
for type_, suffixes, format_ in KIWIEXTENSIONS:
|
||||
if any(path.endswith(suffix) for suffix in suffixes):
|
||||
return type_, format_
|
||||
return None, None
|
||||
|
||||
406
pungi/phases/live_images.py
Normal file
406
pungi/phases/live_images.py
Normal file
@ -0,0 +1,406 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import run, save_to_file, force_list
|
||||
from productmd.images import Image
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
from pungi.wrappers import iso
|
||||
from pungi.phases import base
|
||||
from pungi.util import makedirs, get_mtime, get_file_size, failable
|
||||
from pungi.util import get_repo_urls
|
||||
|
||||
|
||||
# HACK: define cmp in python3
|
||||
if sys.version_info[0] == 3:
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
|
||||
class LiveImagesPhase(
|
||||
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||
):
|
||||
name = "live_images"
|
||||
|
||||
def __init__(self, compose):
|
||||
super(LiveImagesPhase, self).__init__(compose)
|
||||
self.pool = ThreadPool(logger=self.logger)
|
||||
|
||||
def _get_repos(self, arch, variant, data):
|
||||
repos = []
|
||||
if not variant.is_empty:
|
||||
repos.append(variant.uid)
|
||||
repos.extend(force_list(data.get("repo", [])))
|
||||
return get_repo_urls(self.compose, repos, arch=arch)
|
||||
|
||||
def run(self):
|
||||
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
||||
commands = []
|
||||
|
||||
for variant in self.compose.all_variants.values():
|
||||
for arch in variant.arches + ["src"]:
|
||||
for data in self.get_config_block(variant, arch):
|
||||
subvariant = data.get("subvariant", variant.uid)
|
||||
type = data.get("type", "live")
|
||||
|
||||
if type == "live":
|
||||
dest_dir = self.compose.paths.compose.iso_dir(
|
||||
arch, variant, symlink_to=symlink_isos_to
|
||||
)
|
||||
elif type == "appliance":
|
||||
dest_dir = self.compose.paths.compose.image_dir(
|
||||
variant, symlink_to=symlink_isos_to
|
||||
)
|
||||
dest_dir = dest_dir % {"arch": arch}
|
||||
makedirs(dest_dir)
|
||||
else:
|
||||
raise RuntimeError("Unknown live image type %s" % type)
|
||||
if not dest_dir:
|
||||
continue
|
||||
|
||||
cmd = {
|
||||
"name": data.get("name"),
|
||||
"version": self.get_version(data),
|
||||
"release": self.get_release(data),
|
||||
"dest_dir": dest_dir,
|
||||
"build_arch": arch,
|
||||
"ks_file": data["kickstart"],
|
||||
"ksurl": self.get_ksurl(data),
|
||||
# Used for images wrapped in RPM
|
||||
"specfile": data.get("specfile", None),
|
||||
# Scratch (only taken in consideration if specfile
|
||||
# specified) For images wrapped in rpm is scratch
|
||||
# disabled by default For other images is scratch
|
||||
# always on
|
||||
"scratch": data.get("scratch", False),
|
||||
"sign": False,
|
||||
"type": type,
|
||||
"label": "", # currently not used
|
||||
"subvariant": subvariant,
|
||||
"failable_arches": data.get("failable", []),
|
||||
# First see if live_target is specified, then fall back
|
||||
# to regular setup of local, phase and global setting.
|
||||
"target": self.compose.conf.get("live_target")
|
||||
or self.get_config(data, "target"),
|
||||
}
|
||||
|
||||
cmd["repos"] = self._get_repos(arch, variant, data)
|
||||
|
||||
# Signing of the rpm wrapped image
|
||||
if not cmd["scratch"] and data.get("sign"):
|
||||
cmd["sign"] = True
|
||||
|
||||
cmd["filename"] = self._get_file_name(
|
||||
arch, variant, cmd["name"], cmd["version"]
|
||||
)
|
||||
|
||||
commands.append((cmd, variant, arch))
|
||||
|
||||
for cmd, variant, arch in commands:
|
||||
self.pool.add(CreateLiveImageThread(self.pool))
|
||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||
|
||||
self.pool.start()
|
||||
|
||||
def _get_file_name(self, arch, variant, name=None, version=None):
|
||||
if self.compose.conf["live_images_no_rename"]:
|
||||
return None
|
||||
|
||||
disc_type = self.compose.conf["disc_types"].get("live", "live")
|
||||
|
||||
format = (
|
||||
"%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
||||
)
|
||||
# Custom name (prefix)
|
||||
if name:
|
||||
custom_iso_name = name
|
||||
if version:
|
||||
custom_iso_name += "-%s" % version
|
||||
format = (
|
||||
custom_iso_name
|
||||
+ "-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
||||
)
|
||||
|
||||
# XXX: hardcoded disc_num
|
||||
return self.compose.get_image_name(
|
||||
arch, variant, disc_type=disc_type, disc_num=None, format=format
|
||||
)
|
||||
|
||||
|
||||
class CreateLiveImageThread(WorkerThread):
|
||||
EXTS = (".iso", ".raw.xz")
|
||||
|
||||
def process(self, item, num):
|
||||
compose, cmd, variant, arch = item
|
||||
self.failable_arches = cmd.get("failable_arches", [])
|
||||
self.can_fail = bool(self.failable_arches)
|
||||
with failable(
|
||||
compose,
|
||||
self.can_fail,
|
||||
variant,
|
||||
arch,
|
||||
"live",
|
||||
cmd.get("subvariant"),
|
||||
logger=self.pool._logger,
|
||||
):
|
||||
self.worker(compose, cmd, variant, arch, num)
|
||||
|
||||
def worker(self, compose, cmd, variant, arch, num):
|
||||
self.basename = "%(name)s-%(version)s-%(release)s" % cmd
|
||||
log_file = compose.paths.log.log_file(arch, "liveimage-%s" % self.basename)
|
||||
|
||||
subvariant = cmd.pop("subvariant")
|
||||
|
||||
imgname = "%s-%s-%s-%s" % (
|
||||
compose.ci_base.release.short,
|
||||
subvariant,
|
||||
"Live" if cmd["type"] == "live" else "Disk",
|
||||
arch,
|
||||
)
|
||||
|
||||
msg = "Creating ISO (arch: %s, variant: %s): %s" % (
|
||||
arch,
|
||||
variant,
|
||||
self.basename,
|
||||
)
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
koji_wrapper = KojiWrapper(compose)
|
||||
_, version = compose.compose_id.rsplit("-", 1)
|
||||
name = cmd["name"] or imgname
|
||||
version = cmd["version"] or version
|
||||
archive = False
|
||||
if cmd["specfile"] and not cmd["scratch"]:
|
||||
# Non scratch build are allowed only for rpm wrapped images
|
||||
archive = True
|
||||
koji_cmd = koji_wrapper.get_create_image_cmd(
|
||||
name,
|
||||
version,
|
||||
cmd["target"],
|
||||
cmd["build_arch"],
|
||||
cmd["ks_file"],
|
||||
cmd["repos"],
|
||||
image_type=cmd["type"],
|
||||
wait=True,
|
||||
archive=archive,
|
||||
specfile=cmd["specfile"],
|
||||
release=cmd["release"],
|
||||
ksurl=cmd["ksurl"],
|
||||
)
|
||||
|
||||
# avoid race conditions?
|
||||
# Kerberos authentication failed:
|
||||
# Permission denied in replay cache code (-1765328215)
|
||||
time.sleep(num * 3)
|
||||
|
||||
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
||||
if output["retcode"] != 0:
|
||||
raise RuntimeError(
|
||||
"LiveImage task failed: %s. See %s for more details."
|
||||
% (output["task_id"], log_file)
|
||||
)
|
||||
|
||||
# copy finished image to isos/
|
||||
image_path = [
|
||||
path
|
||||
for path in koji_wrapper.get_image_path(output["task_id"])
|
||||
if self._is_image(path)
|
||||
]
|
||||
if len(image_path) != 1:
|
||||
raise RuntimeError(
|
||||
"Got %d images from task %d, expected 1."
|
||||
% (len(image_path), output["task_id"])
|
||||
)
|
||||
image_path = compose.koji_downloader.get_file(image_path[0])
|
||||
filename = cmd.get("filename") or os.path.basename(image_path)
|
||||
destination = os.path.join(cmd["dest_dir"], filename)
|
||||
shutil.copy2(image_path, destination)
|
||||
|
||||
# copy finished rpm to isos/ (if rpm wrapped ISO was built)
|
||||
if cmd["specfile"]:
|
||||
rpm_paths = koji_wrapper.get_wrapped_rpm_path(output["task_id"])
|
||||
|
||||
if cmd["sign"]:
|
||||
# Sign the rpm wrapped images and get their paths
|
||||
self.pool.log_info(
|
||||
"Signing rpm wrapped images in task_id: %s (expected key ID: %s)"
|
||||
% (output["task_id"], compose.conf.get("signing_key_id"))
|
||||
)
|
||||
signed_rpm_paths = self._sign_image(
|
||||
koji_wrapper, compose, cmd, output["task_id"]
|
||||
)
|
||||
if signed_rpm_paths:
|
||||
rpm_paths = signed_rpm_paths
|
||||
|
||||
for rpm_path in rpm_paths:
|
||||
shutil.copy2(rpm_path, cmd["dest_dir"])
|
||||
|
||||
if cmd["type"] == "live":
|
||||
# ISO manifest only makes sense for live images
|
||||
self._write_manifest(destination)
|
||||
|
||||
self._add_to_images(
|
||||
compose,
|
||||
variant,
|
||||
subvariant,
|
||||
arch,
|
||||
cmd["type"],
|
||||
self._get_format(image_path),
|
||||
destination,
|
||||
)
|
||||
|
||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||
|
||||
def _add_to_images(self, compose, variant, subvariant, arch, type, format, path):
|
||||
"""Adds the image to images.json"""
|
||||
img = Image(compose.im)
|
||||
img.type = "raw-xz" if type == "appliance" else type
|
||||
img.format = format
|
||||
img.path = os.path.relpath(path, compose.paths.compose.topdir())
|
||||
img.mtime = get_mtime(path)
|
||||
img.size = get_file_size(path)
|
||||
img.arch = arch
|
||||
img.disc_number = 1 # We don't expect multiple disks
|
||||
img.disc_count = 1
|
||||
img.bootable = True
|
||||
img.subvariant = subvariant
|
||||
setattr(img, "can_fail", self.can_fail)
|
||||
setattr(img, "deliverable", "live")
|
||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||
|
||||
def _is_image(self, path):
|
||||
for ext in self.EXTS:
|
||||
if path.endswith(ext):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_format(self, path):
|
||||
"""Get format based on extension."""
|
||||
for ext in self.EXTS:
|
||||
if path.endswith(ext):
|
||||
return ext[1:]
|
||||
raise RuntimeError("Getting format for unknown image %s" % path)
|
||||
|
||||
def _write_manifest(self, iso_path):
|
||||
"""Generate manifest for ISO at given path.
|
||||
|
||||
:param iso_path: (str) absolute path to the ISO
|
||||
"""
|
||||
dir, filename = os.path.split(iso_path)
|
||||
run("cd %s && %s" % (shlex_quote(dir), iso.get_manifest_cmd(filename)))
|
||||
|
||||
def _sign_image(self, koji_wrapper, compose, cmd, koji_task_id):
|
||||
signing_key_id = compose.conf.get("signing_key_id")
|
||||
signing_command = compose.conf.get("signing_command")
|
||||
|
||||
if not signing_key_id:
|
||||
self.pool.log_warning(
|
||||
"Signing is enabled but signing_key_id is not specified"
|
||||
)
|
||||
self.pool.log_warning("Signing skipped")
|
||||
return None
|
||||
if not signing_command:
|
||||
self.pool.log_warning(
|
||||
"Signing is enabled but signing_command is not specified"
|
||||
)
|
||||
self.pool.log_warning("Signing skipped")
|
||||
return None
|
||||
|
||||
# Prepare signing log file
|
||||
signing_log_file = compose.paths.log.log_file(
|
||||
cmd["build_arch"], "live_images-signing-%s" % self.basename
|
||||
)
|
||||
|
||||
# Sign the rpm wrapped images
|
||||
try:
|
||||
sign_builds_in_task(
|
||||
koji_wrapper,
|
||||
koji_task_id,
|
||||
signing_command,
|
||||
log_file=signing_log_file,
|
||||
signing_key_password=compose.conf.get("signing_key_password"),
|
||||
)
|
||||
except RuntimeError:
|
||||
self.pool.log_error(
|
||||
"Error while signing rpm wrapped images. See log: %s" % signing_log_file
|
||||
)
|
||||
raise
|
||||
|
||||
# Get pats to the signed rpms
|
||||
signing_key_id = signing_key_id.lower() # Koji uses lowercase in paths
|
||||
rpm_paths = koji_wrapper.get_signed_wrapped_rpms_paths(
|
||||
koji_task_id, signing_key_id
|
||||
)
|
||||
|
||||
# Wait until files are available
|
||||
if wait_paths(rpm_paths, 60 * 15):
|
||||
# Files are ready
|
||||
return rpm_paths
|
||||
|
||||
# Signed RPMs are not available
|
||||
self.pool.log_warning("Signed files are not available: %s" % rpm_paths)
|
||||
self.pool.log_warning("Unsigned files will be used")
|
||||
return None
|
||||
|
||||
|
||||
def wait_paths(paths, timeout=60):
|
||||
started = time.time()
|
||||
remaining = paths[:]
|
||||
while True:
|
||||
for path in remaining[:]:
|
||||
if os.path.exists(path):
|
||||
remaining.remove(path)
|
||||
if not remaining:
|
||||
break
|
||||
time.sleep(1)
|
||||
if timeout >= 0 and (time.time() - started) > timeout:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sign_builds_in_task(
|
||||
koji_wrapper, task_id, signing_command, log_file=None, signing_key_password=None
|
||||
):
|
||||
# Get list of nvrs that should be signed
|
||||
nvrs = koji_wrapper.get_build_nvrs(task_id)
|
||||
if not nvrs:
|
||||
# No builds are available (scratch build, etc.?)
|
||||
return
|
||||
|
||||
# Append builds to sign_cmd
|
||||
for nvr in nvrs:
|
||||
signing_command += " '%s'" % nvr
|
||||
|
||||
# Log signing command before password is filled in it
|
||||
if log_file:
|
||||
save_to_file(log_file, signing_command, append=True)
|
||||
|
||||
# Fill password into the signing command
|
||||
if signing_key_password:
|
||||
signing_command = signing_command % {
|
||||
"signing_key_password": signing_key_password
|
||||
}
|
||||
|
||||
# Sign the builds
|
||||
run(signing_command, can_fail=False, show_cmd=False, logfile=log_file)
|
||||
@ -9,9 +9,8 @@ from pungi.util import translate_path, get_repo_urls
|
||||
from pungi.phases.base import ConfigGuardedPhase, ImageConfigMixin, PhaseLoggerMixin
|
||||
from pungi.linker import Linker
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from productmd.images import Image
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
||||
|
||||
@ -1,19 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import configparser
|
||||
import copy
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo import shortcuts
|
||||
from productmd.rpms import Rpms
|
||||
from six.moves import configparser
|
||||
|
||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||
from .. import util
|
||||
from ..wrappers import kojiwrapper
|
||||
from ..wrappers.scm import get_file_from_scm
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo import shortcuts
|
||||
from productmd.images import Image
|
||||
|
||||
@ -10,22 +10,6 @@ from .. import util
|
||||
from ..linker import Linker
|
||||
from ..wrappers import kojiwrapper
|
||||
from .image_build import EXTENSIONS
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
# copy and modify EXTENSIONS with some that osbuild produces but which
|
||||
# do not exist as `koji image-build` formats
|
||||
OSBUILDEXTENSIONS = EXTENSIONS.copy()
|
||||
OSBUILDEXTENSIONS.update(
|
||||
# The key is the type_name as used in Koji archive, the second is a list of
|
||||
# expected file extensions.
|
||||
{
|
||||
"iso": ["iso"],
|
||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||
# The image is technically wsl2, but the type_name in Koji is set to
|
||||
# wsl.
|
||||
"wsl": ["wsl"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class OSBuildPhase(
|
||||
@ -219,7 +203,7 @@ class RunOSBuildThread(WorkerThread):
|
||||
# architecture, but we don't verify that.
|
||||
build_info = koji.koji_proxy.getBuild(build_id)
|
||||
for archive in koji.koji_proxy.listArchives(buildID=build_id):
|
||||
if archive["type_name"] not in OSBUILDEXTENSIONS:
|
||||
if archive["type_name"] not in EXTENSIONS:
|
||||
# Ignore values that are not of required types.
|
||||
continue
|
||||
|
||||
@ -257,7 +241,7 @@ class RunOSBuildThread(WorkerThread):
|
||||
|
||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||
|
||||
for suffix in OSBUILDEXTENSIONS[archive["type_name"]]:
|
||||
for suffix in EXTENSIONS[archive["type_name"]]:
|
||||
if archive["filename"].endswith(suffix):
|
||||
break
|
||||
else:
|
||||
@ -274,13 +258,7 @@ class RunOSBuildThread(WorkerThread):
|
||||
# determine the manifest type based on the koji output
|
||||
img.type = config.get("manifest_type")
|
||||
if not img.type:
|
||||
if archive["type_name"] == "wsl":
|
||||
# productmd only knows wsl2 as type, so let's translate
|
||||
# from the koji type so that users don't need to set the
|
||||
# type explicitly. There really is no other possible type
|
||||
# here anyway.
|
||||
img.type = "wsl2"
|
||||
elif archive["type_name"] != "iso":
|
||||
if archive["type_name"] != "iso":
|
||||
img.type = archive["type_name"]
|
||||
else:
|
||||
fn = archive["filename"].lower()
|
||||
|
||||
@ -4,7 +4,7 @@ import copy
|
||||
import json
|
||||
import os
|
||||
from kobo import shortcuts
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from collections import OrderedDict
|
||||
|
||||
from pungi.arch_utils import getBaseArch
|
||||
@ -14,7 +14,6 @@ from .. import util
|
||||
from ..ostree.utils import get_ref_from_treefile, get_commitid_from_commitid_file
|
||||
from ..util import get_repo_dicts, translate_path
|
||||
from ..wrappers import scm
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class OSTreePhase(ConfigGuardedPhase):
|
||||
|
||||
@ -119,12 +119,14 @@ class OSTreeContainerThread(WorkerThread):
|
||||
def _run_ostree_container_cmd(
|
||||
self, compose, variant, arch, config, config_repo, extra_config_file=None
|
||||
):
|
||||
subvariant = config.get("subvariant", variant.uid)
|
||||
target_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||
util.makedirs(target_dir)
|
||||
version = util.version_generator(compose, config.get("version"))
|
||||
anb = config.get("name", "%s-%s" % (compose.conf["release_short"], subvariant))
|
||||
archive_name = "%s-%s-%s" % (anb, arch, version)
|
||||
archive_name = "%s-%s-%s" % (
|
||||
compose.conf["release_short"],
|
||||
variant.uid,
|
||||
version,
|
||||
)
|
||||
|
||||
# Run the pungi-make-ostree command locally to create a script to
|
||||
# execute in runroot environment.
|
||||
@ -139,7 +141,7 @@ class OSTreeContainerThread(WorkerThread):
|
||||
"--version=%s" % version,
|
||||
]
|
||||
|
||||
_, runroot_script = shortcuts.run(cmd, text=True, errors="replace")
|
||||
_, runroot_script = shortcuts.run(cmd, universal_newlines=True)
|
||||
|
||||
default_packages = ["ostree", "rpm-ostree", "selinux-policy-targeted"]
|
||||
additional_packages = config.get("runroot_packages", [])
|
||||
@ -164,9 +166,9 @@ class OSTreeContainerThread(WorkerThread):
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
|
||||
# these are hardcoded as they should always be correct, we
|
||||
# could potentially allow overriding them via config though
|
||||
img.type = "bootable-container"
|
||||
# Get the manifest type from the config if supplied, otherwise we
|
||||
# determine the manifest type based on the koji output
|
||||
img.type = "ociarchive"
|
||||
img.format = "ociarchive"
|
||||
img.path = os.path.relpath(fullpath, compose.paths.compose.topdir())
|
||||
img.mtime = util.get_mtime(fullpath)
|
||||
@ -175,7 +177,7 @@ class OSTreeContainerThread(WorkerThread):
|
||||
img.disc_number = 1
|
||||
img.disc_count = 1
|
||||
img.bootable = False
|
||||
img.subvariant = subvariant
|
||||
img.subvariant = config.get("subvariant", variant.uid)
|
||||
setattr(img, "can_fail", self.can_fail)
|
||||
setattr(img, "deliverable", "ostree-container")
|
||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool
|
||||
import shlex
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
import shutil
|
||||
from productmd import images
|
||||
from six.moves import shlex_quote
|
||||
from kobo import shortcuts
|
||||
|
||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||
@ -20,7 +20,6 @@ from ..util import (
|
||||
)
|
||||
from ..wrappers import iso, lorax, scm
|
||||
from ..runroot import Runroot
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class OstreeInstallerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||
@ -276,8 +275,8 @@ class OstreeInstallerThread(WorkerThread):
|
||||
skip_branding=config.get("skip_branding"),
|
||||
)
|
||||
cmd = "rm -rf %s && %s" % (
|
||||
shlex.quote(output_dir),
|
||||
" ".join([shlex.quote(x) for x in lorax_cmd]),
|
||||
shlex_quote(output_dir),
|
||||
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||
)
|
||||
|
||||
runroot.run(
|
||||
|
||||
@ -22,10 +22,8 @@ It automatically finds a signed copies according to *sigkey_ordering*.
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
import pgpy
|
||||
import rpm
|
||||
from six.moves import cPickle as pickle
|
||||
from functools import partial
|
||||
|
||||
import kobo.log
|
||||
@ -33,12 +31,11 @@ import kobo.pkgset
|
||||
import kobo.rpmlib
|
||||
from kobo.shortcuts import compute_file_checksums
|
||||
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import WorkerThread, ThreadPool
|
||||
|
||||
from pungi.util import pkg_is_srpm, copy_all
|
||||
from pungi.arch import get_valid_arches, is_excluded
|
||||
from pungi.errors import UnsignedPackagesError
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class ExtendedRpmWrapper(kobo.pkgset.SimpleRpmWrapper):
|
||||
@ -155,15 +152,9 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||
"""
|
||||
|
||||
def nvr_formatter(package_info):
|
||||
epoch_suffix = ''
|
||||
if package_info['epoch'] is not None:
|
||||
epoch_suffix = ':' + package_info['epoch']
|
||||
return (
|
||||
f"{package_info['name']}"
|
||||
f"{epoch_suffix}-"
|
||||
f"{package_info['version']}-"
|
||||
f"{package_info['release']}."
|
||||
f"{package_info['arch']}"
|
||||
# joins NVR parts of the package with '-' character.
|
||||
return "-".join(
|
||||
(package_info["name"], package_info["version"], package_info["release"])
|
||||
)
|
||||
|
||||
def get_error(sigkeys, infos):
|
||||
@ -274,7 +265,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||
for arch in arch_list:
|
||||
self.rpms_by_arch.setdefault(arch, [])
|
||||
for i in other.rpms_by_arch.get(arch, []):
|
||||
if i.file_path in self.file_cache.file_cache:
|
||||
if i.file_path in self.file_cache:
|
||||
# TODO: test if it really works
|
||||
continue
|
||||
if inherit_to_noarch and exclusivearch_list and arch == "noarch":
|
||||
@ -512,8 +503,7 @@ class KojiPackageSet(PackageSetBase):
|
||||
|
||||
response = None
|
||||
if self.cache_region:
|
||||
cache_key = "%s.get_latest_rpms_%s_%s_%s" % (
|
||||
str(self.__class__.__name__),
|
||||
cache_key = "KojiPackageSet.get_latest_rpms_%s_%s_%s" % (
|
||||
str(tag),
|
||||
str(event),
|
||||
str(inherit),
|
||||
@ -535,8 +525,6 @@ class KojiPackageSet(PackageSetBase):
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
||||
def get_package_path(self, queue_item):
|
||||
rpm_info, build_info = queue_item
|
||||
|
||||
@ -548,14 +536,22 @@ class KojiPackageSet(PackageSetBase):
|
||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||
paths = []
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
checksums = self.koji_proxy.getRPMChecksums(
|
||||
rpm_info["id"], checksum_types=("sha256",)
|
||||
)
|
||||
if "sha256" in checksums.get(keyname, {}):
|
||||
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||
if "getRPMChecksums" in self.koji_proxy.system.listMethods():
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
checksums = self.koji_proxy.getRPMChecksums(
|
||||
rpm_info["id"], checksum_types=("sha256",)
|
||||
)
|
||||
if "sha256" in checksums.get(keyname, {}):
|
||||
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||
|
||||
else:
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
# Koji doesn't support checksums yet
|
||||
pass
|
||||
|
||||
attempts_left = self.signed_packages_retries + 1
|
||||
while attempts_left > 0:
|
||||
@ -889,67 +885,6 @@ class KojiPackageSet(PackageSetBase):
|
||||
return False
|
||||
|
||||
|
||||
class KojiMockPackageSet(KojiPackageSet):
|
||||
|
||||
def _is_rpm_signed(self, rpm_path) -> bool:
|
||||
ts = rpm.TransactionSet()
|
||||
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
|
||||
sigkeys = [
|
||||
sigkey.lower() for sigkey in self.sigkey_ordering
|
||||
if sigkey is not None
|
||||
]
|
||||
if not sigkeys:
|
||||
return True
|
||||
with open(rpm_path, 'rb') as fd:
|
||||
header = ts.hdrFromFdno(fd)
|
||||
signature = header[rpm.RPMTAG_SIGGPG] or header[rpm.RPMTAG_SIGPGP]
|
||||
if signature is None:
|
||||
return False
|
||||
pgp_msg = pgpy.PGPMessage.from_blob(signature)
|
||||
return any(
|
||||
signature.signer.lower() in sigkeys
|
||||
for signature in pgp_msg.signatures
|
||||
)
|
||||
|
||||
def get_package_path(self, queue_item):
|
||||
rpm_info, build_info = queue_item
|
||||
|
||||
# Check if this RPM is coming from scratch task.
|
||||
# In this case, we already know the path.
|
||||
if "path_from_task" in rpm_info:
|
||||
return rpm_info["path_from_task"]
|
||||
|
||||
# we replaced this part because pungi uses way
|
||||
# of guessing path of package on koji based on sigkey
|
||||
# we don't need that because all our packages will
|
||||
# be ready for release
|
||||
# signature verification is still done during deps resolution
|
||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||
|
||||
rpm_path = os.path.join(pathinfo.topdir, pathinfo.rpm(rpm_info))
|
||||
if os.path.isfile(rpm_path):
|
||||
if not self._is_rpm_signed(rpm_path):
|
||||
self._invalid_sigkey_rpms.append(rpm_info)
|
||||
self.log_error(
|
||||
'RPM "%s" not found for sigs: "%s". Path checked: "%s"',
|
||||
rpm_info, self.sigkey_ordering, rpm_path
|
||||
)
|
||||
return
|
||||
return rpm_path
|
||||
else:
|
||||
self.log_warning("RPM %s not found" % rpm_path)
|
||||
return None
|
||||
|
||||
def populate(self, tag, event=None, inherit=True, include_packages=None):
|
||||
result = super().populate(
|
||||
tag=tag,
|
||||
event=event,
|
||||
inherit=inherit,
|
||||
include_packages=include_packages,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _is_src(rpm_info):
|
||||
"""Check if rpm info object returned by Koji refers to source packages."""
|
||||
return rpm_info["arch"] in ("src", "nosrc")
|
||||
|
||||
@ -15,10 +15,8 @@
|
||||
|
||||
from .source_koji import PkgsetSourceKoji
|
||||
from .source_repos import PkgsetSourceRepos
|
||||
from .source_kojimock import PkgsetSourceKojiMock
|
||||
|
||||
ALL_SOURCES = {
|
||||
"koji": PkgsetSourceKoji,
|
||||
"repos": PkgsetSourceRepos,
|
||||
"kojimock": PkgsetSourceKojiMock,
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -15,6 +15,7 @@
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from kobo.shortcuts import run
|
||||
|
||||
@ -75,6 +76,7 @@ def get_pkgset_from_repos(compose):
|
||||
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
||||
|
||||
backends = {
|
||||
"yum": pungi.get_pungi_cmd,
|
||||
"dnf": pungi.get_pungi_cmd_dnf,
|
||||
}
|
||||
get_cmd = backends[compose.conf["gather_backend"]]
|
||||
@ -91,6 +93,8 @@ def get_pkgset_from_repos(compose):
|
||||
cache_dir=compose.paths.work.pungi_cache_dir(arch=arch),
|
||||
profiler=profiler,
|
||||
)
|
||||
if compose.conf["gather_backend"] == "yum":
|
||||
cmd.append("--force")
|
||||
|
||||
# TODO: runroot
|
||||
run(cmd, logfile=pungi_log, show_cmd=True, stdout=False)
|
||||
@ -107,6 +111,17 @@ def get_pkgset_from_repos(compose):
|
||||
flist.append(dst)
|
||||
pool.queue_put((src, dst))
|
||||
|
||||
# Clean up tmp dir
|
||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||
yumroot_dir = os.path.join(pungi_dir, "work", arch, "yumroot")
|
||||
if os.path.isdir(yumroot_dir):
|
||||
try:
|
||||
shutil.rmtree(yumroot_dir)
|
||||
except Exception as e:
|
||||
compose.log_warning(
|
||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||
)
|
||||
|
||||
msg = "Linking downloaded pkgset packages"
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
pool.start()
|
||||
|
||||
@ -101,48 +101,27 @@ def run_repoclosure(compose):
|
||||
|
||||
|
||||
def _delete_repoclosure_cache_dirs(compose):
|
||||
"""Find any cached repodata and delete it. The case is not going to be
|
||||
reused ever again, and would otherwise consume storage space.
|
||||
|
||||
DNF will use a different directory depending on whether it is running as
|
||||
root or not. It is not easy to tell though if DNF 4 or 5 is being used, so
|
||||
let's be sure and check both locations. All our cached entries are prefixed
|
||||
by compose ID, so there's very limited amount of risk that we would delete
|
||||
something incorrect.
|
||||
"""
|
||||
cache_dirs = []
|
||||
|
||||
try:
|
||||
# DNF 4
|
||||
if "dnf" == compose.conf["repoclosure_backend"]:
|
||||
from dnf.const import SYSTEM_CACHEDIR
|
||||
from dnf.util import am_i_root
|
||||
from dnf.yum.misc import getCacheDir
|
||||
|
||||
if am_i_root():
|
||||
cache_dirs.append(SYSTEM_CACHEDIR)
|
||||
top_cache_dir = SYSTEM_CACHEDIR
|
||||
else:
|
||||
cache_dirs.append(getCacheDir())
|
||||
except ImportError:
|
||||
pass
|
||||
top_cache_dir = getCacheDir()
|
||||
else:
|
||||
from yum.misc import getCacheDir
|
||||
|
||||
try:
|
||||
# DNF 5 config works directly for root, no need for special case.
|
||||
import libdnf5
|
||||
top_cache_dir = getCacheDir()
|
||||
|
||||
base = libdnf5.base.Base()
|
||||
config = base.get_config()
|
||||
cache_dirs.append(config.cachedir)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
for top_cache_dir in cache_dirs:
|
||||
for name in os.listdir(top_cache_dir):
|
||||
if name.startswith(compose.compose_id):
|
||||
cache_path = os.path.join(top_cache_dir, name)
|
||||
if os.path.isdir(cache_path):
|
||||
shutil.rmtree(cache_path)
|
||||
else:
|
||||
os.remove(cache_path)
|
||||
for name in os.listdir(top_cache_dir):
|
||||
if name.startswith(compose.compose_id):
|
||||
cache_path = os.path.join(top_cache_dir, name)
|
||||
if os.path.isdir(cache_path):
|
||||
shutil.rmtree(cache_path)
|
||||
else:
|
||||
os.remove(cache_path)
|
||||
|
||||
|
||||
def _run_repoclosure_cmd(compose, repos, lookaside, arches, logfile):
|
||||
|
||||
@ -95,7 +95,7 @@ def is_iso(f):
|
||||
|
||||
|
||||
def has_mbr(f):
|
||||
return _check_magic(f, 0x1FE, b"\x55\xaa")
|
||||
return _check_magic(f, 0x1FE, b"\x55\xAA")
|
||||
|
||||
|
||||
def has_gpt(f):
|
||||
|
||||
@ -1,9 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from kobo import shortcuts
|
||||
from kobo.threads import ThreadPool
|
||||
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
|
||||
|
||||
class WeaverPhase(object):
|
||||
|
||||
@ -16,11 +16,12 @@
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import tarfile
|
||||
|
||||
import requests
|
||||
import six
|
||||
from six.moves import shlex_quote
|
||||
import kobo.log
|
||||
from kobo.shortcuts import run
|
||||
|
||||
@ -99,7 +100,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||
log_file = os.path.join(log_dir, "program.log")
|
||||
try:
|
||||
with open(log_file) as f:
|
||||
for line in f.readlines():
|
||||
for line in f:
|
||||
if "losetup: cannot find an unused loop device" in line:
|
||||
return True
|
||||
if re.match("losetup: .* failed to set up loop device", line):
|
||||
@ -156,7 +157,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||
formatted_cmd = command.format(**fmt_dict) if fmt_dict else command
|
||||
ssh_cmd = ["ssh", "-oBatchMode=yes", "-n", "-l", user, hostname, formatted_cmd]
|
||||
output = run(ssh_cmd, show_cmd=True, logfile=log_file)[1]
|
||||
if isinstance(output, bytes):
|
||||
if six.PY3 and isinstance(output, bytes):
|
||||
return output.decode()
|
||||
else:
|
||||
return output
|
||||
@ -183,7 +184,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||
# If the output dir is defined, change the permissions of files generated
|
||||
# by the runroot task, so the Pungi user can access them.
|
||||
if chown_paths:
|
||||
paths = " ".join(shlex.quote(pth) for pth in chown_paths)
|
||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||
command += " ; EXIT_CODE=$?"
|
||||
# Make the files world readable
|
||||
command += " ; chmod -R a+r %s" % paths
|
||||
@ -454,9 +455,6 @@ def download_and_extract_archive(compose, task_id, fname, destination):
|
||||
# So instead let's generate a patch and attempt to convert it to a URL.
|
||||
server_path = os.path.join(koji.pathinfo.task(task_id), fname)
|
||||
archive_url = server_path.replace(koji.config.topdir, koji.config.topurl)
|
||||
tmp_dir = compose.mkdtemp(prefix="buildinstall-download")
|
||||
try:
|
||||
with util.temp_dir(prefix="buildinstall-download") as tmp_dir:
|
||||
local_path = _download_archive(task_id, fname, archive_url, tmp_dir)
|
||||
_extract_archive(task_id, fname, local_path, destination)
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
|
||||
@ -4,12 +4,13 @@ from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from six.moves import configparser
|
||||
|
||||
import kobo.conf
|
||||
import pungi.checks
|
||||
import pungi.util
|
||||
|
||||
@ -8,6 +8,8 @@ import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import pungi.checks
|
||||
import pungi.compose
|
||||
import pungi.paths
|
||||
@ -54,7 +56,7 @@ class ValidationCompose(pungi.compose.Compose):
|
||||
def read_variants(compose, config):
|
||||
with pungi.util.temp_dir() as tmp_dir:
|
||||
scm_dict = compose.conf["variants_file"]
|
||||
if isinstance(scm_dict, str) and scm_dict[0] != "/":
|
||||
if isinstance(scm_dict, six.string_types) and scm_dict[0] != "/":
|
||||
config_dir = os.path.dirname(config)
|
||||
scm_dict = os.path.join(config_dir, scm_dict)
|
||||
files = pungi.wrappers.scm.get_file_from_scm(scm_dict, tmp_dir)
|
||||
@ -126,6 +128,7 @@ def run(config, topdir, has_old, offline, defined_variables, schema_overrides):
|
||||
pungi.phases.OSTreePhase(compose),
|
||||
pungi.phases.CreateisoPhase(compose, buildinstall_phase),
|
||||
pungi.phases.ExtraIsosPhase(compose, buildinstall_phase),
|
||||
pungi.phases.LiveImagesPhase(compose),
|
||||
pungi.phases.LiveMediaPhase(compose),
|
||||
pungi.phases.ImageBuildPhase(compose),
|
||||
pungi.phases.ImageChecksumPhase(compose),
|
||||
|
||||
@ -1,441 +0,0 @@
|
||||
# coding=utf-8
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from shutil import rmtree
|
||||
from typing import (
|
||||
AnyStr,
|
||||
List,
|
||||
Dict,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import createrepo_c as cr
|
||||
import requests
|
||||
import yaml
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .create_packages_json import (
|
||||
PackagesGenerator,
|
||||
RepoInfo,
|
||||
VariantInfo,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExtraVariantInfo(VariantInfo):
|
||||
|
||||
modules: List[AnyStr] = field(default_factory=list)
|
||||
packages: List[AnyStr] = field(default_factory=list)
|
||||
|
||||
|
||||
class CreateExtraRepo(PackagesGenerator):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
variants: List[ExtraVariantInfo],
|
||||
bs_auth_token: AnyStr,
|
||||
local_repository_path: AnyStr,
|
||||
clear_target_repo: bool = True,
|
||||
):
|
||||
self.variants = [] # type: List[ExtraVariantInfo]
|
||||
super().__init__(variants, [], [])
|
||||
self.auth_headers = {
|
||||
'Authorization': f'Bearer {bs_auth_token}',
|
||||
}
|
||||
# modules data of modules.yaml.gz from an existing local repo
|
||||
self.local_modules_data = []
|
||||
self.local_repository_path = local_repository_path
|
||||
# path to modules.yaml, which generated by the class
|
||||
self.default_modules_yaml_path = os.path.join(
|
||||
local_repository_path,
|
||||
'modules.yaml',
|
||||
)
|
||||
if clear_target_repo:
|
||||
if os.path.exists(self.local_repository_path):
|
||||
rmtree(self.local_repository_path)
|
||||
os.makedirs(self.local_repository_path, exist_ok=True)
|
||||
else:
|
||||
self._read_local_modules_yaml()
|
||||
|
||||
def _read_local_modules_yaml(self):
|
||||
"""
|
||||
Read modules data from an existin local repo
|
||||
"""
|
||||
repomd_file_path = os.path.join(
|
||||
self.local_repository_path,
|
||||
'repodata',
|
||||
'repomd.xml',
|
||||
)
|
||||
repomd_object = self._parse_repomd(repomd_file_path)
|
||||
for repomd_record in repomd_object.records:
|
||||
if repomd_record.type != 'modules':
|
||||
continue
|
||||
modules_yaml_path = os.path.join(
|
||||
self.local_repository_path,
|
||||
repomd_record.location_href,
|
||||
)
|
||||
self.local_modules_data = list(self._parse_modules_file(
|
||||
modules_yaml_path,
|
||||
))
|
||||
break
|
||||
|
||||
def _dump_local_modules_yaml(self):
|
||||
"""
|
||||
Dump merged modules data to an local repo
|
||||
"""
|
||||
if self.local_modules_data:
|
||||
with open(self.default_modules_yaml_path, 'w') as yaml_file:
|
||||
yaml.dump_all(
|
||||
self.local_modules_data,
|
||||
yaml_file,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_repo_info_from_bs_repo(
|
||||
auth_token: AnyStr,
|
||||
build_id: AnyStr,
|
||||
arch: AnyStr,
|
||||
packages: Optional[List[AnyStr]] = None,
|
||||
modules: Optional[List[AnyStr]] = None,
|
||||
) -> List[ExtraVariantInfo]:
|
||||
"""
|
||||
Get info about a BS repo and save it to
|
||||
an object of class ExtraRepoInfo
|
||||
:param auth_token: Auth token to Build System
|
||||
:param build_id: ID of a build from BS
|
||||
:param arch: an architecture of repo which will be used
|
||||
:param packages: list of names of packages which will be put to an
|
||||
local repo from a BS repo
|
||||
:param modules: list of names of modules which will be put to an
|
||||
local repo from a BS repo
|
||||
:return: list of ExtraRepoInfo with info about the BS repos
|
||||
"""
|
||||
|
||||
bs_url = 'https://build.cloudlinux.com'
|
||||
api_uri = 'api/v1'
|
||||
bs_repo_suffix = 'build_repos'
|
||||
|
||||
variants_info = []
|
||||
|
||||
# get the full info about a BS repo
|
||||
repo_request = requests.get(
|
||||
url=os.path.join(
|
||||
bs_url,
|
||||
api_uri,
|
||||
'builds',
|
||||
build_id,
|
||||
),
|
||||
headers={
|
||||
'Authorization': f'Bearer {auth_token}',
|
||||
},
|
||||
)
|
||||
repo_request.raise_for_status()
|
||||
result = repo_request.json()
|
||||
for build_platform in result['build_platforms']:
|
||||
platform_name = build_platform['name']
|
||||
for architecture in build_platform['architectures']:
|
||||
# skip repo with unsuitable architecture
|
||||
if architecture != arch:
|
||||
continue
|
||||
variant_info = ExtraVariantInfo(
|
||||
name=f'{build_id}-{platform_name}-{architecture}',
|
||||
arch=architecture,
|
||||
packages=packages,
|
||||
modules=modules,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path=os.path.join(
|
||||
bs_url,
|
||||
bs_repo_suffix,
|
||||
build_id,
|
||||
platform_name,
|
||||
),
|
||||
folder=architecture,
|
||||
is_remote=True,
|
||||
)
|
||||
]
|
||||
)
|
||||
variants_info.append(variant_info)
|
||||
return variants_info
|
||||
|
||||
def _create_local_extra_repo(self):
|
||||
"""
|
||||
Call `createrepo_c <path_to_repo>` for creating a local repo
|
||||
"""
|
||||
subprocess.call(
|
||||
f'createrepo_c {self.local_repository_path}',
|
||||
shell=True,
|
||||
)
|
||||
# remove an unnecessary temporary modules.yaml
|
||||
if os.path.exists(self.default_modules_yaml_path):
|
||||
os.remove(self.default_modules_yaml_path)
|
||||
|
||||
def get_remote_file_content(
|
||||
self,
|
||||
file_url: AnyStr,
|
||||
) -> AnyStr:
|
||||
"""
|
||||
Get content from a remote file and write it to a temp file
|
||||
:param file_url: url of a remote file
|
||||
:return: path to a temp file
|
||||
"""
|
||||
|
||||
file_request = requests.get(
|
||||
url=file_url,
|
||||
# for the case when we get a file from BS
|
||||
headers=self.auth_headers,
|
||||
)
|
||||
file_request.raise_for_status()
|
||||
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||
file_stream.write(file_request.content)
|
||||
return file_stream.name
|
||||
|
||||
def _download_rpm_to_local_repo(
|
||||
self,
|
||||
package_location: AnyStr,
|
||||
repo_info: RepoInfo,
|
||||
) -> None:
|
||||
"""
|
||||
Download a rpm package from a remote repo and save it to a local repo
|
||||
:param package_location: relative uri of a package in a remote repo
|
||||
:param repo_info: info about a remote repo which contains a specific
|
||||
rpm package
|
||||
"""
|
||||
rpm_package_remote_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
package_location,
|
||||
)
|
||||
rpm_package_local_path = os.path.join(
|
||||
self.local_repository_path,
|
||||
os.path.basename(package_location),
|
||||
)
|
||||
rpm_request = requests.get(
|
||||
url=rpm_package_remote_path,
|
||||
headers=self.auth_headers,
|
||||
)
|
||||
rpm_request.raise_for_status()
|
||||
with open(rpm_package_local_path, 'wb') as rpm_file:
|
||||
rpm_file.write(rpm_request.content)
|
||||
|
||||
def _download_packages(
|
||||
self,
|
||||
packages: Dict[AnyStr, cr.Package],
|
||||
variant_info: ExtraVariantInfo
|
||||
):
|
||||
"""
|
||||
Download all defined packages from a remote repo
|
||||
:param packages: information about all packages (including
|
||||
modularity) in a remote repo
|
||||
:param variant_info: information about a remote variant
|
||||
"""
|
||||
for package in packages.values():
|
||||
package_name = package.name
|
||||
# Skip a current package from a remote repo if we defined
|
||||
# the list packages and a current package doesn't belong to it
|
||||
if variant_info.packages and \
|
||||
package_name not in variant_info.packages:
|
||||
continue
|
||||
for repo_info in variant_info.repos:
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=package.location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
|
||||
def _download_modules(
|
||||
self,
|
||||
modules_data: List[Dict],
|
||||
variant_info: ExtraVariantInfo,
|
||||
packages: Dict[AnyStr, cr.Package]
|
||||
):
|
||||
"""
|
||||
Download all defined modularity packages and their data from
|
||||
a remote repo
|
||||
:param modules_data: information about all modules in a remote repo
|
||||
:param variant_info: information about a remote variant
|
||||
:param packages: information about all packages (including
|
||||
modularity) in a remote repo
|
||||
"""
|
||||
for module in modules_data:
|
||||
module_data = module['data']
|
||||
# Skip a current module from a remote repo if we defined
|
||||
# the list modules and a current module doesn't belong to it
|
||||
if variant_info.modules and \
|
||||
module_data['name'] not in variant_info.modules:
|
||||
continue
|
||||
# we should add info about a module if the local repodata
|
||||
# doesn't have it
|
||||
if module not in self.local_modules_data:
|
||||
self.local_modules_data.append(module)
|
||||
# just skip a module's record if it doesn't have rpm artifact
|
||||
if module['document'] != 'modulemd' or \
|
||||
'artifacts' not in module_data or \
|
||||
'rpms' not in module_data['artifacts']:
|
||||
continue
|
||||
for rpm in module['data']['artifacts']['rpms']:
|
||||
# Empty repo_info.packages means that we will download
|
||||
# all packages from repo including
|
||||
# the modularity packages
|
||||
if not variant_info.packages:
|
||||
break
|
||||
# skip a rpm if it doesn't belong to a processed repo
|
||||
if rpm not in packages:
|
||||
continue
|
||||
for repo_info in variant_info.repos:
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=packages[rpm].location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
|
||||
def create_extra_repo(self):
|
||||
"""
|
||||
1. Get from the remote repos the specific (or all) packages/modules
|
||||
2. Save them to a local repo
|
||||
3. Save info about the modules to a local repo
|
||||
3. Call `createrepo_c` which creates a local repo
|
||||
with the right repodata
|
||||
"""
|
||||
for variant_info in self.variants:
|
||||
for repo_info in variant_info.repos:
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
packages_iterator = self.get_packages_iterator(repo_info)
|
||||
# parse the repodata (including modules.yaml.gz)
|
||||
modules_data = self._parse_module_repomd_record(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
)
|
||||
# convert the packages dict to more usable form
|
||||
# for future checking that a rpm from the module's artifacts
|
||||
# belongs to a processed repository
|
||||
packages = {
|
||||
f'{package.name}-{package.epoch}:{package.version}-'
|
||||
f'{package.release}.{package.arch}':
|
||||
package for package in packages_iterator
|
||||
}
|
||||
self._download_modules(
|
||||
modules_data=modules_data,
|
||||
variant_info=variant_info,
|
||||
packages=packages,
|
||||
)
|
||||
self._download_packages(
|
||||
packages=packages,
|
||||
variant_info=variant_info,
|
||||
)
|
||||
|
||||
self._dump_local_modules_yaml()
|
||||
self._create_local_extra_repo()
|
||||
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'--bs-auth-token',
|
||||
help='Auth token for Build System',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--local-repo-path',
|
||||
help='Path to a local repo. E.g. /var/repo/test_repo',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--clear-local-repo',
|
||||
help='Clear a local repo before creating a new',
|
||||
action='store_true',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repo',
|
||||
action='append',
|
||||
help='Path to a folder with repofolders or build id. E.g. '
|
||||
'"http://koji.cloudlinux.com/mirrors/rhel_mirror" or '
|
||||
'"601809b3c2f5b0e458b14cd3"',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repo-folder',
|
||||
action='append',
|
||||
help='A folder which contains folder repodata . E.g. "baseos-stream"',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repo-arch',
|
||||
action='append',
|
||||
help='What architecture packages a repository contains. E.g. "x86_64"',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--packages',
|
||||
action='append',
|
||||
type=str,
|
||||
default=[],
|
||||
help='A list of packages names which we want to download to local '
|
||||
'extra repo. We will download all of packages if param is empty',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--modules',
|
||||
action='append',
|
||||
type=str,
|
||||
default=[],
|
||||
help='A list of modules names which we want to download to local '
|
||||
'extra repo. We will download all of modules if param is empty',
|
||||
required=True,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cli_main():
|
||||
args = create_parser().parse_args()
|
||||
repos_info = []
|
||||
for repo, repo_folder, repo_arch, packages, modules in zip(
|
||||
args.repo,
|
||||
args.repo_folder,
|
||||
args.repo_arch,
|
||||
args.packages,
|
||||
args.modules,
|
||||
):
|
||||
modules = modules.split()
|
||||
packages = packages.split()
|
||||
if repo.startswith('http://'):
|
||||
repos_info.append(
|
||||
ExtraVariantInfo(
|
||||
name=repo_folder,
|
||||
arch=repo_arch,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path=repo,
|
||||
folder=repo_folder,
|
||||
is_remote=True,
|
||||
)
|
||||
],
|
||||
modules=modules,
|
||||
packages=packages,
|
||||
)
|
||||
)
|
||||
else:
|
||||
repos_info.extend(
|
||||
CreateExtraRepo.get_repo_info_from_bs_repo(
|
||||
auth_token=args.bs_auth_token,
|
||||
build_id=repo,
|
||||
arch=repo_arch,
|
||||
modules=modules,
|
||||
packages=packages,
|
||||
)
|
||||
)
|
||||
cer = CreateExtraRepo(
|
||||
variants=repos_info,
|
||||
bs_auth_token=args.bs_auth_token,
|
||||
local_repository_path=args.local_repo_path,
|
||||
clear_target_repo=args.clear_local_repo,
|
||||
)
|
||||
cer.create_extra_repo()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
@ -1,514 +0,0 @@
|
||||
# coding=utf-8
|
||||
"""
|
||||
The tool allow to generate package.json. This file is used by pungi
|
||||
# as parameter `gather_prepopulate`
|
||||
Sample of using repodata files taken from
|
||||
https://github.com/rpm-software-management/createrepo_c/blob/master/examples/python/repodata_parsing.py
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import lzma
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
from itertools import tee
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
AnyStr,
|
||||
Dict,
|
||||
List,
|
||||
Any,
|
||||
Iterator,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import binascii
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
import rpm
|
||||
import yaml
|
||||
from createrepo_c import (
|
||||
Package,
|
||||
PackageIterator,
|
||||
Repomd,
|
||||
RepomdRecord,
|
||||
)
|
||||
from dataclasses import dataclass, field
|
||||
from kobo.rpmlib import parse_nvra
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
|
||||
return binascii.hexlify(first_two_bytes) == initial_bytes
|
||||
|
||||
|
||||
def is_gzip_file(first_two_bytes):
|
||||
return _is_compressed_file(
|
||||
first_two_bytes=first_two_bytes,
|
||||
initial_bytes=b'1f8b',
|
||||
)
|
||||
|
||||
|
||||
def is_xz_file(first_two_bytes):
|
||||
return _is_compressed_file(
|
||||
first_two_bytes=first_two_bytes,
|
||||
initial_bytes=b'fd37',
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepoInfo:
|
||||
# path to a directory with repo directories. E.g. '/var/repos' contains
|
||||
# 'appstream', 'baseos', etc.
|
||||
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
||||
# using remote repo
|
||||
path: str
|
||||
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
||||
folder: str
|
||||
# Is a repo remote or local
|
||||
is_remote: bool
|
||||
# Is a reference repository (usually it's a RHEL repo)
|
||||
# Layout of packages from such repository will be taken as example
|
||||
# Only layout of specific package (which doesn't exist
|
||||
# in a reference repository) will be taken as example
|
||||
is_reference: bool = False
|
||||
# The packages from 'present' repo will be added to a variant.
|
||||
# The packages from 'absent' repo will be removed from a variant.
|
||||
repo_type: str = 'present'
|
||||
|
||||
|
||||
@dataclass
|
||||
class VariantInfo:
|
||||
# name of variant. E.g. 'BaseOS', 'AppStream', etc
|
||||
name: AnyStr
|
||||
# architecture of variant. E.g. 'x86_64', 'i686', etc
|
||||
arch: AnyStr
|
||||
# The packages which will be not added to a variant
|
||||
excluded_packages: List[str] = field(default_factory=list)
|
||||
# Repos of a variant
|
||||
repos: List[RepoInfo] = field(default_factory=list)
|
||||
|
||||
|
||||
class PackagesGenerator:
|
||||
|
||||
repo_arches = defaultdict(lambda: list(('noarch',)))
|
||||
addon_repos = {
|
||||
'x86_64': ['i686'],
|
||||
'ppc64le': [],
|
||||
'aarch64': [],
|
||||
's390x': [],
|
||||
'i686': [],
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
variants: List[VariantInfo],
|
||||
excluded_packages: List[AnyStr],
|
||||
included_packages: List[AnyStr],
|
||||
):
|
||||
self.variants = variants
|
||||
self.pkgs = dict()
|
||||
self.excluded_packages = excluded_packages
|
||||
self.included_packages = included_packages
|
||||
self.tmp_files = [] # type: list[Path]
|
||||
for arch, arch_list in self.addon_repos.items():
|
||||
self.repo_arches[arch].extend(arch_list)
|
||||
self.repo_arches[arch].append(arch)
|
||||
|
||||
def __del__(self):
|
||||
for tmp_file in self.tmp_files:
|
||||
if tmp_file.exists():
|
||||
tmp_file.unlink()
|
||||
|
||||
@staticmethod
|
||||
def _get_full_repo_path(repo_info: RepoInfo):
|
||||
result = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
result = urljoin(
|
||||
repo_info.path + '/',
|
||||
repo_info.folder,
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _warning_callback(warning_type, message):
|
||||
"""
|
||||
Warning callback for createrepo_c parsing functions
|
||||
"""
|
||||
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
||||
return True
|
||||
|
||||
def get_remote_file_content(self, file_url: AnyStr) -> AnyStr:
|
||||
"""
|
||||
Get content from a remote file and write it to a temp file
|
||||
:param file_url: url of a remote file
|
||||
:return: path to a temp file
|
||||
"""
|
||||
|
||||
file_request = requests.get(
|
||||
url=file_url,
|
||||
)
|
||||
file_request.raise_for_status()
|
||||
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||
file_stream.write(file_request.content)
|
||||
self.tmp_files.append(Path(file_stream.name))
|
||||
return file_stream.name
|
||||
|
||||
@staticmethod
|
||||
def _parse_repomd(repomd_file_path: AnyStr) -> Repomd:
|
||||
"""
|
||||
Parse file repomd.xml and create object Repomd
|
||||
:param repomd_file_path: path to local repomd.xml
|
||||
"""
|
||||
return Repomd(repomd_file_path)
|
||||
|
||||
@classmethod
|
||||
def _parse_modules_file(
|
||||
cls,
|
||||
modules_file_path: AnyStr,
|
||||
|
||||
) -> Iterator[Any]:
|
||||
"""
|
||||
Parse modules.yaml.gz and returns parsed data
|
||||
:param modules_file_path: path to local modules.yaml.gz
|
||||
:return: List of dict for each module in a repo
|
||||
"""
|
||||
|
||||
with open(modules_file_path, 'rb') as modules_file:
|
||||
data = modules_file.read()
|
||||
if is_gzip_file(data[:2]):
|
||||
data = gzip.decompress(data)
|
||||
elif is_xz_file(data[:2]):
|
||||
data = lzma.decompress(data)
|
||||
return yaml.load_all(
|
||||
data,
|
||||
Loader=yaml.BaseLoader,
|
||||
)
|
||||
|
||||
def _get_repomd_records(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
) -> List[RepomdRecord]:
|
||||
"""
|
||||
Get, parse file repomd.xml and extract from it repomd records
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:return: list with repomd records
|
||||
"""
|
||||
repomd_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
'repodata',
|
||||
'repomd.xml',
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_file_path = urljoin(
|
||||
urljoin(
|
||||
repo_info.path + '/',
|
||||
repo_info.folder
|
||||
) + '/',
|
||||
'repodata/repomd.xml'
|
||||
)
|
||||
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
||||
|
||||
repomd_object = self._parse_repomd(repomd_file_path)
|
||||
if repo_info.is_remote:
|
||||
os.remove(repomd_file_path)
|
||||
return repomd_object.records
|
||||
|
||||
def _download_repomd_records(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
repomd_records: List[RepomdRecord],
|
||||
repomd_records_dict: Dict[str, str],
|
||||
):
|
||||
"""
|
||||
Download repomd records
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:param repomd_records: list with repomd records
|
||||
:param repomd_records_dict: dict with paths to repodata files
|
||||
"""
|
||||
for repomd_record in repomd_records:
|
||||
if repomd_record.type not in (
|
||||
'primary',
|
||||
'filelists',
|
||||
'other',
|
||||
):
|
||||
continue
|
||||
repomd_record_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
repomd_record.location_href,
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_record_file_path = self.get_remote_file_content(
|
||||
repomd_record_file_path)
|
||||
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
||||
|
||||
def _parse_module_repomd_record(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
repomd_records: List[RepomdRecord],
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Download repomd records
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:param repomd_records: list with repomd records
|
||||
"""
|
||||
for repomd_record in repomd_records:
|
||||
if repomd_record.type != 'modules':
|
||||
continue
|
||||
repomd_record_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
repomd_record.location_href,
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_record_file_path = self.get_remote_file_content(
|
||||
repomd_record_file_path)
|
||||
return list(self._parse_modules_file(
|
||||
repomd_record_file_path,
|
||||
))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
||||
version_tuple_1 = (
|
||||
package_1.epoch,
|
||||
package_1.version,
|
||||
package_1.release,
|
||||
)
|
||||
version_tuple_2 = (
|
||||
package_2.epoch,
|
||||
package_2.version,
|
||||
package_2.release,
|
||||
)
|
||||
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
||||
|
||||
def get_packages_iterator(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
) -> Union[PackageIterator, Iterator]:
|
||||
full_repo_path = self._get_full_repo_path(repo_info)
|
||||
pkgs_iterator = self.pkgs.get(full_repo_path)
|
||||
if pkgs_iterator is None:
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
repomd_records_dict = {} # type: Dict[str, str]
|
||||
self._download_repomd_records(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
repomd_records_dict=repomd_records_dict,
|
||||
)
|
||||
pkgs_iterator = PackageIterator(
|
||||
primary_path=repomd_records_dict['primary'],
|
||||
filelists_path=repomd_records_dict['filelists'],
|
||||
other_path=repomd_records_dict['other'],
|
||||
warningcb=self._warning_callback,
|
||||
)
|
||||
pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator)
|
||||
return pkgs_iterator
|
||||
|
||||
def get_package_arch(
|
||||
self,
|
||||
package: Package,
|
||||
variant_arch: str,
|
||||
) -> str:
|
||||
result = variant_arch
|
||||
if package.arch in self.repo_arches[variant_arch]:
|
||||
result = package.arch
|
||||
return result
|
||||
|
||||
def is_skipped_module_package(
|
||||
self,
|
||||
package: Package,
|
||||
variant_arch: str,
|
||||
) -> bool:
|
||||
package_key = self.get_package_key(package, variant_arch)
|
||||
# Even a module package will be added to packages.json if
|
||||
# it presents in the list of included packages
|
||||
return 'module' in package.release and not any(
|
||||
re.search(
|
||||
f'^{included_pkg}$',
|
||||
package_key,
|
||||
) or included_pkg in (package.name, package_key)
|
||||
for included_pkg in self.included_packages
|
||||
)
|
||||
|
||||
def is_excluded_package(
|
||||
self,
|
||||
package: Package,
|
||||
variant_arch: str,
|
||||
excluded_packages: List[str],
|
||||
) -> bool:
|
||||
package_key = self.get_package_key(package, variant_arch)
|
||||
return any(
|
||||
re.search(
|
||||
f'^{excluded_pkg}$',
|
||||
package_key,
|
||||
) or excluded_pkg in (package.name, package_key)
|
||||
for excluded_pkg in excluded_packages
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_source_rpm_name(package: Package) -> str:
|
||||
source_rpm_nvra = parse_nvra(package.rpm_sourcerpm)
|
||||
return source_rpm_nvra['name']
|
||||
|
||||
def get_package_key(self, package: Package, variant_arch: str) -> str:
|
||||
return (
|
||||
f'{package.name}.'
|
||||
f'{self.get_package_arch(package, variant_arch)}'
|
||||
)
|
||||
|
||||
def generate_packages_json(
|
||||
self
|
||||
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
||||
"""
|
||||
Generate packages.json
|
||||
"""
|
||||
packages = defaultdict(lambda: defaultdict(lambda: {
|
||||
'variants': list(),
|
||||
}))
|
||||
for variant_info in self.variants:
|
||||
for repo_info in variant_info.repos:
|
||||
is_reference = repo_info.is_reference
|
||||
for package in self.get_packages_iterator(repo_info=repo_info):
|
||||
if self.is_skipped_module_package(
|
||||
package=package,
|
||||
variant_arch=variant_info.arch,
|
||||
):
|
||||
continue
|
||||
if self.is_excluded_package(
|
||||
package=package,
|
||||
variant_arch=variant_info.arch,
|
||||
excluded_packages=self.excluded_packages,
|
||||
):
|
||||
continue
|
||||
if self.is_excluded_package(
|
||||
package=package,
|
||||
variant_arch=variant_info.arch,
|
||||
excluded_packages=variant_info.excluded_packages,
|
||||
):
|
||||
continue
|
||||
package_key = self.get_package_key(
|
||||
package,
|
||||
variant_info.arch,
|
||||
)
|
||||
source_rpm_name = self.get_source_rpm_name(package)
|
||||
package_info = packages[source_rpm_name][package_key]
|
||||
if 'is_reference' not in package_info:
|
||||
package_info['variants'].append(variant_info.name)
|
||||
package_info['is_reference'] = is_reference
|
||||
package_info['package'] = package
|
||||
elif not package_info['is_reference'] or \
|
||||
package_info['is_reference'] == is_reference and \
|
||||
self.compare_pkgs_version(
|
||||
package_1=package,
|
||||
package_2=package_info['package'],
|
||||
) > 0:
|
||||
package_info['variants'] = [variant_info.name]
|
||||
package_info['is_reference'] = is_reference
|
||||
package_info['package'] = package
|
||||
elif self.compare_pkgs_version(
|
||||
package_1=package,
|
||||
package_2=package_info['package'],
|
||||
) == 0 and repo_info.repo_type != 'absent':
|
||||
package_info['variants'].append(variant_info.name)
|
||||
result = defaultdict(lambda: defaultdict(
|
||||
lambda: defaultdict(list),
|
||||
))
|
||||
for variant_info in self.variants:
|
||||
for source_rpm_name, packages_info in packages.items():
|
||||
for package_key, package_info in packages_info.items():
|
||||
variant_pkgs = result[variant_info.name][variant_info.arch]
|
||||
if variant_info.name not in package_info['variants']:
|
||||
continue
|
||||
variant_pkgs[source_rpm_name].append(package_key)
|
||||
return result
|
||||
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'-c',
|
||||
'--config',
|
||||
type=Path,
|
||||
default=Path('config.yaml'),
|
||||
required=False,
|
||||
help='Path to a config',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
'--json-output-path',
|
||||
type=str,
|
||||
help='Full path to output json file',
|
||||
required=True,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def read_config(config_path: Path) -> Optional[Dict]:
|
||||
if not config_path.exists():
|
||||
logging.error('A config by path "%s" does not exist', config_path)
|
||||
exit(1)
|
||||
with config_path.open('r') as config_fd:
|
||||
return yaml.safe_load(config_fd)
|
||||
|
||||
|
||||
def process_config(config_data: Dict) -> Tuple[
|
||||
List[VariantInfo],
|
||||
List[str],
|
||||
List[str],
|
||||
]:
|
||||
excluded_packages = config_data.get('excluded_packages', [])
|
||||
included_packages = config_data.get('included_packages', [])
|
||||
variants = [VariantInfo(
|
||||
name=variant_name,
|
||||
arch=variant_info['arch'],
|
||||
excluded_packages=variant_info.get('excluded_packages', []),
|
||||
repos=[RepoInfo(
|
||||
path=variant_repo['path'],
|
||||
folder=variant_repo['folder'],
|
||||
is_remote=variant_repo['remote'],
|
||||
is_reference=variant_repo['reference'],
|
||||
repo_type=variant_repo.get('repo_type', 'present'),
|
||||
) for variant_repo in variant_info['repos']]
|
||||
) for variant_name, variant_info in config_data['variants'].items()]
|
||||
return variants, excluded_packages, included_packages
|
||||
|
||||
|
||||
def cli_main():
|
||||
args = create_parser().parse_args()
|
||||
variants, excluded_packages, included_packages = process_config(
|
||||
config_data=read_config(args.config)
|
||||
)
|
||||
pg = PackagesGenerator(
|
||||
variants=variants,
|
||||
excluded_packages=excluded_packages,
|
||||
included_packages=included_packages,
|
||||
)
|
||||
result = pg.generate_packages_json()
|
||||
with open(args.json_output_path, 'w') as packages_file:
|
||||
json.dump(
|
||||
result,
|
||||
packages_file,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
@ -1,255 +0,0 @@
|
||||
import gzip
|
||||
import lzma
|
||||
import os
|
||||
from argparse import ArgumentParser, FileType
|
||||
from glob import iglob
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import List, AnyStr, Iterable, Union, Optional
|
||||
import logging
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import yaml
|
||||
import createrepo_c as cr
|
||||
from typing.io import BinaryIO
|
||||
|
||||
from .create_packages_json import PackagesGenerator, is_gzip_file, is_xz_file
|
||||
|
||||
EMPTY_FILE = '.empty'
|
||||
|
||||
|
||||
def read_modules_yaml(modules_yaml_path: Union[str, Path]) -> BytesIO:
|
||||
with open(modules_yaml_path, 'rb') as fp:
|
||||
return BytesIO(fp.read())
|
||||
|
||||
|
||||
def grep_list_of_modules_yaml(repos_path: AnyStr) -> Iterable[BytesIO]:
|
||||
"""
|
||||
Find all of valid *modules.yaml.gz in repos
|
||||
:param repos_path: path to a directory which contains repo dirs
|
||||
:return: iterable object of content from *modules.yaml.*
|
||||
"""
|
||||
|
||||
return (
|
||||
read_modules_yaml_from_specific_repo(repo_path=Path(path).parent)
|
||||
for path in iglob(
|
||||
str(Path(repos_path).joinpath('**/repodata')),
|
||||
recursive=True
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _is_remote(path: str):
|
||||
return any(str(path).startswith(protocol)
|
||||
for protocol in ('http', 'https'))
|
||||
|
||||
|
||||
def read_modules_yaml_from_specific_repo(
|
||||
repo_path: Union[str, Path]
|
||||
) -> Optional[BytesIO]:
|
||||
"""
|
||||
Read modules_yaml from a specific repo (remote or local)
|
||||
:param repo_path: path/url to a specific repo
|
||||
(final dir should contain dir `repodata`)
|
||||
:return: iterable object of content from *modules.yaml.*
|
||||
"""
|
||||
|
||||
if _is_remote(repo_path):
|
||||
repomd_url = urljoin(
|
||||
repo_path + '/',
|
||||
'repodata/repomd.xml',
|
||||
)
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
repomd_file_path = packages_generator.get_remote_file_content(
|
||||
file_url=repomd_url
|
||||
)
|
||||
else:
|
||||
repomd_file_path = os.path.join(
|
||||
repo_path,
|
||||
'repodata/repomd.xml',
|
||||
)
|
||||
repomd_obj = cr.Repomd(str(repomd_file_path))
|
||||
for record in repomd_obj.records:
|
||||
if record.type != 'modules':
|
||||
continue
|
||||
else:
|
||||
if _is_remote(repo_path):
|
||||
modules_yaml_url = urljoin(
|
||||
repo_path + '/',
|
||||
record.location_href,
|
||||
)
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
modules_yaml_path = packages_generator.get_remote_file_content(
|
||||
file_url=modules_yaml_url
|
||||
)
|
||||
else:
|
||||
modules_yaml_path = os.path.join(
|
||||
repo_path,
|
||||
record.location_href,
|
||||
)
|
||||
return read_modules_yaml(modules_yaml_path=modules_yaml_path)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _should_grep_defaults(
|
||||
document_type: str,
|
||||
grep_only_modules_data: bool = False,
|
||||
grep_only_modules_defaults_data: bool = False,
|
||||
) -> bool:
|
||||
xor_flag = grep_only_modules_data == grep_only_modules_defaults_data
|
||||
if document_type == 'modulemd' and (xor_flag or grep_only_modules_data):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _should_grep_modules(
|
||||
document_type: str,
|
||||
grep_only_modules_data: bool = False,
|
||||
grep_only_modules_defaults_data: bool = False,
|
||||
) -> bool:
|
||||
xor_flag = grep_only_modules_data == grep_only_modules_defaults_data
|
||||
if document_type == 'modulemd-defaults' and \
|
||||
(xor_flag or grep_only_modules_defaults_data):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def collect_modules(
|
||||
modules_paths: List[BinaryIO],
|
||||
target_dir: str,
|
||||
grep_only_modules_data: bool = False,
|
||||
grep_only_modules_defaults_data: bool = False,
|
||||
):
|
||||
"""
|
||||
Read given modules.yaml.gz files and export modules
|
||||
and modulemd files from it.
|
||||
Returns:
|
||||
object:
|
||||
"""
|
||||
xor_flag = grep_only_modules_defaults_data is grep_only_modules_data
|
||||
modules_path = os.path.join(target_dir, 'modules')
|
||||
module_defaults_path = os.path.join(target_dir, 'module_defaults')
|
||||
if grep_only_modules_data or xor_flag:
|
||||
os.makedirs(modules_path, exist_ok=True)
|
||||
if grep_only_modules_defaults_data or xor_flag:
|
||||
os.makedirs(module_defaults_path, exist_ok=True)
|
||||
# Defaults modules can be empty, but pungi detects
|
||||
# empty folder while copying and raises the exception in this case
|
||||
Path(os.path.join(module_defaults_path, EMPTY_FILE)).touch()
|
||||
|
||||
for module_file in modules_paths:
|
||||
data = module_file.read()
|
||||
if is_gzip_file(data[:2]):
|
||||
data = gzip.decompress(data)
|
||||
elif is_xz_file(data[:2]):
|
||||
data = lzma.decompress(data)
|
||||
documents = yaml.load_all(data, Loader=yaml.BaseLoader)
|
||||
for doc in documents:
|
||||
path = None
|
||||
if _should_grep_modules(
|
||||
doc['document'],
|
||||
grep_only_modules_data,
|
||||
grep_only_modules_defaults_data,
|
||||
):
|
||||
name = f"{doc['data']['module']}.yaml"
|
||||
path = os.path.join(module_defaults_path, name)
|
||||
logging.info('Found %s module defaults', name)
|
||||
elif _should_grep_defaults(
|
||||
doc['document'],
|
||||
grep_only_modules_data,
|
||||
grep_only_modules_defaults_data,
|
||||
):
|
||||
# pungi.phases.pkgset.sources.source_koji.get_koji_modules
|
||||
stream = doc['data']['stream'].replace('-', '_')
|
||||
doc_data = doc['data']
|
||||
name = f"{doc_data['name']}-{stream}-" \
|
||||
f"{doc_data['version']}.{doc_data['context']}"
|
||||
arch_dir = os.path.join(
|
||||
modules_path,
|
||||
doc_data['arch']
|
||||
)
|
||||
os.makedirs(arch_dir, exist_ok=True)
|
||||
path = os.path.join(
|
||||
arch_dir,
|
||||
name,
|
||||
)
|
||||
logging.info('Found module %s', name)
|
||||
|
||||
if 'artifacts' not in doc['data']:
|
||||
logging.warning(
|
||||
'RPM %s does not have explicit list of artifacts',
|
||||
name
|
||||
)
|
||||
if path is not None:
|
||||
with open(path, 'w') as f:
|
||||
yaml.dump(doc, f, default_flow_style=False)
|
||||
|
||||
|
||||
def cli_main():
|
||||
parser = ArgumentParser()
|
||||
content_type_group = parser.add_mutually_exclusive_group(required=False)
|
||||
content_type_group.add_argument(
|
||||
'--get-only-modules-data',
|
||||
action='store_true',
|
||||
help='Parse and get only modules data',
|
||||
)
|
||||
content_type_group.add_argument(
|
||||
'--get-only-modules-defaults-data',
|
||||
action='store_true',
|
||||
help='Parse and get only modules_defaults data',
|
||||
)
|
||||
path_group = parser.add_mutually_exclusive_group(required=True)
|
||||
path_group.add_argument(
|
||||
'-p', '--path',
|
||||
type=FileType('rb'), nargs='+',
|
||||
help='Path to modules.yaml.gz file. '
|
||||
'You may pass multiple files by passing -p path1 path2'
|
||||
)
|
||||
path_group.add_argument(
|
||||
'-rp', '--repo-path',
|
||||
required=False,
|
||||
type=str,
|
||||
default=None,
|
||||
help='Path to a directory which contains repodirs. E.g. /var/repos'
|
||||
)
|
||||
path_group.add_argument(
|
||||
'-rd', '--repodata-paths',
|
||||
required=False,
|
||||
type=str,
|
||||
nargs='+',
|
||||
default=[],
|
||||
help='Paths/urls to the directories with directory `repodata`',
|
||||
)
|
||||
parser.add_argument('-t', '--target', required=True)
|
||||
|
||||
namespace = parser.parse_args()
|
||||
if namespace.repodata_paths:
|
||||
modules = []
|
||||
for repodata_path in namespace.repodata_paths:
|
||||
modules.append(read_modules_yaml_from_specific_repo(
|
||||
repodata_path,
|
||||
))
|
||||
elif namespace.path is not None:
|
||||
modules = namespace.path
|
||||
else:
|
||||
modules = grep_list_of_modules_yaml(namespace.repo_path)
|
||||
modules = list(filter(lambda i: i is not None, modules))
|
||||
collect_modules(
|
||||
modules,
|
||||
namespace.target,
|
||||
namespace.get_only_modules_data,
|
||||
namespace.get_only_modules_defaults_data,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
@ -1,96 +0,0 @@
|
||||
import re
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import os
|
||||
from glob import iglob
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from dataclasses import dataclass
|
||||
from productmd.common import parse_nvra
|
||||
|
||||
|
||||
@dataclass
|
||||
class Package:
|
||||
nvra: dict
|
||||
path: Path
|
||||
|
||||
|
||||
def search_rpms(top_dir: Path) -> List[Package]:
|
||||
"""
|
||||
Search for all *.rpm files recursively
|
||||
in given top directory
|
||||
Returns:
|
||||
list: list of paths
|
||||
"""
|
||||
return [Package(
|
||||
nvra=parse_nvra(Path(path).stem),
|
||||
path=Path(path),
|
||||
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
||||
|
||||
|
||||
def is_excluded_package(
|
||||
package: Package,
|
||||
excluded_packages: List[str],
|
||||
) -> bool:
|
||||
package_key = f'{package.nvra["name"]}.{package.nvra["arch"]}'
|
||||
return any(
|
||||
re.search(
|
||||
f'^{excluded_pkg}$',
|
||||
package_key,
|
||||
) or excluded_pkg in (package.nvra['name'], package_key)
|
||||
for excluded_pkg in excluded_packages
|
||||
)
|
||||
|
||||
|
||||
def copy_rpms(
|
||||
packages: List[Package],
|
||||
target_top_dir: Path,
|
||||
excluded_packages: List[str],
|
||||
):
|
||||
"""
|
||||
Search synced repos for rpms and prepare
|
||||
koji-like structure for pungi
|
||||
|
||||
Instead of repos, use following structure:
|
||||
# ls /mnt/koji/
|
||||
i686/ noarch/ x86_64/
|
||||
Returns:
|
||||
Nothing:
|
||||
"""
|
||||
for package in packages:
|
||||
if is_excluded_package(package, excluded_packages):
|
||||
continue
|
||||
target_arch_dir = target_top_dir.joinpath(package.nvra['arch'])
|
||||
target_file = target_arch_dir.joinpath(package.path.name)
|
||||
os.makedirs(target_arch_dir, exist_ok=True)
|
||||
|
||||
if not target_file.exists():
|
||||
try:
|
||||
os.link(package.path, target_file)
|
||||
except OSError:
|
||||
# hardlink failed, try symlinking
|
||||
package.path.symlink_to(target_file)
|
||||
|
||||
|
||||
def cli_main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('-p', '--path', required=True, type=Path)
|
||||
parser.add_argument('-t', '--target', required=True, type=Path)
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--excluded-packages',
|
||||
required=False,
|
||||
nargs='+',
|
||||
type=str,
|
||||
default=[],
|
||||
)
|
||||
|
||||
namespace = parser.parse_args()
|
||||
|
||||
rpms = search_rpms(namespace.path)
|
||||
copy_rpms(rpms, namespace.target, namespace.excluded_packages)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
513
pungi/scripts/pungi.py
Normal file
513
pungi/scripts/pungi.py
Normal file
@ -0,0 +1,513 @@
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import selinux
|
||||
import sys
|
||||
|
||||
from argparse import ArgumentParser, Action
|
||||
|
||||
from pungi import get_full_version
|
||||
import pungi.gather
|
||||
import pungi.config
|
||||
import pungi.ks
|
||||
|
||||
|
||||
def get_arguments(config):
|
||||
parser = ArgumentParser()
|
||||
|
||||
class SetConfig(Action):
|
||||
def __call__(self, parser, namespace, value, option_string=None):
|
||||
config.set("pungi", self.dest, value)
|
||||
|
||||
parser.add_argument("--version", action="version", version=get_full_version())
|
||||
|
||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
dest="family",
|
||||
type=str,
|
||||
action=SetConfig,
|
||||
help='the name for your distribution (defaults to "Fedora"), DEPRECATED',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--family",
|
||||
dest="family",
|
||||
action=SetConfig,
|
||||
help='the family name for your distribution (defaults to "Fedora")',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ver",
|
||||
dest="version",
|
||||
action=SetConfig,
|
||||
help="the version of your distribution (defaults to datestamp)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--flavor",
|
||||
dest="variant",
|
||||
action=SetConfig,
|
||||
help="the flavor of your distribution spin (optional), DEPRECATED",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--variant",
|
||||
dest="variant",
|
||||
action=SetConfig,
|
||||
help="the variant of your distribution spin (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--destdir",
|
||||
dest="destdir",
|
||||
action=SetConfig,
|
||||
help="destination directory (defaults to current directory)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cachedir",
|
||||
dest="cachedir",
|
||||
action=SetConfig,
|
||||
help="package cache directory (defaults to /var/cache/pungi)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--bugurl",
|
||||
dest="bugurl",
|
||||
action=SetConfig,
|
||||
help="the url for your bug system (defaults to http://bugzilla.redhat.com)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--selfhosting",
|
||||
action="store_true",
|
||||
dest="selfhosting",
|
||||
help="build a self-hosting tree by following build dependencies (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--fulltree",
|
||||
action="store_true",
|
||||
dest="fulltree",
|
||||
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nosource",
|
||||
action="store_true",
|
||||
dest="nosource",
|
||||
help="disable gathering of source packages (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nodebuginfo",
|
||||
action="store_true",
|
||||
dest="nodebuginfo",
|
||||
help="disable gathering of debuginfo packages (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nodownload",
|
||||
action="store_true",
|
||||
dest="nodownload",
|
||||
help="disable downloading of packages. instead, print the package URLs (optional)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--norelnotes",
|
||||
action="store_true",
|
||||
dest="norelnotes",
|
||||
help="disable gathering of release notes (optional); DEPRECATED",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nogreedy",
|
||||
action="store_true",
|
||||
dest="nogreedy",
|
||||
help="disable pulling of all providers of package dependencies (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nodeps",
|
||||
action="store_false",
|
||||
dest="resolve_deps",
|
||||
default=True,
|
||||
help="disable resolving dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sourceisos",
|
||||
default=False,
|
||||
action="store_true",
|
||||
dest="sourceisos",
|
||||
help="Create the source isos (other arch runs must be done)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Force reuse of an existing destination directory (will overwrite files)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--isfinal",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Specify this is a GA tree, which causes betanag to be turned off during install", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nohash",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="disable hashing the Packages trees",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--full-archlist",
|
||||
action="store_true",
|
||||
help="Use the full arch list for x86_64 (include i686, i386, etc.)",
|
||||
)
|
||||
parser.add_argument("--arch", help="Override default (uname based) arch")
|
||||
parser.add_argument(
|
||||
"--greedy", metavar="METHOD", help="Greedy method; none, all, build"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--multilib",
|
||||
action="append",
|
||||
metavar="METHOD",
|
||||
help="Multilib method; can be specified multiple times; recommended: devel, runtime", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--lookaside-repo",
|
||||
action="append",
|
||||
dest="lookaside_repos",
|
||||
metavar="NAME",
|
||||
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workdirbase",
|
||||
dest="workdirbase",
|
||||
action=SetConfig,
|
||||
help="base working directory (defaults to destdir + /work)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-dvd",
|
||||
default=False,
|
||||
action="store_true",
|
||||
dest="no_dvd",
|
||||
help="Do not make a install DVD/CD only the netinstall image and the tree",
|
||||
)
|
||||
parser.add_argument("--lorax-conf", help="Path to lorax.conf file (optional)")
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--installpkgs",
|
||||
default=[],
|
||||
action="append",
|
||||
metavar="STRING",
|
||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--multilibconf",
|
||||
default=None,
|
||||
action=SetConfig,
|
||||
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
dest="config",
|
||||
required=True,
|
||||
help="Path to kickstart config file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all-stages",
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="do_all",
|
||||
help="Enable ALL stages",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_gather",
|
||||
help="Flag to enable processing the Gather stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_createrepo",
|
||||
help="Flag to enable processing the Createrepo stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_buildinstall",
|
||||
help="Flag to enable processing the BuildInstall stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_createiso",
|
||||
help="Flag to enable processing the CreateISO stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--relnotepkgs",
|
||||
dest="relnotepkgs",
|
||||
action=SetConfig,
|
||||
help="Rpms which contain the release notes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--relnotefilere",
|
||||
dest="relnotefilere",
|
||||
action=SetConfig,
|
||||
help="Which files are the release notes -- GPL EULA",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nomacboot",
|
||||
action="store_true",
|
||||
dest="nomacboot",
|
||||
help="disable setting up macboot as no hfs support ",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--rootfs-size",
|
||||
dest="rootfs_size",
|
||||
action=SetConfig,
|
||||
default=False,
|
||||
help="Size of root filesystem in GiB. If not specified, use lorax default value", # noqa: E501
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--pungirc",
|
||||
dest="pungirc",
|
||||
default="~/.pungirc",
|
||||
action=SetConfig,
|
||||
help="Read pungi options from config file ",
|
||||
)
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if (
|
||||
not config.get("pungi", "variant").isalnum()
|
||||
and not config.get("pungi", "variant") == ""
|
||||
):
|
||||
parser.error("Variant must be alphanumeric")
|
||||
|
||||
if (
|
||||
opts.do_gather
|
||||
or opts.do_createrepo
|
||||
or opts.do_buildinstall
|
||||
or opts.do_createiso
|
||||
):
|
||||
opts.do_all = False
|
||||
|
||||
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
||||
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
||||
|
||||
# set the iso_basename.
|
||||
if not config.get("pungi", "variant") == "":
|
||||
config.set(
|
||||
"pungi",
|
||||
"iso_basename",
|
||||
"%s-%s" % (config.get("pungi", "family"), config.get("pungi", "variant")),
|
||||
)
|
||||
else:
|
||||
config.set("pungi", "iso_basename", config.get("pungi", "family"))
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
config = pungi.config.Config()
|
||||
opts = get_arguments(config)
|
||||
|
||||
# Read the config to create "new" defaults
|
||||
# reparse command line options so they take precedence
|
||||
config = pungi.config.Config(pungirc=opts.pungirc)
|
||||
opts = get_arguments(config)
|
||||
|
||||
# You must be this high to ride if you're going to do root tasks
|
||||
if os.geteuid() != 0 and (opts.do_all or opts.do_buildinstall):
|
||||
print("You must run pungi as root", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
try:
|
||||
enforcing = selinux.security_getenforce()
|
||||
except Exception:
|
||||
print("INFO: selinux disabled")
|
||||
enforcing = False
|
||||
if enforcing:
|
||||
print(
|
||||
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled." # noqa: E501
|
||||
)
|
||||
print("Consider running with setenforce 0.")
|
||||
|
||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
||||
|
||||
if opts.sourceisos:
|
||||
config.set("pungi", "arch", "source")
|
||||
|
||||
for part in ksparser.handler.partition.partitions:
|
||||
if part.mountpoint == "iso":
|
||||
config.set("pungi", "cdsize", str(part.size))
|
||||
|
||||
config.set("pungi", "force", str(opts.force))
|
||||
|
||||
if config.get("pungi", "workdirbase") == "/work":
|
||||
config.set("pungi", "workdirbase", "%s/work" % config.get("pungi", "destdir"))
|
||||
# Set up our directories
|
||||
if not os.path.exists(config.get("pungi", "destdir")):
|
||||
try:
|
||||
os.makedirs(config.get("pungi", "destdir"))
|
||||
except OSError:
|
||||
print(
|
||||
"Error: Cannot create destination dir %s"
|
||||
% config.get("pungi", "destdir"),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Warning: Reusing existing destination directory.")
|
||||
|
||||
if not os.path.exists(config.get("pungi", "workdirbase")):
|
||||
try:
|
||||
os.makedirs(config.get("pungi", "workdirbase"))
|
||||
except OSError:
|
||||
print(
|
||||
"Error: Cannot create working base dir %s"
|
||||
% config.get("pungi", "workdirbase"),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Warning: Reusing existing working base directory.")
|
||||
|
||||
cachedir = config.get("pungi", "cachedir")
|
||||
|
||||
if not os.path.exists(cachedir):
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except OSError:
|
||||
print("Error: Cannot create cache dir %s" % cachedir, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Set debuginfo flag
|
||||
if opts.nodebuginfo:
|
||||
config.set("pungi", "debuginfo", "False")
|
||||
if opts.greedy:
|
||||
config.set("pungi", "greedy", opts.greedy)
|
||||
else:
|
||||
# XXX: compatibility
|
||||
if opts.nogreedy:
|
||||
config.set("pungi", "greedy", "none")
|
||||
else:
|
||||
config.set("pungi", "greedy", "all")
|
||||
config.set("pungi", "resolve_deps", str(bool(opts.resolve_deps)))
|
||||
if opts.isfinal:
|
||||
config.set("pungi", "isfinal", "True")
|
||||
if opts.nohash:
|
||||
config.set("pungi", "nohash", "True")
|
||||
if opts.full_archlist:
|
||||
config.set("pungi", "full_archlist", "True")
|
||||
if opts.arch:
|
||||
config.set("pungi", "arch", opts.arch)
|
||||
if opts.multilib:
|
||||
config.set("pungi", "multilib", " ".join(opts.multilib))
|
||||
if opts.lookaside_repos:
|
||||
config.set("pungi", "lookaside_repos", " ".join(opts.lookaside_repos))
|
||||
if opts.no_dvd:
|
||||
config.set("pungi", "no_dvd", "True")
|
||||
if opts.nomacboot:
|
||||
config.set("pungi", "nomacboot", "True")
|
||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
||||
config.set("pungi", "nodebuginfo", str(bool(opts.nodebuginfo)))
|
||||
|
||||
if opts.lorax_conf:
|
||||
config.set("lorax", "conf_file", opts.lorax_conf)
|
||||
if opts.installpkgs:
|
||||
config.set("lorax", "installpkgs", " ".join(opts.installpkgs))
|
||||
|
||||
# Actually do work.
|
||||
mypungi = pungi.gather.Pungi(config, ksparser)
|
||||
|
||||
with mypungi.yumlock:
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_gather or opts.do_buildinstall:
|
||||
mypungi._inityum() # initialize the yum object for things that need it
|
||||
if opts.do_all or opts.do_gather:
|
||||
mypungi.gather()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadPackages()
|
||||
mypungi.makeCompsFile()
|
||||
if not opts.nodebuginfo:
|
||||
mypungi.getDebuginfoList()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write(
|
||||
"DEBUGINFO%s: %s\n" % (flags_str, line["path"])
|
||||
)
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadDebuginfo()
|
||||
if not opts.nosource:
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadSRPMs()
|
||||
|
||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
||||
if not opts.nodebuginfo:
|
||||
print(
|
||||
"DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024**2)
|
||||
)
|
||||
if not opts.nosource:
|
||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
||||
|
||||
# Furthermore (but without the yumlock...)
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_createrepo:
|
||||
mypungi.doCreaterepo()
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
if not opts.norelnotes:
|
||||
mypungi.doGetRelnotes()
|
||||
mypungi.doBuildinstall()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
# Do things slightly different for src.
|
||||
if opts.sourceisos:
|
||||
# we already have all the content gathered
|
||||
mypungi.topdir = os.path.join(
|
||||
config.get("pungi", "destdir"),
|
||||
config.get("pungi", "version"),
|
||||
config.get("pungi", "variant"),
|
||||
"source",
|
||||
"SRPMS",
|
||||
)
|
||||
mypungi.doCreaterepo(comps=False)
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
print("All done!")
|
||||
@ -11,19 +11,19 @@ import locale
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import shlex
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.phases import PHASES_NAMES
|
||||
from pungi import get_full_version, util
|
||||
from pungi.errors import UnsignedPackagesError
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.util import rmtree
|
||||
from pungi.otel import tracing
|
||||
|
||||
|
||||
# force C locales
|
||||
@ -252,15 +252,9 @@ def main():
|
||||
kobo.log.add_stderr_logger(logger)
|
||||
|
||||
conf = util.load_config(opts.config)
|
||||
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
||||
label = opts.label or conf.get("label")
|
||||
if label:
|
||||
try:
|
||||
productmd.composeinfo.verify_label(label)
|
||||
except ValueError as ex:
|
||||
abort(str(ex))
|
||||
|
||||
if compose_type == "production" and not label and not opts.no_label:
|
||||
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
||||
if compose_type == "production" and not opts.label and not opts.no_label:
|
||||
abort("must specify label for a production compose")
|
||||
|
||||
if (
|
||||
@ -310,7 +304,7 @@ def main():
|
||||
opts.target_dir,
|
||||
conf,
|
||||
compose_type=compose_type,
|
||||
compose_label=label,
|
||||
compose_label=opts.label,
|
||||
parent_compose_ids=opts.parent_compose_id,
|
||||
respin_of=opts.respin_of,
|
||||
)
|
||||
@ -321,7 +315,7 @@ def main():
|
||||
ci = Compose.get_compose_info(
|
||||
conf,
|
||||
compose_type=compose_type,
|
||||
compose_label=label,
|
||||
compose_label=opts.label,
|
||||
parent_compose_ids=opts.parent_compose_id,
|
||||
respin_of=opts.respin_of,
|
||||
)
|
||||
@ -386,7 +380,7 @@ def run_compose(
|
||||
compose.log_info("User name: %s" % getpass.getuser())
|
||||
compose.log_info("Working directory: %s" % os.getcwd())
|
||||
compose.log_info(
|
||||
"Command line: %s" % " ".join([shlex.quote(arg) for arg in sys.argv])
|
||||
"Command line: %s" % " ".join([shlex_quote(arg) for arg in sys.argv])
|
||||
)
|
||||
compose.log_info("Compose top directory: %s" % compose.topdir)
|
||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||
@ -426,11 +420,11 @@ def run_compose(
|
||||
ostree_container_phase = pungi.phases.OSTreeContainerPhase(compose, pkgset_phase)
|
||||
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
||||
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose, buildinstall_phase)
|
||||
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
||||
livemedia_phase = pungi.phases.LiveMediaPhase(compose)
|
||||
image_build_phase = pungi.phases.ImageBuildPhase(compose, buildinstall_phase)
|
||||
kiwibuild_phase = pungi.phases.KiwiBuildPhase(compose)
|
||||
osbuild_phase = pungi.phases.OSBuildPhase(compose)
|
||||
imagebuilder_phase = pungi.phases.ImageBuilderPhase(compose)
|
||||
osbs_phase = pungi.phases.OSBSPhase(compose, pkgset_phase, buildinstall_phase)
|
||||
image_container_phase = pungi.phases.ImageContainerPhase(compose)
|
||||
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
|
||||
@ -446,6 +440,7 @@ def run_compose(
|
||||
gather_phase,
|
||||
extrafiles_phase,
|
||||
createiso_phase,
|
||||
liveimages_phase,
|
||||
livemedia_phase,
|
||||
image_build_phase,
|
||||
image_checksum_phase,
|
||||
@ -458,7 +453,6 @@ def run_compose(
|
||||
osbuild_phase,
|
||||
image_container_phase,
|
||||
kiwibuild_phase,
|
||||
imagebuilder_phase,
|
||||
):
|
||||
if phase.skip():
|
||||
continue
|
||||
@ -473,6 +467,50 @@ def run_compose(
|
||||
print(i)
|
||||
raise RuntimeError("Configuration is not valid")
|
||||
|
||||
# PREP
|
||||
|
||||
# Note: This may be put into a new method of phase classes (e.g. .prep())
|
||||
# in same way as .validate() or .run()
|
||||
|
||||
# Prep for liveimages - Obtain a password for signing rpm wrapped images
|
||||
if (
|
||||
"signing_key_password_file" in compose.conf
|
||||
and "signing_command" in compose.conf
|
||||
and "%(signing_key_password)s" in compose.conf["signing_command"]
|
||||
and not liveimages_phase.skip()
|
||||
):
|
||||
# TODO: Don't require key if signing is turned off
|
||||
# Obtain signing key password
|
||||
signing_key_password = None
|
||||
|
||||
# Use appropriate method
|
||||
if compose.conf["signing_key_password_file"] == "-":
|
||||
# Use stdin (by getpass module)
|
||||
try:
|
||||
signing_key_password = getpass.getpass("Signing key password: ")
|
||||
except EOFError:
|
||||
compose.log_debug("Ignoring signing key password")
|
||||
pass
|
||||
else:
|
||||
# Use text file with password
|
||||
try:
|
||||
signing_key_password = (
|
||||
open(compose.conf["signing_key_password_file"], "r")
|
||||
.readline()
|
||||
.rstrip("\n")
|
||||
)
|
||||
except IOError:
|
||||
# Filename is not print intentionally in case someone puts
|
||||
# password directly into the option
|
||||
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option" # noqa: E501
|
||||
compose.log_error(err_msg)
|
||||
print(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
if signing_key_password:
|
||||
# Store the password
|
||||
compose.conf["signing_key_password"] = signing_key_password
|
||||
|
||||
init_phase.start()
|
||||
init_phase.stop()
|
||||
|
||||
@ -484,58 +522,51 @@ def run_compose(
|
||||
buildinstall_phase,
|
||||
(gather_phase, createrepo_phase),
|
||||
extrafiles_phase,
|
||||
ostree_phase,
|
||||
(ostree_phase, ostree_installer_phase),
|
||||
ostree_container_phase,
|
||||
)
|
||||
essentials_phase = pungi.phases.WeaverPhase(compose, essentials_schema)
|
||||
essentials_phase.start()
|
||||
ostree_container_phase.start()
|
||||
try:
|
||||
essentials_phase.stop()
|
||||
essentials_phase.stop()
|
||||
|
||||
# write treeinfo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
for arch in variant.arches + ["src"]:
|
||||
pungi.metadata.write_tree_info(
|
||||
compose, arch, variant, bi=buildinstall_phase
|
||||
)
|
||||
# write treeinfo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
for arch in variant.arches + ["src"]:
|
||||
pungi.metadata.write_tree_info(
|
||||
compose, arch, variant, bi=buildinstall_phase
|
||||
)
|
||||
|
||||
# write .discinfo and media.repo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
if variant.type == "addon" or variant.is_empty:
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
|
||||
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
|
||||
# write .discinfo and media.repo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
if variant.type == "addon" or variant.is_empty:
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
|
||||
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
|
||||
|
||||
# Run phases for image artifacts in parallel
|
||||
compose_images_schema = (
|
||||
createiso_phase,
|
||||
extra_isos_phase,
|
||||
image_build_phase,
|
||||
livemedia_phase,
|
||||
osbuild_phase,
|
||||
kiwibuild_phase,
|
||||
imagebuilder_phase,
|
||||
)
|
||||
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
|
||||
extra_phase_schema = (
|
||||
(compose_images_phase, image_container_phase),
|
||||
ostree_installer_phase,
|
||||
osbs_phase,
|
||||
repoclosure_phase,
|
||||
)
|
||||
extra_phase = pungi.phases.WeaverPhase(compose, extra_phase_schema)
|
||||
# Run phases for image artifacts in parallel
|
||||
compose_images_schema = (
|
||||
createiso_phase,
|
||||
extra_isos_phase,
|
||||
liveimages_phase,
|
||||
image_build_phase,
|
||||
livemedia_phase,
|
||||
osbuild_phase,
|
||||
kiwibuild_phase,
|
||||
)
|
||||
post_image_phase = pungi.phases.WeaverPhase(
|
||||
compose, (image_checksum_phase, image_container_phase)
|
||||
)
|
||||
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
|
||||
extra_phase_schema = (
|
||||
(compose_images_phase, post_image_phase),
|
||||
osbs_phase,
|
||||
repoclosure_phase,
|
||||
)
|
||||
extra_phase = pungi.phases.WeaverPhase(compose, extra_phase_schema)
|
||||
|
||||
extra_phase.start()
|
||||
extra_phase.stop()
|
||||
finally:
|
||||
# wait for ostree container phase here too - it can happily run in parallel with
|
||||
# all of the other stuff, but we must ensure it always gets stopped
|
||||
ostree_container_phase.stop()
|
||||
|
||||
# now we do checksums as all images are done
|
||||
image_checksum_phase.start()
|
||||
image_checksum_phase.stop()
|
||||
extra_phase.start()
|
||||
extra_phase.stop()
|
||||
|
||||
pungi.metadata.write_compose_info(compose)
|
||||
if not (
|
||||
@ -543,10 +574,10 @@ def run_compose(
|
||||
and ostree_installer_phase.skip()
|
||||
and createiso_phase.skip()
|
||||
and extra_isos_phase.skip()
|
||||
and liveimages_phase.skip()
|
||||
and livemedia_phase.skip()
|
||||
and image_build_phase.skip()
|
||||
and kiwibuild_phase.skip()
|
||||
and imagebuilder_phase.skip()
|
||||
and osbuild_phase.skip()
|
||||
and ostree_container_phase.skip()
|
||||
):
|
||||
@ -657,28 +688,22 @@ def cli_main():
|
||||
signal.signal(signal.SIGINT, sigterm_handler)
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
|
||||
tracing.setup()
|
||||
|
||||
with tracing.span("run-compose"):
|
||||
try:
|
||||
main()
|
||||
except (Exception, KeyboardInterrupt) as ex:
|
||||
tracing.record_exception(ex)
|
||||
if COMPOSE:
|
||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
else:
|
||||
print("Exception: %s" % ex)
|
||||
raise
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Remove repositories cloned during ExtraFiles phase
|
||||
process_id = os.getpid()
|
||||
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||
rmtree(directoy_to_remove)
|
||||
# Wait for all traces to be sent...
|
||||
tracing.force_flush()
|
||||
try:
|
||||
main()
|
||||
except (Exception, KeyboardInterrupt) as ex:
|
||||
if COMPOSE:
|
||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
else:
|
||||
print("Exception: %s" % ex)
|
||||
raise
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Remove repositories cloned during ExtraFiles phase
|
||||
process_id = os.getpid()
|
||||
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||
rmtree(directoy_to_remove)
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
from kobo.threads import WorkerThread
|
||||
|
||||
from .otel import tracing
|
||||
|
||||
|
||||
class TelemetryWorkerThread(WorkerThread):
|
||||
"""
|
||||
Subclass of WorkerThread that captures current context when the thread is
|
||||
created, and restores the context in the new thread.
|
||||
|
||||
A regular WorkerThread would start from an empty context, leading to any
|
||||
spans created in the thread disconnected from the overall trace.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.traceparent = tracing.get_traceparent()
|
||||
super(TelemetryWorkerThread, self).__init__(*args, **kwargs)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
tracing.set_context(self.traceparent)
|
||||
super(TelemetryWorkerThread, self).run(*args, **kwargs)
|
||||
216
pungi/util.py
216
pungi/util.py
@ -19,24 +19,22 @@ import subprocess
|
||||
import os
|
||||
import shutil
|
||||
import string
|
||||
import sys
|
||||
import hashlib
|
||||
import errno
|
||||
import re
|
||||
import contextlib
|
||||
import shlex
|
||||
import traceback
|
||||
import tempfile
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import functools
|
||||
from six.moves import urllib, range, shlex_quote
|
||||
|
||||
import kobo.conf
|
||||
from kobo.shortcuts import run, force_list
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.threads import WorkerThread, ThreadPool
|
||||
from productmd.common import get_major_version
|
||||
from pungi.module_util import Modulemd
|
||||
from pungi.otel import tracing
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
# Patterns that match all names of debuginfo packages
|
||||
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
|
||||
@ -45,6 +43,132 @@ DEBUG_PATTERN_RE = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def _doRunCommand(
|
||||
command,
|
||||
logger,
|
||||
rundir="/tmp",
|
||||
output=subprocess.PIPE,
|
||||
error=subprocess.PIPE,
|
||||
env=None,
|
||||
):
|
||||
"""Run a command and log the output. Error out if we get something on stderr"""
|
||||
|
||||
logger.info("Running %s" % subprocess.list2cmdline(command))
|
||||
|
||||
p1 = subprocess.Popen(
|
||||
command,
|
||||
cwd=rundir,
|
||||
stdout=output,
|
||||
stderr=error,
|
||||
universal_newlines=True,
|
||||
env=env,
|
||||
close_fds=True,
|
||||
)
|
||||
(out, err) = p1.communicate()
|
||||
|
||||
if out:
|
||||
logger.debug(out)
|
||||
|
||||
if p1.returncode != 0:
|
||||
logger.error("Got an error from %s" % command[0])
|
||||
logger.error(err)
|
||||
raise OSError(
|
||||
"Got an error (%d) from %s: %s" % (p1.returncode, command[0], err)
|
||||
)
|
||||
|
||||
|
||||
def _link(local, target, logger, force=False):
|
||||
"""Simple function to link or copy a package, removing target optionally."""
|
||||
|
||||
if os.path.exists(target) and force:
|
||||
os.remove(target)
|
||||
|
||||
# check for broken links
|
||||
if force and os.path.islink(target):
|
||||
if not os.path.exists(os.readlink(target)):
|
||||
os.remove(target)
|
||||
|
||||
try:
|
||||
os.link(local, target)
|
||||
except OSError as e:
|
||||
if e.errno != 18: # EXDEV
|
||||
logger.error("Got an error linking from cache: %s" % e)
|
||||
raise OSError(e)
|
||||
|
||||
# Can't hardlink cross file systems
|
||||
shutil.copy2(local, target)
|
||||
|
||||
|
||||
def _ensuredir(target, logger, force=False, clean=False):
|
||||
"""Ensure that a directory exists, if it already exists, only continue
|
||||
if force is set."""
|
||||
|
||||
# We have to check existence of a logger, as setting the logger could
|
||||
# itself cause an issue.
|
||||
def whoops(func, path, exc_info):
|
||||
message = "Could not remove %s" % path
|
||||
if logger:
|
||||
logger.error(message)
|
||||
else:
|
||||
sys.stderr(message)
|
||||
sys.exit(1)
|
||||
|
||||
if os.path.exists(target) and not os.path.isdir(target):
|
||||
message = "%s exists but is not a directory." % target
|
||||
if logger:
|
||||
logger.error(message)
|
||||
else:
|
||||
sys.stderr(message)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(target):
|
||||
os.makedirs(target)
|
||||
elif force and clean:
|
||||
shutil.rmtree(target, onerror=whoops)
|
||||
os.makedirs(target)
|
||||
elif force:
|
||||
return
|
||||
else:
|
||||
message = "Directory %s already exists. Use --force to overwrite." % target
|
||||
if logger:
|
||||
logger.error(message)
|
||||
else:
|
||||
sys.stderr(message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _doCheckSum(path, hash, logger):
|
||||
"""Generate a checksum hash from a provided path.
|
||||
Return a string of type:hash"""
|
||||
|
||||
# Try to figure out what hash we want to do
|
||||
try:
|
||||
sum = hashlib.new(hash)
|
||||
except ValueError:
|
||||
logger.error("Invalid hash type: %s" % hash)
|
||||
return False
|
||||
|
||||
# Try to open the file, using binary flag.
|
||||
try:
|
||||
myfile = open(path, "rb")
|
||||
except IOError as e:
|
||||
logger.error("Could not open file %s: %s" % (path, e))
|
||||
return False
|
||||
|
||||
# Loop through the file reading chunks at a time as to not
|
||||
# put the entire file in memory. That would suck for DVDs
|
||||
while True:
|
||||
chunk = myfile.read(
|
||||
8192
|
||||
) # magic number! Taking suggestions for better blocksize
|
||||
if not chunk:
|
||||
break # we're done with the file
|
||||
sum.update(chunk)
|
||||
myfile.close()
|
||||
|
||||
return "%s:%s" % (hash, sum.hexdigest())
|
||||
|
||||
|
||||
def makedirs(path, mode=0o775):
|
||||
try:
|
||||
os.makedirs(path, mode=mode)
|
||||
@ -69,14 +193,14 @@ def explode_rpm_package(pkg_path, target_dir):
|
||||
try:
|
||||
# rpm2archive writes to stdout only if reading from stdin, thus the redirect
|
||||
run(
|
||||
"rpm2archive - <%s | tar xfz - && chmod -R a+rX ." % shlex.quote(pkg_path),
|
||||
"rpm2archive - <%s | tar xfz - && chmod -R a+rX ." % shlex_quote(pkg_path),
|
||||
workdir=target_dir,
|
||||
)
|
||||
except RuntimeError:
|
||||
# Fall back to rpm2cpio in case rpm2archive failed (most likely due to
|
||||
# not being present on the system).
|
||||
run(
|
||||
"rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex.quote(pkg_path),
|
||||
"rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex_quote(pkg_path),
|
||||
workdir=target_dir,
|
||||
)
|
||||
|
||||
@ -250,38 +374,6 @@ class GitUrlResolver(object):
|
||||
return self.cache[key]
|
||||
|
||||
|
||||
class ContainerTagResolver(object):
|
||||
"""
|
||||
A caching resolver for container image urls that replaces tags with digests.
|
||||
"""
|
||||
|
||||
def __init__(self, offline=False):
|
||||
self.offline = offline
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, url):
|
||||
if self.offline:
|
||||
# We're offline, nothing to do
|
||||
return url
|
||||
if re.match(".*@sha256:[a-z0-9]+", url):
|
||||
# We already have a digest
|
||||
return url
|
||||
if url not in self.cache:
|
||||
self.cache[url] = self._resolve(url)
|
||||
return self.cache[url]
|
||||
|
||||
def _resolve(self, url):
|
||||
m = re.match("^.+(:.+)$", url)
|
||||
if not m:
|
||||
raise RuntimeError("Failed to find tag name")
|
||||
tag = m.group(1)
|
||||
|
||||
with tracing.span("skopeo-inspect", url=url):
|
||||
data = _skopeo_inspect(url)
|
||||
digest = data["Digest"]
|
||||
return url.replace(tag, f"@{digest}")
|
||||
|
||||
|
||||
# format: {arch|*: [data]}
|
||||
def get_arch_data(conf, var_name, arch):
|
||||
result = []
|
||||
@ -395,7 +487,10 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
||||
tried.add(volid)
|
||||
|
||||
if volid and len(volid) > 32:
|
||||
volid = volid[:32]
|
||||
raise ValueError(
|
||||
"Could not create volume ID longer than 32 bytes, options are %r",
|
||||
sorted(tried, key=len),
|
||||
)
|
||||
|
||||
if compose.conf["restricted_volid"]:
|
||||
# Replace all non-alphanumeric characters and non-underscores) with
|
||||
@ -498,13 +593,7 @@ def failable(
|
||||
else:
|
||||
compose.require_deliverable(variant, arch, deliverable, subvariant)
|
||||
try:
|
||||
with tracing.span(
|
||||
f"generate-{deliverable}",
|
||||
variant=variant.uid,
|
||||
arch=arch,
|
||||
subvariant=subvariant or "",
|
||||
):
|
||||
yield
|
||||
yield
|
||||
except Exception as exc:
|
||||
if not can_fail:
|
||||
raise
|
||||
@ -689,11 +778,7 @@ def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
||||
"""
|
||||
for i in range(max_retries):
|
||||
proc = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
errors="replace",
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode == 0:
|
||||
@ -715,8 +800,7 @@ def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
||||
c,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
out, _ = proc.communicate()
|
||||
logger.debug(
|
||||
@ -917,12 +1001,11 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
||||
|
||||
@retry(wait_on=RuntimeError)
|
||||
def git_ls_remote(baseurl, ref, credential_helper=None):
|
||||
with tracing.span("git-ls-remote", baseurl=baseurl, ref=ref):
|
||||
cmd = ["git"]
|
||||
if credential_helper:
|
||||
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||
return run(cmd + ["ls-remote", baseurl, ref], text=True, errors="replace")
|
||||
cmd = ["git"]
|
||||
if credential_helper:
|
||||
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||
return run(cmd + ["ls-remote", baseurl, ref], universal_newlines=True)
|
||||
|
||||
|
||||
def get_tz_offset():
|
||||
@ -1080,14 +1163,3 @@ def format_size(sz):
|
||||
unit += 1
|
||||
|
||||
return "%.3g %sB" % (sz, UNITS[unit])
|
||||
|
||||
|
||||
@retry(interval=5, timeout=60, wait_on=RuntimeError)
|
||||
def _skopeo_inspect(url):
|
||||
"""Wrapper for running `skopeo inspect {url}` and parsing the output.
|
||||
Retries on failure.
|
||||
"""
|
||||
cp = subprocess.run(
|
||||
["skopeo", "inspect", url], stdout=subprocess.PIPE, check=True, encoding="utf-8"
|
||||
)
|
||||
return json.loads(cp.stdout)
|
||||
|
||||
@ -306,8 +306,6 @@ class CompsWrapper(object):
|
||||
append_common_info(doc, group_node, group, force_description=True)
|
||||
append_bool(doc, group_node, "default", group.default)
|
||||
append_bool(doc, group_node, "uservisible", group.uservisible)
|
||||
if group.display_order is not None:
|
||||
append(doc, group_node, "display_order", str(group.display_order))
|
||||
|
||||
if group.lang_only:
|
||||
append(doc, group_node, "langonly", group.lang_only)
|
||||
|
||||
@ -88,12 +88,5 @@ def parse_output(output):
|
||||
packages.add((name, arch, frozenset(flags)))
|
||||
else:
|
||||
name, arch = nevra.rsplit(".", 1)
|
||||
# replace dash by underscore in stream of module's nerva
|
||||
# source of name looks like
|
||||
# module:llvm-toolset:rhel8:8040020210411062713:9f9e2e7e.x86_64
|
||||
name = ':'.join(
|
||||
item.replace('-', '_') if i == 1 else item for
|
||||
i, item in enumerate(name.split(':')[1:])
|
||||
)
|
||||
modules.add(name)
|
||||
modules.add(name.split(":", 1)[1])
|
||||
return packages, modules
|
||||
|
||||
@ -15,9 +15,9 @@
|
||||
|
||||
|
||||
import os
|
||||
import shlex
|
||||
from fnmatch import fnmatch
|
||||
import contextlib
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from kobo.shortcuts import force_list, relative_path, run
|
||||
from pungi import util
|
||||
@ -227,7 +227,7 @@ def get_checkisomd5_cmd(iso_path, just_print=False):
|
||||
|
||||
def get_checkisomd5_data(iso_path, logger=None):
|
||||
cmd = get_checkisomd5_cmd(iso_path, just_print=True)
|
||||
retcode, output = run(cmd, text=True, errors="replace")
|
||||
retcode, output = run(cmd, universal_newlines=True)
|
||||
items = [line.strip().rsplit(":", 1) for line in output.splitlines()]
|
||||
items = dict([(k, v.strip()) for k, v in items])
|
||||
md5 = items.get(iso_path, "")
|
||||
@ -270,26 +270,26 @@ def get_manifest_cmd(iso_name, xorriso=False, output_file=None):
|
||||
tr -d "'" |
|
||||
cut -c2- |
|
||||
sort >> %s""" % (
|
||||
shlex.quote(iso_name),
|
||||
shlex.quote(output_file),
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(output_file),
|
||||
)
|
||||
else:
|
||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s" % (
|
||||
shlex.quote(iso_name),
|
||||
shlex.quote(output_file),
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(output_file),
|
||||
)
|
||||
|
||||
|
||||
def get_volume_id(path, xorriso=False):
|
||||
if xorriso:
|
||||
cmd = ["xorriso", "-indev", path]
|
||||
retcode, output = run(cmd, text=True, errors="replace")
|
||||
retcode, output = run(cmd, universal_newlines=True)
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Volume id"):
|
||||
return line.split("'")[1]
|
||||
else:
|
||||
cmd = ["isoinfo", "-d", "-i", path]
|
||||
retcode, output = run(cmd, text=True, errors="replace")
|
||||
retcode, output = run(cmd, universal_newlines=True)
|
||||
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
@ -500,7 +500,7 @@ def mount(image, logger=None, use_guestmount=True):
|
||||
else:
|
||||
env = {}
|
||||
cmd = ["mount", "-o", "loop", image, mount_dir]
|
||||
ret, out = run(cmd, env=env, can_fail=True, text=True, errors="replace")
|
||||
ret, out = run(cmd, env=env, can_fail=True, universal_newlines=True)
|
||||
if ret != 0:
|
||||
# The mount command failed, something is wrong.
|
||||
# Log the output and raise an exception.
|
||||
@ -516,21 +516,3 @@ def mount(image, logger=None, use_guestmount=True):
|
||||
util.run_unmount_cmd(["fusermount", "-u", mount_dir], path=mount_dir)
|
||||
else:
|
||||
util.run_unmount_cmd(["umount", mount_dir], path=mount_dir)
|
||||
|
||||
|
||||
def xorriso_commands(arch, input, output):
|
||||
"""List of xorriso commands to modify a bootable image."""
|
||||
commands = [
|
||||
("-indev", input),
|
||||
("-outdev", output),
|
||||
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
||||
("-joliet", "on"),
|
||||
# Support long filenames in the Joliet trees. Repodata is particularly
|
||||
# likely to run into this limit.
|
||||
("-compliance", "joliet_long_names"),
|
||||
("-boot_image", "any", "replay"),
|
||||
]
|
||||
if arch == "ppc64le":
|
||||
# This is needed for the image to be bootable.
|
||||
commands.append(("-as", "mkisofs", "-U", "--"))
|
||||
return commands
|
||||
|
||||
@ -1,299 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from attr import dataclass
|
||||
from kobo.rpmlib import parse_nvra
|
||||
|
||||
from pungi.module_util import Modulemd
|
||||
|
||||
# just a random value which we don't
|
||||
# use in mock currently
|
||||
# originally builds are filtered by this value
|
||||
# to get consistent snapshot of tags and packages
|
||||
from pungi.scripts.gather_rpms import search_rpms
|
||||
|
||||
LAST_EVENT_ID = 999999
|
||||
# last event time is not important but build
|
||||
# time should be less then it
|
||||
LAST_EVENT_TIME = time.time()
|
||||
BUILD_TIME = 0
|
||||
# virtual build that collects all
|
||||
# packages built for some arch
|
||||
RELEASE_BUILD_ID = 15270
|
||||
# tag that should have all packages available
|
||||
ALL_PACKAGES_TAG = 'dist-c8-compose'
|
||||
# tag that should have all modules available
|
||||
ALL_MODULES_TAG = 'dist-c8-module-compose'
|
||||
|
||||
|
||||
@dataclass
|
||||
class Module:
|
||||
build_id: int
|
||||
name: str
|
||||
nvr: str
|
||||
stream: str
|
||||
version: str
|
||||
context: str
|
||||
arch: str
|
||||
|
||||
|
||||
class KojiMock:
|
||||
"""
|
||||
Class that acts like real koji (for some needed methods)
|
||||
but uses local storage as data source
|
||||
"""
|
||||
def __init__(self, packages_dir, modules_dir, all_arches):
|
||||
self._modules = self._gather_modules(modules_dir)
|
||||
self._modules_dir = modules_dir
|
||||
self._packages_dir = packages_dir
|
||||
self._all_arches = all_arches
|
||||
|
||||
@staticmethod
|
||||
def _gather_modules(modules_dir):
|
||||
modules = {}
|
||||
for index, (f, arch) in enumerate(
|
||||
(sub_path.name, sub_path.parent.name)
|
||||
for path in Path(modules_dir).glob('*')
|
||||
for sub_path in path.iterdir()
|
||||
):
|
||||
parsed = parse_nvra(f)
|
||||
modules[index] = Module(
|
||||
name=parsed['name'],
|
||||
nvr=f,
|
||||
version=parsed['release'],
|
||||
context=parsed['arch'],
|
||||
stream=parsed['version'],
|
||||
build_id=index,
|
||||
arch=arch,
|
||||
)
|
||||
return modules
|
||||
|
||||
@staticmethod
|
||||
def getLastEvent(*args, **kwargs):
|
||||
return {'id': LAST_EVENT_ID, 'ts': LAST_EVENT_TIME}
|
||||
|
||||
def listTagged(self, tag_name, *args, **kwargs):
|
||||
"""
|
||||
Returns list of virtual 'builds' that contain packages by given tag
|
||||
There are two kinds of tags: modular and distributive.
|
||||
For now, only one kind, distributive one, is needed.
|
||||
"""
|
||||
if tag_name != ALL_MODULES_TAG:
|
||||
raise ValueError("I don't know what tag is %s" % tag_name)
|
||||
|
||||
builds = []
|
||||
for module in self._modules.values():
|
||||
builds.append({
|
||||
'build_id': module.build_id,
|
||||
'owner_name': 'centos',
|
||||
'package_name': module.name,
|
||||
'nvr': module.nvr,
|
||||
'version': module.stream,
|
||||
'release': '%s.%s' % (module.version, module.context),
|
||||
'name': module.name,
|
||||
'id': module.build_id,
|
||||
'tag_name': tag_name,
|
||||
'arch': module.arch,
|
||||
# Following fields are currently not
|
||||
# used but returned by real koji
|
||||
# left them here just for reference
|
||||
#
|
||||
# 'task_id': None,
|
||||
# 'state': 1,
|
||||
# 'start_time': '2020-12-23 16:43:59',
|
||||
# 'creation_event_id': 309485,
|
||||
# 'creation_time': '2020-12-23 17:05:33.553748',
|
||||
# 'epoch': None, 'tag_id': 533,
|
||||
# 'completion_time': '2020-12-23 17:05:23',
|
||||
# 'volume_id': 0,
|
||||
# 'package_id': 3221,
|
||||
# 'owner_id': 11,
|
||||
# 'volume_name': 'DEFAULT',
|
||||
})
|
||||
|
||||
return builds
|
||||
|
||||
@staticmethod
|
||||
def getFullInheritance(*args, **kwargs):
|
||||
"""
|
||||
Unneeded because we use local storage.
|
||||
"""
|
||||
return []
|
||||
|
||||
def getBuild(self, build_id, *args, **kwargs):
|
||||
"""
|
||||
Used to get information about build
|
||||
(used in pungi only for modules currently)
|
||||
"""
|
||||
module = self._modules[build_id]
|
||||
|
||||
result = {
|
||||
'id': build_id,
|
||||
'name': module.name,
|
||||
'version': module.stream,
|
||||
'release': '%s.%s' % (module.version, module.context),
|
||||
'completion_ts': BUILD_TIME,
|
||||
'state': 'COMPLETE',
|
||||
'arch': module.arch,
|
||||
'extra': {
|
||||
'typeinfo': {
|
||||
'module': {
|
||||
'stream': module.stream,
|
||||
'version': module.version,
|
||||
'name': module.name,
|
||||
'context': module.context,
|
||||
'content_koji_tag': '-'.join([
|
||||
module.name,
|
||||
module.stream,
|
||||
module.version
|
||||
]) + '.' + module.context
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
|
||||
def listArchives(self, build_id, *args, **kwargs):
|
||||
"""
|
||||
Originally lists artifacts for build, but in pungi used
|
||||
only to get list of modulemd files for some module
|
||||
"""
|
||||
module = self._modules[build_id]
|
||||
|
||||
return [
|
||||
{
|
||||
'build_id': module.build_id,
|
||||
'filename': f'modulemd.{module.arch}.txt',
|
||||
'btype': 'module'
|
||||
},
|
||||
# noone ever uses this file
|
||||
# but it should be because pungi ignores builds
|
||||
# with len(files) <= 1
|
||||
{
|
||||
'build_id': module.build_id,
|
||||
'filename': 'modulemd.txt',
|
||||
'btype': 'module'
|
||||
}
|
||||
]
|
||||
|
||||
def listTaggedRPMS(self, tag_name, *args, **kwargs):
|
||||
"""
|
||||
Get information about packages that are tagged by tag.
|
||||
There are two kings of tags: per-module and per-distr.
|
||||
"""
|
||||
if tag_name == ALL_PACKAGES_TAG:
|
||||
builds, packages = self._get_release_packages()
|
||||
else:
|
||||
builds, packages = self._get_module_packages(tag_name)
|
||||
return [
|
||||
packages,
|
||||
builds
|
||||
]
|
||||
|
||||
def _get_release_packages(self):
|
||||
"""
|
||||
Search packages dir and keep only
|
||||
packages that are non-modular.
|
||||
|
||||
This is quite the way how real koji works:
|
||||
- modular packages are tagged by module-* tag
|
||||
- all other packages are tagged with dist* tag
|
||||
"""
|
||||
packages = []
|
||||
|
||||
# get all rpms in folder
|
||||
rpms = search_rpms(Path(self._packages_dir))
|
||||
|
||||
for rpm in rpms:
|
||||
info = parse_nvra(rpm.path.stem)
|
||||
if 'module' in info['release']:
|
||||
continue
|
||||
packages.append({
|
||||
"build_id": RELEASE_BUILD_ID,
|
||||
"name": info['name'],
|
||||
"extra": None,
|
||||
"arch": info['arch'],
|
||||
"epoch": info['epoch'] or None,
|
||||
"version": info['version'],
|
||||
"metadata_only": False,
|
||||
"release": info['release'],
|
||||
# not used currently
|
||||
# "id": 262555,
|
||||
# "size": 0
|
||||
})
|
||||
builds = []
|
||||
return builds, packages
|
||||
|
||||
def _get_module_packages(self, tag_name):
|
||||
"""
|
||||
Get list of builds for module and given module tag name.
|
||||
"""
|
||||
builds = []
|
||||
packages = []
|
||||
modules = self._get_modules_by_name(tag_name)
|
||||
for module in modules:
|
||||
if module is None:
|
||||
raise ValueError('Module %s is not found' % tag_name)
|
||||
path = os.path.join(
|
||||
self._modules_dir,
|
||||
module.arch,
|
||||
tag_name,
|
||||
)
|
||||
|
||||
builds.append({
|
||||
"build_id": module.build_id,
|
||||
"package_name": module.name,
|
||||
"nvr": module.nvr,
|
||||
"tag_name": module.nvr,
|
||||
"version": module.stream,
|
||||
"release": module.version,
|
||||
"id": module.build_id,
|
||||
"name": module.name,
|
||||
"volume_name": "DEFAULT",
|
||||
# Following fields are currently not
|
||||
# used but returned by real koji
|
||||
# left them here just for reference
|
||||
#
|
||||
# "owner_name": "mbox-mbs-backend",
|
||||
# "task_id": 195937,
|
||||
# "state": 1,
|
||||
# "start_time": "2020-12-22 19:20:12.504578",
|
||||
# "creation_event_id": 306731,
|
||||
# "creation_time": "2020-12-22 19:20:12.504578",
|
||||
# "epoch": None,
|
||||
# "tag_id": 1192,
|
||||
# "completion_time": "2020-12-22 19:34:34.716615",
|
||||
# "volume_id": 0,
|
||||
# "package_id": 104,
|
||||
# "owner_id": 6,
|
||||
})
|
||||
|
||||
if os.path.exists(path):
|
||||
info = Modulemd.ModuleStream.read_string(open(path).read(), strict=True)
|
||||
for art in info.get_rpm_artifacts():
|
||||
data = parse_nvra(art)
|
||||
packages.append({
|
||||
"build_id": module.build_id,
|
||||
"name": data['name'],
|
||||
"extra": None,
|
||||
"arch": data['arch'],
|
||||
"epoch": data['epoch'] or None,
|
||||
"version": data['version'],
|
||||
"metadata_only": False,
|
||||
"release": data['release'],
|
||||
"id": 262555,
|
||||
"size": 0
|
||||
})
|
||||
else:
|
||||
raise RuntimeError('Unable to find module %s' % path)
|
||||
return builds, packages
|
||||
|
||||
def _get_modules_by_name(self, tag_name):
|
||||
modules = []
|
||||
for arch in self._all_arches:
|
||||
for module in self._modules.values():
|
||||
if module.nvr != tag_name or module.arch != arch:
|
||||
continue
|
||||
modules.append(module)
|
||||
return modules
|
||||
@ -14,27 +14,25 @@
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import configparser
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import shlex
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import xmlrpc.client
|
||||
|
||||
import requests
|
||||
|
||||
import koji
|
||||
from kobo.shortcuts import run, force_list
|
||||
import six
|
||||
from six.moves import configparser, shlex_quote
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
from flufl.lock import Lock
|
||||
from datetime import timedelta
|
||||
|
||||
from .kojimock import KojiMock
|
||||
from .. import util
|
||||
from ..otel import tracing
|
||||
from ..arch_utils import getBaseArch
|
||||
|
||||
|
||||
@ -69,13 +67,13 @@ class KojiWrapper(object):
|
||||
value = getattr(self.koji_module.config, key, None)
|
||||
if value is not None:
|
||||
session_opts[key] = value
|
||||
self.koji_proxy = tracing.instrument_xmlrpc_proxy(
|
||||
koji.ClientSession(self.koji_module.config.server, session_opts)
|
||||
self.koji_proxy = koji.ClientSession(
|
||||
self.koji_module.config.server, session_opts
|
||||
)
|
||||
|
||||
# This retry should be removed once https://pagure.io/koji/issue/3170 is
|
||||
# fixed and released.
|
||||
@util.retry(wait_on=(xmlrpc.client.ProtocolError, koji.GenericError))
|
||||
@util.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError))
|
||||
def login(self):
|
||||
"""Authenticate to the hub."""
|
||||
auth_type = self.koji_module.config.authtype
|
||||
@ -146,7 +144,7 @@ class KojiWrapper(object):
|
||||
cmd.append(arch)
|
||||
|
||||
if isinstance(command, list):
|
||||
command = " ".join([shlex.quote(i) for i in command])
|
||||
command = " ".join([shlex_quote(i) for i in command])
|
||||
|
||||
# HACK: remove rpmdb and yum cache
|
||||
command = (
|
||||
@ -154,7 +152,7 @@ class KojiWrapper(object):
|
||||
)
|
||||
|
||||
if chown_paths:
|
||||
paths = " ".join(shlex.quote(pth) for pth in chown_paths)
|
||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||
command += " ; EXIT_CODE=$?"
|
||||
# Make the files world readable
|
||||
command += " ; chmod -R a+r %s" % paths
|
||||
@ -288,38 +286,35 @@ class KojiWrapper(object):
|
||||
:return dict: {"retcode": 0, "output": "", "task_id": 1}
|
||||
"""
|
||||
task_id = None
|
||||
with tracing.span("run-runroot-cmd", command=command):
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
logfile=log_file,
|
||||
show_cmd=True,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
logfile=log_file,
|
||||
show_cmd=True,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
# Look for first line that contains only a number. This is the ID of
|
||||
# the new task. Usually this should be the first line, but there may be
|
||||
# warnings before it.
|
||||
for line in output.splitlines():
|
||||
match = re.search(r"^(\d+)$", line)
|
||||
if match:
|
||||
task_id = int(match.groups()[0])
|
||||
break
|
||||
# Look for first line that contains only a number. This is the ID of
|
||||
# the new task. Usually this should be the first line, but there may be
|
||||
# warnings before it.
|
||||
for line in output.splitlines():
|
||||
match = re.search(r"^(\d+)$", line)
|
||||
if match:
|
||||
task_id = int(match.groups()[0])
|
||||
break
|
||||
|
||||
if not task_id:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
if not task_id:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
|
||||
self.save_task_id(task_id)
|
||||
tracing.set_attribute("task_id", task_id)
|
||||
self.save_task_id(task_id)
|
||||
|
||||
retcode, output = self._wait_for_task(task_id, logfile=log_file)
|
||||
retcode, output = self._wait_for_task(task_id, logfile=log_file)
|
||||
|
||||
return {
|
||||
"retcode": retcode,
|
||||
@ -363,7 +358,7 @@ class KojiWrapper(object):
|
||||
for option, value in opts.items():
|
||||
if isinstance(value, list):
|
||||
value = ",".join(value)
|
||||
if not isinstance(value, str):
|
||||
if not isinstance(value, six.string_types):
|
||||
# Python 3 configparser will reject non-string values.
|
||||
value = str(value)
|
||||
cfg_parser.set(section, option, value)
|
||||
@ -418,6 +413,92 @@ class KojiWrapper(object):
|
||||
|
||||
return cmd
|
||||
|
||||
def get_create_image_cmd(
|
||||
self,
|
||||
name,
|
||||
version,
|
||||
target,
|
||||
arch,
|
||||
ks_file,
|
||||
repos,
|
||||
image_type="live",
|
||||
image_format=None,
|
||||
release=None,
|
||||
wait=True,
|
||||
archive=False,
|
||||
specfile=None,
|
||||
ksurl=None,
|
||||
):
|
||||
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file> # noqa: E501
|
||||
# Examples:
|
||||
# * name: RHEL-7.0
|
||||
# * name: Satellite-6.0.1-RHEL-6
|
||||
# ** -<type>.<arch>
|
||||
# * version: YYYYMMDD[.n|.t].X
|
||||
# * release: 1
|
||||
|
||||
cmd = self._get_cmd()
|
||||
|
||||
if image_type == "live":
|
||||
cmd.append("spin-livecd")
|
||||
elif image_type == "appliance":
|
||||
cmd.append("spin-appliance")
|
||||
else:
|
||||
raise ValueError("Invalid image type: %s" % image_type)
|
||||
|
||||
if not archive:
|
||||
cmd.append("--scratch")
|
||||
|
||||
cmd.append("--noprogress")
|
||||
|
||||
if wait:
|
||||
cmd.append("--wait")
|
||||
else:
|
||||
cmd.append("--nowait")
|
||||
|
||||
if specfile:
|
||||
cmd.append("--specfile=%s" % specfile)
|
||||
|
||||
if ksurl:
|
||||
cmd.append("--ksurl=%s" % ksurl)
|
||||
|
||||
if isinstance(repos, list):
|
||||
for repo in repos:
|
||||
cmd.append("--repo=%s" % repo)
|
||||
else:
|
||||
cmd.append("--repo=%s" % repos)
|
||||
|
||||
if image_format:
|
||||
if image_type != "appliance":
|
||||
raise ValueError("Format can be specified only for appliance images'")
|
||||
supported_formats = ["raw", "qcow", "qcow2", "vmx"]
|
||||
if image_format not in supported_formats:
|
||||
raise ValueError(
|
||||
"Format is not supported: %s. Supported formats: %s"
|
||||
% (image_format, " ".join(sorted(supported_formats)))
|
||||
)
|
||||
cmd.append("--format=%s" % image_format)
|
||||
|
||||
if release is not None:
|
||||
cmd.append("--release=%s" % release)
|
||||
|
||||
# IMPORTANT: all --opts have to be provided *before* args
|
||||
# Usage:
|
||||
# koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
||||
|
||||
cmd.append(name)
|
||||
cmd.append(version)
|
||||
cmd.append(target)
|
||||
|
||||
# i686 -> i386 etc.
|
||||
arch = getBaseArch(arch)
|
||||
cmd.append(arch)
|
||||
|
||||
cmd.append(ks_file)
|
||||
|
||||
return cmd
|
||||
|
||||
def _has_connection_error(self, output):
|
||||
"""Checks if output indicates connection error."""
|
||||
return re.search("error: failed to connect\n$", output)
|
||||
@ -434,10 +515,9 @@ class KojiWrapper(object):
|
||||
attempt = 0
|
||||
|
||||
while True:
|
||||
with tracing.span("watch-task", task_id=task_id):
|
||||
retcode, output = run(
|
||||
cmd, can_fail=True, logfile=logfile, text=True, errors="replace"
|
||||
)
|
||||
retcode, output = run(
|
||||
cmd, can_fail=True, logfile=logfile, universal_newlines=True
|
||||
)
|
||||
|
||||
if retcode == 0 or not (
|
||||
self._has_connection_error(output) or self._has_offline_error(output)
|
||||
@ -461,36 +541,33 @@ class KojiWrapper(object):
|
||||
its exit code and parsed task id. This method will block until the
|
||||
command finishes.
|
||||
"""
|
||||
with tracing.span("run-blocking-cmd", command=command):
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
show_cmd=True,
|
||||
logfile=log_file,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
show_cmd=True,
|
||||
logfile=log_file,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
match = re.search(r"Created task: (\d+)", output)
|
||||
if not match:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
task_id = int(match.groups()[0])
|
||||
tracing.set_attribute("task_id", task_id)
|
||||
match = re.search(r"Created task: (\d+)", output)
|
||||
if not match:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
task_id = int(match.groups()[0])
|
||||
|
||||
self.save_task_id(task_id)
|
||||
self.save_task_id(task_id)
|
||||
|
||||
if retcode != 0 and (
|
||||
self._has_connection_error(output) or self._has_offline_error(output)
|
||||
):
|
||||
retcode, output = self._wait_for_task(
|
||||
task_id, logfile=log_file, max_retries=max_retries
|
||||
)
|
||||
if retcode != 0 and (
|
||||
self._has_connection_error(output) or self._has_offline_error(output)
|
||||
):
|
||||
retcode, output = self._wait_for_task(
|
||||
task_id, logfile=log_file, max_retries=max_retries
|
||||
)
|
||||
|
||||
return {
|
||||
"retcode": retcode,
|
||||
@ -536,7 +613,6 @@ class KojiWrapper(object):
|
||||
"createLiveMedia",
|
||||
"createAppliance",
|
||||
"createKiwiImage",
|
||||
"imageBuilderBuildArch",
|
||||
]:
|
||||
continue
|
||||
|
||||
@ -572,6 +648,126 @@ class KojiWrapper(object):
|
||||
|
||||
return result
|
||||
|
||||
def get_image_path(self, task_id):
|
||||
result = []
|
||||
task_info_list = []
|
||||
task_info_list.append(self.koji_proxy.getTaskInfo(task_id, request=True))
|
||||
task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))
|
||||
|
||||
# scan parent and child tasks for certain methods
|
||||
task_info = None
|
||||
for i in task_info_list:
|
||||
if i["method"] in ("createAppliance", "createLiveCD", "createImage"):
|
||||
task_info = i
|
||||
break
|
||||
|
||||
scratch = task_info["request"][-1].get("scratch", False)
|
||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||
task_result.pop("rpmlist", None)
|
||||
|
||||
if scratch:
|
||||
topdir = os.path.join(
|
||||
self.koji_module.pathinfo.work(),
|
||||
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||
)
|
||||
else:
|
||||
build = self.koji_proxy.getImageBuild(
|
||||
"%(name)s-%(version)s-%(release)s" % task_result
|
||||
)
|
||||
build["name"] = task_result["name"]
|
||||
build["version"] = task_result["version"]
|
||||
build["release"] = task_result["release"]
|
||||
build["arch"] = task_result["arch"]
|
||||
topdir = self.koji_module.pathinfo.imagebuild(build)
|
||||
for i in task_result["files"]:
|
||||
result.append(os.path.join(topdir, i))
|
||||
return result
|
||||
|
||||
def get_wrapped_rpm_path(self, task_id, srpm=False):
|
||||
result = []
|
||||
task_info_list = []
|
||||
task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))
|
||||
|
||||
# scan parent and child tasks for certain methods
|
||||
task_info = None
|
||||
for i in task_info_list:
|
||||
if i["method"] in ("wrapperRPM"):
|
||||
task_info = i
|
||||
break
|
||||
|
||||
# Get results of wrapperRPM task
|
||||
# {'buildroot_id': 2479520,
|
||||
# 'logs': ['checkout.log', 'root.log', 'state.log', 'build.log'],
|
||||
# 'rpms': ['foreman-discovery-image-2.1.0-2.el7sat.noarch.rpm'],
|
||||
# 'srpm': 'foreman-discovery-image-2.1.0-2.el7sat.src.rpm'}
|
||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||
|
||||
# Get koji dir with results (rpms, srpms, logs, ...)
|
||||
topdir = os.path.join(
|
||||
self.koji_module.pathinfo.work(),
|
||||
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||
)
|
||||
|
||||
# TODO: Maybe use different approach for non-scratch
|
||||
# builds - see get_image_path()
|
||||
|
||||
# Get list of filenames that should be returned
|
||||
result_files = task_result["rpms"]
|
||||
if srpm:
|
||||
result_files += [task_result["srpm"]]
|
||||
|
||||
# Prepare list with paths to the required files
|
||||
for i in result_files:
|
||||
result.append(os.path.join(topdir, i))
|
||||
|
||||
return result
|
||||
|
||||
def get_signed_wrapped_rpms_paths(self, task_id, sigkey, srpm=False):
|
||||
result = []
|
||||
parent_task = self.koji_proxy.getTaskInfo(task_id, request=True)
|
||||
task_info_list = []
|
||||
task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))
|
||||
|
||||
# scan parent and child tasks for certain methods
|
||||
task_info = None
|
||||
for i in task_info_list:
|
||||
if i["method"] in ("wrapperRPM"):
|
||||
task_info = i
|
||||
break
|
||||
|
||||
# Check parent_task if it's scratch build
|
||||
scratch = parent_task["request"][-1].get("scratch", False)
|
||||
if scratch:
|
||||
raise RuntimeError("Scratch builds cannot be signed!")
|
||||
|
||||
# Get results of wrapperRPM task
|
||||
# {'buildroot_id': 2479520,
|
||||
# 'logs': ['checkout.log', 'root.log', 'state.log', 'build.log'],
|
||||
# 'rpms': ['foreman-discovery-image-2.1.0-2.el7sat.noarch.rpm'],
|
||||
# 'srpm': 'foreman-discovery-image-2.1.0-2.el7sat.src.rpm'}
|
||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||
|
||||
# Get list of filenames that should be returned
|
||||
result_files = task_result["rpms"]
|
||||
if srpm:
|
||||
result_files += [task_result["srpm"]]
|
||||
|
||||
# Prepare list with paths to the required files
|
||||
for i in result_files:
|
||||
rpminfo = self.koji_proxy.getRPM(i)
|
||||
build = self.koji_proxy.getBuild(rpminfo["build_id"])
|
||||
path = os.path.join(
|
||||
self.koji_module.pathinfo.build(build),
|
||||
self.koji_module.pathinfo.signed(rpminfo, sigkey),
|
||||
)
|
||||
result.append(path)
|
||||
|
||||
return result
|
||||
|
||||
def get_build_nvrs(self, task_id):
|
||||
builds = self.koji_proxy.listBuilds(taskID=task_id)
|
||||
return [build.get("nvr") for build in builds if build.get("nvr")]
|
||||
|
||||
def multicall_map(
|
||||
self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None
|
||||
):
|
||||
@ -654,11 +850,11 @@ class KojiWrapper(object):
|
||||
|
||||
return results
|
||||
|
||||
@util.retry(wait_on=(xmlrpc.client.ProtocolError, koji.GenericError))
|
||||
@util.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError))
|
||||
def retrying_multicall_map(self, *args, **kwargs):
|
||||
"""
|
||||
Retrying version of multicall_map. This tries to retry the Koji call
|
||||
in case of koji.GenericError or xmlrpc.client.ProtocolError.
|
||||
in case of koji.GenericError or xmlrpclib.ProtocolError.
|
||||
|
||||
Please refer to koji_multicall_map for further specification of arguments.
|
||||
"""
|
||||
@ -674,45 +870,6 @@ class KojiWrapper(object):
|
||||
pass
|
||||
|
||||
|
||||
class KojiMockWrapper(object):
|
||||
lock = threading.Lock()
|
||||
|
||||
def __init__(self, compose, all_arches):
|
||||
self.all_arches = all_arches
|
||||
self.compose = compose
|
||||
try:
|
||||
self.profile = self.compose.conf["koji_profile"]
|
||||
except KeyError:
|
||||
raise RuntimeError("Koji profile must be configured")
|
||||
with self.lock:
|
||||
self.koji_module = koji.get_profile_module(self.profile)
|
||||
session_opts = {}
|
||||
for key in (
|
||||
"timeout",
|
||||
"keepalive",
|
||||
"max_retries",
|
||||
"retry_interval",
|
||||
"anon_retry",
|
||||
"offline_retry",
|
||||
"offline_retry_interval",
|
||||
"debug",
|
||||
"debug_xmlrpc",
|
||||
"serverca",
|
||||
"use_fast_upload",
|
||||
):
|
||||
value = getattr(self.koji_module.config, key, None)
|
||||
if value is not None:
|
||||
session_opts[key] = value
|
||||
self.koji_proxy = KojiMock(
|
||||
packages_dir=self.koji_module.config.topdir,
|
||||
modules_dir=os.path.join(
|
||||
self.koji_module.config.topdir,
|
||||
'modules',
|
||||
),
|
||||
all_arches=self.all_arches,
|
||||
)
|
||||
|
||||
|
||||
def get_buildroot_rpms(compose, task_id):
|
||||
"""Get build root RPMs - either from runroot or local"""
|
||||
result = []
|
||||
@ -737,8 +894,7 @@ def get_buildroot_rpms(compose, task_id):
|
||||
# local
|
||||
retcode, output = run(
|
||||
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
for i in output.splitlines():
|
||||
if not i:
|
||||
@ -898,8 +1054,7 @@ class KojiDownloadProxy:
|
||||
os.utime(destination_file)
|
||||
return destination_file
|
||||
|
||||
with tracing.span("download-rpm", url=url):
|
||||
return self._atomic_download(url, destination_file, validator)
|
||||
return self._atomic_download(url, destination_file, validator)
|
||||
|
||||
def get_file(self, path, validator=None):
|
||||
"""
|
||||
|
||||
@ -46,7 +46,6 @@ class LoraxWrapper(object):
|
||||
skip_branding=False,
|
||||
squashfs_only=False,
|
||||
configuration_file=None,
|
||||
rootfs_type=None,
|
||||
):
|
||||
cmd = ["lorax"]
|
||||
cmd.append("--product=%s" % product)
|
||||
@ -107,9 +106,6 @@ class LoraxWrapper(object):
|
||||
output_dir = os.path.abspath(output_dir)
|
||||
cmd.append(output_dir)
|
||||
|
||||
if rootfs_type:
|
||||
cmd.append("--rootfs-type=%s" % rootfs_type)
|
||||
|
||||
# TODO: workdir
|
||||
|
||||
return cmd
|
||||
|
||||
@ -105,6 +105,85 @@ class PungiWrapper(object):
|
||||
|
||||
kickstart.close()
|
||||
|
||||
def get_pungi_cmd(
|
||||
self,
|
||||
config,
|
||||
destdir,
|
||||
name,
|
||||
version=None,
|
||||
flavor=None,
|
||||
selfhosting=False,
|
||||
fulltree=False,
|
||||
greedy=None,
|
||||
nodeps=False,
|
||||
nodownload=True,
|
||||
full_archlist=False,
|
||||
arch=None,
|
||||
cache_dir=None,
|
||||
lookaside_repos=None,
|
||||
multilib_methods=None,
|
||||
profiler=False,
|
||||
):
|
||||
cmd = ["pungi"]
|
||||
|
||||
# Gather stage
|
||||
cmd.append("-G")
|
||||
|
||||
# path to a kickstart file
|
||||
cmd.append("--config=%s" % config)
|
||||
|
||||
# destdir is optional in Pungi (defaults to current dir), but
|
||||
# want it mandatory here
|
||||
cmd.append("--destdir=%s" % destdir)
|
||||
|
||||
# name
|
||||
cmd.append("--name=%s" % name)
|
||||
|
||||
# version; optional, defaults to datestamp
|
||||
if version:
|
||||
cmd.append("--ver=%s" % version)
|
||||
|
||||
# rhel variant; optional
|
||||
if flavor:
|
||||
cmd.append("--flavor=%s" % flavor)
|
||||
|
||||
# turn selfhosting on
|
||||
if selfhosting:
|
||||
cmd.append("--selfhosting")
|
||||
|
||||
# NPLB
|
||||
if fulltree:
|
||||
cmd.append("--fulltree")
|
||||
|
||||
greedy = greedy or "none"
|
||||
cmd.append("--greedy=%s" % greedy)
|
||||
|
||||
if nodeps:
|
||||
cmd.append("--nodeps")
|
||||
|
||||
# don't download packages, just print paths
|
||||
if nodownload:
|
||||
cmd.append("--nodownload")
|
||||
|
||||
if full_archlist:
|
||||
cmd.append("--full-archlist")
|
||||
|
||||
if arch:
|
||||
cmd.append("--arch=%s" % arch)
|
||||
|
||||
if multilib_methods:
|
||||
for i in multilib_methods:
|
||||
cmd.append("--multilib=%s" % i)
|
||||
|
||||
if cache_dir:
|
||||
cmd.append("--cachedir=%s" % cache_dir)
|
||||
|
||||
if lookaside_repos:
|
||||
for i in lookaside_repos:
|
||||
cmd.append("--lookaside-repo=%s" % i)
|
||||
|
||||
return cmd
|
||||
|
||||
def get_pungi_cmd_dnf(
|
||||
self,
|
||||
config,
|
||||
@ -190,3 +269,70 @@ class PungiWrapper(object):
|
||||
broken_deps.setdefault(match.group(2), set()).add(match.group(1))
|
||||
|
||||
return packages, broken_deps, missing_comps
|
||||
|
||||
def run_pungi(
|
||||
self,
|
||||
ks_file,
|
||||
destdir,
|
||||
name,
|
||||
selfhosting=False,
|
||||
fulltree=False,
|
||||
greedy="",
|
||||
cache_dir=None,
|
||||
arch="",
|
||||
multilib_methods=[],
|
||||
nodeps=False,
|
||||
lookaside_repos=[],
|
||||
):
|
||||
"""
|
||||
This is a replacement for get_pungi_cmd that runs it in-process. Not
|
||||
all arguments are supported.
|
||||
"""
|
||||
from .. import ks, gather, config
|
||||
|
||||
ksparser = ks.get_ksparser(ks_path=ks_file)
|
||||
cfg = config.Config()
|
||||
cfg.set("pungi", "destdir", destdir)
|
||||
cfg.set("pungi", "family", name)
|
||||
cfg.set("pungi", "iso_basename", name)
|
||||
cfg.set("pungi", "fulltree", str(fulltree))
|
||||
cfg.set("pungi", "selfhosting", str(selfhosting))
|
||||
cfg.set("pungi", "cachedir", cache_dir)
|
||||
cfg.set("pungi", "full_archlist", "True")
|
||||
cfg.set("pungi", "workdirbase", "%s/work" % destdir)
|
||||
cfg.set("pungi", "greedy", greedy)
|
||||
cfg.set("pungi", "nosource", "False")
|
||||
cfg.set("pungi", "nodebuginfo", "False")
|
||||
cfg.set("pungi", "force", "False")
|
||||
cfg.set("pungi", "resolve_deps", str(not nodeps))
|
||||
if arch:
|
||||
cfg.set("pungi", "arch", arch)
|
||||
if multilib_methods:
|
||||
cfg.set("pungi", "multilib", " ".join(multilib_methods))
|
||||
if lookaside_repos:
|
||||
cfg.set("pungi", "lookaside_repos", " ".join(lookaside_repos))
|
||||
|
||||
mypungi = gather.Pungi(cfg, ksparser)
|
||||
|
||||
with open(os.path.join(destdir, "out"), "w") as f:
|
||||
with mypungi.yumlock:
|
||||
mypungi._inityum()
|
||||
mypungi.gather()
|
||||
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
f.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
mypungi.makeCompsFile()
|
||||
mypungi.getDebuginfoList()
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
f.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
f.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
|
||||
@ -19,8 +19,13 @@ import os
|
||||
from kobo.shortcuts import force_list
|
||||
|
||||
|
||||
def get_repoclosure_cmd(backend="dnf", arch=None, repos=None, lookaside=None):
|
||||
def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
||||
cmds = {
|
||||
"yum": {
|
||||
"cmd": ["/usr/bin/repoclosure", "--tempcache"],
|
||||
"repoarg": "--repoid=%s",
|
||||
"lookaside": "--lookaside=%s",
|
||||
},
|
||||
"dnf": {
|
||||
"cmd": ["dnf", "repoclosure"],
|
||||
"repoarg": "--repo=%s",
|
||||
@ -39,17 +44,18 @@ def get_repoclosure_cmd(backend="dnf", arch=None, repos=None, lookaside=None):
|
||||
for i in arches:
|
||||
cmd.append("--arch=%s" % i)
|
||||
|
||||
if arches:
|
||||
if backend == "dnf" and arches:
|
||||
cmd.append("--forcearch=%s" % arches[0])
|
||||
|
||||
repos = repos or {}
|
||||
for repo_id, repo_path in repos.items():
|
||||
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
||||
cmd.append(cmds[backend]["repoarg"] % repo_id)
|
||||
# For dnf we want to add all repos with the --repo option (which
|
||||
# enables only those and not any system repo), and the repos to
|
||||
# check are also listed with the --check option.
|
||||
cmd.append("--check=%s" % repo_id)
|
||||
if backend == "dnf":
|
||||
# For dnf we want to add all repos with the --repo option (which
|
||||
# enables only those and not any system repo), and the repos to
|
||||
# check are also listed with the --check option.
|
||||
cmd.append("--check=%s" % repo_id)
|
||||
|
||||
lookaside = lookaside or {}
|
||||
for repo_id, repo_path in lookaside.items():
|
||||
|
||||
@ -19,16 +19,16 @@ from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import glob
|
||||
import shlex
|
||||
import six
|
||||
import threading
|
||||
from urllib.request import urlretrieve
|
||||
from six.moves import shlex_quote
|
||||
from six.moves.urllib.request import urlretrieve
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import kobo.log
|
||||
from kobo.shortcuts import run, force_list
|
||||
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||
from .kojiwrapper import KojiWrapper
|
||||
from ..otel import tracing
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
@ -57,8 +57,7 @@ class ScmBase(kobo.log.LoggingBase):
|
||||
workdir=cwd,
|
||||
can_fail=True,
|
||||
stdin_data="",
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
if retcode != 0:
|
||||
self.log_error("Output was: %r" % output)
|
||||
@ -80,7 +79,7 @@ class FileWrapper(ScmBase):
|
||||
for i in dirs:
|
||||
copy_all(i, target_dir)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
if scm_root:
|
||||
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
||||
self.log_debug(
|
||||
@ -119,7 +118,7 @@ class CvsWrapper(ScmBase):
|
||||
)
|
||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
scm_file = scm_file.lstrip("/")
|
||||
scm_branch = scm_branch or "HEAD"
|
||||
with temp_dir() as tmp_dir:
|
||||
@ -161,9 +160,6 @@ class GitWrapper(ScmBase):
|
||||
if "://" not in repo:
|
||||
repo = "file://%s" % repo
|
||||
|
||||
if repo.startswith("git+http"):
|
||||
repo = repo[4:]
|
||||
|
||||
git_cmd = ["git"]
|
||||
if "credential_helper" in self.options:
|
||||
git_cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
@ -202,17 +198,6 @@ class GitWrapper(ScmBase):
|
||||
copy_all(destdir, debugdir)
|
||||
raise
|
||||
|
||||
if os.path.exists(os.path.join(destdir, ".gitmodules")):
|
||||
try:
|
||||
self.log_debug("Cloning submodules")
|
||||
run(["git", "submodule", "init"], workdir=destdir)
|
||||
run(["git", "submodule", "update"], workdir=destdir)
|
||||
except RuntimeError as e:
|
||||
self.log_error(
|
||||
"Failed to clone submodules: %s %s", e, getattr(e, "output", "")
|
||||
)
|
||||
# Ignore the error here, there may just be no submodules.
|
||||
|
||||
def get_temp_repo_path(self, scm_root, scm_branch):
|
||||
scm_repo = scm_root.split("/")[-1]
|
||||
process_id = os.getpid()
|
||||
@ -230,8 +215,7 @@ class GitWrapper(ScmBase):
|
||||
tmp_dir = self.get_temp_repo_path(scm_root, scm_branch)
|
||||
if not os.path.isdir(tmp_dir):
|
||||
makedirs(tmp_dir)
|
||||
with tracing.span("git-clone", repo=scm_root, ref=scm_branch):
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
self.run_process_command(tmp_dir)
|
||||
return tmp_dir
|
||||
|
||||
@ -249,7 +233,7 @@ class GitWrapper(ScmBase):
|
||||
|
||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
scm_file = scm_file.lstrip("/")
|
||||
scm_branch = scm_branch or "master"
|
||||
|
||||
@ -290,12 +274,12 @@ class RpmScmWrapper(ScmBase):
|
||||
run(
|
||||
"cp -a %s %s/"
|
||||
% (
|
||||
shlex.quote(os.path.join(tmp_dir, scm_dir)),
|
||||
shlex.quote(target_dir),
|
||||
shlex_quote(os.path.join(tmp_dir, scm_dir)),
|
||||
shlex_quote(target_dir),
|
||||
)
|
||||
)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
for rpm in self._list_rpms(scm_root):
|
||||
scm_file = scm_file.lstrip("/")
|
||||
with temp_dir() as tmp_dir:
|
||||
@ -320,7 +304,7 @@ class KojiScmWrapper(ScmBase):
|
||||
def export_dir(self, *args, **kwargs):
|
||||
raise RuntimeError("Only files can be exported from Koji")
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
if scm_branch:
|
||||
self._get_latest_from_tag(scm_branch, scm_root, scm_file, target_dir)
|
||||
else:
|
||||
@ -357,44 +341,6 @@ class KojiScmWrapper(ScmBase):
|
||||
urlretrieve(url, target_file)
|
||||
|
||||
|
||||
class SkopeoCopyTimeoutError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class ContainerImageScmWrapper(ScmBase):
|
||||
|
||||
def export_dir(self, *args, **kwargs):
|
||||
raise RuntimeError("Containers can only be exported as files")
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
if arch == "src":
|
||||
return
|
||||
ARCHES = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
arch = ARCHES.get(arch, arch)
|
||||
cmd = [
|
||||
"skopeo",
|
||||
"--override-arch=" + arch,
|
||||
"copy",
|
||||
scm_root,
|
||||
"oci:" + target_dir,
|
||||
"--remove-signatures",
|
||||
]
|
||||
try:
|
||||
self.log_debug(
|
||||
"Exporting container %s to %s: %s", scm_root, target_dir, cmd
|
||||
)
|
||||
with tracing.span("skopeo-copy", arch=arch, image=scm_root):
|
||||
self.retry_run(cmd, can_fail=False)
|
||||
except RuntimeError as e:
|
||||
output = getattr(e, "output", "")
|
||||
self.log_error("Failed to copy container image: %s %s", e, output)
|
||||
|
||||
if "connection timed out" in output:
|
||||
raise SkopeoCopyTimeoutError(output) from e
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def _get_wrapper(scm_type, *args, **kwargs):
|
||||
SCM_WRAPPERS = {
|
||||
"file": FileWrapper,
|
||||
@ -402,7 +348,6 @@ def _get_wrapper(scm_type, *args, **kwargs):
|
||||
"git": GitWrapper,
|
||||
"rpm": RpmScmWrapper,
|
||||
"koji": KojiScmWrapper,
|
||||
"container-image": ContainerImageScmWrapper,
|
||||
}
|
||||
try:
|
||||
cls = SCM_WRAPPERS[scm_type]
|
||||
@ -411,7 +356,7 @@ def _get_wrapper(scm_type, *args, **kwargs):
|
||||
return cls(*args, **kwargs)
|
||||
|
||||
|
||||
def get_file_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
def get_file_from_scm(scm_dict, target_path, compose=None):
|
||||
"""
|
||||
Copy one or more files from source control to a target path. A list of files
|
||||
created in ``target_path`` is returned.
|
||||
@ -442,7 +387,7 @@ def get_file_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
>>> get_file_from_scm(scm_dict, target_path)
|
||||
['/tmp/path/share/variants.dtd']
|
||||
"""
|
||||
if isinstance(scm_dict, str):
|
||||
if isinstance(scm_dict, six.string_types):
|
||||
scm_type = "file"
|
||||
scm_repo = None
|
||||
scm_file = os.path.abspath(scm_dict)
|
||||
@ -465,18 +410,8 @@ def get_file_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
files_copied = []
|
||||
for i in force_list(scm_file):
|
||||
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
||||
# Most SCM wrappers need a temporary directory: the git repo is
|
||||
# cloned there, and only relevant files are copied out. But this
|
||||
# doesn't work for the container image fetching. That pulls in only
|
||||
# required files, and the final output needs to be done by skopeo
|
||||
# to correctly handle multiple containers landing in the same OCI
|
||||
# archive.
|
||||
dest = target_path if scm_type == "container-image" else tmp_dir
|
||||
scm.export_file(
|
||||
scm_repo, i, scm_branch=scm_branch, target_dir=dest, arch=arch
|
||||
)
|
||||
if dest == tmp_dir:
|
||||
files_copied += copy_all(tmp_dir, target_path)
|
||||
scm.export_file(scm_repo, i, scm_branch=scm_branch, target_dir=tmp_dir)
|
||||
files_copied += copy_all(tmp_dir, target_path)
|
||||
return files_copied
|
||||
|
||||
|
||||
@ -515,7 +450,7 @@ def get_file(source, destination, compose, overwrite=False):
|
||||
return destination
|
||||
|
||||
|
||||
def get_dir_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
def get_dir_from_scm(scm_dict, target_path, compose=None):
|
||||
"""
|
||||
Copy a directory from source control to a target path. A list of files
|
||||
created in ``target_path`` is returned.
|
||||
@ -545,7 +480,7 @@ def get_dir_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
>>> get_dir_from_scm(scm_dict, target_path)
|
||||
['/tmp/path/share/variants.dtd', '/tmp/path/share/rawhide-fedora.ks', ...]
|
||||
"""
|
||||
if isinstance(scm_dict, str):
|
||||
if isinstance(scm_dict, six.string_types):
|
||||
scm_type = "file"
|
||||
scm_repo = None
|
||||
scm_dir = os.path.abspath(scm_dict)
|
||||
|
||||
@ -15,8 +15,8 @@
|
||||
from kobo import shortcuts
|
||||
import os
|
||||
import productmd
|
||||
import shlex
|
||||
import tempfile
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi import util
|
||||
from pungi.phases.buildinstall import tweak_configs
|
||||
@ -24,8 +24,8 @@ from pungi.wrappers import iso
|
||||
|
||||
|
||||
def sh(log, cmd, *args, **kwargs):
|
||||
log.info("Running: %s", " ".join(shlex.quote(x) for x in cmd))
|
||||
ret, out = shortcuts.run(cmd, *args, text=True, errors="replace", **kwargs)
|
||||
log.info("Running: %s", " ".join(shlex_quote(x) for x in cmd))
|
||||
ret, out = shortcuts.run(cmd, *args, universal_newlines=True, **kwargs)
|
||||
if out:
|
||||
log.debug("%s", out)
|
||||
return ret, out
|
||||
@ -35,8 +35,7 @@ def get_lorax_dir(default="/usr/share/lorax"):
|
||||
try:
|
||||
_, out = shortcuts.run(
|
||||
["python3", "-c" "import pylorax; print(pylorax.find_templates())"],
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
return out.strip()
|
||||
except Exception:
|
||||
|
||||
@ -394,8 +394,7 @@ class UnifiedISO(object):
|
||||
iso.get_mkisofs_cmd(
|
||||
iso_path, [source_dir], volid=volid, exclude=["./lost+found"]
|
||||
),
|
||||
text=True,
|
||||
errors="replace",
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
# implant MD5
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
||||
dict.sorted
|
||||
dogpile.cache
|
||||
flufl.lock
|
||||
flufl.lock ; python_version >= '3.0'
|
||||
flufl.lock < 3.0 ; python_version <= '2.7'
|
||||
funcsigs
|
||||
jsonschema
|
||||
kobo
|
||||
koji
|
||||
@ -11,3 +14,4 @@ ordered_set
|
||||
productmd
|
||||
pykickstart
|
||||
python-multilib
|
||||
urlgrabber
|
||||
|
||||
23
setup.py
23
setup.py
@ -20,7 +20,7 @@ packages = sorted(packages)
|
||||
|
||||
setup(
|
||||
name="pungi",
|
||||
version="4.10.1",
|
||||
version="4.6.3",
|
||||
description="Distribution compose tool",
|
||||
url="https://pagure.io/pungi",
|
||||
author="Dennis Gilmore",
|
||||
@ -30,6 +30,7 @@ setup(
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"comps_filter = pungi.scripts.comps_filter:main",
|
||||
"pungi = pungi.scripts.pungi:main",
|
||||
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
||||
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||
@ -41,27 +42,25 @@ setup(
|
||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||
"pungi-cache-cleanup = pungi.scripts.cache_cleanup:main",
|
||||
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
|
||||
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
|
||||
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
|
||||
"pungi-create-extra-repo = pungi.scripts.create_extra_repo:cli_main"
|
||||
]
|
||||
},
|
||||
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
||||
data_files=[
|
||||
("lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||
("share/pungi", glob.glob("share/*.xsl")),
|
||||
("share/pungi", glob.glob("share/*.ks")),
|
||||
("share/pungi", glob.glob("share/*.dtd")),
|
||||
("share/pungi/multilib", glob.glob("share/multilib/*")),
|
||||
("/usr/lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||
("/usr/share/pungi/multilib", glob.glob("share/multilib/*")),
|
||||
],
|
||||
test_suite="tests",
|
||||
install_requires=[
|
||||
"jsonschema",
|
||||
"kobo",
|
||||
"lxml",
|
||||
"productmd>=1.45",
|
||||
"productmd>=1.23",
|
||||
"six",
|
||||
"dogpile.cache",
|
||||
],
|
||||
tests_require=["pytest", "pytest-cov", "pyfakefs"],
|
||||
extras_require={':python_version=="2.7"': ["enum34", "lockfile"]},
|
||||
tests_require=["pytest", "pytest-cov"],
|
||||
)
|
||||
|
||||
1
sources
1
sources
@ -1 +0,0 @@
|
||||
SHA512 (pungi-4.10.1.tar.bz2) = 4ff1005ece77ac9b41ac31c3b0bcdd558afaaea4d99bf178d42b24a4318ccc9a5576ad4740446f1589a07f88f59f5cb4954d182f3f4e15b1a798e19d9a54fb22
|
||||
@ -1,3 +1,5 @@
|
||||
mock; python_version < '3.3'
|
||||
parameterized
|
||||
pytest
|
||||
pytest-cov
|
||||
unittest2; python_version < '3.0'
|
||||
|
||||
@ -6,7 +6,6 @@ LABEL \
|
||||
license="MIT"
|
||||
|
||||
RUN dnf -y update && dnf -y install \
|
||||
--setopt=install_weak_deps=false \
|
||||
findutils \
|
||||
libmodulemd \
|
||||
git \
|
||||
@ -16,7 +15,6 @@ RUN dnf -y update && dnf -y install \
|
||||
python3-gobject-base \
|
||||
python3-tox \
|
||||
python3-urlgrabber \
|
||||
python3-dnf \
|
||||
&& dnf clean all
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
27
tests/Dockerfile-test-py2
Normal file
27
tests/Dockerfile-test-py2
Normal file
@ -0,0 +1,27 @@
|
||||
FROM centos:7
|
||||
LABEL \
|
||||
name="Pungi test" \
|
||||
description="Run tests using tox with Python 2" \
|
||||
vendor="Pungi developers" \
|
||||
license="MIT"
|
||||
|
||||
RUN yum -y update && yum -y install epel-release && yum -y install \
|
||||
git \
|
||||
libmodulemd2 \
|
||||
make \
|
||||
python3 \
|
||||
python-createrepo_c \
|
||||
python-gobject-base \
|
||||
python-gssapi \
|
||||
python-libcomps \
|
||||
pykickstart \
|
||||
&& yum clean all
|
||||
|
||||
# python-tox in yum repo is too old, let's install latest version
|
||||
RUN pip3 install tox
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["tox", "-e", "py27"]
|
||||
4
tests/Jenkinsfile
vendored
4
tests/Jenkinsfile
vendored
@ -1,3 +1,5 @@
|
||||
def DUFFY_SESSION_ID
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
label 'cico-workspace'
|
||||
@ -15,7 +17,6 @@ pipeline {
|
||||
if (params.REPO == "" || params.BRANCH == "") {
|
||||
error "Please supply both params (REPO and BRANCH)"
|
||||
}
|
||||
def DUFFY_SESSION_ID
|
||||
try {
|
||||
echo "Requesting duffy node ..."
|
||||
def session_str = sh returnStdout: true, script: "set +x; duffy client --url https://duffy.ci.centos.org/api/v1 --auth-name fedora-infra --auth-key $CICO_API_KEY request-session pool=virt-ec2-t2-centos-9s-x86_64,quantity=1"
|
||||
@ -39,6 +40,7 @@ git fetch proposed
|
||||
git checkout origin/master
|
||||
git merge --no-ff "proposed/$params.BRANCH" -m "Merge PR"
|
||||
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test tox -r -e flake8,black,py3,bandit
|
||||
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test-py2 tox -r -e py27
|
||||
"""
|
||||
sh "cat job.sh"
|
||||
sh "ssh -o StrictHostKeyChecking=no root@$hostname mkdir $remote_dir"
|
||||
|
||||
@ -35,11 +35,6 @@ for spec in $DIR/*.spec; do
|
||||
if [ "$(basename $spec)" == "dummy-skype.spec" ]; then
|
||||
continue
|
||||
fi
|
||||
if [ "$(basename $spec)" == "dummy-fcoe-target-utils.spec" ]; then
|
||||
if [ "$target" == "ppc" -o "$target" == "s390" -o "$target" == "s390x" ]; then
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
echo "Building ${spec/.spec/} for $target"
|
||||
rpmbuild --quiet --target=$target -ba --nodeps --define "_srcrpmdir $DIR/../repo/src" --define "_rpmdir $DIR/../repo" $spec
|
||||
done
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,36 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||
<revision>1612479076</revision>
|
||||
<data type="primary">
|
||||
<checksum type="sha256">08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb</checksum>
|
||||
<open-checksum type="sha256">2a15e618f049a883d360ccbf3e764b30640255f47dc526c633b1722fe23cbcbc</open-checksum>
|
||||
<location href="repodata/08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb-primary.xml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>1240</size>
|
||||
<open-size>3888</open-size>
|
||||
</data>
|
||||
<data type="filelists">
|
||||
<checksum type="sha256">e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f</checksum>
|
||||
<open-checksum type="sha256">b1782bc4207a5b7c3e64115d5a1d001802e8d363f022ea165df7cdab6f14651c</open-checksum>
|
||||
<location href="repodata/e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f-filelists.xml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>439</size>
|
||||
<open-size>1295</open-size>
|
||||
</data>
|
||||
<data type="other">
|
||||
<checksum type="sha256">92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1</checksum>
|
||||
<open-checksum type="sha256">3b847919691ad32279b13463de6c08f1f8b32f51e87b7d8d7e95a3ec2f46ef51</open-checksum>
|
||||
<location href="repodata/92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1-other.xml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>630</size>
|
||||
<open-size>1911</open-size>
|
||||
</data>
|
||||
<data type="modules">
|
||||
<checksum type="sha256">e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57</checksum>
|
||||
<open-checksum type="sha256">d59fee86c18018cc18bb7325aa74aa0abf923c64d29a4ec45e08dcd01a0c3966</open-checksum>
|
||||
<location href="repodata/e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57-modules.yaml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>920</size>
|
||||
<open-size>3308</open-size>
|
||||
</data>
|
||||
</repomd>
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,55 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||
<revision>1666177486</revision>
|
||||
<data type="primary">
|
||||
<checksum type="sha256">89cb9cc1181635c9147864a7076d91fb81072641d481cd202832a2d257453576</checksum>
|
||||
<open-checksum type="sha256">07255d9856f7531b52a6459f6fc7701c6d93c6d6c29d1382d83afcc53f13494a</open-checksum>
|
||||
<location href="repodata/89cb9cc1181635c9147864a7076d91fb81072641d481cd202832a2d257453576-primary.xml.gz"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>1387</size>
|
||||
<open-size>6528</open-size>
|
||||
</data>
|
||||
<data type="filelists">
|
||||
<checksum type="sha256">f69ca03957574729fd5150335b0d87afddcfb37a97aed5b06272212854f1773d</checksum>
|
||||
<open-checksum type="sha256">c2e1e674d7d48bccaa16cae0a5f70cb55ef4cd7352b4d9d4fdaa619075d07dbc</open-checksum>
|
||||
<location href="repodata/f69ca03957574729fd5150335b0d87afddcfb37a97aed5b06272212854f1773d-filelists.xml.gz"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>1252</size>
|
||||
<open-size>5594</open-size>
|
||||
</data>
|
||||
<data type="other">
|
||||
<checksum type="sha256">b3827bd6c9ea67ffa3912002515c64e4d9fe5c4dacbf7c46b0d8768b7abbb84f</checksum>
|
||||
<open-checksum type="sha256">9ce24c526239e349d023c577b2ae3872c8b0f1888aed1fb24b9b9aa12063fdf3</open-checksum>
|
||||
<location href="repodata/b3827bd6c9ea67ffa3912002515c64e4d9fe5c4dacbf7c46b0d8768b7abbb84f-other.xml.gz"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>999</size>
|
||||
<open-size>6320</open-size>
|
||||
</data>
|
||||
<data type="primary_db">
|
||||
<checksum type="sha256">ab8df35061dfa0285069b843f24a7076e31266d9a8abe8282340bcb936aa61d7</checksum>
|
||||
<open-checksum type="sha256">2bce9554ce4496cef34b5cd69f186f7f3143c7cabae8fa384fc5c9eeab326f7f</open-checksum>
|
||||
<location href="repodata/ab8df35061dfa0285069b843f24a7076e31266d9a8abe8282340bcb936aa61d7-primary.sqlite.bz2"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>3558</size>
|
||||
<open-size>106496</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
<data type="filelists_db">
|
||||
<checksum type="sha256">8bcf6d40db4e922934ac47e8ac7fb8d15bdacf579af8c819d2134ed54d30550b</checksum>
|
||||
<open-checksum type="sha256">f7001d1df7f5f7e4898919b15710bea8ed9711ce42faf68e22b757e63169b1fb</open-checksum>
|
||||
<location href="repodata/8bcf6d40db4e922934ac47e8ac7fb8d15bdacf579af8c819d2134ed54d30550b-filelists.sqlite.bz2"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>2360</size>
|
||||
<open-size>28672</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
<data type="other_db">
|
||||
<checksum type="sha256">01b82e9eb7ee9151f283c6e761ae450de18ed2d64b5e32de88689eaf95216a80</checksum>
|
||||
<open-checksum type="sha256">07f5b9750af1e440d37ca216e719dd288149e79e9132f2fdccb6f73b2e5dd541</open-checksum>
|
||||
<location href="repodata/01b82e9eb7ee9151f283c6e761ae450de18ed2d64b5e32de88689eaf95216a80-other.sqlite.bz2"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>2196</size>
|
||||
<open-size>32768</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
</repomd>
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user