Compare commits
402 Commits
pungi-4.7.
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 8b79bf754d | |||
| 875e663809 | |||
|
|
65986b8eaf | ||
|
|
bc12ed7d69 | ||
|
|
834ee63331 | ||
|
|
2fbc4e7bcb | ||
|
|
363a28f561 | ||
|
|
05ded4aaa8 | ||
|
|
f200e493ec | ||
|
|
603527c6cc | ||
|
|
d3bc078089 | ||
|
|
4d432fd385 | ||
|
|
85d7d19dc5 | ||
|
|
84f7766dcf | ||
|
|
858c0ab252 | ||
|
|
d818c781f9 | ||
|
|
2dd2b5c82a | ||
|
|
65c507e2c0 | ||
|
|
2d48a341a6 | ||
|
|
384a7a0bee | ||
|
|
466e924e6f | ||
|
|
b0e025cea1 | ||
|
|
e765db157f | ||
|
|
37479bbc6a | ||
|
|
5cf13491df | ||
|
|
7a4bd62978 | ||
|
|
2c80051446 | ||
|
|
dcd7e5ff2a | ||
|
|
fc2cc0073a | ||
|
|
8c897dda71 | ||
|
|
027eb45b1d | ||
|
|
7bcf9df307 | ||
|
|
0e203553fa | ||
|
|
45c3b1d9b1 | ||
|
|
4bfbe8afc2 | ||
|
|
feffd284a4 | ||
|
|
49a3e6cd12 | ||
|
|
545215da19 | ||
|
|
74ceea10ba | ||
|
|
64e1c30100 | ||
|
|
4f53c5257d | ||
|
|
136a02bdbb | ||
|
|
a6e7828033 | ||
|
|
6891038eb8 | ||
|
|
dd8d22f0e3 | ||
|
|
cdc275741b | ||
|
|
a034b8b977 | ||
|
|
f3dcb036a5 | ||
|
|
e59566feb2 | ||
|
|
ed0713c572 | ||
|
|
e550458c9f | ||
|
|
c2852f7034 | ||
|
|
6a293639cf | ||
|
|
ac7e1e515e | ||
|
|
fddce94704 | ||
|
|
26959621a6 | ||
|
|
74db11a836 | ||
|
|
e98dd56fce | ||
|
|
4ff13b1993 | ||
|
|
b044ebdba1 | ||
|
|
f8932bc1f4 | ||
|
|
755004af02 | ||
|
|
567baed60f | ||
|
|
2e9baeaf51 | ||
|
|
4454619be6 | ||
|
|
4f69f6c242 | ||
|
|
37f9f1fcaf | ||
|
|
fdea2c88d9 | ||
|
|
0483f914c4 | ||
|
|
a24c6d52ce | ||
|
|
a0a155ebcd | ||
|
|
059995a200 | ||
|
|
53c273f025 | ||
|
|
9594954287 | ||
|
|
c586c0b03b | ||
|
|
6576ab9b32 | ||
|
|
d93b358959 | ||
|
|
d2fc85437b | ||
|
|
ca0984611b | ||
|
|
4dd7ecf875 | ||
|
|
2f8ce9dbca | ||
|
|
eaaa5a6a0c | ||
|
|
e164c6ed14 | ||
|
|
e33373f74c | ||
|
|
8e5c545c22 | ||
|
|
1fda6afce9 | ||
|
|
4f5ca6ad18 | ||
|
|
afa2617a73 | ||
|
|
e9b29c87d5 | ||
|
|
4137092e7f | ||
|
|
80e22467e7 | ||
|
|
1fb0c8aa16 | ||
|
|
cc5b039197 | ||
|
|
3ec9bd0413 | ||
|
|
560916cd83 | ||
|
|
2495771f59 | ||
|
|
b3b4b894c7 | ||
|
|
dac4df2438 | ||
|
|
8334b2f027 | ||
| e9ed4402e6 | |||
| 2ac29cf0d6 | |||
| 9c1dfb3cbc | |||
| d49e8278ea | |||
| 1856763163 | |||
| e17a6d7f42 | |||
| 5152dfa764 | |||
| b61614969d | |||
| 38cc2f79a0 | |||
| d8b7f9210e | |||
| 69ec4df8f0 | |||
| 20841cfd4c | |||
| cb53de3c46 | |||
| 72635cf5c1 | |||
| 9ce519426d | |||
| 208c71c194 | |||
| 71c4e3c178 | |||
| 1308986569 | |||
|
|
e05a11f99a | ||
|
|
cb9dede604 | ||
|
|
ce2c222dc2 | ||
|
|
be4fd75a7a | ||
|
|
33bb0ceceb | ||
|
|
aef48c0ab4 | ||
|
|
bd91ef1d10 | ||
|
|
32d5d32a6e | ||
|
|
5bcb3f5ac1 | ||
|
|
78bfbef206 | ||
|
|
88b6d8ebf5 | ||
|
|
6223baa2ba | ||
|
|
9d6226b436 | ||
|
|
927a0d35ab | ||
|
|
d81ee0f553 | ||
|
|
e601345a38 | ||
|
|
1fe075e7e4 | ||
|
|
a8fc1b183b | ||
|
|
8f171b81a1 | ||
|
|
ee8a56e64d | ||
|
|
2bf6c216bc | ||
|
|
99a6dfe8ad | ||
|
|
c63f9f41b6 | ||
|
|
ab1960de6d | ||
|
|
c17b820490 | ||
|
|
36133b71da | ||
|
|
50b217145c | ||
|
|
57f2b428d5 | ||
|
|
3cdc8d0ba7 | ||
|
|
07829f2229 | ||
|
|
bdf06ea038 | ||
|
|
bcab3431e1 | ||
|
|
b181b08033 | ||
|
|
e05b1bcd78 | ||
|
|
a97488721d | ||
|
|
4d858ef958 | ||
|
|
744b00499d | ||
|
|
583547c6ee | ||
|
|
f28053eecc | ||
|
|
a196e9c895 | ||
|
|
a6f6199910 | ||
|
|
a3dcec5059 | ||
|
|
6aa674fbb3 | ||
|
|
05d9651eba | ||
|
|
75ab6a14b2 | ||
|
|
533ea641d8 | ||
|
|
185a53d56b | ||
|
|
305deab9ed | ||
|
|
6af11d5747 | ||
|
|
58f96531c7 | ||
|
|
e570aa7726 | ||
|
|
d8a553163f | ||
|
|
a9839d8078 | ||
|
|
dc05d1fbba | ||
|
|
dc4e8b2fb7 | ||
|
|
27d055992e | ||
|
|
34fcd550b6 | ||
|
|
4c0059e91b | ||
|
|
bb2e32132e | ||
|
|
dca3be5861 | ||
|
|
38ec4ca159 | ||
|
|
c589ccb56f | ||
|
|
e413955849 | ||
|
|
e70e1841c7 | ||
|
|
fc86e03e44 | ||
|
|
548441644b | ||
|
|
ca369df0df | ||
|
|
67ae4202c4 | ||
|
|
aba5a7a093 | ||
|
|
323d1c1eb6 | ||
|
|
b0964ff555 | ||
|
|
79bc4e0c3a | ||
|
|
8772ccca23 | ||
|
|
3bb34225a9 | ||
|
|
daea6cabdf | ||
|
|
35b720e87a | ||
|
|
5a6ee9f8eb | ||
|
|
9a64db0485 | ||
|
|
de7210f69a | ||
|
|
24418ef74d | ||
| f4765fbe3a | |||
|
|
80b9add9f7 | ||
|
|
b241545ca6 | ||
|
|
2e536228ae | ||
|
|
ff7950b9d1 | ||
|
|
6971624f83 | ||
|
|
b7d371d1c3 | ||
| bc8c776872 | |||
| 91d282708e | |||
| ccaf31bc87 | |||
| 5fe0504265 | |||
| d79f163685 | |||
| 793fb23958 | |||
| 65d0c09e97 | |||
| 0a9e5df66c | |||
| ae527a2e01 | |||
|
|
4991144a01 | ||
|
|
68d94ff488 | ||
|
|
ce45fdc39a | ||
|
|
b625ccea06 | ||
|
|
8eccfc5a03 | ||
|
|
f5a0e06af5 | ||
|
|
f6f54b56ca | ||
|
|
fcee346c7c | ||
|
|
82ec38ad60 | ||
|
|
c9cbd80569 | ||
|
|
035fca1e6d | ||
|
|
0f8cae69b7 | ||
|
|
f17628dd5f | ||
|
|
f3485410ad | ||
|
|
cccfaea14e | ||
|
|
e2057b75c5 | ||
|
|
44ea4d4419 | ||
|
|
d4425f7935 | ||
|
|
c8118527ea | ||
|
|
a8ea322907 | ||
|
|
c4995c8f4b | ||
|
|
997e372f25 | ||
|
|
42f1c62528 | ||
|
|
3fd29d0ee0 | ||
|
|
c1f2fa5035 | ||
|
|
85c9e9e776 | ||
|
|
33012ab31e | ||
|
|
72ddf65e62 | ||
|
|
c402ff3d60 | ||
|
|
8dd344f9ee | ||
|
|
d07f517a90 | ||
|
|
48366177cc | ||
|
|
4cb8671fe4 | ||
|
|
135bbbfe7e | ||
|
|
5624829564 | ||
|
|
5fb4f86312 | ||
|
|
e891fe7b09 | ||
|
|
4cd7d39914 | ||
|
|
5de829d05b | ||
|
|
2930a1cc54 | ||
|
|
9c4d3d496d | ||
|
|
4637fd6697 | ||
|
|
2ff8132eaf | ||
|
|
f9190d1fd1 | ||
|
|
80ad0448ec | ||
|
|
027380f969 | ||
|
|
41048f60b7 | ||
|
|
9f8f6a7956 | ||
|
|
3d3e4bafdf | ||
|
|
8fe0257e93 | ||
|
|
d7b5fd2278 | ||
|
|
8b49d4ad61 | ||
|
|
57443cd0aa | ||
|
|
1d146bb8d5 | ||
|
|
790091b7d7 | ||
|
|
28aad3ea40 | ||
|
|
7373b4dbbf | ||
|
|
218b11f1b7 | ||
|
|
bfbe9095d2 | ||
|
|
eb17182c04 | ||
| f91f90cf64 | |||
| 49931082b2 | |||
| 8ba8609bda | |||
| 6f495a8133 | |||
| 2b4bddbfe0 | |||
| 032cf725de | |||
| 8b11bb81af | |||
|
|
114a73f100 | ||
|
|
1c3e5dce5e | ||
|
|
e55abb17f1 | ||
|
|
e81d78a1d1 | ||
|
|
68915d04f8 | ||
|
|
a25bf72fb8 | ||
|
|
68aee1fa2d | ||
|
|
6592735aec | ||
|
|
943fd8e77d | ||
|
|
004fc4382f | ||
|
|
596c5c0b7f | ||
|
|
141d00e941 | ||
|
|
4b64d20826 | ||
|
|
0747e967b0 | ||
|
|
6d58bc2ed8 | ||
|
|
60a347a4a2 | ||
|
|
53ed7386f3 | ||
|
|
ed43f0038e | ||
|
|
fcc9b4f1ca | ||
|
|
d32c293bca | ||
|
|
f0bd1af999 | ||
|
|
1b4747b915 | ||
|
|
6aabfc9285 | ||
|
|
9e014fed6a | ||
|
|
7ccb1d4849 | ||
|
|
abec28256d | ||
|
|
46216b4f17 | ||
|
|
02b3adbaeb | ||
|
|
d17e578645 | ||
|
|
6c1c9d9efd | ||
|
|
8dd7d8326f | ||
|
|
d7b173cae5 | ||
|
|
fa4640f03e | ||
|
|
d66eb0dea8 | ||
|
|
d56227ab4a | ||
|
|
12433157dd | ||
|
|
623955cb1f | ||
|
|
4e0d2d14c9 | ||
|
|
b61e59d676 | ||
|
|
eb35d7baac | ||
|
|
54209f3643 | ||
|
|
80c4536eaa | ||
|
|
9bb5550d36 | ||
|
|
364ed6c3af | ||
|
|
0b965096ee | ||
|
|
d914626d92 | ||
|
|
32215d955a | ||
|
|
d711f8a2d6 | ||
|
|
bd9d800b52 | ||
|
|
e03648589d | ||
|
|
b5fe2e8129 | ||
|
|
b14e85324c | ||
|
|
5a19ad2258 | ||
|
|
9ae49dae5b | ||
|
|
c82cbfdc32 | ||
|
|
ee9c9a74e6 | ||
|
|
ea0f933315 | ||
|
|
323d31df2b | ||
|
|
9acd7f5fa4 | ||
|
|
a2b16eb44f | ||
|
|
ff946d3f7b | ||
|
|
ede91bcd03 | ||
|
|
0fa459eb9e | ||
|
|
b49ffee06d | ||
|
|
fce5493f09 | ||
|
|
750499eda1 | ||
|
|
d999960235 | ||
|
|
6edece449d | ||
|
|
dd22d94a9e | ||
|
|
b157a1825a | ||
|
|
fd298d4f17 | ||
|
|
f21ed6f607 | ||
|
|
cfe6ec3f4e | ||
|
|
e6c6f74176 | ||
|
|
8676941655 | ||
|
|
5f74175c33 | ||
|
|
1e18e8995d | ||
|
|
38ea822260 | ||
|
|
34eb45c7ec | ||
|
|
7422d1e045 | ||
|
|
97801e772e | ||
|
|
dff346eedb | ||
|
|
de53dd0bbd | ||
|
|
88121619bc | ||
|
|
0484426e0c | ||
|
|
b9d86b90e1 | ||
|
|
58a16e5688 | ||
|
|
f2ed64d952 | ||
|
|
b2c49dcaf6 | ||
|
|
14dd6a195f | ||
|
|
084321dd97 | ||
|
|
941d6b064a | ||
|
|
aaeee7132d | ||
|
|
cc4d99441c | ||
|
|
a435eeed06 | ||
|
|
b9f554bf39 | ||
|
|
ebf028ca3b | ||
|
|
305103a38e | ||
|
|
01bce26275 | ||
|
|
4d763514c1 | ||
|
|
41381df6a5 | ||
|
|
02686d7bdf | ||
|
|
2e48c9a56f | ||
|
|
b3a8c3f28a | ||
|
|
5434d24027 | ||
|
|
3b5501b4bf | ||
|
|
cea8d92906 | ||
|
|
1a29de435e | ||
|
|
69ed7699e8 | ||
|
|
103c3dc608 | ||
|
|
94ad7603b8 | ||
|
|
903db91c0f | ||
|
|
552343fffe | ||
|
|
5806217041 | ||
| 67eacf8483 | |||
|
|
38789d07ee | ||
|
|
3735aaa443 | ||
|
|
2c1603c414 | ||
|
|
f2fd10b0ab | ||
|
|
ac601ab8ea | ||
|
|
757a6ed653 | ||
|
|
b2e439e561 |
25
1860.patch
Normal file
25
1860.patch
Normal file
@ -0,0 +1,25 @@
|
||||
From 3bd28f97b2991cf4e3b4ce9ce34c80cba2bf21ab Mon Sep 17 00:00:00 2001
|
||||
From: Lubomír Sedlář <lsedlar@redhat.com>
|
||||
Date: Aug 08 2025 11:54:39 +0000
|
||||
Subject: repoclosure: Don't fail if cache doesn't exist
|
||||
|
||||
|
||||
Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
|
||||
|
||||
---
|
||||
|
||||
diff --git a/pungi/phases/repoclosure.py b/pungi/phases/repoclosure.py
|
||||
index 1d3fad0..398802f 100644
|
||||
--- a/pungi/phases/repoclosure.py
|
||||
+++ b/pungi/phases/repoclosure.py
|
||||
@@ -136,6 +136,9 @@ def _delete_repoclosure_cache_dirs(compose):
|
||||
pass
|
||||
|
||||
for top_cache_dir in cache_dirs:
|
||||
+ if not os.path.isdir(top_cache_dir):
|
||||
+ # Skip if the cache doesn't exist.
|
||||
+ continue
|
||||
for name in os.listdir(top_cache_dir):
|
||||
if name.startswith(compose.compose_id):
|
||||
cache_path = os.path.join(top_cache_dir, name)
|
||||
|
||||
512
doc/_static/phases.svg
vendored
512
doc/_static/phases.svg
vendored
@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="610.46454"
|
||||
height="327.16599"
|
||||
viewBox="0 0 610.46457 327.16599"
|
||||
width="698.46503"
|
||||
height="367.16599"
|
||||
viewBox="0 0 698.46506 367.16599"
|
||||
id="svg2"
|
||||
version="1.1"
|
||||
inkscape:version="1.3.2 (091e20e, 2023-11-25)"
|
||||
inkscape:version="1.4 (e7c3feb1, 2024-10-09)"
|
||||
sodipodi:docname="phases.svg"
|
||||
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
||||
inkscape:export-xdpi="90"
|
||||
@ -24,9 +24,9 @@
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="1"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1.5"
|
||||
inkscape:cx="268"
|
||||
inkscape:cy="260.66667"
|
||||
inkscape:zoom="1.5268051"
|
||||
inkscape:cx="281.30637"
|
||||
inkscape:cy="222.68723"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="false"
|
||||
@ -105,7 +105,7 @@
|
||||
style="font-size:13.1479px;line-height:1.25">Pkgset</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(56.378954,-80.817124)"
|
||||
transform="translate(141.04531,-80.817124)"
|
||||
id="g3398">
|
||||
<rect
|
||||
y="553.98242"
|
||||
@ -153,181 +153,191 @@
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path3642"
|
||||
d="M 100.90864,859.8891 H 654.22706"
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.17467px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" />
|
||||
d="M 100.90864,859.8891 H 734.73997"
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.25724px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" />
|
||||
<g
|
||||
transform="translate(26.249988)"
|
||||
id="g262">
|
||||
<g
|
||||
id="g234">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="179.38934"
|
||||
x="872.67383"
|
||||
height="162.72726"
|
||||
width="26.295755"
|
||||
id="rect3342"
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:0.838448px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3364"
|
||||
y="890.72327"
|
||||
x="181.69368"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="890.72327"
|
||||
x="181.69368"
|
||||
id="tspan3366"
|
||||
sodipodi:role="line">Buildinstall</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3639"
|
||||
transform="translate(75.925692,-0.34404039)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="103.28194"
|
||||
x="905.2099"
|
||||
height="54.197887"
|
||||
width="26.295755"
|
||||
id="rect3344"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3368"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
id="tspan3370"
|
||||
sodipodi:role="line">Gather</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(15.925722,63.405928)"
|
||||
id="g3647">
|
||||
<g
|
||||
id="g3644">
|
||||
<rect
|
||||
style="fill:#ad7fa8;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3346"
|
||||
width="26.295755"
|
||||
height="72.729973"
|
||||
x="905.2099"
|
||||
y="162.92607"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
</g>
|
||||
<text
|
||||
id="text3372"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
id="tspan3374"
|
||||
sodipodi:role="line">ExtraFiles</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-2.824268,-0.34404039)"
|
||||
id="g3658">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="241.10229"
|
||||
x="905.2099"
|
||||
height="78.636055"
|
||||
width="26.295755"
|
||||
id="rect3348"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3376"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
id="tspan3378"
|
||||
sodipodi:role="line">Createrepo</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3408"
|
||||
transform="translate(-74.638308,113.77258)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="254.60153"
|
||||
x="823.54675"
|
||||
height="53.653927"
|
||||
width="26.295755"
|
||||
id="rect3350-3"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3380-2"
|
||||
y="840.3219"
|
||||
x="256.90588"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
id="tspan3406"
|
||||
sodipodi:role="line"
|
||||
x="256.90588"
|
||||
y="840.3219">OSTree</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-252.46536,-85.861863)"
|
||||
id="g288">
|
||||
<g
|
||||
transform="translate(0.56706579)"
|
||||
id="g3653">
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428"
|
||||
width="26.295755"
|
||||
height="101.85102"
|
||||
x="1022.637"
|
||||
y="490.33765"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="492.642"
|
||||
y="1039.4121"
|
||||
id="text3430"><tspan
|
||||
id="tspan283"
|
||||
sodipodi:role="line"
|
||||
x="492.642"
|
||||
y="1039.4121"
|
||||
style="font-size:12px;line-height:0">OSTreeInstaller</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g2"
|
||||
transform="translate(-1.4062678e-8,9.3749966)">
|
||||
id="g10">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1.85901px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
y="205.63933"
|
||||
x="872.67383"
|
||||
height="137.98026"
|
||||
width="26.295755"
|
||||
id="rect3342"
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:0.772066px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3364"
|
||||
y="890.72327"
|
||||
x="207.94366"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="890.72327"
|
||||
x="207.94366"
|
||||
id="tspan3366"
|
||||
sodipodi:role="line">Buildinstall</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3639"
|
||||
transform="translate(102.17568,-0.34404039)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="103.28194"
|
||||
x="905.2099"
|
||||
height="54.197887"
|
||||
width="26.295755"
|
||||
id="rect3344"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3368"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
id="tspan3370"
|
||||
sodipodi:role="line">Gather</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(42.17571,32.494534)"
|
||||
id="g3647">
|
||||
<g
|
||||
id="g3644">
|
||||
<rect
|
||||
style="fill:#ad7fa8;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3346"
|
||||
width="26.295755"
|
||||
height="72.729973"
|
||||
x="905.2099"
|
||||
y="162.92607"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
</g>
|
||||
<text
|
||||
id="text3372"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
id="tspan3374"
|
||||
sodipodi:role="line">ExtraFiles</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(23.42572,-0.34404039)"
|
||||
id="g3658">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="241.10229"
|
||||
x="905.2099"
|
||||
height="78.636055"
|
||||
width="26.295755"
|
||||
id="rect3348"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3376"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
id="tspan3378"
|
||||
sodipodi:role="line">Createrepo</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3408"
|
||||
transform="translate(-48.38832,300.30474)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="253.37347"
|
||||
x="670.65399"
|
||||
height="137.77563"
|
||||
width="26.295755"
|
||||
id="rect3350-3"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.60245px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3380-2"
|
||||
y="688.04315"
|
||||
x="256.90588"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
id="tspan3406"
|
||||
x="256.90588"
|
||||
y="688.04315"
|
||||
sodipodi:role="line">OSTree</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.48564px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428"
|
||||
width="26.295755"
|
||||
height="224.79666"
|
||||
x="1122.0793"
|
||||
y="351.26718"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="355.4136"
|
||||
y="1140.0824"
|
||||
id="text3430"><tspan
|
||||
id="tspan283"
|
||||
sodipodi:role="line"
|
||||
x="355.4136"
|
||||
y="1140.0824"
|
||||
style="font-size:12px;line-height:0">OSTreeInstaller</tspan></text>
|
||||
<g
|
||||
id="g11">
|
||||
<rect
|
||||
style="fill:#edd400;fill-rule:evenodd;stroke:none;stroke-width:1.90661px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428-5"
|
||||
width="26.295755"
|
||||
height="370.24628"
|
||||
x="1155.5499"
|
||||
y="205.91063"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="208.21498"
|
||||
y="1172.3251"
|
||||
id="text3430-3"><tspan
|
||||
id="tspan283-5"
|
||||
sodipodi:role="line"
|
||||
x="208.21498"
|
||||
y="1172.3251"
|
||||
style="font-size:12px;line-height:0">OSTreeContainer</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g9"
|
||||
transform="translate(-23.616254)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:0.898355px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1"
|
||||
width="103.12497"
|
||||
height="115.80065"
|
||||
x="863.29883"
|
||||
y="486.55563" />
|
||||
width="25.155075"
|
||||
height="110.86161"
|
||||
x="872.67383"
|
||||
y="602.95026" />
|
||||
<text
|
||||
id="text3384-0"
|
||||
y="921.73846"
|
||||
x="489.56451"
|
||||
y="889.42767"
|
||||
x="605.95917"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391"
|
||||
sodipodi:role="line"
|
||||
x="489.56451"
|
||||
y="921.73846">ImageChecksum</tspan></text>
|
||||
x="605.95917"
|
||||
y="889.42767">ImageChecksum</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-42.209584,-80.817124)"
|
||||
transform="translate(-68.341107,-80.817124)"
|
||||
id="g3458">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
@ -349,32 +359,9 @@
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">Createiso</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3453"
|
||||
transform="translate(-42.466031,-84.525321)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="420.39337"
|
||||
x="989.65247"
|
||||
height="101.85102"
|
||||
width="26.295755"
|
||||
id="rect3352"
|
||||
style="fill:#73d216;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3388"
|
||||
y="1006.4276"
|
||||
x="422.69772"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
y="1006.4276"
|
||||
x="422.69772"
|
||||
id="tspan3390"
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">LiveImages</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3448"
|
||||
transform="translate(-42.466031,-88.485966)">
|
||||
transform="translate(-68.597554,-120.23498)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="420.39337"
|
||||
@ -397,7 +384,7 @@
|
||||
</g>
|
||||
<g
|
||||
id="g3443"
|
||||
transform="translate(-43.173123,-92.80219)">
|
||||
transform="translate(-69.304646,-124.55121)">
|
||||
<rect
|
||||
style="fill:#edd400;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3422"
|
||||
@ -418,27 +405,31 @@
|
||||
y="1079.6111"
|
||||
style="font-size:13.1479px;line-height:1.25">LiveMedia</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="fill:#c17d11;fill-rule:evenodd;stroke:none;stroke-width:1.48416px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290"
|
||||
width="26.295755"
|
||||
height="224.35098"
|
||||
x="1091.7223"
|
||||
y="378.43698"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.74133"
|
||||
y="1106.6223"
|
||||
id="text294"><tspan
|
||||
y="1106.6223"
|
||||
x="380.74133"
|
||||
sodipodi:role="line"
|
||||
id="tspan301"
|
||||
style="font-size:12px;line-height:0">OSBS</tspan></text>
|
||||
<g
|
||||
transform="translate(-70.933542,-51.043149)"
|
||||
id="g8"
|
||||
transform="translate(-26.131523,-31.749016)">
|
||||
<rect
|
||||
style="fill:#c17d11;fill-rule:evenodd;stroke:none;stroke-width:1.48416px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290"
|
||||
width="26.295755"
|
||||
height="224.35098"
|
||||
x="1091.7223"
|
||||
y="378.43698"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.74133"
|
||||
y="1106.6223"
|
||||
id="text294"><tspan
|
||||
y="1106.6223"
|
||||
x="380.74133"
|
||||
sodipodi:role="line"
|
||||
id="tspan301"
|
||||
style="font-size:12px;line-height:0">OSBS</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-97.065065,-82.792165)"
|
||||
id="g3819">
|
||||
<rect
|
||||
style="fill:#73d216;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
@ -460,28 +451,32 @@
|
||||
y="1069.0087"
|
||||
id="tspan3812">ExtraIsos</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
y="377.92242"
|
||||
x="1122.3463"
|
||||
height="224.24059"
|
||||
width="26.295755"
|
||||
id="rect87"
|
||||
style="fill:#5ed4ec;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.48006px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.7789"
|
||||
y="1140.3958"
|
||||
id="text91"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
sodipodi:role="line"
|
||||
id="tspan89"
|
||||
<g
|
||||
id="g7"
|
||||
transform="translate(-26.131523,-31.749016)">
|
||||
<rect
|
||||
y="377.92242"
|
||||
x="1122.3463"
|
||||
height="224.24059"
|
||||
width="26.295755"
|
||||
id="rect87"
|
||||
style="fill:#5ed4ec;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.48006px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.7789"
|
||||
y="1140.3958">Repoclosure</tspan></text>
|
||||
y="1140.3958"
|
||||
id="text91"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
sodipodi:role="line"
|
||||
id="tspan89"
|
||||
x="380.7789"
|
||||
y="1140.3958">Repoclosure</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g206"
|
||||
transform="translate(0,-1.8749994)">
|
||||
transform="translate(-26.131523,-33.624015)">
|
||||
<rect
|
||||
style="fill:#fcd9a4;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290-6"
|
||||
@ -502,37 +497,28 @@
|
||||
id="tspan301-5"
|
||||
style="font-size:12px;line-height:0">KiwiBuild</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3">
|
||||
<g
|
||||
id="g1">
|
||||
<g
|
||||
id="g4">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.83502px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1-3"
|
||||
width="103.12497"
|
||||
height="115.80065"
|
||||
x="983.44263"
|
||||
y="486.55563" />
|
||||
<text
|
||||
id="text3384-0-6"
|
||||
y="1038.8422"
|
||||
x="489.56451"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391-7"
|
||||
sodipodi:role="line"
|
||||
x="489.56451"
|
||||
y="1038.8422">ImageContainer</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:2.42607px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1-3"
|
||||
width="180.25586"
|
||||
height="115.80065"
|
||||
x="873.67194"
|
||||
y="460.4241" />
|
||||
<text
|
||||
id="text3384-0-6"
|
||||
y="967.06702"
|
||||
x="467.91034"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391-7"
|
||||
sodipodi:role="line"
|
||||
x="467.91034"
|
||||
y="967.06702">ImageContainer</tspan></text>
|
||||
<g
|
||||
id="g206-1"
|
||||
transform="translate(-0.04628921,28.701853)">
|
||||
transform="translate(-26.177813,-3.0471625)">
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290-6-7"
|
||||
|
||||
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 22 KiB |
@ -51,9 +51,9 @@ copyright = "2016, Red Hat, Inc."
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = "4.7"
|
||||
version = "4.10"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = "4.7.0"
|
||||
release = "4.10.1"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
||||
@ -629,6 +629,10 @@ Options
|
||||
* ``squashfs_only`` -- *bool* (default ``False``) pass the --squashfs_only to Lorax.
|
||||
* ``configuration_file`` -- (:ref:`scm_dict <scm_support>`) (default empty) pass the
|
||||
specified configuration file to Lorax using the -c option.
|
||||
* ``rootfs_type`` -- *string* (default empty) pass the ``--rootfs-type``
|
||||
option to Lorax with the provided value. If not specified, no type is
|
||||
specified to Lorax, which will choose whatever default it is configured
|
||||
with.
|
||||
**lorax_extra_sources**
|
||||
(*list*) -- a variant/arch mapping with urls for extra source repositories
|
||||
added to Lorax command line. Either one repo or a list can be specified.
|
||||
@ -1003,6 +1007,8 @@ Example
|
||||
to track decisions.
|
||||
|
||||
|
||||
.. _koji-settings:
|
||||
|
||||
Koji Settings
|
||||
=============
|
||||
|
||||
@ -1017,6 +1023,11 @@ Options
|
||||
to set up your Koji client profile. In the examples, the profile name is
|
||||
"koji", which points to Fedora's koji.fedoraproject.org.
|
||||
|
||||
**koji_cache**
|
||||
(*str*) -- koji cache directory. Setting this causes Pungi to download
|
||||
packages over HTTP into a cache, which is used in lieu of the Koji profile's
|
||||
``topdir`` setting. See :doc:`koji` for details on this behavior.
|
||||
|
||||
**global_runroot_method**
|
||||
(*str*) -- global runroot method to use. If ``runroot_method`` is set
|
||||
per Pungi phase using a dictionary, this option defines the default
|
||||
@ -1280,7 +1291,7 @@ Options
|
||||
(*int|str*) -- how much free space should be left on each disk. The format
|
||||
is the same as for ``iso_size`` option.
|
||||
|
||||
**iso_hfs_ppc64le_compatible** = True
|
||||
**iso_hfs_ppc64le_compatible** = False
|
||||
(*bool*) -- when set to False, the Apple/HFS compatibility is turned off
|
||||
for ppc64le ISOs. This option only makes sense for bootable products, and
|
||||
affects images produced in *createiso* and *extra_isos* phases.
|
||||
@ -1570,6 +1581,19 @@ KiwiBuild Settings
|
||||
* ``type_attr`` -- (*[str]*) override default attributes for the build type
|
||||
from description.
|
||||
* ``bundle_name_format`` -- (*str*) override default bundle format name.
|
||||
* ``version`` -- (*str*) override version. Follows the same rules as
|
||||
described in :ref:`automatic versioning <auto-version>`.
|
||||
* ``repo_releasever`` -- (*str*) Override default releasever of the output
|
||||
image.
|
||||
* ``manifest_type`` -- the image type that is put into the manifest by
|
||||
pungi. If not supplied, an autodetected value will be provided. It may or
|
||||
may not make sense.
|
||||
* ``use_buildroot_repo = False`` -- (*bool*) whether the task should
|
||||
automatically enable buildroot repository corresponding to the used
|
||||
target.
|
||||
|
||||
The options can be set either for the specific image, or at the phase level
|
||||
(see below). Version also falls back to ``global_version``.
|
||||
|
||||
**kiwibuild_description_scm**
|
||||
(*str*) -- URL for scm containing the description files
|
||||
@ -1586,6 +1610,15 @@ KiwiBuild Settings
|
||||
**kiwibuild_bundle_name_format**
|
||||
(*str*) -- override default bundle format name.
|
||||
|
||||
**kiwibuild_version**
|
||||
(*str*) -- overide version for all kiwibuild tasks.
|
||||
|
||||
**kiwibuild_repo_releasever**
|
||||
(*str*) -- override releasever for all kiwibuild tasks.
|
||||
|
||||
**kiwibuild_use_buildroot_repo**
|
||||
(*bool*) -- set enablement of a buildroot repo for all kiwibuild tasks.
|
||||
|
||||
|
||||
OSBuild Composer for building images
|
||||
====================================
|
||||
@ -1693,6 +1726,102 @@ OSBuild Composer for building images
|
||||
arch.
|
||||
|
||||
|
||||
Image Builder Settings
|
||||
======================
|
||||
|
||||
**imagebuilder**
|
||||
(*dict*) -- configuration for building images with the ``koji-image-builder``
|
||||
Koji plugin. Pungi will trigger a Koji task which will build the image with
|
||||
the given configuration using the ``image-builder`` executable in the build
|
||||
root.
|
||||
|
||||
Format: ``{variant_uid_regex: [{...}]}``.
|
||||
|
||||
Required keys in the configuration dict:
|
||||
|
||||
* ``name`` -- name of the Koji package
|
||||
* ``types`` -- a list with a single image type string representing
|
||||
the image type to build (e.g. ``qcow2``). Only a single image type
|
||||
can be provided as an argument.
|
||||
|
||||
Optional keys:
|
||||
|
||||
* ``target`` -- which build target to use for the task. Either this option,
|
||||
the global ``imagebuilder_target``, or ``global_target`` is required.
|
||||
* ``version`` -- version for the final build (as a string). This option is
|
||||
required if the global ``imagebuilder_version`` or its ``global_version``
|
||||
equivalent are not specified.
|
||||
* ``release`` -- release part of the final NVR. If neither this option nor
|
||||
the global ``imagebuilder_release`` nor its ``global_release`` equivalent
|
||||
are set, Koji will automatically generate a value.
|
||||
* ``repos`` -- a list of repositories from which to consume packages for
|
||||
building the image. By default only the variant repository is used.
|
||||
The list items use the following formats:
|
||||
|
||||
* String with just the repository URL.
|
||||
* Variant ID in the current compose.
|
||||
|
||||
* ``arches`` -- list of architectures for which to build the image. By
|
||||
default, the variant arches are used. This option can only restrict it,
|
||||
not add a new one.
|
||||
|
||||
* ``seed`` -- An integer that can be used to make builds more reproducible.
|
||||
When ``image-builder`` builds images various bits and bobs are generated
|
||||
with a PRNG (partition uuids, etc). Pinning the seed with this argument
|
||||
or ``imagebuilder_seed`` to do so globally will make builds use the same
|
||||
random values each time. Note that using ``seed`` requires the Koji side
|
||||
to have at least ``koji-image-builder >= 7`` deployed.
|
||||
|
||||
* ``scratch`` -- A boolean to instruct ``koji-image-builder`` to perform scratch
|
||||
builds. This might have implications on garbage collection within the ``koji``
|
||||
instance you're targeting. Can also be set globally through
|
||||
``imagebuilder_scratch``.
|
||||
|
||||
* ``ostree`` -- A dictionary describing where to get ``ostree`` content when
|
||||
applicable. The dictionary contains the following keys:
|
||||
|
||||
* ``url`` -- URL of the repository that's used to fetch the parent
|
||||
commit from.
|
||||
* ``ref`` -- Name of an ostree branch or tag
|
||||
|
||||
* ``blueprint`` -- A dictionary with a blueprint to use for the
|
||||
image build. Blueprints can customize images beyond their initial definition.
|
||||
For the list of supported customizations, see external
|
||||
`Documentation <https://osbuild.org/docs/user-guide/blueprint-reference/>`__
|
||||
|
||||
.. note::
|
||||
There is initial support for having this task as failable without aborting
|
||||
the whole compose. This can be enabled by setting ``"failable": ["*"]`` in
|
||||
the config for the image. It is an on/off switch without granularity per
|
||||
arch.
|
||||
|
||||
|
||||
Example Config
|
||||
--------------
|
||||
::
|
||||
|
||||
imagebuilder_target = 'f43-image-builder'
|
||||
imagebuilder_seed = 43
|
||||
imagebuilder_scratch = True
|
||||
|
||||
imagebuilder = {
|
||||
"^IoT$": [
|
||||
{
|
||||
"name": "%s-raw" % release_name,
|
||||
"types": ["iot-raw-xz"],
|
||||
"arches": ["x86_64"], #, "aarch64"],
|
||||
"repos": ["https://kojipkgs.fedoraproject.org/compose/rawhide/latest-Fedora-Rawhide/compose/Everything/$arch/os/"],
|
||||
"ostree": {
|
||||
"url": "https://kojipkgs.fedoraproject.org/compose/iot/repo/",
|
||||
"ref": "fedora/rawhide/$arch/iot",
|
||||
},
|
||||
"subvariant": "IoT",
|
||||
"failable": ["*"],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Image container
|
||||
===============
|
||||
|
||||
@ -1870,6 +1999,15 @@ will thus create a new OCI archive image *from scratch*.
|
||||
reference will not be created.
|
||||
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
||||
installed in the runroot environment in Koji.
|
||||
* ``subvariant`` -- (*str*) The subvariant value to be used in the metadata
|
||||
for the image. Also used in the image's filename, unless overridden by
|
||||
``name``. Defaults to being the same as the variant. If building more
|
||||
than one ostree container in a variant, each must have a unique
|
||||
subvariant.
|
||||
* ``name`` -- (*str*) The base for the image's filename. To produce the
|
||||
complete filename, the image's architecture, the version string, and the
|
||||
format suffix are appended to this. Defaults to the value of
|
||||
``release_short`` and the subvariant, joined by a dash.
|
||||
|
||||
Example config
|
||||
--------------
|
||||
|
||||
@ -28,7 +28,8 @@ It is possible now to run a compose from a Koji tag without direct access to
|
||||
Koji storage.
|
||||
|
||||
Pungi can download the packages over HTTP protocol, store them in a local
|
||||
cache, and consume them from there.
|
||||
cache, and consume them from there. To enable this behavior, set the
|
||||
:ref:`koji_cache <koji-settings>` option in the compose configuration.
|
||||
|
||||
The local cache has similar structure to what is on the Koji volume.
|
||||
|
||||
@ -43,7 +44,8 @@ If it doesn't exist, it will be downloaded from Koji (by replacing the
|
||||
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
|
||||
The packages can be hardlinked from this cache directory.
|
||||
The packages can be hard- or softlinked from this cache directory
|
||||
(``/mnt/compose/cache`` in the example).
|
||||
|
||||
|
||||
Cleanup
|
||||
|
||||
@ -124,6 +124,12 @@ OSBuild
|
||||
Similarly to image build, this phases creates a koji `osbuild` task. In the
|
||||
background it uses OSBuild Composer to create images.
|
||||
|
||||
ImageBuilder
|
||||
------------
|
||||
|
||||
Similarly to image build, this phases creates a koji `imageBuilderBuild`
|
||||
task. In the background it uses `image-builder` to create images.
|
||||
|
||||
OSBS
|
||||
----
|
||||
|
||||
|
||||
@ -18,6 +18,7 @@ which can contain following keys.
|
||||
* ``cvs`` -- copies files from a CVS repository
|
||||
* ``rpm`` -- copies files from a package in the compose
|
||||
* ``koji`` -- downloads archives from a given build in Koji build system
|
||||
* ``container-image`` -- downloads an artifact from a container registry
|
||||
|
||||
* ``repo``
|
||||
|
||||
@ -85,6 +86,24 @@ For ``extra_files`` phase either key is valid and should be chosen depending on
|
||||
what the actual use case.
|
||||
|
||||
|
||||
``container-image`` example
|
||||
---------------------------
|
||||
|
||||
Example of pulling a container image into the compose. ::
|
||||
|
||||
{
|
||||
# Pull a container into an oci-archive tar file
|
||||
"scm": "container-image",
|
||||
# This is the pull spec including tag. It is passed directly to skopeo
|
||||
# copy with no modification.
|
||||
"repo": "docker://registry.access.redhat.com/ubi9/ubi-minimal:latest",
|
||||
# Key `file` is required, but the value is ignored.
|
||||
"file": "",
|
||||
# Optional subdirectory under Server/<arch>/os
|
||||
"target": "containers",
|
||||
}
|
||||
|
||||
|
||||
Caveats
|
||||
-------
|
||||
|
||||
|
||||
1098
pungi.spec
1098
pungi.spec
File diff suppressed because it is too large
Load Diff
@ -16,7 +16,8 @@ def get_full_version():
|
||||
proc = subprocess.Popen(
|
||||
["git", "--git-dir=%s/.git" % location, "describe", "--tags"],
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
output, _ = proc.communicate()
|
||||
return re.sub(r"-1.fc\d\d?", "", output.strip().replace("pungi-", ""))
|
||||
@ -24,7 +25,7 @@ def get_full_version():
|
||||
import subprocess
|
||||
|
||||
proc = subprocess.Popen(
|
||||
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, universal_newlines=True
|
||||
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, text=True, errors="replace"
|
||||
)
|
||||
(output, err) = proc.communicate()
|
||||
if not err:
|
||||
|
||||
@ -93,6 +93,11 @@ def split_name_arch(name_arch):
|
||||
|
||||
def is_excluded(package, arches, logger=None):
|
||||
"""Check if package is excluded from given architectures."""
|
||||
if any(
|
||||
getBaseArch(exc_arch) == 'x86_64' for exc_arch in package.exclusivearch
|
||||
) and 'x86_64_v2' not in package.exclusivearch:
|
||||
package.exclusivearch.append('x86_64_v2')
|
||||
|
||||
if package.excludearch and set(package.excludearch) & set(arches):
|
||||
if logger:
|
||||
logger.debug(
|
||||
|
||||
@ -34,6 +34,8 @@ arches = {
|
||||
"x86_64": "athlon",
|
||||
"amd64": "x86_64",
|
||||
"ia32e": "x86_64",
|
||||
# x86-64-v2
|
||||
"x86_64_v2": "noarch",
|
||||
# ppc64le
|
||||
"ppc64le": "noarch",
|
||||
# ppc
|
||||
@ -82,6 +84,8 @@ arches = {
|
||||
"sh3": "noarch",
|
||||
# itanium
|
||||
"ia64": "noarch",
|
||||
# riscv64
|
||||
"riscv64": "noarch",
|
||||
}
|
||||
|
||||
# Will contain information parsed from /proc/self/auxv via _parse_auxv().
|
||||
|
||||
161
pungi/checks.py
161
pungi/checks.py
@ -42,7 +42,6 @@ import platform
|
||||
import re
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
from kobo.shortcuts import force_list
|
||||
from pungi.phases import PHASES_NAMES
|
||||
from pungi.runroot import RUNROOT_TYPES
|
||||
@ -236,8 +235,8 @@ def validate(config, offline=False, schema=None):
|
||||
schema,
|
||||
{
|
||||
"array": (tuple, list),
|
||||
"regex": six.string_types,
|
||||
"url": six.string_types,
|
||||
"regex": str,
|
||||
"url": str,
|
||||
},
|
||||
)
|
||||
errors = []
|
||||
@ -266,6 +265,28 @@ def validate(config, offline=False, schema=None):
|
||||
if error.validator in ("anyOf", "oneOf"):
|
||||
for suberror in error.context:
|
||||
errors.append(" Possible reason: %s" % suberror.message)
|
||||
|
||||
# Resolve container tags in extra_files
|
||||
tag_resolver = util.ContainerTagResolver(offline=offline)
|
||||
if config.get("extra_files"):
|
||||
for _, arch_dict in config["extra_files"]:
|
||||
for value in arch_dict.values():
|
||||
if isinstance(value, dict):
|
||||
_resolve_container_tag(value, tag_resolver)
|
||||
elif isinstance(value, list):
|
||||
for subinstance in value:
|
||||
_resolve_container_tag(subinstance, tag_resolver)
|
||||
if config.get("extra_isos"):
|
||||
for cfgs in config["extra_isos"].values():
|
||||
if not isinstance(cfgs, list):
|
||||
cfgs = [cfgs]
|
||||
for cfg in cfgs:
|
||||
if isinstance(cfg.get("extra_files"), dict):
|
||||
_resolve_container_tag(cfg["extra_files"], tag_resolver)
|
||||
elif isinstance(cfg.get("extra_files"), list):
|
||||
for c in cfg["extra_files"]:
|
||||
_resolve_container_tag(c, tag_resolver)
|
||||
|
||||
return (errors + _validate_requires(schema, config, CONFIG_DEPS), warnings)
|
||||
|
||||
|
||||
@ -462,7 +483,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
||||
return isinstance(instance, (tuple, list))
|
||||
|
||||
def is_string_type(checker, instance):
|
||||
return isinstance(instance, six.string_types)
|
||||
return isinstance(instance, str)
|
||||
|
||||
kwargs["type_checker"] = validator_class.TYPE_CHECKER.redefine_many(
|
||||
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||
@ -534,6 +555,18 @@ def make_schema():
|
||||
"str_or_scm_dict": {
|
||||
"anyOf": [{"type": "string"}, {"$ref": "#/definitions/scm_dict"}]
|
||||
},
|
||||
"extra_file": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"scm": {"type": "string"},
|
||||
"repo": {"type": "string"},
|
||||
"branch": {"$ref": "#/definitions/optional_string"},
|
||||
"file": {"$ref": "#/definitions/strings"},
|
||||
"dir": {"$ref": "#/definitions/strings"},
|
||||
"target": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"repo_dict": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -588,6 +621,7 @@ def make_schema():
|
||||
"release_discinfo_description": {"type": "string"},
|
||||
"treeinfo_version": {"type": "string"},
|
||||
"compose_type": {"type": "string", "enum": COMPOSE_TYPES},
|
||||
"label": {"type": "string"},
|
||||
"base_product_name": {"type": "string"},
|
||||
"base_product_short": {"type": "string"},
|
||||
"base_product_version": {"type": "string"},
|
||||
@ -665,7 +699,11 @@ def make_schema():
|
||||
"pkgset_allow_reuse": {"type": "boolean", "default": True},
|
||||
"createiso_allow_reuse": {"type": "boolean", "default": True},
|
||||
"extraiso_allow_reuse": {"type": "boolean", "default": True},
|
||||
"pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
|
||||
"pkgset_source": {"type": "string", "enum": [
|
||||
"koji",
|
||||
"repos",
|
||||
"kojimock",
|
||||
]},
|
||||
"createrepo_c": {"type": "boolean", "default": True},
|
||||
"createrepo_checksum": {
|
||||
"type": "string",
|
||||
@ -698,7 +736,6 @@ def make_schema():
|
||||
),
|
||||
"repoclosure_backend": {
|
||||
"type": "string",
|
||||
# Gather and repoclosure both have the same backends: yum + dnf
|
||||
"default": _get_default_gather_backend(),
|
||||
"enum": _get_gather_backends(),
|
||||
},
|
||||
@ -765,7 +802,7 @@ def make_schema():
|
||||
_variant_arch_mapping({"type": "number", "enum": [1, 2, 3, 4]}),
|
||||
],
|
||||
},
|
||||
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": True},
|
||||
"iso_hfs_ppc64le_compatible": {"type": "boolean", "default": False},
|
||||
"multilib": _variant_arch_mapping(
|
||||
{"$ref": "#/definitions/list_of_strings"}
|
||||
),
|
||||
@ -794,6 +831,14 @@ def make_schema():
|
||||
"type": "string",
|
||||
"enum": ["lorax"],
|
||||
},
|
||||
# In phase `buildinstall` we should add to compose only the
|
||||
# images that will be used only as netinstall
|
||||
"netinstall_variants": {
|
||||
"$ref": "#/definitions/list_of_strings",
|
||||
"default": [
|
||||
"BaseOS",
|
||||
],
|
||||
},
|
||||
"buildinstall_topdir": {"type": "string"},
|
||||
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
|
||||
@ -937,20 +982,7 @@ def make_schema():
|
||||
"properties": {
|
||||
"include_variants": {"$ref": "#/definitions/strings"},
|
||||
"extra_files": _one_or_list(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"scm": {"type": "string"},
|
||||
"repo": {"type": "string"},
|
||||
"branch": {
|
||||
"$ref": "#/definitions/optional_string"
|
||||
},
|
||||
"file": {"$ref": "#/definitions/strings"},
|
||||
"dir": {"$ref": "#/definitions/strings"},
|
||||
"target": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
}
|
||||
{"$ref": "#/definitions/extra_file"}
|
||||
),
|
||||
"filename": {"type": "string"},
|
||||
"volid": {"$ref": "#/definitions/strings"},
|
||||
@ -1101,6 +1133,8 @@ def make_schema():
|
||||
"runroot_packages": {
|
||||
"$ref": "#/definitions/list_of_strings",
|
||||
},
|
||||
"subvariant": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
},
|
||||
"required": [
|
||||
"treefile",
|
||||
@ -1212,6 +1246,10 @@ def make_schema():
|
||||
"type": {"type": "string"},
|
||||
"type_attr": {"$ref": "#/definitions/list_of_strings"},
|
||||
"bundle_name_format": {"type": "string"},
|
||||
"version": {"type": "string"},
|
||||
"repo_releasever": {"type": "string"},
|
||||
"manifest_type": {"type": "string"},
|
||||
"use_buildroot_repo": {"type": "boolean"},
|
||||
},
|
||||
"required": [
|
||||
# description_scm and description_path
|
||||
@ -1233,6 +1271,9 @@ def make_schema():
|
||||
"kiwibuild_type": {"type": "string"},
|
||||
"kiwibuild_type_attr": {"$ref": "#/definitions/list_of_strings"},
|
||||
"kiwibuild_bundle_name_format": {"type": "string"},
|
||||
"kiwibuild_version": {"type": "string"},
|
||||
"kiwibuild_repo_releasever": {"type": "string"},
|
||||
"kiwibuild_use_buildroot_repo": {"type": "boolean", "default": False},
|
||||
"osbuild_target": {"type": "string"},
|
||||
"osbuild_release": {"$ref": "#/definitions/optional_string"},
|
||||
"osbuild_version": {"type": "string"},
|
||||
@ -1387,6 +1428,58 @@ def make_schema():
|
||||
},
|
||||
},
|
||||
},
|
||||
"imagebuilder": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
# Warning: this pattern is a variant uid regex, but the
|
||||
# format does not let us validate it as there is no regular
|
||||
# expression to describe all regular expressions.
|
||||
".+": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
||||
"types": {"$ref": "#/definitions/list_of_strings"},
|
||||
"version": {"type": "string"},
|
||||
"repos": {"$ref": "#/definitions/list_of_strings"},
|
||||
"release": {"type": "string"},
|
||||
"distro": {"type": "string"},
|
||||
"scratch": {"type": "boolean"},
|
||||
"ostree": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parent": {"type": "string"},
|
||||
"ref": {"type": "string"},
|
||||
"url": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||
"subvariant": {"type": "string"},
|
||||
"blueprint": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
},
|
||||
"seed": {"type": "integer"},
|
||||
"manifest_type": {"type": "string"},
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"types",
|
||||
],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"imagebuilder_target": {"type": "string"},
|
||||
"imagebuilder_release": {"$ref": "#/definitions/optional_string"},
|
||||
"imagebuilder_version": {"type": "string"},
|
||||
"imagebuilder_seed": {"type": "integer"},
|
||||
"imagebuilder_scratch": {"type": "boolean"},
|
||||
"lorax_options": _variant_arch_mapping(
|
||||
{
|
||||
"type": "object",
|
||||
@ -1406,6 +1499,7 @@ def make_schema():
|
||||
"skip_branding": {"type": "boolean"},
|
||||
"squashfs_only": {"type": "boolean"},
|
||||
"configuration_file": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||
"rootfs_type": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
}
|
||||
@ -1465,21 +1559,7 @@ def make_schema():
|
||||
"additionalProperties": False,
|
||||
},
|
||||
"extra_files": _variant_arch_mapping(
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"scm": {"type": "string"},
|
||||
"repo": {"type": "string"},
|
||||
"branch": {"$ref": "#/definitions/optional_string"},
|
||||
"file": {"$ref": "#/definitions/strings"},
|
||||
"dir": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
}
|
||||
{"type": "array", "items": {"$ref": "#/definitions/extra_file"}}
|
||||
),
|
||||
"gather_lookaside_repos": _variant_arch_mapping(
|
||||
{"$ref": "#/definitions/strings"}
|
||||
@ -1600,10 +1680,13 @@ def update_schema(schema, update_dict):
|
||||
|
||||
|
||||
def _get_gather_backends():
|
||||
if six.PY2:
|
||||
return ["yum", "dnf"]
|
||||
return ["dnf"]
|
||||
|
||||
|
||||
def _get_default_gather_backend():
|
||||
return "yum" if six.PY2 else "dnf"
|
||||
return "dnf"
|
||||
|
||||
|
||||
def _resolve_container_tag(instance, tag_resolver):
|
||||
if instance.get("scm") == "container-image":
|
||||
instance["repo"] = tag_resolver(instance["repo"])
|
||||
|
||||
@ -50,6 +50,7 @@ from pungi.util import (
|
||||
translate_path_raw,
|
||||
)
|
||||
from pungi.metadata import compose_to_composeinfo
|
||||
from pungi.otel import tracing
|
||||
|
||||
try:
|
||||
# This is available since productmd >= 1.18
|
||||
@ -130,15 +131,16 @@ def cts_auth(pungi_conf):
|
||||
cts_oidc_client_id = os.environ.get(
|
||||
"CTS_OIDC_CLIENT_ID", ""
|
||||
) or pungi_conf.get("cts_oidc_client_id", "")
|
||||
token = retry_request(
|
||||
"post",
|
||||
cts_oidc_token_url,
|
||||
data={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": cts_oidc_client_id,
|
||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||
},
|
||||
).json()["access_token"]
|
||||
with tracing.span("obtain-oidc-token"):
|
||||
token = retry_request(
|
||||
"post",
|
||||
cts_oidc_token_url,
|
||||
data={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": cts_oidc_client_id,
|
||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||
},
|
||||
).json()["access_token"]
|
||||
auth = BearerAuth(token)
|
||||
del token
|
||||
|
||||
@ -194,8 +196,9 @@ def get_compose_info(
|
||||
"parent_compose_ids": parent_compose_ids,
|
||||
"respin_of": respin_of,
|
||||
}
|
||||
with cts_auth(conf) as authentication:
|
||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||
with tracing.span("create-compose-in-cts"):
|
||||
with cts_auth(conf) as authentication:
|
||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||
|
||||
# Update local ComposeInfo with received ComposeInfo.
|
||||
cts_ci = ComposeInfo()
|
||||
@ -231,8 +234,9 @@ def update_compose_url(compose_id, compose_dir, conf):
|
||||
"action": "set_url",
|
||||
"compose_url": compose_url,
|
||||
}
|
||||
with cts_auth(conf) as authentication:
|
||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||
with tracing.span("update-compose-url"):
|
||||
with cts_auth(conf) as authentication:
|
||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||
|
||||
|
||||
def get_compose_dir(
|
||||
@ -373,6 +377,7 @@ class Compose(kobo.log.LoggingBase):
|
||||
self.ci_base.load(
|
||||
os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json")
|
||||
)
|
||||
tracing.set_attribute("compose_id", self.compose_id)
|
||||
|
||||
self.supported = supported
|
||||
if (
|
||||
@ -466,13 +471,10 @@ class Compose(kobo.log.LoggingBase):
|
||||
|
||||
@property
|
||||
def should_create_yum_database(self):
|
||||
"""Explicit configuration trumps all. Otherwise check gather backend
|
||||
and only create it for Yum.
|
||||
"""Explicit configuration trumps all. Yum is no longer supported, so
|
||||
default to False.
|
||||
"""
|
||||
config = self.conf.get("createrepo_database")
|
||||
if config is not None:
|
||||
return config
|
||||
return self.conf["gather_backend"] == "yum"
|
||||
return self.conf.get("createrepo_database", False)
|
||||
|
||||
def read_variants(self):
|
||||
# TODO: move to phases/init ?
|
||||
@ -560,6 +562,7 @@ class Compose(kobo.log.LoggingBase):
|
||||
old_status = self.get_status()
|
||||
if stat_msg == old_status:
|
||||
return
|
||||
tracing.set_attribute("compose_status", stat_msg)
|
||||
if old_status == "FINISHED":
|
||||
msg = "Could not modify a FINISHED compose: %s" % self.topdir
|
||||
self.log_error(msg)
|
||||
|
||||
@ -1,79 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ConfigParser import SafeConfigParser
|
||||
|
||||
from .arch_utils import getBaseArch
|
||||
|
||||
# In development, `here` will point to the bin/ directory with scripts.
|
||||
here = sys.path[0]
|
||||
MULTILIBCONF = (
|
||||
os.path.join(os.path.dirname(__file__), "..", "share", "multilib")
|
||||
if here != "/usr/bin"
|
||||
else "/usr/share/pungi/multilib"
|
||||
)
|
||||
|
||||
|
||||
class Config(SafeConfigParser):
|
||||
def __init__(self, pungirc=None):
|
||||
SafeConfigParser.__init__(self)
|
||||
|
||||
self.add_section("pungi")
|
||||
self.add_section("lorax")
|
||||
|
||||
self.set("pungi", "osdir", "os")
|
||||
self.set("pungi", "sourcedir", "source")
|
||||
self.set("pungi", "debugdir", "debug")
|
||||
self.set("pungi", "isodir", "iso")
|
||||
self.set("pungi", "multilibconf", MULTILIBCONF)
|
||||
self.set(
|
||||
"pungi", "relnotefilere", "LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG"
|
||||
)
|
||||
self.set("pungi", "relnotedirre", "")
|
||||
self.set(
|
||||
"pungi", "relnotepkgs", "fedora-repos fedora-release fedora-release-notes"
|
||||
)
|
||||
self.set("pungi", "product_path", "Packages")
|
||||
self.set("pungi", "cachedir", "/var/cache/pungi")
|
||||
self.set("pungi", "compress_type", "xz")
|
||||
self.set("pungi", "arch", getBaseArch())
|
||||
self.set("pungi", "family", "Fedora")
|
||||
self.set("pungi", "iso_basename", "Fedora")
|
||||
self.set("pungi", "version", time.strftime("%Y%m%d", time.localtime()))
|
||||
self.set("pungi", "variant", "")
|
||||
self.set("pungi", "destdir", os.getcwd())
|
||||
self.set("pungi", "workdirbase", "/work")
|
||||
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
|
||||
self.set("pungi", "cdsize", "695.0")
|
||||
self.set("pungi", "debuginfo", "True")
|
||||
self.set("pungi", "alldeps", "True")
|
||||
self.set("pungi", "isfinal", "False")
|
||||
self.set("pungi", "nohash", "False")
|
||||
self.set("pungi", "full_archlist", "False")
|
||||
self.set("pungi", "multilib", "")
|
||||
self.set("pungi", "lookaside_repos", "")
|
||||
self.set("pungi", "resolve_deps", "True")
|
||||
self.set("pungi", "no_dvd", "False")
|
||||
self.set("pungi", "nomacboot", "False")
|
||||
self.set("pungi", "rootfs_size", "False")
|
||||
|
||||
# if missing, self.read() is a noop, else change 'defaults'
|
||||
if pungirc:
|
||||
self.read(os.path.expanduser(pungirc))
|
||||
@ -3,10 +3,9 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import six
|
||||
import shlex
|
||||
from collections import namedtuple
|
||||
from kobo.shortcuts import run
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from .wrappers import iso
|
||||
from .wrappers.jigdo import JigdoWrapper
|
||||
@ -41,13 +40,13 @@ def quote(str):
|
||||
expanded.
|
||||
"""
|
||||
if str.startswith("$TEMPLATE"):
|
||||
return "$TEMPLATE%s" % shlex_quote(str.replace("$TEMPLATE", "", 1))
|
||||
return shlex_quote(str)
|
||||
return "$TEMPLATE%s" % shlex.quote(str.replace("$TEMPLATE", "", 1))
|
||||
return shlex.quote(str)
|
||||
|
||||
|
||||
def emit(f, cmd):
|
||||
"""Print line of shell code into the stream."""
|
||||
if isinstance(cmd, six.string_types):
|
||||
if isinstance(cmd, str):
|
||||
print(cmd, file=f)
|
||||
else:
|
||||
print(" ".join([quote(x) for x in cmd]), file=f)
|
||||
|
||||
2297
pungi/gather.py
2297
pungi/gather.py
File diff suppressed because it is too large
Load Diff
@ -35,11 +35,6 @@ from pungi.profiler import Profiler
|
||||
from pungi.util import DEBUG_PATTERNS
|
||||
|
||||
|
||||
def get_source_name(pkg):
|
||||
# Workaround for rhbz#1418298
|
||||
return pkg.sourcerpm.rsplit("-", 2)[0]
|
||||
|
||||
|
||||
def filter_dotarch(queue, pattern, **kwargs):
|
||||
"""Filter queue for packages matching the pattern. If pattern matches the
|
||||
dotarch format of <name>.<arch>, it is processed as such. Otherwise it is
|
||||
@ -389,7 +384,7 @@ class Gather(GatherBase):
|
||||
# lookaside
|
||||
if self.is_from_lookaside(i):
|
||||
self._set_flag(i, PkgFlag.lookaside)
|
||||
if i.sourcerpm.rsplit("-", 2)[0] in self.opts.fulltree_excludes:
|
||||
if i.source_name in self.opts.fulltree_excludes:
|
||||
self._set_flag(i, PkgFlag.fulltree_exclude)
|
||||
|
||||
def _get_package_deps(self, pkg, debuginfo=False):
|
||||
@ -839,7 +834,7 @@ class Gather(GatherBase):
|
||||
continue
|
||||
if self.is_from_lookaside(i):
|
||||
self._set_flag(i, PkgFlag.lookaside)
|
||||
srpm_name = i.sourcerpm.rsplit("-", 2)[0]
|
||||
srpm_name = i.source_name
|
||||
if srpm_name in self.opts.fulltree_excludes:
|
||||
self._set_flag(i, PkgFlag.fulltree_exclude)
|
||||
if PkgFlag.input in self.result_package_flags.get(srpm_name, set()):
|
||||
@ -871,7 +866,7 @@ class Gather(GatherBase):
|
||||
for pkg in sorted(self.result_binary_packages):
|
||||
assert pkg is not None
|
||||
|
||||
if get_source_name(pkg) in self.opts.fulltree_excludes:
|
||||
if pkg.source_name in self.opts.fulltree_excludes:
|
||||
self.logger.debug("No fulltree for %s due to exclude list", pkg)
|
||||
continue
|
||||
|
||||
@ -1085,7 +1080,7 @@ class Gather(GatherBase):
|
||||
if ex.errno == errno.EEXIST:
|
||||
self.logger.warning("Downloaded package exists in %s", target)
|
||||
else:
|
||||
self.logger.error("Unable to link %s from the yum cache.", pkg.name)
|
||||
self.logger.error("Unable to link %s from the dnf cache.", pkg.name)
|
||||
raise
|
||||
|
||||
def log_count(self, msg, method, *args):
|
||||
|
||||
@ -228,20 +228,7 @@ class Linker(kobo.log.LoggingBase):
|
||||
raise ValueError("Unknown link_type: %s" % link_type)
|
||||
|
||||
def link(self, src, dst, link_type="hardlink-or-copy"):
|
||||
"""Link directories recursively."""
|
||||
if os.path.isfile(src) or os.path.islink(src):
|
||||
self._link_file(src, dst, link_type)
|
||||
return
|
||||
if os.path.isdir(src):
|
||||
raise RuntimeError("Linking directories recursively is not supported")
|
||||
|
||||
if os.path.isfile(dst):
|
||||
raise OSError(errno.EEXIST, "File exists")
|
||||
|
||||
if not self.test:
|
||||
if not os.path.exists(dst):
|
||||
makedirs(dst)
|
||||
shutil.copystat(src, dst)
|
||||
|
||||
for i in os.listdir(src):
|
||||
src_path = os.path.join(src, i)
|
||||
dst_path = os.path.join(dst, i)
|
||||
self.link(src_path, dst_path, link_type)
|
||||
self._link_file(src, dst, link_type)
|
||||
|
||||
@ -1,295 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import re
|
||||
import fnmatch
|
||||
|
||||
import pungi.pathmatch
|
||||
import pungi.gather
|
||||
import pungi.util
|
||||
|
||||
|
||||
LINE_PATTERN_RE = re.compile(r"^\s*(?P<line>[^#]+)(:?\s+(?P<comment>#.*))?$")
|
||||
RUNTIME_PATTERN_SPLIT_RE = re.compile(
|
||||
r"^\s*(?P<path>[^\s]+)\s+(?P<pattern>[^\s]+)(:?\s+(?P<comment>#.*))?$"
|
||||
)
|
||||
SONAME_PATTERN_RE = re.compile(r"^(.+\.so\.[a-zA-Z0-9_\.]+).*$")
|
||||
|
||||
|
||||
def read_lines(lines):
|
||||
result = []
|
||||
for i in lines:
|
||||
i = i.strip()
|
||||
|
||||
if not i:
|
||||
continue
|
||||
|
||||
# skip comments
|
||||
if i.startswith("#"):
|
||||
continue
|
||||
|
||||
match = LINE_PATTERN_RE.match(i)
|
||||
if match is None:
|
||||
raise ValueError("Couldn't parse line: %s" % i)
|
||||
gd = match.groupdict()
|
||||
result.append(gd["line"])
|
||||
return result
|
||||
|
||||
|
||||
def read_lines_from_file(path):
|
||||
lines = open(path, "r").readlines()
|
||||
lines = read_lines(lines)
|
||||
return lines
|
||||
|
||||
|
||||
def read_runtime_patterns(lines):
|
||||
result = []
|
||||
for i in read_lines(lines):
|
||||
match = RUNTIME_PATTERN_SPLIT_RE.match(i)
|
||||
if match is None:
|
||||
raise ValueError("Couldn't parse pattern: %s" % i)
|
||||
gd = match.groupdict()
|
||||
result.append((gd["path"], gd["pattern"]))
|
||||
return result
|
||||
|
||||
|
||||
def read_runtime_patterns_from_file(path):
|
||||
lines = open(path, "r").readlines()
|
||||
return read_runtime_patterns(lines)
|
||||
|
||||
|
||||
def expand_runtime_patterns(patterns):
|
||||
pm = pungi.pathmatch.PathMatch()
|
||||
for path, pattern in patterns:
|
||||
for root in ("", "/opt/*/*/root"):
|
||||
# include Software Collections: /opt/<vendor>/<scl_name>/root/...
|
||||
if "$LIBDIR" in path:
|
||||
for lib_dir in ("/lib", "/lib64", "/usr/lib", "/usr/lib64"):
|
||||
path_pattern = path.replace("$LIBDIR", lib_dir)
|
||||
path_pattern = "%s/%s" % (root, path_pattern.lstrip("/"))
|
||||
pm[path_pattern] = (path_pattern, pattern)
|
||||
else:
|
||||
path_pattern = "%s/%s" % (root, path.lstrip("/"))
|
||||
pm[path_pattern] = (path_pattern, pattern)
|
||||
return pm
|
||||
|
||||
|
||||
class MultilibMethodBase(object):
|
||||
"""a base class for multilib methods"""
|
||||
|
||||
name = "base"
|
||||
|
||||
def __init__(self, config_path):
|
||||
self.config_path = config_path
|
||||
|
||||
def select(self, po):
|
||||
raise NotImplementedError
|
||||
|
||||
def skip(self, po):
|
||||
if (
|
||||
pungi.gather.is_noarch(po)
|
||||
or pungi.gather.is_source(po)
|
||||
or pungi.util.pkg_is_debug(po)
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name == "kernel":
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel_devel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name == "kernel-devel":
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel_or_kernel_devel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name in ("kernel", "kernel-devel"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class NoneMultilibMethod(MultilibMethodBase):
|
||||
"""multilib disabled"""
|
||||
|
||||
name = "none"
|
||||
|
||||
def select(self, po):
|
||||
return False
|
||||
|
||||
|
||||
class AllMultilibMethod(MultilibMethodBase):
|
||||
"""all packages are multilib"""
|
||||
|
||||
name = "all"
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class RuntimeMultilibMethod(MultilibMethodBase):
|
||||
"""pre-defined paths to libs"""
|
||||
|
||||
name = "runtime"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RuntimeMultilibMethod, self).__init__(*args, **kwargs)
|
||||
self.blacklist = read_lines_from_file(
|
||||
self.config_path + "runtime-blacklist.conf"
|
||||
)
|
||||
self.whitelist = read_lines_from_file(
|
||||
self.config_path + "runtime-whitelist.conf"
|
||||
)
|
||||
self.patterns = expand_runtime_patterns(
|
||||
read_runtime_patterns_from_file(self.config_path + "runtime-patterns.conf")
|
||||
)
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
if po.name in self.blacklist:
|
||||
return False
|
||||
if po.name in self.whitelist:
|
||||
return True
|
||||
if self.is_kernel(po):
|
||||
return False
|
||||
|
||||
# gather all *.so.* provides from the RPM header
|
||||
provides = set()
|
||||
for i in po.provides:
|
||||
match = SONAME_PATTERN_RE.match(i[0])
|
||||
if match is not None:
|
||||
provides.add(match.group(1))
|
||||
|
||||
for path in po.returnFileEntries() + po.returnFileEntries("ghost"):
|
||||
dirname, filename = path.rsplit("/", 1)
|
||||
dirname = dirname.rstrip("/")
|
||||
|
||||
patterns = self.patterns[dirname]
|
||||
if not patterns:
|
||||
continue
|
||||
for dir_pattern, file_pattern in patterns:
|
||||
if file_pattern == "-":
|
||||
return True
|
||||
if fnmatch.fnmatch(filename, file_pattern):
|
||||
if ".so.*" in file_pattern:
|
||||
if filename in provides:
|
||||
# return only if the lib is provided in RPM header
|
||||
# (some libs may be private, hence not exposed in Provides)
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class KernelMultilibMethod(MultilibMethodBase):
|
||||
"""kernel and kernel-devel"""
|
||||
|
||||
name = "kernel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(KernelMultilibMethod, self).__init__(*args, **kwargs)
|
||||
|
||||
def select(self, po):
|
||||
if self.is_kernel_or_kernel_devel(po):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class YabootMultilibMethod(MultilibMethodBase):
|
||||
"""yaboot on ppc"""
|
||||
|
||||
name = "yaboot"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(YabootMultilibMethod, self).__init__(*args, **kwargs)
|
||||
|
||||
def select(self, po):
|
||||
if po.arch in ["ppc"]:
|
||||
if po.name.startswith("yaboot"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class DevelMultilibMethod(MultilibMethodBase):
|
||||
"""all -devel and -static packages"""
|
||||
|
||||
name = "devel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DevelMultilibMethod, self).__init__(*args, **kwargs)
|
||||
self.blacklist = read_lines_from_file(self.config_path + "devel-blacklist.conf")
|
||||
self.whitelist = read_lines_from_file(self.config_path + "devel-whitelist.conf")
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
if po.name in self.blacklist:
|
||||
return False
|
||||
if po.name in self.whitelist:
|
||||
return True
|
||||
if self.is_kernel_devel(po):
|
||||
return False
|
||||
# HACK: exclude ghc*
|
||||
if po.name.startswith("ghc-"):
|
||||
return False
|
||||
if po.name.endswith("-devel"):
|
||||
return True
|
||||
if po.name.endswith("-static"):
|
||||
return True
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name.endswith("-devel"):
|
||||
return True
|
||||
if p_name.endswith("-static"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
DEFAULT_METHODS = ["devel", "runtime"]
|
||||
METHOD_MAP = {}
|
||||
|
||||
|
||||
def init(config_path="/usr/share/pungi/multilib/"):
|
||||
global METHOD_MAP
|
||||
|
||||
if not config_path.endswith("/"):
|
||||
config_path += "/"
|
||||
|
||||
for cls in (
|
||||
AllMultilibMethod,
|
||||
DevelMultilibMethod,
|
||||
KernelMultilibMethod,
|
||||
NoneMultilibMethod,
|
||||
RuntimeMultilibMethod,
|
||||
YabootMultilibMethod,
|
||||
):
|
||||
method = cls(config_path)
|
||||
METHOD_MAP[method.name] = method
|
||||
|
||||
|
||||
def po_is_multilib(po, methods):
|
||||
for method_name in methods:
|
||||
if not method_name:
|
||||
continue
|
||||
method = METHOD_MAP[method_name]
|
||||
if method.select(po):
|
||||
return method_name
|
||||
return None
|
||||
@ -104,7 +104,8 @@ class PungiNotifier(object):
|
||||
workdir=workdir,
|
||||
return_stdout=False,
|
||||
show_cmd=True,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
logfile=logfile,
|
||||
)
|
||||
if ret != 0:
|
||||
|
||||
@ -16,8 +16,7 @@
|
||||
|
||||
import os
|
||||
import json
|
||||
import six
|
||||
from six.moves import shlex_quote
|
||||
import shlex
|
||||
|
||||
|
||||
from .base import OSTree
|
||||
@ -26,10 +25,10 @@ from .utils import tweak_treeconf
|
||||
|
||||
def emit(cmd):
|
||||
"""Print line of shell code into the stream."""
|
||||
if isinstance(cmd, six.string_types):
|
||||
if isinstance(cmd, str):
|
||||
print(cmd)
|
||||
else:
|
||||
print(" ".join([shlex_quote(x) for x in cmd]))
|
||||
print(" ".join([shlex.quote(x) for x in cmd]))
|
||||
|
||||
|
||||
class Container(OSTree):
|
||||
|
||||
@ -64,7 +64,8 @@ class Tree(OSTree):
|
||||
show_cmd=True,
|
||||
stdout=True,
|
||||
logfile=log_file,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
finally:
|
||||
os.umask(oldumask)
|
||||
@ -77,7 +78,8 @@ class Tree(OSTree):
|
||||
show_cmd=True,
|
||||
stdout=True,
|
||||
logfile=log_file,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
|
||||
def _update_ref(self):
|
||||
|
||||
229
pungi/otel.py
Normal file
229
pungi/otel.py
Normal file
@ -0,0 +1,229 @@
|
||||
import itertools
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
"""
|
||||
This module contains two classes with the same interface. An instance of one of
|
||||
them is available as `tracing`. Which class is instantiated is selected
|
||||
depending on whether environment variables configuring OTel are configured.
|
||||
"""
|
||||
|
||||
|
||||
class DummyTracing:
|
||||
"""A dummy tracing module that doesn't actually do anything."""
|
||||
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def span(self, *args, **kwargs):
|
||||
yield
|
||||
|
||||
def set_attribute(self, name, value):
|
||||
pass
|
||||
|
||||
def force_flush(self):
|
||||
pass
|
||||
|
||||
def instrument_xmlrpc_proxy(self, proxy):
|
||||
return proxy
|
||||
|
||||
def get_traceparent(self):
|
||||
return None
|
||||
|
||||
def set_context(self, traceparent):
|
||||
pass
|
||||
|
||||
def record_exception(self, exc, set_error_status=True):
|
||||
pass
|
||||
|
||||
|
||||
class OtelTracing:
|
||||
"""This class implements the actual integration with opentelemetry."""
|
||||
|
||||
def setup(self):
|
||||
"""Configure opentelemetry tracing based on environment variables. This
|
||||
setup is optional as it may not be desirable when pungi is used as a
|
||||
library.
|
||||
"""
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import (
|
||||
BatchSpanProcessor,
|
||||
ConsoleSpanExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
||||
OTLPSpanExporter,
|
||||
)
|
||||
|
||||
otel_endpoint = os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"]
|
||||
provider = TracerProvider(
|
||||
resource=Resource(attributes={"service.name": "pungi"})
|
||||
)
|
||||
if "console" == otel_endpoint:
|
||||
# This is for debugging the tracing locally.
|
||||
self.processor = BatchSpanProcessor(ConsoleSpanExporter())
|
||||
else:
|
||||
self.processor = BatchSpanProcessor(OTLPSpanExporter())
|
||||
provider.add_span_processor(self.processor)
|
||||
trace.set_tracer_provider(provider)
|
||||
|
||||
traceparent = os.environ.get("TRACEPARENT")
|
||||
if traceparent:
|
||||
self.set_context(traceparent)
|
||||
|
||||
try:
|
||||
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||
|
||||
RequestsInstrumentor().instrument()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@property
|
||||
def tracer(self):
|
||||
from opentelemetry import trace
|
||||
|
||||
return trace.get_tracer(__name__)
|
||||
|
||||
@contextmanager
|
||||
def span(self, name, **attributes):
|
||||
"""Create a new span as a child of the current one. Attributes can be
|
||||
passed via kwargs."""
|
||||
with self.tracer.start_as_current_span(name, attributes=attributes) as span:
|
||||
yield span
|
||||
|
||||
def get_traceparent(self):
|
||||
from opentelemetry.trace.propagation.tracecontext import (
|
||||
TraceContextTextMapPropagator,
|
||||
)
|
||||
|
||||
carrier = {}
|
||||
TraceContextTextMapPropagator().inject(carrier)
|
||||
return carrier["traceparent"]
|
||||
|
||||
def set_attribute(self, name, value):
|
||||
"""Set an attribute on the current span."""
|
||||
from opentelemetry import trace
|
||||
|
||||
span = trace.get_current_span()
|
||||
span.set_attribute(name, value)
|
||||
|
||||
def force_flush(self):
|
||||
"""Ensure all spans and traces are sent out. Call this before the
|
||||
process exits."""
|
||||
self.processor.force_flush()
|
||||
|
||||
def instrument_xmlrpc_proxy(self, proxy):
|
||||
return InstrumentedClientSession(proxy)
|
||||
|
||||
def set_context(self, traceparent):
|
||||
"""Configure current context to match the given traceparent."""
|
||||
from opentelemetry import context
|
||||
from opentelemetry.trace.propagation.tracecontext import (
|
||||
TraceContextTextMapPropagator,
|
||||
)
|
||||
|
||||
ctx = TraceContextTextMapPropagator().extract(
|
||||
carrier={"traceparent": traceparent}
|
||||
)
|
||||
context.attach(ctx)
|
||||
|
||||
def record_exception(self, exc, set_error_status=True):
|
||||
"""Records an exception for the current span and optionally marks the
|
||||
span as failed."""
|
||||
from opentelemetry import trace
|
||||
|
||||
span = trace.get_current_span()
|
||||
span.record_exception(exc)
|
||||
|
||||
if set_error_status:
|
||||
span.set_status(trace.status.StatusCode.ERROR)
|
||||
|
||||
|
||||
class InstrumentedClientSession:
|
||||
"""Wrapper around koji.ClientSession that creates spans for each API call.
|
||||
RequestsInstrumentor can create spans at the HTTP requests level, but since
|
||||
those all go the same XML-RPC endpoint, they are not very informative.
|
||||
|
||||
Multicall is not handled very well here. The spans will only have a
|
||||
`multicall` boolean attribute, but they don't carry any additional data
|
||||
that could group them.
|
||||
|
||||
Koji ClientSession supports three ways of making multicalls, but Pungi only
|
||||
uses one, and that one is supported here.
|
||||
|
||||
Supported:
|
||||
|
||||
c.multicall = True
|
||||
c.getBuild(1)
|
||||
c.getBuild(2)
|
||||
results = c.multiCall()
|
||||
|
||||
Not supported:
|
||||
|
||||
with c.multicall() as m:
|
||||
r1 = m.getBuild(1)
|
||||
r2 = m.getBuild(2)
|
||||
|
||||
Also not supported:
|
||||
|
||||
m = c.multicall()
|
||||
r1 = m.getBuild(1)
|
||||
r2 = m.getBuild(2)
|
||||
m.call_all()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
def _name(self, name):
|
||||
"""Helper for generating span names."""
|
||||
return "%s.%s" % (self.session.__class__.__name__, name)
|
||||
|
||||
@property
|
||||
def system(self):
|
||||
"""This is only ever used to get list of available API calls. It is
|
||||
rather awkward though. Ideally we wouldn't really trace this at all,
|
||||
but there's the underlying POST request to the hub, which is quite
|
||||
confusing in the trace if there is no additional context."""
|
||||
return self.session.system
|
||||
|
||||
@property
|
||||
def multicall(self):
|
||||
return self.session.multicall
|
||||
|
||||
@multicall.setter
|
||||
def multicall(self, value):
|
||||
self.session.multicall = value
|
||||
|
||||
def __getattr__(self, name):
|
||||
return self._instrument_method(name, getattr(self.session, name))
|
||||
|
||||
def _instrument_method(self, name, callable):
|
||||
def wrapper(*args, **kwargs):
|
||||
with tracing.span(self._name(name)) as span:
|
||||
span.set_attribute("arguments", _format_args(args, kwargs))
|
||||
if self.session.multicall:
|
||||
tracing.set_attribute("multicall", True)
|
||||
return callable(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _format_args(args, kwargs):
|
||||
"""Turn args+kwargs into a single string. OTel could choke on more
|
||||
complicated data."""
|
||||
return ", ".join(
|
||||
itertools.chain(
|
||||
(repr(arg) for arg in args),
|
||||
(f"{key}={value!r}" for key, value in kwargs.items()),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if "OTEL_EXPORTER_OTLP_ENDPOINT" in os.environ:
|
||||
tracing = OtelTracing()
|
||||
else:
|
||||
tracing = DummyTracing()
|
||||
@ -1,73 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import fnmatch
|
||||
|
||||
|
||||
def head_tail_split(name):
|
||||
name_split = name.strip("/").split("/", 1)
|
||||
if len(name_split) == 2:
|
||||
head = name_split[0]
|
||||
tail = name_split[1].strip("/")
|
||||
else:
|
||||
head, tail = name_split[0], None
|
||||
return head, tail
|
||||
|
||||
|
||||
class PathMatch(object):
|
||||
def __init__(self, parent=None, desc=None):
|
||||
self._patterns = {}
|
||||
self._final_patterns = {}
|
||||
self._values = []
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
head, tail = head_tail_split(name)
|
||||
|
||||
if tail is not None:
|
||||
# recursion
|
||||
if head not in self._patterns:
|
||||
self._patterns[head] = PathMatch(parent=self, desc=head)
|
||||
self._patterns[head][tail] = value
|
||||
else:
|
||||
if head not in self._final_patterns:
|
||||
self._final_patterns[head] = PathMatch(parent=self, desc=head)
|
||||
if value not in self._final_patterns[head]._values:
|
||||
self._final_patterns[head]._values.append(value)
|
||||
|
||||
def __getitem__(self, name):
|
||||
result = []
|
||||
head, tail = head_tail_split(name)
|
||||
for pattern in self._patterns:
|
||||
if fnmatch.fnmatch(head, pattern):
|
||||
if tail is None:
|
||||
values = self._patterns[pattern]._values
|
||||
else:
|
||||
values = self._patterns[pattern][tail]
|
||||
for value in values:
|
||||
if value not in result:
|
||||
result.append(value)
|
||||
|
||||
for pattern in self._final_patterns:
|
||||
if tail is None:
|
||||
x = head
|
||||
else:
|
||||
x = "%s/%s" % (head, tail)
|
||||
if fnmatch.fnmatch(x, pattern):
|
||||
values = self._final_patterns[pattern]._values
|
||||
for value in values:
|
||||
if value not in result:
|
||||
result.append(value)
|
||||
return result
|
||||
@ -29,6 +29,7 @@ from .image_build import ImageBuildPhase # noqa
|
||||
from .image_container import ImageContainerPhase # noqa
|
||||
from .kiwibuild import KiwiBuildPhase # noqa
|
||||
from .osbuild import OSBuildPhase # noqa
|
||||
from .imagebuilder import ImageBuilderPhase # noqa
|
||||
from .repoclosure import RepoclosurePhase # noqa
|
||||
from .test import TestPhase # noqa
|
||||
from .image_checksum import ImageChecksumPhase # noqa
|
||||
|
||||
@ -16,17 +16,17 @@
|
||||
|
||||
import errno
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
import shlex
|
||||
import shutil
|
||||
import re
|
||||
from six.moves import cPickle as pickle
|
||||
from copy import copy
|
||||
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.shortcuts import run, force_list
|
||||
import kobo.rpmlib
|
||||
from productmd.images import Image
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.arch import get_valid_arches
|
||||
from pungi.util import get_volid, get_arch_variant_data
|
||||
@ -39,6 +39,7 @@ from pungi.wrappers.scm import get_file_from_scm
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.phases.base import PhaseBase
|
||||
from pungi.runroot import Runroot, download_and_extract_archive
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class BuildinstallPhase(PhaseBase):
|
||||
@ -94,6 +95,7 @@ class BuildinstallPhase(PhaseBase):
|
||||
squashfs_only = False
|
||||
configuration_file = None
|
||||
configuration_file_source = None
|
||||
rootfs_type = None
|
||||
version = self.compose.conf.get(
|
||||
"treeinfo_version", self.compose.conf["release_version"]
|
||||
)
|
||||
@ -116,6 +118,7 @@ class BuildinstallPhase(PhaseBase):
|
||||
skip_branding = data.get("skip_branding", False)
|
||||
configuration_file_source = data.get("configuration_file")
|
||||
squashfs_only = data.get("squashfs_only", False)
|
||||
rootfs_type = data.get("rootfs_type", None)
|
||||
if "version" in data:
|
||||
version = data["version"]
|
||||
output_dir = os.path.join(output_dir, variant.uid)
|
||||
@ -171,6 +174,7 @@ class BuildinstallPhase(PhaseBase):
|
||||
"skip_branding": skip_branding,
|
||||
"squashfs_only": squashfs_only,
|
||||
"configuration_file": configuration_file,
|
||||
"rootfs-type": rootfs_type,
|
||||
}
|
||||
else:
|
||||
# If the buildinstall_topdir is set, it means Koji is used for
|
||||
@ -205,10 +209,11 @@ class BuildinstallPhase(PhaseBase):
|
||||
skip_branding=skip_branding,
|
||||
squashfs_only=squashfs_only,
|
||||
configuration_file=configuration_file,
|
||||
rootfs_type=rootfs_type,
|
||||
)
|
||||
return "rm -rf %s && %s" % (
|
||||
shlex_quote(output_topdir),
|
||||
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||
shlex.quote(output_topdir),
|
||||
" ".join([shlex.quote(x) for x in lorax_cmd]),
|
||||
)
|
||||
|
||||
def get_repos(self, arch):
|
||||
@ -413,8 +418,8 @@ def tweak_buildinstall(
|
||||
# copy src to temp
|
||||
# TODO: place temp on the same device as buildinstall dir so we can hardlink
|
||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||
shlex_quote(src),
|
||||
shlex_quote(tmp_dir),
|
||||
shlex.quote(src),
|
||||
shlex.quote(tmp_dir),
|
||||
)
|
||||
run(cmd)
|
||||
|
||||
@ -452,12 +457,12 @@ def tweak_buildinstall(
|
||||
run(cmd)
|
||||
|
||||
# HACK: make buildinstall files world readable
|
||||
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
||||
run("chmod -R a+rX %s" % shlex.quote(tmp_dir))
|
||||
|
||||
# copy temp to dst
|
||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||
shlex_quote(tmp_dir),
|
||||
shlex_quote(dst),
|
||||
shlex.quote(tmp_dir),
|
||||
shlex.quote(dst),
|
||||
)
|
||||
run(cmd)
|
||||
|
||||
@ -527,7 +532,14 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
||||
)
|
||||
except RuntimeError:
|
||||
pass
|
||||
compose.im.add(variant.uid, arch, img)
|
||||
# In this phase we should add to compose only the images that
|
||||
# will be used only as netinstall.
|
||||
# On this step lorax generates environment
|
||||
# for creating isos and create them.
|
||||
# On step `extra_isos` we overwrite the not needed iso `boot Minimal` by
|
||||
# new iso. It already contains necessary packages from incldued variants.
|
||||
if variant.uid in compose.conf['netinstall_variants']:
|
||||
compose.im.add(variant.uid, arch, img)
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
|
||||
@ -17,15 +17,15 @@
|
||||
import itertools
|
||||
import os
|
||||
import random
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
import json
|
||||
|
||||
import productmd.treeinfo
|
||||
from productmd.images import Image
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo.shortcuts import run, relative_path, compute_file_checksums
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.wrappers import iso
|
||||
from pungi.wrappers.createrepo import CreaterepoWrapper
|
||||
@ -43,6 +43,7 @@ from pungi.util import (
|
||||
from pungi.media_split import MediaSplitter, convert_media_size
|
||||
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
||||
from pungi.runroot import Runroot
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
from .. import createiso
|
||||
|
||||
@ -655,7 +656,7 @@ def fix_treeinfo_checksums(compose, iso_path, arch):
|
||||
)
|
||||
# The modified ISO no longer has implanted MD5, so that needs to be
|
||||
# fixed again.
|
||||
compose.log_debug("Implanting new MD5 to %s fixed_path")
|
||||
compose.log_debug("Implanting new MD5 to %s", fixed_path)
|
||||
run(
|
||||
iso.get_implantisomd5_cmd(fixed_path, compose.supported),
|
||||
logfile=compose.paths.log.log_file(
|
||||
@ -782,7 +783,7 @@ def prepare_iso(
|
||||
|
||||
if file_list_content:
|
||||
# write modified repodata only if there are packages available
|
||||
run("cp -a %s/repodata %s/" % (shlex_quote(tree_dir), shlex_quote(iso_dir)))
|
||||
run("cp -a %s/repodata %s/" % (shlex.quote(tree_dir), shlex.quote(iso_dir)))
|
||||
with open(file_list, "w") as f:
|
||||
f.write("\n".join(file_list_content))
|
||||
cmd = repo.get_createrepo_cmd(
|
||||
|
||||
@ -27,7 +27,7 @@ import xml.dom.minidom
|
||||
import productmd.modules
|
||||
import productmd.rpms
|
||||
from kobo.shortcuts import relative_path, run
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
|
||||
from ..module_util import Modulemd, collect_module_defaults, collect_module_obsoletes
|
||||
from ..util import (
|
||||
@ -38,6 +38,7 @@ from ..util import (
|
||||
from ..wrappers.createrepo import CreaterepoWrapper
|
||||
from ..wrappers.scm import get_dir_from_scm
|
||||
from .base import PhaseBase
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
CACHE_TOPDIR = "/var/cache/pungi/createrepo_c/"
|
||||
createrepo_lock = threading.Lock()
|
||||
|
||||
@ -112,7 +112,7 @@ def copy_extra_files(
|
||||
target_path = os.path.join(
|
||||
extra_files_dir, scm_dict.get("target", "").lstrip("/")
|
||||
)
|
||||
getter(scm_dict, target_path, compose=compose)
|
||||
getter(scm_dict, target_path, compose=compose, arch=arch)
|
||||
|
||||
if os.listdir(extra_files_dir):
|
||||
metadata.populate_extra_files_metadata(
|
||||
|
||||
@ -18,7 +18,8 @@ import hashlib
|
||||
import json
|
||||
|
||||
from kobo.shortcuts import force_list
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
import productmd.treeinfo
|
||||
from productmd.extra_files import ExtraFiles
|
||||
|
||||
@ -342,23 +343,24 @@ def get_extra_files(compose, variant, arch, extra_files):
|
||||
included in the ISO.
|
||||
"""
|
||||
extra_files_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
||||
filelist = []
|
||||
for scm_dict in extra_files:
|
||||
getter = get_file_from_scm if "file" in scm_dict else get_dir_from_scm
|
||||
target = scm_dict.get("target", "").lstrip("/")
|
||||
target_path = os.path.join(extra_files_dir, target).rstrip("/")
|
||||
filelist.extend(
|
||||
os.path.join(target, f)
|
||||
for f in getter(scm_dict, target_path, compose=compose)
|
||||
)
|
||||
getter(scm_dict, target_path, compose=compose, arch=arch)
|
||||
|
||||
filelist = [
|
||||
os.path.relpath(os.path.join(root, f), extra_files_dir)
|
||||
for root, _, files in os.walk(extra_files_dir)
|
||||
for f in files
|
||||
]
|
||||
if filelist:
|
||||
metadata.populate_extra_files_metadata(
|
||||
ExtraFiles(),
|
||||
variant,
|
||||
arch,
|
||||
extra_files_dir,
|
||||
filelist,
|
||||
sorted(filelist),
|
||||
compose.conf["media_checksums"],
|
||||
)
|
||||
|
||||
@ -429,6 +431,12 @@ def get_iso_contents(
|
||||
original_treeinfo,
|
||||
os.path.join(extra_files_dir, ".treeinfo"),
|
||||
)
|
||||
tweak_repo_treeinfo(
|
||||
compose,
|
||||
include_variants,
|
||||
original_treeinfo,
|
||||
original_treeinfo,
|
||||
)
|
||||
|
||||
# Add extra files specific for the ISO
|
||||
files.update(
|
||||
@ -440,6 +448,45 @@ def get_iso_contents(
|
||||
return gp
|
||||
|
||||
|
||||
def tweak_repo_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
"""
|
||||
The method includes the variants to file .treeinfo of a variant. It takes
|
||||
the variants which are described
|
||||
by options `extra_isos -> include_variants`.
|
||||
"""
|
||||
ti = productmd.treeinfo.TreeInfo()
|
||||
ti.load(source_file)
|
||||
main_variant = next(iter(ti.variants))
|
||||
for variant_uid in include_variants:
|
||||
variant = compose.all_variants[variant_uid]
|
||||
var = productmd.treeinfo.Variant(ti)
|
||||
var.id = variant.id
|
||||
var.uid = variant.uid
|
||||
var.name = variant.name
|
||||
var.type = variant.type
|
||||
ti.variants.add(var)
|
||||
|
||||
for variant_id in ti.variants:
|
||||
var = ti.variants[variant_id]
|
||||
if variant_id == main_variant:
|
||||
var.paths.packages = 'Packages'
|
||||
var.paths.repository = '.'
|
||||
else:
|
||||
var.paths.packages = os.path.join(
|
||||
'../../..',
|
||||
var.uid,
|
||||
var.arch,
|
||||
'os/Packages',
|
||||
)
|
||||
var.paths.repository = os.path.join(
|
||||
'../../..',
|
||||
var.uid,
|
||||
var.arch,
|
||||
'os',
|
||||
)
|
||||
ti.dump(dest_file, main_variant=main_variant)
|
||||
|
||||
|
||||
def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
ti = load_and_tweak_treeinfo(source_file)
|
||||
for variant_uid in include_variants:
|
||||
@ -455,7 +502,6 @@ def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
var = ti.variants[variant_id]
|
||||
var.paths.packages = os.path.join(var.uid, "Packages")
|
||||
var.paths.repository = var.uid
|
||||
|
||||
ti.dump(dest_file)
|
||||
|
||||
|
||||
|
||||
@ -17,13 +17,14 @@
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import shutil
|
||||
import threading
|
||||
|
||||
from kobo.rpmlib import parse_nvra
|
||||
from kobo.shortcuts import run
|
||||
from productmd.rpms import Rpms
|
||||
from six.moves import cPickle as pickle
|
||||
from pungi.phases.pkgset.common import get_all_arches
|
||||
|
||||
try:
|
||||
from queue import Queue
|
||||
@ -649,6 +650,11 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
||||
pungi.wrappers.kojiwrapper.KojiWrapper(compose).koji_module.config.topdir,
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
"kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
|
||||
compose,
|
||||
get_all_arches(compose),
|
||||
).koji_module.config.topdir.rstrip("/")
|
||||
+ "/",
|
||||
}
|
||||
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
||||
package_list = set()
|
||||
|
||||
@ -87,7 +87,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
pool.queue_put((os.path.realpath(pkg["path"]), dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
@ -116,7 +116,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
pool.queue_put((os.path.realpath(pkg["path"]), dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
@ -146,7 +146,7 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
pool.queue_put((os.path.realpath(pkg["path"]), dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
|
||||
@ -15,7 +15,6 @@
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from kobo.shortcuts import run
|
||||
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
||||
@ -220,9 +219,7 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
||||
yum_arch = tree_arch_to_yum_arch(arch)
|
||||
tmp_dir = compose.paths.work.tmp_dir(arch, variant)
|
||||
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
||||
# TODO: remove YUM code, fully migrate to DNF
|
||||
backends = {
|
||||
"yum": pungi_wrapper.get_pungi_cmd,
|
||||
"dnf": pungi_wrapper.get_pungi_cmd_dnf,
|
||||
}
|
||||
get_cmd = backends[compose.conf["gather_backend"]]
|
||||
@ -245,17 +242,6 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
||||
with temp_dir(prefix="pungi_") as work_dir:
|
||||
run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
|
||||
|
||||
# Clean up tmp dir
|
||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||
yumroot_dir = os.path.join(tmp_dir, "work", arch, "yumroot")
|
||||
if os.path.isdir(yumroot_dir):
|
||||
try:
|
||||
shutil.rmtree(yumroot_dir)
|
||||
except Exception as e:
|
||||
compose.log_warning(
|
||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||
)
|
||||
|
||||
with open(pungi_log, "r") as f:
|
||||
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
||||
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
import os
|
||||
from pprint import pformat
|
||||
import re
|
||||
import six
|
||||
|
||||
import pungi.arch
|
||||
from pungi.util import pkg_is_rpm, pkg_is_srpm, pkg_is_debug
|
||||
@ -74,7 +73,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
||||
if not pkg_is_rpm(pkg):
|
||||
continue
|
||||
for gathered_pkg, pkg_arch in packages:
|
||||
if isinstance(gathered_pkg, six.string_types) and not re.match(
|
||||
if isinstance(gathered_pkg, str) and not re.match(
|
||||
gathered_pkg.replace(".", "\\.")
|
||||
.replace("+", "\\+")
|
||||
.replace("*", ".*")
|
||||
|
||||
@ -13,7 +13,8 @@ from pungi.util import as_local_file, translate_path, get_repo_urls, version_gen
|
||||
from pungi.phases import base
|
||||
from pungi.linker import Linker
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
from kobo.shortcuts import force_list
|
||||
from productmd.images import Image
|
||||
from productmd.rpms import Rpms
|
||||
@ -22,10 +23,13 @@ from productmd.rpms import Rpms
|
||||
# This is a mapping from formats to file extensions. The format is what koji
|
||||
# image-build command expects as argument, and the extension is what the file
|
||||
# name will be ending with. The extensions are used to filter out which task
|
||||
# results will be pulled into the compose.
|
||||
# results will be pulled into the compose. This dict is also used later in
|
||||
# the process to set the image 'type' in productmd metadata terms - the type
|
||||
# is set as the first key in this dict which has the file's extension in its
|
||||
# values. This dict is imported and extended for similar purposes by other
|
||||
# phases (at least osbuild and kiwibuild).
|
||||
EXTENSIONS = {
|
||||
"docker": ["tar.gz", "tar.xz"],
|
||||
"iso": ["iso"],
|
||||
"docker": ["tar.xz"],
|
||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||
"qcow": ["qcow"],
|
||||
"qcow2": ["qcow2"],
|
||||
@ -40,7 +44,6 @@ EXTENSIONS = {
|
||||
"vdi": ["vdi"],
|
||||
"vmdk": ["vmdk"],
|
||||
"vpc": ["vhd"],
|
||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||
"vsphere-ova": ["vsphere.ova"],
|
||||
}
|
||||
|
||||
|
||||
@ -2,12 +2,13 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
|
||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||
from .. import util
|
||||
from ..wrappers import kojiwrapper
|
||||
from ..phases.osbs import add_metadata
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class ImageContainerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||
|
||||
263
pungi/phases/imagebuilder.py
Normal file
263
pungi/phases/imagebuilder.py
Normal file
@ -0,0 +1,263 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo import shortcuts
|
||||
from productmd.images import Image
|
||||
|
||||
from . import base
|
||||
from .. import util
|
||||
from ..linker import Linker
|
||||
from ..wrappers import kojiwrapper
|
||||
from .image_build import EXTENSIONS
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
IMAGEBUILDEREXTENSIONS = [
|
||||
("vagrant-libvirt", ["vagrant.libvirt.box"], "vagrant-libvirt.box"),
|
||||
(
|
||||
"vagrant-virtualbox",
|
||||
["vagrant.virtualbox.box"],
|
||||
"vagrant-virtualbox.box",
|
||||
),
|
||||
("container", ["oci.tar.xz"], "tar.xz"),
|
||||
("wsl2", ["wsl"], "wsl"),
|
||||
# .iso images can be of many types - boot, cd, dvd, live... -
|
||||
# so 'boot' is just a default guess. 'iso' is not a valid
|
||||
# productmd image type
|
||||
("boot", [".iso"], "iso"),
|
||||
]
|
||||
|
||||
|
||||
class ImageBuilderPhase(
|
||||
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||
):
|
||||
name = "imagebuilder"
|
||||
|
||||
def __init__(self, compose):
|
||||
super(ImageBuilderPhase, self).__init__(compose)
|
||||
self.pool = ThreadPool(logger=self.logger)
|
||||
|
||||
def _get_arches(self, image_conf, arches):
|
||||
"""Get an intersection of arches in the config dict and the given ones."""
|
||||
if "arches" in image_conf:
|
||||
arches = set(image_conf["arches"]) & arches
|
||||
return sorted(arches)
|
||||
|
||||
@staticmethod
|
||||
def _get_repo_urls(compose, repos, arch="$basearch"):
|
||||
"""
|
||||
Get list of repos with resolved repo URLs. Preserve repos defined
|
||||
as dicts.
|
||||
"""
|
||||
resolved_repos = []
|
||||
|
||||
for repo in repos:
|
||||
repo = util.get_repo_url(compose, repo, arch=arch)
|
||||
if repo is None:
|
||||
raise RuntimeError("Failed to resolve repo URL for %s" % repo)
|
||||
resolved_repos.append(repo)
|
||||
|
||||
return resolved_repos
|
||||
|
||||
def _get_repo(self, image_conf, variant):
|
||||
"""
|
||||
Get a list of repos. First included are those explicitly listed in
|
||||
config, followed by by repo for current variant if it's not included in
|
||||
the list already.
|
||||
"""
|
||||
repos = shortcuts.force_list(image_conf.get("repos", []))
|
||||
|
||||
if not variant.is_empty and variant.uid not in repos:
|
||||
repos.append(variant.uid)
|
||||
|
||||
return ImageBuilderPhase._get_repo_urls(self.compose, repos, arch="$arch")
|
||||
|
||||
def run(self):
|
||||
for variant in self.compose.get_variants():
|
||||
arches = set([x for x in variant.arches if x != "src"])
|
||||
|
||||
for image_conf in self.get_config_block(variant):
|
||||
build_arches = self._get_arches(image_conf, arches)
|
||||
if not build_arches:
|
||||
self.log_debug("skip: no arches")
|
||||
continue
|
||||
|
||||
# these properties can be set per-image *or* as e.g.
|
||||
# imagebuilder_release or global_release in the config
|
||||
generics = {
|
||||
"release": self.get_release(image_conf),
|
||||
"target": self.get_config(image_conf, "target"),
|
||||
"types": self.get_config(image_conf, "types"),
|
||||
"seed": self.get_config(image_conf, "seed"),
|
||||
"scratch": self.get_config(image_conf, "scratch"),
|
||||
"version": self.get_version(image_conf),
|
||||
}
|
||||
|
||||
repo = self._get_repo(image_conf, variant)
|
||||
|
||||
failable_arches = image_conf.pop("failable", [])
|
||||
if failable_arches == ["*"]:
|
||||
failable_arches = image_conf["arches"]
|
||||
|
||||
self.pool.add(RunImageBuilderThread(self.pool))
|
||||
self.pool.queue_put(
|
||||
(
|
||||
self.compose,
|
||||
variant,
|
||||
image_conf,
|
||||
build_arches,
|
||||
generics,
|
||||
repo,
|
||||
failable_arches,
|
||||
)
|
||||
)
|
||||
|
||||
self.pool.start()
|
||||
|
||||
|
||||
class RunImageBuilderThread(WorkerThread):
|
||||
def process(self, item, num):
|
||||
(compose, variant, config, arches, generics, repo, failable_arches) = item
|
||||
self.failable_arches = []
|
||||
# the Koji task as a whole can only fail if *all* arches are failable
|
||||
can_task_fail = set(self.failable_arches).issuperset(set(arches))
|
||||
self.num = num
|
||||
with util.failable(
|
||||
compose,
|
||||
can_task_fail,
|
||||
variant,
|
||||
"*",
|
||||
"imageBuilderBuild",
|
||||
logger=self.pool._logger,
|
||||
):
|
||||
self.worker(compose, variant, config, arches, generics, repo)
|
||||
|
||||
def worker(self, compose, variant, config, arches, generics, repo):
|
||||
msg = "imageBuilderBuild task for variant %s" % variant.uid
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
koji = kojiwrapper.KojiWrapper(compose)
|
||||
koji.login()
|
||||
|
||||
opts = {}
|
||||
opts["repos"] = repo
|
||||
|
||||
if generics.get("release"):
|
||||
opts["release"] = generics["release"]
|
||||
|
||||
if generics.get("seed"):
|
||||
opts["seed"] = generics["seed"]
|
||||
|
||||
if generics.get("scratch"):
|
||||
opts["scratch"] = generics["scratch"]
|
||||
|
||||
if config.get("ostree"):
|
||||
opts["ostree"] = config["ostree"]
|
||||
|
||||
if config.get("blueprint"):
|
||||
opts["blueprint"] = config["blueprint"]
|
||||
|
||||
task_id = koji.koji_proxy.imageBuilderBuild(
|
||||
generics["target"],
|
||||
arches,
|
||||
types=generics["types"],
|
||||
name=config["name"],
|
||||
version=generics["version"],
|
||||
opts=opts,
|
||||
)
|
||||
|
||||
koji.save_task_id(task_id)
|
||||
|
||||
# Wait for it to finish and capture the output into log file.
|
||||
log_dir = os.path.join(compose.paths.log.topdir(), "imageBuilderBuild")
|
||||
util.makedirs(log_dir)
|
||||
log_file = os.path.join(
|
||||
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
||||
)
|
||||
if koji.watch_task(task_id, log_file) != 0:
|
||||
raise RuntimeError(
|
||||
"imageBuilderBuild task failed: %s. See %s for details"
|
||||
% (task_id, log_file)
|
||||
)
|
||||
|
||||
# Refresh koji session which may have timed out while the task was
|
||||
# running. Watching is done via a subprocess, so the session is
|
||||
# inactive.
|
||||
koji = kojiwrapper.KojiWrapper(compose)
|
||||
|
||||
linker = Linker(logger=self.pool._logger)
|
||||
|
||||
# Process all images in the build. There should be one for each
|
||||
# architecture, but we don't verify that.
|
||||
paths = koji.get_image_paths(task_id)
|
||||
|
||||
for arch, paths in paths.items():
|
||||
for path in paths:
|
||||
type_, format_ = _find_type_and_format(path)
|
||||
if not format_:
|
||||
# Path doesn't match any known type.
|
||||
continue
|
||||
|
||||
# image_dir is absolute path to which the image should be copied.
|
||||
# We also need the same path as relative to compose directory for
|
||||
# including in the metadata.
|
||||
if format_ == "iso":
|
||||
# If the produced image is actually an ISO, it should go to
|
||||
# iso/ subdirectory.
|
||||
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||
rel_image_dir = compose.paths.compose.iso_dir(
|
||||
arch, variant, relative=True
|
||||
)
|
||||
else:
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {
|
||||
"arch": arch
|
||||
}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
util.makedirs(image_dir)
|
||||
|
||||
filename = os.path.basename(path)
|
||||
|
||||
image_dest = os.path.join(image_dir, filename)
|
||||
|
||||
src_file = compose.koji_downloader.get_file(path)
|
||||
|
||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
|
||||
# If user configured exact type, use it, otherwise try to
|
||||
# figure it out based on the koji output.
|
||||
img.type = config.get("manifest_type", type_)
|
||||
img.format = format_
|
||||
img.path = os.path.join(rel_image_dir, filename)
|
||||
img.mtime = util.get_mtime(image_dest)
|
||||
img.size = util.get_file_size(image_dest)
|
||||
img.arch = arch
|
||||
img.disc_number = 1 # We don't expect multiple disks
|
||||
img.disc_count = 1
|
||||
|
||||
img.bootable = format_ == "iso"
|
||||
img.subvariant = config.get("subvariant", variant.uid)
|
||||
setattr(img, "can_fail", arch in self.failable_arches)
|
||||
setattr(img, "deliverable", "imageBuilderBuild")
|
||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||
|
||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, task_id))
|
||||
|
||||
|
||||
def _find_type_and_format(path):
|
||||
# these are our image-builder-exclusive mappings for images whose extensions
|
||||
# aren't quite the same as imagefactory. they come first as we
|
||||
# want our oci.tar.xz mapping to win over the tar.xz one in
|
||||
# EXTENSIONS
|
||||
for type_, suffixes, format_ in IMAGEBUILDEREXTENSIONS:
|
||||
if any(path.endswith(suffix) for suffix in suffixes):
|
||||
return type_, format_
|
||||
for type_, suffixes in EXTENSIONS.items():
|
||||
for suffix in suffixes:
|
||||
if path.endswith(suffix):
|
||||
return type_, suffix
|
||||
return None, None
|
||||
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo import shortcuts
|
||||
from productmd.images import Image
|
||||
|
||||
@ -10,11 +10,24 @@ from .. import util
|
||||
from ..linker import Linker
|
||||
from ..wrappers import kojiwrapper
|
||||
from .image_build import EXTENSIONS
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
KIWIEXTENSIONS = [
|
||||
("vhd-compressed", ["vhdfixed.xz"], "vhd.xz"),
|
||||
("vagrant-libvirt", ["vagrant.libvirt.box"], "vagrant-libvirt.box"),
|
||||
("vagrant-virtualbox", ["vagrant.virtualbox.box"], "vagrant-virtualbox.box"),
|
||||
# .iso images can be of many types - boot, cd, dvd, live... -
|
||||
# so 'boot' is just a default guess. 'iso' is not a valid
|
||||
# productmd image type
|
||||
("boot", [".iso"], "iso"),
|
||||
("fex", ["erofs.xz"], "erofs.xz"),
|
||||
("fex", ["erofs.gz"], "erofs.gz"),
|
||||
("fex", ["erofs"], "erofs"),
|
||||
("fex", ["squashfs.xz"], "squashfs.xz"),
|
||||
("fex", ["squashfs.gz"], "squashfs.gz"),
|
||||
("fex", ["squashfs"], "squashfs"),
|
||||
("container", ["oci.tar.xz"], "tar.xz"),
|
||||
("wsl2", ["wsl"], "wsl"),
|
||||
]
|
||||
|
||||
|
||||
@ -84,6 +97,11 @@ class KiwiBuildPhase(
|
||||
"bundle_name_format": self.get_config(
|
||||
image_conf, "bundle_name_format"
|
||||
),
|
||||
"version": self.get_version(image_conf),
|
||||
"repo_releasever": self.get_config(image_conf, "repo_releasever"),
|
||||
"use_buildroot_repo": self.get_config(
|
||||
image_conf, "use_buildroot_repo"
|
||||
),
|
||||
}
|
||||
|
||||
repo = self._get_repo(image_conf, variant)
|
||||
@ -144,6 +162,9 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
result_bundle_name_format=generics["bundle_name_format"],
|
||||
# this ensures the task won't fail if only failable arches fail
|
||||
optional_arches=self.failable_arches,
|
||||
version=generics["version"],
|
||||
repo_releasever=generics["repo_releasever"],
|
||||
use_buildroot_repo=generics["use_buildroot_repo"],
|
||||
)
|
||||
|
||||
koji.save_task_id(task_id)
|
||||
@ -180,10 +201,20 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
# image_dir is absolute path to which the image should be copied.
|
||||
# We also need the same path as relative to compose directory for
|
||||
# including in the metadata.
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
if format_ == "iso":
|
||||
# If the produced image is actually an ISO, it should go to
|
||||
# iso/ subdirectory.
|
||||
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||
rel_image_dir = compose.paths.compose.iso_dir(
|
||||
arch, variant, relative=True
|
||||
)
|
||||
else:
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {
|
||||
"arch": arch
|
||||
}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
util.makedirs(image_dir)
|
||||
|
||||
filename = os.path.basename(path)
|
||||
@ -197,9 +228,9 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
|
||||
# Get the manifest type from the config if supplied, otherwise we
|
||||
# determine the manifest type based on the koji output
|
||||
img.type = type_
|
||||
# If user configured exact type, use it, otherwise try to
|
||||
# figure it out based on the koji output.
|
||||
img.type = config.get("manifest_type", type_)
|
||||
img.format = format_
|
||||
img.path = os.path.join(rel_image_dir, filename)
|
||||
img.mtime = util.get_mtime(image_dest)
|
||||
@ -207,7 +238,8 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
img.arch = arch
|
||||
img.disc_number = 1 # We don't expect multiple disks
|
||||
img.disc_count = 1
|
||||
img.bootable = False
|
||||
# Kiwi produces only bootable ISOs. Other kinds of images are
|
||||
img.bootable = format_ == "iso"
|
||||
img.subvariant = config.get("subvariant", variant.uid)
|
||||
setattr(img, "can_fail", arch in self.failable_arches)
|
||||
setattr(img, "deliverable", "kiwibuild")
|
||||
@ -217,13 +249,15 @@ class RunKiwiBuildThread(WorkerThread):
|
||||
|
||||
|
||||
def _find_type_and_format(path):
|
||||
# these are our kiwi-exclusive mappings for images whose extensions
|
||||
# aren't quite the same as imagefactory. they come first as we
|
||||
# want our oci.tar.xz mapping to win over the tar.xz one in
|
||||
# EXTENSIONS
|
||||
for type_, suffixes, format_ in KIWIEXTENSIONS:
|
||||
if any(path.endswith(suffix) for suffix in suffixes):
|
||||
return type_, format_
|
||||
for type_, suffixes in EXTENSIONS.items():
|
||||
for suffix in suffixes:
|
||||
if path.endswith(suffix):
|
||||
return type_, suffix
|
||||
# these are our kiwi-exclusive mappings for images whose extensions
|
||||
# aren't quite the same as imagefactory
|
||||
for type_, suffixes, format_ in KIWIEXTENSIONS:
|
||||
if any(path.endswith(suffix) for suffix in suffixes):
|
||||
return type_, format_
|
||||
return None, None
|
||||
|
||||
@ -9,8 +9,9 @@ from pungi.util import translate_path, get_repo_urls
|
||||
from pungi.phases.base import ConfigGuardedPhase, ImageConfigMixin, PhaseLoggerMixin
|
||||
from pungi.linker import Linker
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from productmd.images import Image
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
||||
|
||||
@ -1,18 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import configparser
|
||||
import copy
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo import shortcuts
|
||||
from productmd.rpms import Rpms
|
||||
from six.moves import configparser
|
||||
|
||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||
from .. import util
|
||||
from ..wrappers import kojiwrapper
|
||||
from ..wrappers.scm import get_file_from_scm
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from kobo import shortcuts
|
||||
from productmd.images import Image
|
||||
|
||||
@ -10,6 +10,22 @@ from .. import util
|
||||
from ..linker import Linker
|
||||
from ..wrappers import kojiwrapper
|
||||
from .image_build import EXTENSIONS
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
# copy and modify EXTENSIONS with some that osbuild produces but which
|
||||
# do not exist as `koji image-build` formats
|
||||
OSBUILDEXTENSIONS = EXTENSIONS.copy()
|
||||
OSBUILDEXTENSIONS.update(
|
||||
# The key is the type_name as used in Koji archive, the second is a list of
|
||||
# expected file extensions.
|
||||
{
|
||||
"iso": ["iso"],
|
||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||
# The image is technically wsl2, but the type_name in Koji is set to
|
||||
# wsl.
|
||||
"wsl": ["wsl"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class OSBuildPhase(
|
||||
@ -203,7 +219,7 @@ class RunOSBuildThread(WorkerThread):
|
||||
# architecture, but we don't verify that.
|
||||
build_info = koji.koji_proxy.getBuild(build_id)
|
||||
for archive in koji.koji_proxy.listArchives(buildID=build_id):
|
||||
if archive["type_name"] not in EXTENSIONS:
|
||||
if archive["type_name"] not in OSBUILDEXTENSIONS:
|
||||
# Ignore values that are not of required types.
|
||||
continue
|
||||
|
||||
@ -241,7 +257,7 @@ class RunOSBuildThread(WorkerThread):
|
||||
|
||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||
|
||||
for suffix in EXTENSIONS[archive["type_name"]]:
|
||||
for suffix in OSBUILDEXTENSIONS[archive["type_name"]]:
|
||||
if archive["filename"].endswith(suffix):
|
||||
break
|
||||
else:
|
||||
@ -258,7 +274,13 @@ class RunOSBuildThread(WorkerThread):
|
||||
# determine the manifest type based on the koji output
|
||||
img.type = config.get("manifest_type")
|
||||
if not img.type:
|
||||
if archive["type_name"] != "iso":
|
||||
if archive["type_name"] == "wsl":
|
||||
# productmd only knows wsl2 as type, so let's translate
|
||||
# from the koji type so that users don't need to set the
|
||||
# type explicitly. There really is no other possible type
|
||||
# here anyway.
|
||||
img.type = "wsl2"
|
||||
elif archive["type_name"] != "iso":
|
||||
img.type = archive["type_name"]
|
||||
else:
|
||||
fn = archive["filename"].lower()
|
||||
|
||||
@ -4,7 +4,7 @@ import copy
|
||||
import json
|
||||
import os
|
||||
from kobo import shortcuts
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
from collections import OrderedDict
|
||||
|
||||
from pungi.arch_utils import getBaseArch
|
||||
@ -14,6 +14,7 @@ from .. import util
|
||||
from ..ostree.utils import get_ref_from_treefile, get_commitid_from_commitid_file
|
||||
from ..util import get_repo_dicts, translate_path
|
||||
from ..wrappers import scm
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class OSTreePhase(ConfigGuardedPhase):
|
||||
|
||||
@ -119,14 +119,12 @@ class OSTreeContainerThread(WorkerThread):
|
||||
def _run_ostree_container_cmd(
|
||||
self, compose, variant, arch, config, config_repo, extra_config_file=None
|
||||
):
|
||||
subvariant = config.get("subvariant", variant.uid)
|
||||
target_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||
util.makedirs(target_dir)
|
||||
version = util.version_generator(compose, config.get("version"))
|
||||
archive_name = "%s-%s-%s" % (
|
||||
compose.conf["release_short"],
|
||||
variant.uid,
|
||||
version,
|
||||
)
|
||||
anb = config.get("name", "%s-%s" % (compose.conf["release_short"], subvariant))
|
||||
archive_name = "%s-%s-%s" % (anb, arch, version)
|
||||
|
||||
# Run the pungi-make-ostree command locally to create a script to
|
||||
# execute in runroot environment.
|
||||
@ -141,7 +139,7 @@ class OSTreeContainerThread(WorkerThread):
|
||||
"--version=%s" % version,
|
||||
]
|
||||
|
||||
_, runroot_script = shortcuts.run(cmd, universal_newlines=True)
|
||||
_, runroot_script = shortcuts.run(cmd, text=True, errors="replace")
|
||||
|
||||
default_packages = ["ostree", "rpm-ostree", "selinux-policy-targeted"]
|
||||
additional_packages = config.get("runroot_packages", [])
|
||||
@ -166,9 +164,9 @@ class OSTreeContainerThread(WorkerThread):
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
|
||||
# Get the manifest type from the config if supplied, otherwise we
|
||||
# determine the manifest type based on the koji output
|
||||
img.type = "ociarchive"
|
||||
# these are hardcoded as they should always be correct, we
|
||||
# could potentially allow overriding them via config though
|
||||
img.type = "bootable-container"
|
||||
img.format = "ociarchive"
|
||||
img.path = os.path.relpath(fullpath, compose.paths.compose.topdir())
|
||||
img.mtime = util.get_mtime(fullpath)
|
||||
@ -177,7 +175,7 @@ class OSTreeContainerThread(WorkerThread):
|
||||
img.disc_number = 1
|
||||
img.disc_count = 1
|
||||
img.bootable = False
|
||||
img.subvariant = config.get("subvariant", variant.uid)
|
||||
img.subvariant = subvariant
|
||||
setattr(img, "can_fail", self.can_fail)
|
||||
setattr(img, "deliverable", "ostree-container")
|
||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
import shlex
|
||||
import shutil
|
||||
from productmd import images
|
||||
from six.moves import shlex_quote
|
||||
from kobo import shortcuts
|
||||
|
||||
from .base import ConfigGuardedPhase, PhaseLoggerMixin
|
||||
@ -20,6 +20,7 @@ from ..util import (
|
||||
)
|
||||
from ..wrappers import iso, lorax, scm
|
||||
from ..runroot import Runroot
|
||||
from ..threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class OstreeInstallerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||
@ -275,8 +276,8 @@ class OstreeInstallerThread(WorkerThread):
|
||||
skip_branding=config.get("skip_branding"),
|
||||
)
|
||||
cmd = "rm -rf %s && %s" % (
|
||||
shlex_quote(output_dir),
|
||||
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||
shlex.quote(output_dir),
|
||||
" ".join([shlex.quote(x) for x in lorax_cmd]),
|
||||
)
|
||||
|
||||
runroot.run(
|
||||
|
||||
@ -22,8 +22,10 @@ It automatically finds a signed copies according to *sigkey_ordering*.
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
from six.moves import cPickle as pickle
|
||||
import pgpy
|
||||
import rpm
|
||||
from functools import partial
|
||||
|
||||
import kobo.log
|
||||
@ -31,11 +33,12 @@ import kobo.pkgset
|
||||
import kobo.rpmlib
|
||||
from kobo.shortcuts import compute_file_checksums
|
||||
|
||||
from kobo.threads import WorkerThread, ThreadPool
|
||||
from kobo.threads import ThreadPool
|
||||
|
||||
from pungi.util import pkg_is_srpm, copy_all
|
||||
from pungi.arch import get_valid_arches, is_excluded
|
||||
from pungi.errors import UnsignedPackagesError
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class ExtendedRpmWrapper(kobo.pkgset.SimpleRpmWrapper):
|
||||
@ -152,9 +155,15 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||
"""
|
||||
|
||||
def nvr_formatter(package_info):
|
||||
# joins NVR parts of the package with '-' character.
|
||||
return "-".join(
|
||||
(package_info["name"], package_info["version"], package_info["release"])
|
||||
epoch_suffix = ''
|
||||
if package_info['epoch'] is not None:
|
||||
epoch_suffix = ':' + package_info['epoch']
|
||||
return (
|
||||
f"{package_info['name']}"
|
||||
f"{epoch_suffix}-"
|
||||
f"{package_info['version']}-"
|
||||
f"{package_info['release']}."
|
||||
f"{package_info['arch']}"
|
||||
)
|
||||
|
||||
def get_error(sigkeys, infos):
|
||||
@ -265,7 +274,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||
for arch in arch_list:
|
||||
self.rpms_by_arch.setdefault(arch, [])
|
||||
for i in other.rpms_by_arch.get(arch, []):
|
||||
if i.file_path in self.file_cache:
|
||||
if i.file_path in self.file_cache.file_cache:
|
||||
# TODO: test if it really works
|
||||
continue
|
||||
if inherit_to_noarch and exclusivearch_list and arch == "noarch":
|
||||
@ -503,7 +512,8 @@ class KojiPackageSet(PackageSetBase):
|
||||
|
||||
response = None
|
||||
if self.cache_region:
|
||||
cache_key = "KojiPackageSet.get_latest_rpms_%s_%s_%s" % (
|
||||
cache_key = "%s.get_latest_rpms_%s_%s_%s" % (
|
||||
str(self.__class__.__name__),
|
||||
str(tag),
|
||||
str(event),
|
||||
str(inherit),
|
||||
@ -525,6 +535,8 @@ class KojiPackageSet(PackageSetBase):
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
||||
def get_package_path(self, queue_item):
|
||||
rpm_info, build_info = queue_item
|
||||
|
||||
@ -536,22 +548,14 @@ class KojiPackageSet(PackageSetBase):
|
||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||
paths = []
|
||||
|
||||
if "getRPMChecksums" in self.koji_proxy.system.listMethods():
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
checksums = self.koji_proxy.getRPMChecksums(
|
||||
rpm_info["id"], checksum_types=("sha256",)
|
||||
)
|
||||
if "sha256" in checksums.get(keyname, {}):
|
||||
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||
|
||||
else:
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
# Koji doesn't support checksums yet
|
||||
pass
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
checksums = self.koji_proxy.getRPMChecksums(
|
||||
rpm_info["id"], checksum_types=("sha256",)
|
||||
)
|
||||
if "sha256" in checksums.get(keyname, {}):
|
||||
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||
|
||||
attempts_left = self.signed_packages_retries + 1
|
||||
while attempts_left > 0:
|
||||
@ -885,6 +889,67 @@ class KojiPackageSet(PackageSetBase):
|
||||
return False
|
||||
|
||||
|
||||
class KojiMockPackageSet(KojiPackageSet):
|
||||
|
||||
def _is_rpm_signed(self, rpm_path) -> bool:
|
||||
ts = rpm.TransactionSet()
|
||||
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
|
||||
sigkeys = [
|
||||
sigkey.lower() for sigkey in self.sigkey_ordering
|
||||
if sigkey is not None
|
||||
]
|
||||
if not sigkeys:
|
||||
return True
|
||||
with open(rpm_path, 'rb') as fd:
|
||||
header = ts.hdrFromFdno(fd)
|
||||
signature = header[rpm.RPMTAG_SIGGPG] or header[rpm.RPMTAG_SIGPGP]
|
||||
if signature is None:
|
||||
return False
|
||||
pgp_msg = pgpy.PGPMessage.from_blob(signature)
|
||||
return any(
|
||||
signature.signer.lower() in sigkeys
|
||||
for signature in pgp_msg.signatures
|
||||
)
|
||||
|
||||
def get_package_path(self, queue_item):
|
||||
rpm_info, build_info = queue_item
|
||||
|
||||
# Check if this RPM is coming from scratch task.
|
||||
# In this case, we already know the path.
|
||||
if "path_from_task" in rpm_info:
|
||||
return rpm_info["path_from_task"]
|
||||
|
||||
# we replaced this part because pungi uses way
|
||||
# of guessing path of package on koji based on sigkey
|
||||
# we don't need that because all our packages will
|
||||
# be ready for release
|
||||
# signature verification is still done during deps resolution
|
||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||
|
||||
rpm_path = os.path.join(pathinfo.topdir, pathinfo.rpm(rpm_info))
|
||||
if os.path.isfile(rpm_path):
|
||||
if not self._is_rpm_signed(rpm_path):
|
||||
self._invalid_sigkey_rpms.append(rpm_info)
|
||||
self.log_error(
|
||||
'RPM "%s" not found for sigs: "%s". Path checked: "%s"',
|
||||
rpm_info, self.sigkey_ordering, rpm_path
|
||||
)
|
||||
return
|
||||
return rpm_path
|
||||
else:
|
||||
self.log_warning("RPM %s not found" % rpm_path)
|
||||
return None
|
||||
|
||||
def populate(self, tag, event=None, inherit=True, include_packages=None):
|
||||
result = super().populate(
|
||||
tag=tag,
|
||||
event=event,
|
||||
inherit=inherit,
|
||||
include_packages=include_packages,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _is_src(rpm_info):
|
||||
"""Check if rpm info object returned by Koji refers to source packages."""
|
||||
return rpm_info["arch"] in ("src", "nosrc")
|
||||
|
||||
@ -15,8 +15,10 @@
|
||||
|
||||
from .source_koji import PkgsetSourceKoji
|
||||
from .source_repos import PkgsetSourceRepos
|
||||
from .source_kojimock import PkgsetSourceKojiMock
|
||||
|
||||
ALL_SOURCES = {
|
||||
"koji": PkgsetSourceKoji,
|
||||
"repos": PkgsetSourceRepos,
|
||||
"kojimock": PkgsetSourceKojiMock,
|
||||
}
|
||||
|
||||
1024
pungi/phases/pkgset/sources/source_kojimock.py
Normal file
1024
pungi/phases/pkgset/sources/source_kojimock.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,6 @@
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from kobo.shortcuts import run
|
||||
|
||||
@ -76,7 +75,6 @@ def get_pkgset_from_repos(compose):
|
||||
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
||||
|
||||
backends = {
|
||||
"yum": pungi.get_pungi_cmd,
|
||||
"dnf": pungi.get_pungi_cmd_dnf,
|
||||
}
|
||||
get_cmd = backends[compose.conf["gather_backend"]]
|
||||
@ -93,8 +91,6 @@ def get_pkgset_from_repos(compose):
|
||||
cache_dir=compose.paths.work.pungi_cache_dir(arch=arch),
|
||||
profiler=profiler,
|
||||
)
|
||||
if compose.conf["gather_backend"] == "yum":
|
||||
cmd.append("--force")
|
||||
|
||||
# TODO: runroot
|
||||
run(cmd, logfile=pungi_log, show_cmd=True, stdout=False)
|
||||
@ -111,17 +107,6 @@ def get_pkgset_from_repos(compose):
|
||||
flist.append(dst)
|
||||
pool.queue_put((src, dst))
|
||||
|
||||
# Clean up tmp dir
|
||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||
yumroot_dir = os.path.join(pungi_dir, "work", arch, "yumroot")
|
||||
if os.path.isdir(yumroot_dir):
|
||||
try:
|
||||
shutil.rmtree(yumroot_dir)
|
||||
except Exception as e:
|
||||
compose.log_warning(
|
||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||
)
|
||||
|
||||
msg = "Linking downloaded pkgset packages"
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
pool.start()
|
||||
|
||||
@ -101,27 +101,48 @@ def run_repoclosure(compose):
|
||||
|
||||
|
||||
def _delete_repoclosure_cache_dirs(compose):
|
||||
if "dnf" == compose.conf["repoclosure_backend"]:
|
||||
"""Find any cached repodata and delete it. The case is not going to be
|
||||
reused ever again, and would otherwise consume storage space.
|
||||
|
||||
DNF will use a different directory depending on whether it is running as
|
||||
root or not. It is not easy to tell though if DNF 4 or 5 is being used, so
|
||||
let's be sure and check both locations. All our cached entries are prefixed
|
||||
by compose ID, so there's very limited amount of risk that we would delete
|
||||
something incorrect.
|
||||
"""
|
||||
cache_dirs = []
|
||||
|
||||
try:
|
||||
# DNF 4
|
||||
from dnf.const import SYSTEM_CACHEDIR
|
||||
from dnf.util import am_i_root
|
||||
from dnf.yum.misc import getCacheDir
|
||||
|
||||
if am_i_root():
|
||||
top_cache_dir = SYSTEM_CACHEDIR
|
||||
cache_dirs.append(SYSTEM_CACHEDIR)
|
||||
else:
|
||||
top_cache_dir = getCacheDir()
|
||||
else:
|
||||
from yum.misc import getCacheDir
|
||||
cache_dirs.append(getCacheDir())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
top_cache_dir = getCacheDir()
|
||||
try:
|
||||
# DNF 5 config works directly for root, no need for special case.
|
||||
import libdnf5
|
||||
|
||||
for name in os.listdir(top_cache_dir):
|
||||
if name.startswith(compose.compose_id):
|
||||
cache_path = os.path.join(top_cache_dir, name)
|
||||
if os.path.isdir(cache_path):
|
||||
shutil.rmtree(cache_path)
|
||||
else:
|
||||
os.remove(cache_path)
|
||||
base = libdnf5.base.Base()
|
||||
config = base.get_config()
|
||||
cache_dirs.append(config.cachedir)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
for top_cache_dir in cache_dirs:
|
||||
for name in os.listdir(top_cache_dir):
|
||||
if name.startswith(compose.compose_id):
|
||||
cache_path = os.path.join(top_cache_dir, name)
|
||||
if os.path.isdir(cache_path):
|
||||
shutil.rmtree(cache_path)
|
||||
else:
|
||||
os.remove(cache_path)
|
||||
|
||||
|
||||
def _run_repoclosure_cmd(compose, repos, lookaside, arches, logfile):
|
||||
|
||||
@ -95,7 +95,7 @@ def is_iso(f):
|
||||
|
||||
|
||||
def has_mbr(f):
|
||||
return _check_magic(f, 0x1FE, b"\x55\xAA")
|
||||
return _check_magic(f, 0x1FE, b"\x55\xaa")
|
||||
|
||||
|
||||
def has_gpt(f):
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from kobo import shortcuts
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.threads import ThreadPool
|
||||
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
|
||||
class WeaverPhase(object):
|
||||
|
||||
@ -16,12 +16,11 @@
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import tarfile
|
||||
|
||||
import requests
|
||||
import six
|
||||
from six.moves import shlex_quote
|
||||
import kobo.log
|
||||
from kobo.shortcuts import run
|
||||
|
||||
@ -100,7 +99,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||
log_file = os.path.join(log_dir, "program.log")
|
||||
try:
|
||||
with open(log_file) as f:
|
||||
for line in f:
|
||||
for line in f.readlines():
|
||||
if "losetup: cannot find an unused loop device" in line:
|
||||
return True
|
||||
if re.match("losetup: .* failed to set up loop device", line):
|
||||
@ -157,7 +156,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||
formatted_cmd = command.format(**fmt_dict) if fmt_dict else command
|
||||
ssh_cmd = ["ssh", "-oBatchMode=yes", "-n", "-l", user, hostname, formatted_cmd]
|
||||
output = run(ssh_cmd, show_cmd=True, logfile=log_file)[1]
|
||||
if six.PY3 and isinstance(output, bytes):
|
||||
if isinstance(output, bytes):
|
||||
return output.decode()
|
||||
else:
|
||||
return output
|
||||
@ -184,7 +183,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||
# If the output dir is defined, change the permissions of files generated
|
||||
# by the runroot task, so the Pungi user can access them.
|
||||
if chown_paths:
|
||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||
paths = " ".join(shlex.quote(pth) for pth in chown_paths)
|
||||
command += " ; EXIT_CODE=$?"
|
||||
# Make the files world readable
|
||||
command += " ; chmod -R a+r %s" % paths
|
||||
@ -455,6 +454,9 @@ def download_and_extract_archive(compose, task_id, fname, destination):
|
||||
# So instead let's generate a patch and attempt to convert it to a URL.
|
||||
server_path = os.path.join(koji.pathinfo.task(task_id), fname)
|
||||
archive_url = server_path.replace(koji.config.topdir, koji.config.topurl)
|
||||
with util.temp_dir(prefix="buildinstall-download") as tmp_dir:
|
||||
tmp_dir = compose.mkdtemp(prefix="buildinstall-download")
|
||||
try:
|
||||
local_path = _download_archive(task_id, fname, archive_url, tmp_dir)
|
||||
_extract_archive(task_id, fname, local_path, destination)
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
|
||||
@ -4,13 +4,12 @@ from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from six.moves import configparser
|
||||
|
||||
import kobo.conf
|
||||
import pungi.checks
|
||||
import pungi.util
|
||||
|
||||
@ -8,8 +8,6 @@ import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import pungi.checks
|
||||
import pungi.compose
|
||||
import pungi.paths
|
||||
@ -56,7 +54,7 @@ class ValidationCompose(pungi.compose.Compose):
|
||||
def read_variants(compose, config):
|
||||
with pungi.util.temp_dir() as tmp_dir:
|
||||
scm_dict = compose.conf["variants_file"]
|
||||
if isinstance(scm_dict, six.string_types) and scm_dict[0] != "/":
|
||||
if isinstance(scm_dict, str) and scm_dict[0] != "/":
|
||||
config_dir = os.path.dirname(config)
|
||||
scm_dict = os.path.join(config_dir, scm_dict)
|
||||
files = pungi.wrappers.scm.get_file_from_scm(scm_dict, tmp_dir)
|
||||
|
||||
441
pungi/scripts/create_extra_repo.py
Normal file
441
pungi/scripts/create_extra_repo.py
Normal file
@ -0,0 +1,441 @@
|
||||
# coding=utf-8
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from shutil import rmtree
|
||||
from typing import (
|
||||
AnyStr,
|
||||
List,
|
||||
Dict,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import createrepo_c as cr
|
||||
import requests
|
||||
import yaml
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .create_packages_json import (
|
||||
PackagesGenerator,
|
||||
RepoInfo,
|
||||
VariantInfo,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExtraVariantInfo(VariantInfo):
|
||||
|
||||
modules: List[AnyStr] = field(default_factory=list)
|
||||
packages: List[AnyStr] = field(default_factory=list)
|
||||
|
||||
|
||||
class CreateExtraRepo(PackagesGenerator):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
variants: List[ExtraVariantInfo],
|
||||
bs_auth_token: AnyStr,
|
||||
local_repository_path: AnyStr,
|
||||
clear_target_repo: bool = True,
|
||||
):
|
||||
self.variants = [] # type: List[ExtraVariantInfo]
|
||||
super().__init__(variants, [], [])
|
||||
self.auth_headers = {
|
||||
'Authorization': f'Bearer {bs_auth_token}',
|
||||
}
|
||||
# modules data of modules.yaml.gz from an existing local repo
|
||||
self.local_modules_data = []
|
||||
self.local_repository_path = local_repository_path
|
||||
# path to modules.yaml, which generated by the class
|
||||
self.default_modules_yaml_path = os.path.join(
|
||||
local_repository_path,
|
||||
'modules.yaml',
|
||||
)
|
||||
if clear_target_repo:
|
||||
if os.path.exists(self.local_repository_path):
|
||||
rmtree(self.local_repository_path)
|
||||
os.makedirs(self.local_repository_path, exist_ok=True)
|
||||
else:
|
||||
self._read_local_modules_yaml()
|
||||
|
||||
def _read_local_modules_yaml(self):
|
||||
"""
|
||||
Read modules data from an existin local repo
|
||||
"""
|
||||
repomd_file_path = os.path.join(
|
||||
self.local_repository_path,
|
||||
'repodata',
|
||||
'repomd.xml',
|
||||
)
|
||||
repomd_object = self._parse_repomd(repomd_file_path)
|
||||
for repomd_record in repomd_object.records:
|
||||
if repomd_record.type != 'modules':
|
||||
continue
|
||||
modules_yaml_path = os.path.join(
|
||||
self.local_repository_path,
|
||||
repomd_record.location_href,
|
||||
)
|
||||
self.local_modules_data = list(self._parse_modules_file(
|
||||
modules_yaml_path,
|
||||
))
|
||||
break
|
||||
|
||||
def _dump_local_modules_yaml(self):
|
||||
"""
|
||||
Dump merged modules data to an local repo
|
||||
"""
|
||||
if self.local_modules_data:
|
||||
with open(self.default_modules_yaml_path, 'w') as yaml_file:
|
||||
yaml.dump_all(
|
||||
self.local_modules_data,
|
||||
yaml_file,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_repo_info_from_bs_repo(
|
||||
auth_token: AnyStr,
|
||||
build_id: AnyStr,
|
||||
arch: AnyStr,
|
||||
packages: Optional[List[AnyStr]] = None,
|
||||
modules: Optional[List[AnyStr]] = None,
|
||||
) -> List[ExtraVariantInfo]:
|
||||
"""
|
||||
Get info about a BS repo and save it to
|
||||
an object of class ExtraRepoInfo
|
||||
:param auth_token: Auth token to Build System
|
||||
:param build_id: ID of a build from BS
|
||||
:param arch: an architecture of repo which will be used
|
||||
:param packages: list of names of packages which will be put to an
|
||||
local repo from a BS repo
|
||||
:param modules: list of names of modules which will be put to an
|
||||
local repo from a BS repo
|
||||
:return: list of ExtraRepoInfo with info about the BS repos
|
||||
"""
|
||||
|
||||
bs_url = 'https://build.cloudlinux.com'
|
||||
api_uri = 'api/v1'
|
||||
bs_repo_suffix = 'build_repos'
|
||||
|
||||
variants_info = []
|
||||
|
||||
# get the full info about a BS repo
|
||||
repo_request = requests.get(
|
||||
url=os.path.join(
|
||||
bs_url,
|
||||
api_uri,
|
||||
'builds',
|
||||
build_id,
|
||||
),
|
||||
headers={
|
||||
'Authorization': f'Bearer {auth_token}',
|
||||
},
|
||||
)
|
||||
repo_request.raise_for_status()
|
||||
result = repo_request.json()
|
||||
for build_platform in result['build_platforms']:
|
||||
platform_name = build_platform['name']
|
||||
for architecture in build_platform['architectures']:
|
||||
# skip repo with unsuitable architecture
|
||||
if architecture != arch:
|
||||
continue
|
||||
variant_info = ExtraVariantInfo(
|
||||
name=f'{build_id}-{platform_name}-{architecture}',
|
||||
arch=architecture,
|
||||
packages=packages,
|
||||
modules=modules,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path=os.path.join(
|
||||
bs_url,
|
||||
bs_repo_suffix,
|
||||
build_id,
|
||||
platform_name,
|
||||
),
|
||||
folder=architecture,
|
||||
is_remote=True,
|
||||
)
|
||||
]
|
||||
)
|
||||
variants_info.append(variant_info)
|
||||
return variants_info
|
||||
|
||||
def _create_local_extra_repo(self):
|
||||
"""
|
||||
Call `createrepo_c <path_to_repo>` for creating a local repo
|
||||
"""
|
||||
subprocess.call(
|
||||
f'createrepo_c {self.local_repository_path}',
|
||||
shell=True,
|
||||
)
|
||||
# remove an unnecessary temporary modules.yaml
|
||||
if os.path.exists(self.default_modules_yaml_path):
|
||||
os.remove(self.default_modules_yaml_path)
|
||||
|
||||
def get_remote_file_content(
|
||||
self,
|
||||
file_url: AnyStr,
|
||||
) -> AnyStr:
|
||||
"""
|
||||
Get content from a remote file and write it to a temp file
|
||||
:param file_url: url of a remote file
|
||||
:return: path to a temp file
|
||||
"""
|
||||
|
||||
file_request = requests.get(
|
||||
url=file_url,
|
||||
# for the case when we get a file from BS
|
||||
headers=self.auth_headers,
|
||||
)
|
||||
file_request.raise_for_status()
|
||||
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||
file_stream.write(file_request.content)
|
||||
return file_stream.name
|
||||
|
||||
def _download_rpm_to_local_repo(
|
||||
self,
|
||||
package_location: AnyStr,
|
||||
repo_info: RepoInfo,
|
||||
) -> None:
|
||||
"""
|
||||
Download a rpm package from a remote repo and save it to a local repo
|
||||
:param package_location: relative uri of a package in a remote repo
|
||||
:param repo_info: info about a remote repo which contains a specific
|
||||
rpm package
|
||||
"""
|
||||
rpm_package_remote_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
package_location,
|
||||
)
|
||||
rpm_package_local_path = os.path.join(
|
||||
self.local_repository_path,
|
||||
os.path.basename(package_location),
|
||||
)
|
||||
rpm_request = requests.get(
|
||||
url=rpm_package_remote_path,
|
||||
headers=self.auth_headers,
|
||||
)
|
||||
rpm_request.raise_for_status()
|
||||
with open(rpm_package_local_path, 'wb') as rpm_file:
|
||||
rpm_file.write(rpm_request.content)
|
||||
|
||||
def _download_packages(
|
||||
self,
|
||||
packages: Dict[AnyStr, cr.Package],
|
||||
variant_info: ExtraVariantInfo
|
||||
):
|
||||
"""
|
||||
Download all defined packages from a remote repo
|
||||
:param packages: information about all packages (including
|
||||
modularity) in a remote repo
|
||||
:param variant_info: information about a remote variant
|
||||
"""
|
||||
for package in packages.values():
|
||||
package_name = package.name
|
||||
# Skip a current package from a remote repo if we defined
|
||||
# the list packages and a current package doesn't belong to it
|
||||
if variant_info.packages and \
|
||||
package_name not in variant_info.packages:
|
||||
continue
|
||||
for repo_info in variant_info.repos:
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=package.location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
|
||||
def _download_modules(
|
||||
self,
|
||||
modules_data: List[Dict],
|
||||
variant_info: ExtraVariantInfo,
|
||||
packages: Dict[AnyStr, cr.Package]
|
||||
):
|
||||
"""
|
||||
Download all defined modularity packages and their data from
|
||||
a remote repo
|
||||
:param modules_data: information about all modules in a remote repo
|
||||
:param variant_info: information about a remote variant
|
||||
:param packages: information about all packages (including
|
||||
modularity) in a remote repo
|
||||
"""
|
||||
for module in modules_data:
|
||||
module_data = module['data']
|
||||
# Skip a current module from a remote repo if we defined
|
||||
# the list modules and a current module doesn't belong to it
|
||||
if variant_info.modules and \
|
||||
module_data['name'] not in variant_info.modules:
|
||||
continue
|
||||
# we should add info about a module if the local repodata
|
||||
# doesn't have it
|
||||
if module not in self.local_modules_data:
|
||||
self.local_modules_data.append(module)
|
||||
# just skip a module's record if it doesn't have rpm artifact
|
||||
if module['document'] != 'modulemd' or \
|
||||
'artifacts' not in module_data or \
|
||||
'rpms' not in module_data['artifacts']:
|
||||
continue
|
||||
for rpm in module['data']['artifacts']['rpms']:
|
||||
# Empty repo_info.packages means that we will download
|
||||
# all packages from repo including
|
||||
# the modularity packages
|
||||
if not variant_info.packages:
|
||||
break
|
||||
# skip a rpm if it doesn't belong to a processed repo
|
||||
if rpm not in packages:
|
||||
continue
|
||||
for repo_info in variant_info.repos:
|
||||
self._download_rpm_to_local_repo(
|
||||
package_location=packages[rpm].location_href,
|
||||
repo_info=repo_info,
|
||||
)
|
||||
|
||||
def create_extra_repo(self):
|
||||
"""
|
||||
1. Get from the remote repos the specific (or all) packages/modules
|
||||
2. Save them to a local repo
|
||||
3. Save info about the modules to a local repo
|
||||
3. Call `createrepo_c` which creates a local repo
|
||||
with the right repodata
|
||||
"""
|
||||
for variant_info in self.variants:
|
||||
for repo_info in variant_info.repos:
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
packages_iterator = self.get_packages_iterator(repo_info)
|
||||
# parse the repodata (including modules.yaml.gz)
|
||||
modules_data = self._parse_module_repomd_record(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
)
|
||||
# convert the packages dict to more usable form
|
||||
# for future checking that a rpm from the module's artifacts
|
||||
# belongs to a processed repository
|
||||
packages = {
|
||||
f'{package.name}-{package.epoch}:{package.version}-'
|
||||
f'{package.release}.{package.arch}':
|
||||
package for package in packages_iterator
|
||||
}
|
||||
self._download_modules(
|
||||
modules_data=modules_data,
|
||||
variant_info=variant_info,
|
||||
packages=packages,
|
||||
)
|
||||
self._download_packages(
|
||||
packages=packages,
|
||||
variant_info=variant_info,
|
||||
)
|
||||
|
||||
self._dump_local_modules_yaml()
|
||||
self._create_local_extra_repo()
|
||||
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'--bs-auth-token',
|
||||
help='Auth token for Build System',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--local-repo-path',
|
||||
help='Path to a local repo. E.g. /var/repo/test_repo',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--clear-local-repo',
|
||||
help='Clear a local repo before creating a new',
|
||||
action='store_true',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repo',
|
||||
action='append',
|
||||
help='Path to a folder with repofolders or build id. E.g. '
|
||||
'"http://koji.cloudlinux.com/mirrors/rhel_mirror" or '
|
||||
'"601809b3c2f5b0e458b14cd3"',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repo-folder',
|
||||
action='append',
|
||||
help='A folder which contains folder repodata . E.g. "baseos-stream"',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repo-arch',
|
||||
action='append',
|
||||
help='What architecture packages a repository contains. E.g. "x86_64"',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--packages',
|
||||
action='append',
|
||||
type=str,
|
||||
default=[],
|
||||
help='A list of packages names which we want to download to local '
|
||||
'extra repo. We will download all of packages if param is empty',
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--modules',
|
||||
action='append',
|
||||
type=str,
|
||||
default=[],
|
||||
help='A list of modules names which we want to download to local '
|
||||
'extra repo. We will download all of modules if param is empty',
|
||||
required=True,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cli_main():
|
||||
args = create_parser().parse_args()
|
||||
repos_info = []
|
||||
for repo, repo_folder, repo_arch, packages, modules in zip(
|
||||
args.repo,
|
||||
args.repo_folder,
|
||||
args.repo_arch,
|
||||
args.packages,
|
||||
args.modules,
|
||||
):
|
||||
modules = modules.split()
|
||||
packages = packages.split()
|
||||
if repo.startswith('http://'):
|
||||
repos_info.append(
|
||||
ExtraVariantInfo(
|
||||
name=repo_folder,
|
||||
arch=repo_arch,
|
||||
repos=[
|
||||
RepoInfo(
|
||||
path=repo,
|
||||
folder=repo_folder,
|
||||
is_remote=True,
|
||||
)
|
||||
],
|
||||
modules=modules,
|
||||
packages=packages,
|
||||
)
|
||||
)
|
||||
else:
|
||||
repos_info.extend(
|
||||
CreateExtraRepo.get_repo_info_from_bs_repo(
|
||||
auth_token=args.bs_auth_token,
|
||||
build_id=repo,
|
||||
arch=repo_arch,
|
||||
modules=modules,
|
||||
packages=packages,
|
||||
)
|
||||
)
|
||||
cer = CreateExtraRepo(
|
||||
variants=repos_info,
|
||||
bs_auth_token=args.bs_auth_token,
|
||||
local_repository_path=args.local_repo_path,
|
||||
clear_target_repo=args.clear_local_repo,
|
||||
)
|
||||
cer.create_extra_repo()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
514
pungi/scripts/create_packages_json.py
Normal file
514
pungi/scripts/create_packages_json.py
Normal file
@ -0,0 +1,514 @@
|
||||
# coding=utf-8
|
||||
"""
|
||||
The tool allow to generate package.json. This file is used by pungi
|
||||
# as parameter `gather_prepopulate`
|
||||
Sample of using repodata files taken from
|
||||
https://github.com/rpm-software-management/createrepo_c/blob/master/examples/python/repodata_parsing.py
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import lzma
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
from itertools import tee
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
AnyStr,
|
||||
Dict,
|
||||
List,
|
||||
Any,
|
||||
Iterator,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import binascii
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
import rpm
|
||||
import yaml
|
||||
from createrepo_c import (
|
||||
Package,
|
||||
PackageIterator,
|
||||
Repomd,
|
||||
RepomdRecord,
|
||||
)
|
||||
from dataclasses import dataclass, field
|
||||
from kobo.rpmlib import parse_nvra
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
|
||||
return binascii.hexlify(first_two_bytes) == initial_bytes
|
||||
|
||||
|
||||
def is_gzip_file(first_two_bytes):
|
||||
return _is_compressed_file(
|
||||
first_two_bytes=first_two_bytes,
|
||||
initial_bytes=b'1f8b',
|
||||
)
|
||||
|
||||
|
||||
def is_xz_file(first_two_bytes):
|
||||
return _is_compressed_file(
|
||||
first_two_bytes=first_two_bytes,
|
||||
initial_bytes=b'fd37',
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepoInfo:
|
||||
# path to a directory with repo directories. E.g. '/var/repos' contains
|
||||
# 'appstream', 'baseos', etc.
|
||||
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
||||
# using remote repo
|
||||
path: str
|
||||
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
||||
folder: str
|
||||
# Is a repo remote or local
|
||||
is_remote: bool
|
||||
# Is a reference repository (usually it's a RHEL repo)
|
||||
# Layout of packages from such repository will be taken as example
|
||||
# Only layout of specific package (which doesn't exist
|
||||
# in a reference repository) will be taken as example
|
||||
is_reference: bool = False
|
||||
# The packages from 'present' repo will be added to a variant.
|
||||
# The packages from 'absent' repo will be removed from a variant.
|
||||
repo_type: str = 'present'
|
||||
|
||||
|
||||
@dataclass
|
||||
class VariantInfo:
|
||||
# name of variant. E.g. 'BaseOS', 'AppStream', etc
|
||||
name: AnyStr
|
||||
# architecture of variant. E.g. 'x86_64', 'i686', etc
|
||||
arch: AnyStr
|
||||
# The packages which will be not added to a variant
|
||||
excluded_packages: List[str] = field(default_factory=list)
|
||||
# Repos of a variant
|
||||
repos: List[RepoInfo] = field(default_factory=list)
|
||||
|
||||
|
||||
class PackagesGenerator:
|
||||
|
||||
repo_arches = defaultdict(lambda: list(('noarch',)))
|
||||
addon_repos = {
|
||||
'x86_64': ['i686'],
|
||||
'ppc64le': [],
|
||||
'aarch64': [],
|
||||
's390x': [],
|
||||
'i686': [],
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
variants: List[VariantInfo],
|
||||
excluded_packages: List[AnyStr],
|
||||
included_packages: List[AnyStr],
|
||||
):
|
||||
self.variants = variants
|
||||
self.pkgs = dict()
|
||||
self.excluded_packages = excluded_packages
|
||||
self.included_packages = included_packages
|
||||
self.tmp_files = [] # type: list[Path]
|
||||
for arch, arch_list in self.addon_repos.items():
|
||||
self.repo_arches[arch].extend(arch_list)
|
||||
self.repo_arches[arch].append(arch)
|
||||
|
||||
def __del__(self):
|
||||
for tmp_file in self.tmp_files:
|
||||
if tmp_file.exists():
|
||||
tmp_file.unlink()
|
||||
|
||||
@staticmethod
|
||||
def _get_full_repo_path(repo_info: RepoInfo):
|
||||
result = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
result = urljoin(
|
||||
repo_info.path + '/',
|
||||
repo_info.folder,
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _warning_callback(warning_type, message):
|
||||
"""
|
||||
Warning callback for createrepo_c parsing functions
|
||||
"""
|
||||
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
||||
return True
|
||||
|
||||
def get_remote_file_content(self, file_url: AnyStr) -> AnyStr:
|
||||
"""
|
||||
Get content from a remote file and write it to a temp file
|
||||
:param file_url: url of a remote file
|
||||
:return: path to a temp file
|
||||
"""
|
||||
|
||||
file_request = requests.get(
|
||||
url=file_url,
|
||||
)
|
||||
file_request.raise_for_status()
|
||||
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||
file_stream.write(file_request.content)
|
||||
self.tmp_files.append(Path(file_stream.name))
|
||||
return file_stream.name
|
||||
|
||||
@staticmethod
|
||||
def _parse_repomd(repomd_file_path: AnyStr) -> Repomd:
|
||||
"""
|
||||
Parse file repomd.xml and create object Repomd
|
||||
:param repomd_file_path: path to local repomd.xml
|
||||
"""
|
||||
return Repomd(repomd_file_path)
|
||||
|
||||
@classmethod
|
||||
def _parse_modules_file(
|
||||
cls,
|
||||
modules_file_path: AnyStr,
|
||||
|
||||
) -> Iterator[Any]:
|
||||
"""
|
||||
Parse modules.yaml.gz and returns parsed data
|
||||
:param modules_file_path: path to local modules.yaml.gz
|
||||
:return: List of dict for each module in a repo
|
||||
"""
|
||||
|
||||
with open(modules_file_path, 'rb') as modules_file:
|
||||
data = modules_file.read()
|
||||
if is_gzip_file(data[:2]):
|
||||
data = gzip.decompress(data)
|
||||
elif is_xz_file(data[:2]):
|
||||
data = lzma.decompress(data)
|
||||
return yaml.load_all(
|
||||
data,
|
||||
Loader=yaml.BaseLoader,
|
||||
)
|
||||
|
||||
def _get_repomd_records(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
) -> List[RepomdRecord]:
|
||||
"""
|
||||
Get, parse file repomd.xml and extract from it repomd records
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:return: list with repomd records
|
||||
"""
|
||||
repomd_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
'repodata',
|
||||
'repomd.xml',
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_file_path = urljoin(
|
||||
urljoin(
|
||||
repo_info.path + '/',
|
||||
repo_info.folder
|
||||
) + '/',
|
||||
'repodata/repomd.xml'
|
||||
)
|
||||
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
||||
|
||||
repomd_object = self._parse_repomd(repomd_file_path)
|
||||
if repo_info.is_remote:
|
||||
os.remove(repomd_file_path)
|
||||
return repomd_object.records
|
||||
|
||||
def _download_repomd_records(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
repomd_records: List[RepomdRecord],
|
||||
repomd_records_dict: Dict[str, str],
|
||||
):
|
||||
"""
|
||||
Download repomd records
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:param repomd_records: list with repomd records
|
||||
:param repomd_records_dict: dict with paths to repodata files
|
||||
"""
|
||||
for repomd_record in repomd_records:
|
||||
if repomd_record.type not in (
|
||||
'primary',
|
||||
'filelists',
|
||||
'other',
|
||||
):
|
||||
continue
|
||||
repomd_record_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
repomd_record.location_href,
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_record_file_path = self.get_remote_file_content(
|
||||
repomd_record_file_path)
|
||||
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
||||
|
||||
def _parse_module_repomd_record(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
repomd_records: List[RepomdRecord],
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Download repomd records
|
||||
:param repo_info: structure which contains info about a current repo
|
||||
:param repomd_records: list with repomd records
|
||||
"""
|
||||
for repomd_record in repomd_records:
|
||||
if repomd_record.type != 'modules':
|
||||
continue
|
||||
repomd_record_file_path = os.path.join(
|
||||
repo_info.path,
|
||||
repo_info.folder,
|
||||
repomd_record.location_href,
|
||||
)
|
||||
if repo_info.is_remote:
|
||||
repomd_record_file_path = self.get_remote_file_content(
|
||||
repomd_record_file_path)
|
||||
return list(self._parse_modules_file(
|
||||
repomd_record_file_path,
|
||||
))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
||||
version_tuple_1 = (
|
||||
package_1.epoch,
|
||||
package_1.version,
|
||||
package_1.release,
|
||||
)
|
||||
version_tuple_2 = (
|
||||
package_2.epoch,
|
||||
package_2.version,
|
||||
package_2.release,
|
||||
)
|
||||
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
||||
|
||||
def get_packages_iterator(
|
||||
self,
|
||||
repo_info: RepoInfo,
|
||||
) -> Union[PackageIterator, Iterator]:
|
||||
full_repo_path = self._get_full_repo_path(repo_info)
|
||||
pkgs_iterator = self.pkgs.get(full_repo_path)
|
||||
if pkgs_iterator is None:
|
||||
repomd_records = self._get_repomd_records(
|
||||
repo_info=repo_info,
|
||||
)
|
||||
repomd_records_dict = {} # type: Dict[str, str]
|
||||
self._download_repomd_records(
|
||||
repo_info=repo_info,
|
||||
repomd_records=repomd_records,
|
||||
repomd_records_dict=repomd_records_dict,
|
||||
)
|
||||
pkgs_iterator = PackageIterator(
|
||||
primary_path=repomd_records_dict['primary'],
|
||||
filelists_path=repomd_records_dict['filelists'],
|
||||
other_path=repomd_records_dict['other'],
|
||||
warningcb=self._warning_callback,
|
||||
)
|
||||
pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator)
|
||||
return pkgs_iterator
|
||||
|
||||
def get_package_arch(
|
||||
self,
|
||||
package: Package,
|
||||
variant_arch: str,
|
||||
) -> str:
|
||||
result = variant_arch
|
||||
if package.arch in self.repo_arches[variant_arch]:
|
||||
result = package.arch
|
||||
return result
|
||||
|
||||
def is_skipped_module_package(
|
||||
self,
|
||||
package: Package,
|
||||
variant_arch: str,
|
||||
) -> bool:
|
||||
package_key = self.get_package_key(package, variant_arch)
|
||||
# Even a module package will be added to packages.json if
|
||||
# it presents in the list of included packages
|
||||
return 'module' in package.release and not any(
|
||||
re.search(
|
||||
f'^{included_pkg}$',
|
||||
package_key,
|
||||
) or included_pkg in (package.name, package_key)
|
||||
for included_pkg in self.included_packages
|
||||
)
|
||||
|
||||
def is_excluded_package(
|
||||
self,
|
||||
package: Package,
|
||||
variant_arch: str,
|
||||
excluded_packages: List[str],
|
||||
) -> bool:
|
||||
package_key = self.get_package_key(package, variant_arch)
|
||||
return any(
|
||||
re.search(
|
||||
f'^{excluded_pkg}$',
|
||||
package_key,
|
||||
) or excluded_pkg in (package.name, package_key)
|
||||
for excluded_pkg in excluded_packages
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_source_rpm_name(package: Package) -> str:
|
||||
source_rpm_nvra = parse_nvra(package.rpm_sourcerpm)
|
||||
return source_rpm_nvra['name']
|
||||
|
||||
def get_package_key(self, package: Package, variant_arch: str) -> str:
|
||||
return (
|
||||
f'{package.name}.'
|
||||
f'{self.get_package_arch(package, variant_arch)}'
|
||||
)
|
||||
|
||||
def generate_packages_json(
|
||||
self
|
||||
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
||||
"""
|
||||
Generate packages.json
|
||||
"""
|
||||
packages = defaultdict(lambda: defaultdict(lambda: {
|
||||
'variants': list(),
|
||||
}))
|
||||
for variant_info in self.variants:
|
||||
for repo_info in variant_info.repos:
|
||||
is_reference = repo_info.is_reference
|
||||
for package in self.get_packages_iterator(repo_info=repo_info):
|
||||
if self.is_skipped_module_package(
|
||||
package=package,
|
||||
variant_arch=variant_info.arch,
|
||||
):
|
||||
continue
|
||||
if self.is_excluded_package(
|
||||
package=package,
|
||||
variant_arch=variant_info.arch,
|
||||
excluded_packages=self.excluded_packages,
|
||||
):
|
||||
continue
|
||||
if self.is_excluded_package(
|
||||
package=package,
|
||||
variant_arch=variant_info.arch,
|
||||
excluded_packages=variant_info.excluded_packages,
|
||||
):
|
||||
continue
|
||||
package_key = self.get_package_key(
|
||||
package,
|
||||
variant_info.arch,
|
||||
)
|
||||
source_rpm_name = self.get_source_rpm_name(package)
|
||||
package_info = packages[source_rpm_name][package_key]
|
||||
if 'is_reference' not in package_info:
|
||||
package_info['variants'].append(variant_info.name)
|
||||
package_info['is_reference'] = is_reference
|
||||
package_info['package'] = package
|
||||
elif not package_info['is_reference'] or \
|
||||
package_info['is_reference'] == is_reference and \
|
||||
self.compare_pkgs_version(
|
||||
package_1=package,
|
||||
package_2=package_info['package'],
|
||||
) > 0:
|
||||
package_info['variants'] = [variant_info.name]
|
||||
package_info['is_reference'] = is_reference
|
||||
package_info['package'] = package
|
||||
elif self.compare_pkgs_version(
|
||||
package_1=package,
|
||||
package_2=package_info['package'],
|
||||
) == 0 and repo_info.repo_type != 'absent':
|
||||
package_info['variants'].append(variant_info.name)
|
||||
result = defaultdict(lambda: defaultdict(
|
||||
lambda: defaultdict(list),
|
||||
))
|
||||
for variant_info in self.variants:
|
||||
for source_rpm_name, packages_info in packages.items():
|
||||
for package_key, package_info in packages_info.items():
|
||||
variant_pkgs = result[variant_info.name][variant_info.arch]
|
||||
if variant_info.name not in package_info['variants']:
|
||||
continue
|
||||
variant_pkgs[source_rpm_name].append(package_key)
|
||||
return result
|
||||
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'-c',
|
||||
'--config',
|
||||
type=Path,
|
||||
default=Path('config.yaml'),
|
||||
required=False,
|
||||
help='Path to a config',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
'--json-output-path',
|
||||
type=str,
|
||||
help='Full path to output json file',
|
||||
required=True,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def read_config(config_path: Path) -> Optional[Dict]:
|
||||
if not config_path.exists():
|
||||
logging.error('A config by path "%s" does not exist', config_path)
|
||||
exit(1)
|
||||
with config_path.open('r') as config_fd:
|
||||
return yaml.safe_load(config_fd)
|
||||
|
||||
|
||||
def process_config(config_data: Dict) -> Tuple[
|
||||
List[VariantInfo],
|
||||
List[str],
|
||||
List[str],
|
||||
]:
|
||||
excluded_packages = config_data.get('excluded_packages', [])
|
||||
included_packages = config_data.get('included_packages', [])
|
||||
variants = [VariantInfo(
|
||||
name=variant_name,
|
||||
arch=variant_info['arch'],
|
||||
excluded_packages=variant_info.get('excluded_packages', []),
|
||||
repos=[RepoInfo(
|
||||
path=variant_repo['path'],
|
||||
folder=variant_repo['folder'],
|
||||
is_remote=variant_repo['remote'],
|
||||
is_reference=variant_repo['reference'],
|
||||
repo_type=variant_repo.get('repo_type', 'present'),
|
||||
) for variant_repo in variant_info['repos']]
|
||||
) for variant_name, variant_info in config_data['variants'].items()]
|
||||
return variants, excluded_packages, included_packages
|
||||
|
||||
|
||||
def cli_main():
|
||||
args = create_parser().parse_args()
|
||||
variants, excluded_packages, included_packages = process_config(
|
||||
config_data=read_config(args.config)
|
||||
)
|
||||
pg = PackagesGenerator(
|
||||
variants=variants,
|
||||
excluded_packages=excluded_packages,
|
||||
included_packages=included_packages,
|
||||
)
|
||||
result = pg.generate_packages_json()
|
||||
with open(args.json_output_path, 'w') as packages_file:
|
||||
json.dump(
|
||||
result,
|
||||
packages_file,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
255
pungi/scripts/gather_modules.py
Normal file
255
pungi/scripts/gather_modules.py
Normal file
@ -0,0 +1,255 @@
|
||||
import gzip
|
||||
import lzma
|
||||
import os
|
||||
from argparse import ArgumentParser, FileType
|
||||
from glob import iglob
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import List, AnyStr, Iterable, Union, Optional
|
||||
import logging
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import yaml
|
||||
import createrepo_c as cr
|
||||
from typing.io import BinaryIO
|
||||
|
||||
from .create_packages_json import PackagesGenerator, is_gzip_file, is_xz_file
|
||||
|
||||
EMPTY_FILE = '.empty'
|
||||
|
||||
|
||||
def read_modules_yaml(modules_yaml_path: Union[str, Path]) -> BytesIO:
|
||||
with open(modules_yaml_path, 'rb') as fp:
|
||||
return BytesIO(fp.read())
|
||||
|
||||
|
||||
def grep_list_of_modules_yaml(repos_path: AnyStr) -> Iterable[BytesIO]:
|
||||
"""
|
||||
Find all of valid *modules.yaml.gz in repos
|
||||
:param repos_path: path to a directory which contains repo dirs
|
||||
:return: iterable object of content from *modules.yaml.*
|
||||
"""
|
||||
|
||||
return (
|
||||
read_modules_yaml_from_specific_repo(repo_path=Path(path).parent)
|
||||
for path in iglob(
|
||||
str(Path(repos_path).joinpath('**/repodata')),
|
||||
recursive=True
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _is_remote(path: str):
|
||||
return any(str(path).startswith(protocol)
|
||||
for protocol in ('http', 'https'))
|
||||
|
||||
|
||||
def read_modules_yaml_from_specific_repo(
|
||||
repo_path: Union[str, Path]
|
||||
) -> Optional[BytesIO]:
|
||||
"""
|
||||
Read modules_yaml from a specific repo (remote or local)
|
||||
:param repo_path: path/url to a specific repo
|
||||
(final dir should contain dir `repodata`)
|
||||
:return: iterable object of content from *modules.yaml.*
|
||||
"""
|
||||
|
||||
if _is_remote(repo_path):
|
||||
repomd_url = urljoin(
|
||||
repo_path + '/',
|
||||
'repodata/repomd.xml',
|
||||
)
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
repomd_file_path = packages_generator.get_remote_file_content(
|
||||
file_url=repomd_url
|
||||
)
|
||||
else:
|
||||
repomd_file_path = os.path.join(
|
||||
repo_path,
|
||||
'repodata/repomd.xml',
|
||||
)
|
||||
repomd_obj = cr.Repomd(str(repomd_file_path))
|
||||
for record in repomd_obj.records:
|
||||
if record.type != 'modules':
|
||||
continue
|
||||
else:
|
||||
if _is_remote(repo_path):
|
||||
modules_yaml_url = urljoin(
|
||||
repo_path + '/',
|
||||
record.location_href,
|
||||
)
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
modules_yaml_path = packages_generator.get_remote_file_content(
|
||||
file_url=modules_yaml_url
|
||||
)
|
||||
else:
|
||||
modules_yaml_path = os.path.join(
|
||||
repo_path,
|
||||
record.location_href,
|
||||
)
|
||||
return read_modules_yaml(modules_yaml_path=modules_yaml_path)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _should_grep_defaults(
|
||||
document_type: str,
|
||||
grep_only_modules_data: bool = False,
|
||||
grep_only_modules_defaults_data: bool = False,
|
||||
) -> bool:
|
||||
xor_flag = grep_only_modules_data == grep_only_modules_defaults_data
|
||||
if document_type == 'modulemd' and (xor_flag or grep_only_modules_data):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _should_grep_modules(
|
||||
document_type: str,
|
||||
grep_only_modules_data: bool = False,
|
||||
grep_only_modules_defaults_data: bool = False,
|
||||
) -> bool:
|
||||
xor_flag = grep_only_modules_data == grep_only_modules_defaults_data
|
||||
if document_type == 'modulemd-defaults' and \
|
||||
(xor_flag or grep_only_modules_defaults_data):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def collect_modules(
|
||||
modules_paths: List[BinaryIO],
|
||||
target_dir: str,
|
||||
grep_only_modules_data: bool = False,
|
||||
grep_only_modules_defaults_data: bool = False,
|
||||
):
|
||||
"""
|
||||
Read given modules.yaml.gz files and export modules
|
||||
and modulemd files from it.
|
||||
Returns:
|
||||
object:
|
||||
"""
|
||||
xor_flag = grep_only_modules_defaults_data is grep_only_modules_data
|
||||
modules_path = os.path.join(target_dir, 'modules')
|
||||
module_defaults_path = os.path.join(target_dir, 'module_defaults')
|
||||
if grep_only_modules_data or xor_flag:
|
||||
os.makedirs(modules_path, exist_ok=True)
|
||||
if grep_only_modules_defaults_data or xor_flag:
|
||||
os.makedirs(module_defaults_path, exist_ok=True)
|
||||
# Defaults modules can be empty, but pungi detects
|
||||
# empty folder while copying and raises the exception in this case
|
||||
Path(os.path.join(module_defaults_path, EMPTY_FILE)).touch()
|
||||
|
||||
for module_file in modules_paths:
|
||||
data = module_file.read()
|
||||
if is_gzip_file(data[:2]):
|
||||
data = gzip.decompress(data)
|
||||
elif is_xz_file(data[:2]):
|
||||
data = lzma.decompress(data)
|
||||
documents = yaml.load_all(data, Loader=yaml.BaseLoader)
|
||||
for doc in documents:
|
||||
path = None
|
||||
if _should_grep_modules(
|
||||
doc['document'],
|
||||
grep_only_modules_data,
|
||||
grep_only_modules_defaults_data,
|
||||
):
|
||||
name = f"{doc['data']['module']}.yaml"
|
||||
path = os.path.join(module_defaults_path, name)
|
||||
logging.info('Found %s module defaults', name)
|
||||
elif _should_grep_defaults(
|
||||
doc['document'],
|
||||
grep_only_modules_data,
|
||||
grep_only_modules_defaults_data,
|
||||
):
|
||||
# pungi.phases.pkgset.sources.source_koji.get_koji_modules
|
||||
stream = doc['data']['stream'].replace('-', '_')
|
||||
doc_data = doc['data']
|
||||
name = f"{doc_data['name']}-{stream}-" \
|
||||
f"{doc_data['version']}.{doc_data['context']}"
|
||||
arch_dir = os.path.join(
|
||||
modules_path,
|
||||
doc_data['arch']
|
||||
)
|
||||
os.makedirs(arch_dir, exist_ok=True)
|
||||
path = os.path.join(
|
||||
arch_dir,
|
||||
name,
|
||||
)
|
||||
logging.info('Found module %s', name)
|
||||
|
||||
if 'artifacts' not in doc['data']:
|
||||
logging.warning(
|
||||
'RPM %s does not have explicit list of artifacts',
|
||||
name
|
||||
)
|
||||
if path is not None:
|
||||
with open(path, 'w') as f:
|
||||
yaml.dump(doc, f, default_flow_style=False)
|
||||
|
||||
|
||||
def cli_main():
|
||||
parser = ArgumentParser()
|
||||
content_type_group = parser.add_mutually_exclusive_group(required=False)
|
||||
content_type_group.add_argument(
|
||||
'--get-only-modules-data',
|
||||
action='store_true',
|
||||
help='Parse and get only modules data',
|
||||
)
|
||||
content_type_group.add_argument(
|
||||
'--get-only-modules-defaults-data',
|
||||
action='store_true',
|
||||
help='Parse and get only modules_defaults data',
|
||||
)
|
||||
path_group = parser.add_mutually_exclusive_group(required=True)
|
||||
path_group.add_argument(
|
||||
'-p', '--path',
|
||||
type=FileType('rb'), nargs='+',
|
||||
help='Path to modules.yaml.gz file. '
|
||||
'You may pass multiple files by passing -p path1 path2'
|
||||
)
|
||||
path_group.add_argument(
|
||||
'-rp', '--repo-path',
|
||||
required=False,
|
||||
type=str,
|
||||
default=None,
|
||||
help='Path to a directory which contains repodirs. E.g. /var/repos'
|
||||
)
|
||||
path_group.add_argument(
|
||||
'-rd', '--repodata-paths',
|
||||
required=False,
|
||||
type=str,
|
||||
nargs='+',
|
||||
default=[],
|
||||
help='Paths/urls to the directories with directory `repodata`',
|
||||
)
|
||||
parser.add_argument('-t', '--target', required=True)
|
||||
|
||||
namespace = parser.parse_args()
|
||||
if namespace.repodata_paths:
|
||||
modules = []
|
||||
for repodata_path in namespace.repodata_paths:
|
||||
modules.append(read_modules_yaml_from_specific_repo(
|
||||
repodata_path,
|
||||
))
|
||||
elif namespace.path is not None:
|
||||
modules = namespace.path
|
||||
else:
|
||||
modules = grep_list_of_modules_yaml(namespace.repo_path)
|
||||
modules = list(filter(lambda i: i is not None, modules))
|
||||
collect_modules(
|
||||
modules,
|
||||
namespace.target,
|
||||
namespace.get_only_modules_data,
|
||||
namespace.get_only_modules_defaults_data,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
96
pungi/scripts/gather_rpms.py
Normal file
96
pungi/scripts/gather_rpms.py
Normal file
@ -0,0 +1,96 @@
|
||||
import re
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import os
|
||||
from glob import iglob
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from dataclasses import dataclass
|
||||
from productmd.common import parse_nvra
|
||||
|
||||
|
||||
@dataclass
|
||||
class Package:
|
||||
nvra: dict
|
||||
path: Path
|
||||
|
||||
|
||||
def search_rpms(top_dir: Path) -> List[Package]:
|
||||
"""
|
||||
Search for all *.rpm files recursively
|
||||
in given top directory
|
||||
Returns:
|
||||
list: list of paths
|
||||
"""
|
||||
return [Package(
|
||||
nvra=parse_nvra(Path(path).stem),
|
||||
path=Path(path),
|
||||
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
||||
|
||||
|
||||
def is_excluded_package(
|
||||
package: Package,
|
||||
excluded_packages: List[str],
|
||||
) -> bool:
|
||||
package_key = f'{package.nvra["name"]}.{package.nvra["arch"]}'
|
||||
return any(
|
||||
re.search(
|
||||
f'^{excluded_pkg}$',
|
||||
package_key,
|
||||
) or excluded_pkg in (package.nvra['name'], package_key)
|
||||
for excluded_pkg in excluded_packages
|
||||
)
|
||||
|
||||
|
||||
def copy_rpms(
|
||||
packages: List[Package],
|
||||
target_top_dir: Path,
|
||||
excluded_packages: List[str],
|
||||
):
|
||||
"""
|
||||
Search synced repos for rpms and prepare
|
||||
koji-like structure for pungi
|
||||
|
||||
Instead of repos, use following structure:
|
||||
# ls /mnt/koji/
|
||||
i686/ noarch/ x86_64/
|
||||
Returns:
|
||||
Nothing:
|
||||
"""
|
||||
for package in packages:
|
||||
if is_excluded_package(package, excluded_packages):
|
||||
continue
|
||||
target_arch_dir = target_top_dir.joinpath(package.nvra['arch'])
|
||||
target_file = target_arch_dir.joinpath(package.path.name)
|
||||
os.makedirs(target_arch_dir, exist_ok=True)
|
||||
|
||||
if not target_file.exists():
|
||||
try:
|
||||
os.link(package.path, target_file)
|
||||
except OSError:
|
||||
# hardlink failed, try symlinking
|
||||
package.path.symlink_to(target_file)
|
||||
|
||||
|
||||
def cli_main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('-p', '--path', required=True, type=Path)
|
||||
parser.add_argument('-t', '--target', required=True, type=Path)
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--excluded-packages',
|
||||
required=False,
|
||||
nargs='+',
|
||||
type=str,
|
||||
default=[],
|
||||
)
|
||||
|
||||
namespace = parser.parse_args()
|
||||
|
||||
rpms = search_rpms(namespace.path)
|
||||
copy_rpms(rpms, namespace.target, namespace.excluded_packages)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli_main()
|
||||
@ -1,513 +0,0 @@
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import selinux
|
||||
import sys
|
||||
|
||||
from argparse import ArgumentParser, Action
|
||||
|
||||
from pungi import get_full_version
|
||||
import pungi.gather
|
||||
import pungi.config
|
||||
import pungi.ks
|
||||
|
||||
|
||||
def get_arguments(config):
|
||||
parser = ArgumentParser()
|
||||
|
||||
class SetConfig(Action):
|
||||
def __call__(self, parser, namespace, value, option_string=None):
|
||||
config.set("pungi", self.dest, value)
|
||||
|
||||
parser.add_argument("--version", action="version", version=get_full_version())
|
||||
|
||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
dest="family",
|
||||
type=str,
|
||||
action=SetConfig,
|
||||
help='the name for your distribution (defaults to "Fedora"), DEPRECATED',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--family",
|
||||
dest="family",
|
||||
action=SetConfig,
|
||||
help='the family name for your distribution (defaults to "Fedora")',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ver",
|
||||
dest="version",
|
||||
action=SetConfig,
|
||||
help="the version of your distribution (defaults to datestamp)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--flavor",
|
||||
dest="variant",
|
||||
action=SetConfig,
|
||||
help="the flavor of your distribution spin (optional), DEPRECATED",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--variant",
|
||||
dest="variant",
|
||||
action=SetConfig,
|
||||
help="the variant of your distribution spin (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--destdir",
|
||||
dest="destdir",
|
||||
action=SetConfig,
|
||||
help="destination directory (defaults to current directory)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cachedir",
|
||||
dest="cachedir",
|
||||
action=SetConfig,
|
||||
help="package cache directory (defaults to /var/cache/pungi)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--bugurl",
|
||||
dest="bugurl",
|
||||
action=SetConfig,
|
||||
help="the url for your bug system (defaults to http://bugzilla.redhat.com)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--selfhosting",
|
||||
action="store_true",
|
||||
dest="selfhosting",
|
||||
help="build a self-hosting tree by following build dependencies (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--fulltree",
|
||||
action="store_true",
|
||||
dest="fulltree",
|
||||
help="build a tree that includes all packages built from corresponding source rpms (optional)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nosource",
|
||||
action="store_true",
|
||||
dest="nosource",
|
||||
help="disable gathering of source packages (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nodebuginfo",
|
||||
action="store_true",
|
||||
dest="nodebuginfo",
|
||||
help="disable gathering of debuginfo packages (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nodownload",
|
||||
action="store_true",
|
||||
dest="nodownload",
|
||||
help="disable downloading of packages. instead, print the package URLs (optional)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--norelnotes",
|
||||
action="store_true",
|
||||
dest="norelnotes",
|
||||
help="disable gathering of release notes (optional); DEPRECATED",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nogreedy",
|
||||
action="store_true",
|
||||
dest="nogreedy",
|
||||
help="disable pulling of all providers of package dependencies (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nodeps",
|
||||
action="store_false",
|
||||
dest="resolve_deps",
|
||||
default=True,
|
||||
help="disable resolving dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sourceisos",
|
||||
default=False,
|
||||
action="store_true",
|
||||
dest="sourceisos",
|
||||
help="Create the source isos (other arch runs must be done)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Force reuse of an existing destination directory (will overwrite files)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--isfinal",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Specify this is a GA tree, which causes betanag to be turned off during install", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nohash",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="disable hashing the Packages trees",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--full-archlist",
|
||||
action="store_true",
|
||||
help="Use the full arch list for x86_64 (include i686, i386, etc.)",
|
||||
)
|
||||
parser.add_argument("--arch", help="Override default (uname based) arch")
|
||||
parser.add_argument(
|
||||
"--greedy", metavar="METHOD", help="Greedy method; none, all, build"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--multilib",
|
||||
action="append",
|
||||
metavar="METHOD",
|
||||
help="Multilib method; can be specified multiple times; recommended: devel, runtime", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--lookaside-repo",
|
||||
action="append",
|
||||
dest="lookaside_repos",
|
||||
metavar="NAME",
|
||||
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workdirbase",
|
||||
dest="workdirbase",
|
||||
action=SetConfig,
|
||||
help="base working directory (defaults to destdir + /work)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-dvd",
|
||||
default=False,
|
||||
action="store_true",
|
||||
dest="no_dvd",
|
||||
help="Do not make a install DVD/CD only the netinstall image and the tree",
|
||||
)
|
||||
parser.add_argument("--lorax-conf", help="Path to lorax.conf file (optional)")
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--installpkgs",
|
||||
default=[],
|
||||
action="append",
|
||||
metavar="STRING",
|
||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--multilibconf",
|
||||
default=None,
|
||||
action=SetConfig,
|
||||
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
dest="config",
|
||||
required=True,
|
||||
help="Path to kickstart config file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all-stages",
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="do_all",
|
||||
help="Enable ALL stages",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_gather",
|
||||
help="Flag to enable processing the Gather stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_createrepo",
|
||||
help="Flag to enable processing the Createrepo stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_buildinstall",
|
||||
help="Flag to enable processing the BuildInstall stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_createiso",
|
||||
help="Flag to enable processing the CreateISO stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--relnotepkgs",
|
||||
dest="relnotepkgs",
|
||||
action=SetConfig,
|
||||
help="Rpms which contain the release notes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--relnotefilere",
|
||||
dest="relnotefilere",
|
||||
action=SetConfig,
|
||||
help="Which files are the release notes -- GPL EULA",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nomacboot",
|
||||
action="store_true",
|
||||
dest="nomacboot",
|
||||
help="disable setting up macboot as no hfs support ",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--rootfs-size",
|
||||
dest="rootfs_size",
|
||||
action=SetConfig,
|
||||
default=False,
|
||||
help="Size of root filesystem in GiB. If not specified, use lorax default value", # noqa: E501
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--pungirc",
|
||||
dest="pungirc",
|
||||
default="~/.pungirc",
|
||||
action=SetConfig,
|
||||
help="Read pungi options from config file ",
|
||||
)
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if (
|
||||
not config.get("pungi", "variant").isalnum()
|
||||
and not config.get("pungi", "variant") == ""
|
||||
):
|
||||
parser.error("Variant must be alphanumeric")
|
||||
|
||||
if (
|
||||
opts.do_gather
|
||||
or opts.do_createrepo
|
||||
or opts.do_buildinstall
|
||||
or opts.do_createiso
|
||||
):
|
||||
opts.do_all = False
|
||||
|
||||
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
||||
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
||||
|
||||
# set the iso_basename.
|
||||
if not config.get("pungi", "variant") == "":
|
||||
config.set(
|
||||
"pungi",
|
||||
"iso_basename",
|
||||
"%s-%s" % (config.get("pungi", "family"), config.get("pungi", "variant")),
|
||||
)
|
||||
else:
|
||||
config.set("pungi", "iso_basename", config.get("pungi", "family"))
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
config = pungi.config.Config()
|
||||
opts = get_arguments(config)
|
||||
|
||||
# Read the config to create "new" defaults
|
||||
# reparse command line options so they take precedence
|
||||
config = pungi.config.Config(pungirc=opts.pungirc)
|
||||
opts = get_arguments(config)
|
||||
|
||||
# You must be this high to ride if you're going to do root tasks
|
||||
if os.geteuid() != 0 and (opts.do_all or opts.do_buildinstall):
|
||||
print("You must run pungi as root", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
try:
|
||||
enforcing = selinux.security_getenforce()
|
||||
except Exception:
|
||||
print("INFO: selinux disabled")
|
||||
enforcing = False
|
||||
if enforcing:
|
||||
print(
|
||||
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled." # noqa: E501
|
||||
)
|
||||
print("Consider running with setenforce 0.")
|
||||
|
||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
||||
|
||||
if opts.sourceisos:
|
||||
config.set("pungi", "arch", "source")
|
||||
|
||||
for part in ksparser.handler.partition.partitions:
|
||||
if part.mountpoint == "iso":
|
||||
config.set("pungi", "cdsize", str(part.size))
|
||||
|
||||
config.set("pungi", "force", str(opts.force))
|
||||
|
||||
if config.get("pungi", "workdirbase") == "/work":
|
||||
config.set("pungi", "workdirbase", "%s/work" % config.get("pungi", "destdir"))
|
||||
# Set up our directories
|
||||
if not os.path.exists(config.get("pungi", "destdir")):
|
||||
try:
|
||||
os.makedirs(config.get("pungi", "destdir"))
|
||||
except OSError:
|
||||
print(
|
||||
"Error: Cannot create destination dir %s"
|
||||
% config.get("pungi", "destdir"),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Warning: Reusing existing destination directory.")
|
||||
|
||||
if not os.path.exists(config.get("pungi", "workdirbase")):
|
||||
try:
|
||||
os.makedirs(config.get("pungi", "workdirbase"))
|
||||
except OSError:
|
||||
print(
|
||||
"Error: Cannot create working base dir %s"
|
||||
% config.get("pungi", "workdirbase"),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Warning: Reusing existing working base directory.")
|
||||
|
||||
cachedir = config.get("pungi", "cachedir")
|
||||
|
||||
if not os.path.exists(cachedir):
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except OSError:
|
||||
print("Error: Cannot create cache dir %s" % cachedir, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Set debuginfo flag
|
||||
if opts.nodebuginfo:
|
||||
config.set("pungi", "debuginfo", "False")
|
||||
if opts.greedy:
|
||||
config.set("pungi", "greedy", opts.greedy)
|
||||
else:
|
||||
# XXX: compatibility
|
||||
if opts.nogreedy:
|
||||
config.set("pungi", "greedy", "none")
|
||||
else:
|
||||
config.set("pungi", "greedy", "all")
|
||||
config.set("pungi", "resolve_deps", str(bool(opts.resolve_deps)))
|
||||
if opts.isfinal:
|
||||
config.set("pungi", "isfinal", "True")
|
||||
if opts.nohash:
|
||||
config.set("pungi", "nohash", "True")
|
||||
if opts.full_archlist:
|
||||
config.set("pungi", "full_archlist", "True")
|
||||
if opts.arch:
|
||||
config.set("pungi", "arch", opts.arch)
|
||||
if opts.multilib:
|
||||
config.set("pungi", "multilib", " ".join(opts.multilib))
|
||||
if opts.lookaside_repos:
|
||||
config.set("pungi", "lookaside_repos", " ".join(opts.lookaside_repos))
|
||||
if opts.no_dvd:
|
||||
config.set("pungi", "no_dvd", "True")
|
||||
if opts.nomacboot:
|
||||
config.set("pungi", "nomacboot", "True")
|
||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
||||
config.set("pungi", "nodebuginfo", str(bool(opts.nodebuginfo)))
|
||||
|
||||
if opts.lorax_conf:
|
||||
config.set("lorax", "conf_file", opts.lorax_conf)
|
||||
if opts.installpkgs:
|
||||
config.set("lorax", "installpkgs", " ".join(opts.installpkgs))
|
||||
|
||||
# Actually do work.
|
||||
mypungi = pungi.gather.Pungi(config, ksparser)
|
||||
|
||||
with mypungi.yumlock:
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_gather or opts.do_buildinstall:
|
||||
mypungi._inityum() # initialize the yum object for things that need it
|
||||
if opts.do_all or opts.do_gather:
|
||||
mypungi.gather()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadPackages()
|
||||
mypungi.makeCompsFile()
|
||||
if not opts.nodebuginfo:
|
||||
mypungi.getDebuginfoList()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write(
|
||||
"DEBUGINFO%s: %s\n" % (flags_str, line["path"])
|
||||
)
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadDebuginfo()
|
||||
if not opts.nosource:
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadSRPMs()
|
||||
|
||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
||||
if not opts.nodebuginfo:
|
||||
print(
|
||||
"DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024**2)
|
||||
)
|
||||
if not opts.nosource:
|
||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
||||
|
||||
# Furthermore (but without the yumlock...)
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_createrepo:
|
||||
mypungi.doCreaterepo()
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
if not opts.norelnotes:
|
||||
mypungi.doGetRelnotes()
|
||||
mypungi.doBuildinstall()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
# Do things slightly different for src.
|
||||
if opts.sourceisos:
|
||||
# we already have all the content gathered
|
||||
mypungi.topdir = os.path.join(
|
||||
config.get("pungi", "destdir"),
|
||||
config.get("pungi", "version"),
|
||||
config.get("pungi", "variant"),
|
||||
"source",
|
||||
"SRPMS",
|
||||
)
|
||||
mypungi.doCreaterepo(comps=False)
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
print("All done!")
|
||||
@ -11,19 +11,19 @@ import locale
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import shlex
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.phases import PHASES_NAMES
|
||||
from pungi import get_full_version, util
|
||||
from pungi.errors import UnsignedPackagesError
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.util import rmtree
|
||||
from pungi.otel import tracing
|
||||
|
||||
|
||||
# force C locales
|
||||
@ -252,9 +252,15 @@ def main():
|
||||
kobo.log.add_stderr_logger(logger)
|
||||
|
||||
conf = util.load_config(opts.config)
|
||||
|
||||
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
||||
if compose_type == "production" and not opts.label and not opts.no_label:
|
||||
label = opts.label or conf.get("label")
|
||||
if label:
|
||||
try:
|
||||
productmd.composeinfo.verify_label(label)
|
||||
except ValueError as ex:
|
||||
abort(str(ex))
|
||||
|
||||
if compose_type == "production" and not label and not opts.no_label:
|
||||
abort("must specify label for a production compose")
|
||||
|
||||
if (
|
||||
@ -304,7 +310,7 @@ def main():
|
||||
opts.target_dir,
|
||||
conf,
|
||||
compose_type=compose_type,
|
||||
compose_label=opts.label,
|
||||
compose_label=label,
|
||||
parent_compose_ids=opts.parent_compose_id,
|
||||
respin_of=opts.respin_of,
|
||||
)
|
||||
@ -315,7 +321,7 @@ def main():
|
||||
ci = Compose.get_compose_info(
|
||||
conf,
|
||||
compose_type=compose_type,
|
||||
compose_label=opts.label,
|
||||
compose_label=label,
|
||||
parent_compose_ids=opts.parent_compose_id,
|
||||
respin_of=opts.respin_of,
|
||||
)
|
||||
@ -380,7 +386,7 @@ def run_compose(
|
||||
compose.log_info("User name: %s" % getpass.getuser())
|
||||
compose.log_info("Working directory: %s" % os.getcwd())
|
||||
compose.log_info(
|
||||
"Command line: %s" % " ".join([shlex_quote(arg) for arg in sys.argv])
|
||||
"Command line: %s" % " ".join([shlex.quote(arg) for arg in sys.argv])
|
||||
)
|
||||
compose.log_info("Compose top directory: %s" % compose.topdir)
|
||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||
@ -424,6 +430,7 @@ def run_compose(
|
||||
image_build_phase = pungi.phases.ImageBuildPhase(compose, buildinstall_phase)
|
||||
kiwibuild_phase = pungi.phases.KiwiBuildPhase(compose)
|
||||
osbuild_phase = pungi.phases.OSBuildPhase(compose)
|
||||
imagebuilder_phase = pungi.phases.ImageBuilderPhase(compose)
|
||||
osbs_phase = pungi.phases.OSBSPhase(compose, pkgset_phase, buildinstall_phase)
|
||||
image_container_phase = pungi.phases.ImageContainerPhase(compose)
|
||||
image_checksum_phase = pungi.phases.ImageChecksumPhase(compose)
|
||||
@ -451,6 +458,7 @@ def run_compose(
|
||||
osbuild_phase,
|
||||
image_container_phase,
|
||||
kiwibuild_phase,
|
||||
imagebuilder_phase,
|
||||
):
|
||||
if phase.skip():
|
||||
continue
|
||||
@ -476,50 +484,58 @@ def run_compose(
|
||||
buildinstall_phase,
|
||||
(gather_phase, createrepo_phase),
|
||||
extrafiles_phase,
|
||||
(ostree_phase, ostree_installer_phase),
|
||||
ostree_container_phase,
|
||||
ostree_phase,
|
||||
)
|
||||
essentials_phase = pungi.phases.WeaverPhase(compose, essentials_schema)
|
||||
essentials_phase.start()
|
||||
essentials_phase.stop()
|
||||
ostree_container_phase.start()
|
||||
try:
|
||||
essentials_phase.stop()
|
||||
|
||||
# write treeinfo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
for arch in variant.arches + ["src"]:
|
||||
pungi.metadata.write_tree_info(
|
||||
compose, arch, variant, bi=buildinstall_phase
|
||||
)
|
||||
# write treeinfo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
for arch in variant.arches + ["src"]:
|
||||
pungi.metadata.write_tree_info(
|
||||
compose, arch, variant, bi=buildinstall_phase
|
||||
)
|
||||
|
||||
# write .discinfo and media.repo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
if variant.type == "addon" or variant.is_empty:
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
|
||||
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
|
||||
# write .discinfo and media.repo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
if variant.type == "addon" or variant.is_empty:
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
|
||||
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
|
||||
|
||||
# Run phases for image artifacts in parallel
|
||||
compose_images_schema = (
|
||||
createiso_phase,
|
||||
extra_isos_phase,
|
||||
image_build_phase,
|
||||
livemedia_phase,
|
||||
osbuild_phase,
|
||||
kiwibuild_phase,
|
||||
)
|
||||
post_image_phase = pungi.phases.WeaverPhase(
|
||||
compose, (image_checksum_phase, image_container_phase)
|
||||
)
|
||||
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
|
||||
extra_phase_schema = (
|
||||
(compose_images_phase, post_image_phase),
|
||||
osbs_phase,
|
||||
repoclosure_phase,
|
||||
)
|
||||
extra_phase = pungi.phases.WeaverPhase(compose, extra_phase_schema)
|
||||
# Run phases for image artifacts in parallel
|
||||
compose_images_schema = (
|
||||
createiso_phase,
|
||||
extra_isos_phase,
|
||||
image_build_phase,
|
||||
livemedia_phase,
|
||||
osbuild_phase,
|
||||
kiwibuild_phase,
|
||||
imagebuilder_phase,
|
||||
)
|
||||
compose_images_phase = pungi.phases.WeaverPhase(compose, compose_images_schema)
|
||||
extra_phase_schema = (
|
||||
(compose_images_phase, image_container_phase),
|
||||
ostree_installer_phase,
|
||||
osbs_phase,
|
||||
repoclosure_phase,
|
||||
)
|
||||
extra_phase = pungi.phases.WeaverPhase(compose, extra_phase_schema)
|
||||
|
||||
extra_phase.start()
|
||||
extra_phase.stop()
|
||||
extra_phase.start()
|
||||
extra_phase.stop()
|
||||
finally:
|
||||
# wait for ostree container phase here too - it can happily run in parallel with
|
||||
# all of the other stuff, but we must ensure it always gets stopped
|
||||
ostree_container_phase.stop()
|
||||
|
||||
# now we do checksums as all images are done
|
||||
image_checksum_phase.start()
|
||||
image_checksum_phase.stop()
|
||||
|
||||
pungi.metadata.write_compose_info(compose)
|
||||
if not (
|
||||
@ -530,6 +546,7 @@ def run_compose(
|
||||
and livemedia_phase.skip()
|
||||
and image_build_phase.skip()
|
||||
and kiwibuild_phase.skip()
|
||||
and imagebuilder_phase.skip()
|
||||
and osbuild_phase.skip()
|
||||
and ostree_container_phase.skip()
|
||||
):
|
||||
@ -640,22 +657,28 @@ def cli_main():
|
||||
signal.signal(signal.SIGINT, sigterm_handler)
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
|
||||
try:
|
||||
main()
|
||||
except (Exception, KeyboardInterrupt) as ex:
|
||||
if COMPOSE:
|
||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
else:
|
||||
print("Exception: %s" % ex)
|
||||
raise
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Remove repositories cloned during ExtraFiles phase
|
||||
process_id = os.getpid()
|
||||
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||
rmtree(directoy_to_remove)
|
||||
tracing.setup()
|
||||
|
||||
with tracing.span("run-compose"):
|
||||
try:
|
||||
main()
|
||||
except (Exception, KeyboardInterrupt) as ex:
|
||||
tracing.record_exception(ex)
|
||||
if COMPOSE:
|
||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
else:
|
||||
print("Exception: %s" % ex)
|
||||
raise
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Remove repositories cloned during ExtraFiles phase
|
||||
process_id = os.getpid()
|
||||
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||
rmtree(directoy_to_remove)
|
||||
# Wait for all traces to be sent...
|
||||
tracing.force_flush()
|
||||
|
||||
21
pungi/threading.py
Normal file
21
pungi/threading.py
Normal file
@ -0,0 +1,21 @@
|
||||
from kobo.threads import WorkerThread
|
||||
|
||||
from .otel import tracing
|
||||
|
||||
|
||||
class TelemetryWorkerThread(WorkerThread):
|
||||
"""
|
||||
Subclass of WorkerThread that captures current context when the thread is
|
||||
created, and restores the context in the new thread.
|
||||
|
||||
A regular WorkerThread would start from an empty context, leading to any
|
||||
spans created in the thread disconnected from the overall trace.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.traceparent = tracing.get_traceparent()
|
||||
super(TelemetryWorkerThread, self).__init__(*args, **kwargs)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
tracing.set_context(self.traceparent)
|
||||
super(TelemetryWorkerThread, self).run(*args, **kwargs)
|
||||
216
pungi/util.py
216
pungi/util.py
@ -19,22 +19,24 @@ import subprocess
|
||||
import os
|
||||
import shutil
|
||||
import string
|
||||
import sys
|
||||
import hashlib
|
||||
import errno
|
||||
import re
|
||||
import contextlib
|
||||
import shlex
|
||||
import traceback
|
||||
import tempfile
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import functools
|
||||
from six.moves import urllib, range, shlex_quote
|
||||
|
||||
import kobo.conf
|
||||
from kobo.shortcuts import run, force_list
|
||||
from kobo.threads import WorkerThread, ThreadPool
|
||||
from kobo.threads import ThreadPool
|
||||
from productmd.common import get_major_version
|
||||
from pungi.module_util import Modulemd
|
||||
from pungi.otel import tracing
|
||||
from pungi.threading import TelemetryWorkerThread as WorkerThread
|
||||
|
||||
# Patterns that match all names of debuginfo packages
|
||||
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
|
||||
@ -43,132 +45,6 @@ DEBUG_PATTERN_RE = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def _doRunCommand(
|
||||
command,
|
||||
logger,
|
||||
rundir="/tmp",
|
||||
output=subprocess.PIPE,
|
||||
error=subprocess.PIPE,
|
||||
env=None,
|
||||
):
|
||||
"""Run a command and log the output. Error out if we get something on stderr"""
|
||||
|
||||
logger.info("Running %s" % subprocess.list2cmdline(command))
|
||||
|
||||
p1 = subprocess.Popen(
|
||||
command,
|
||||
cwd=rundir,
|
||||
stdout=output,
|
||||
stderr=error,
|
||||
universal_newlines=True,
|
||||
env=env,
|
||||
close_fds=True,
|
||||
)
|
||||
(out, err) = p1.communicate()
|
||||
|
||||
if out:
|
||||
logger.debug(out)
|
||||
|
||||
if p1.returncode != 0:
|
||||
logger.error("Got an error from %s" % command[0])
|
||||
logger.error(err)
|
||||
raise OSError(
|
||||
"Got an error (%d) from %s: %s" % (p1.returncode, command[0], err)
|
||||
)
|
||||
|
||||
|
||||
def _link(local, target, logger, force=False):
|
||||
"""Simple function to link or copy a package, removing target optionally."""
|
||||
|
||||
if os.path.exists(target) and force:
|
||||
os.remove(target)
|
||||
|
||||
# check for broken links
|
||||
if force and os.path.islink(target):
|
||||
if not os.path.exists(os.readlink(target)):
|
||||
os.remove(target)
|
||||
|
||||
try:
|
||||
os.link(local, target)
|
||||
except OSError as e:
|
||||
if e.errno != 18: # EXDEV
|
||||
logger.error("Got an error linking from cache: %s" % e)
|
||||
raise OSError(e)
|
||||
|
||||
# Can't hardlink cross file systems
|
||||
shutil.copy2(local, target)
|
||||
|
||||
|
||||
def _ensuredir(target, logger, force=False, clean=False):
|
||||
"""Ensure that a directory exists, if it already exists, only continue
|
||||
if force is set."""
|
||||
|
||||
# We have to check existence of a logger, as setting the logger could
|
||||
# itself cause an issue.
|
||||
def whoops(func, path, exc_info):
|
||||
message = "Could not remove %s" % path
|
||||
if logger:
|
||||
logger.error(message)
|
||||
else:
|
||||
sys.stderr(message)
|
||||
sys.exit(1)
|
||||
|
||||
if os.path.exists(target) and not os.path.isdir(target):
|
||||
message = "%s exists but is not a directory." % target
|
||||
if logger:
|
||||
logger.error(message)
|
||||
else:
|
||||
sys.stderr(message)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(target):
|
||||
os.makedirs(target)
|
||||
elif force and clean:
|
||||
shutil.rmtree(target, onerror=whoops)
|
||||
os.makedirs(target)
|
||||
elif force:
|
||||
return
|
||||
else:
|
||||
message = "Directory %s already exists. Use --force to overwrite." % target
|
||||
if logger:
|
||||
logger.error(message)
|
||||
else:
|
||||
sys.stderr(message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _doCheckSum(path, hash, logger):
|
||||
"""Generate a checksum hash from a provided path.
|
||||
Return a string of type:hash"""
|
||||
|
||||
# Try to figure out what hash we want to do
|
||||
try:
|
||||
sum = hashlib.new(hash)
|
||||
except ValueError:
|
||||
logger.error("Invalid hash type: %s" % hash)
|
||||
return False
|
||||
|
||||
# Try to open the file, using binary flag.
|
||||
try:
|
||||
myfile = open(path, "rb")
|
||||
except IOError as e:
|
||||
logger.error("Could not open file %s: %s" % (path, e))
|
||||
return False
|
||||
|
||||
# Loop through the file reading chunks at a time as to not
|
||||
# put the entire file in memory. That would suck for DVDs
|
||||
while True:
|
||||
chunk = myfile.read(
|
||||
8192
|
||||
) # magic number! Taking suggestions for better blocksize
|
||||
if not chunk:
|
||||
break # we're done with the file
|
||||
sum.update(chunk)
|
||||
myfile.close()
|
||||
|
||||
return "%s:%s" % (hash, sum.hexdigest())
|
||||
|
||||
|
||||
def makedirs(path, mode=0o775):
|
||||
try:
|
||||
os.makedirs(path, mode=mode)
|
||||
@ -193,14 +69,14 @@ def explode_rpm_package(pkg_path, target_dir):
|
||||
try:
|
||||
# rpm2archive writes to stdout only if reading from stdin, thus the redirect
|
||||
run(
|
||||
"rpm2archive - <%s | tar xfz - && chmod -R a+rX ." % shlex_quote(pkg_path),
|
||||
"rpm2archive - <%s | tar xfz - && chmod -R a+rX ." % shlex.quote(pkg_path),
|
||||
workdir=target_dir,
|
||||
)
|
||||
except RuntimeError:
|
||||
# Fall back to rpm2cpio in case rpm2archive failed (most likely due to
|
||||
# not being present on the system).
|
||||
run(
|
||||
"rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex_quote(pkg_path),
|
||||
"rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex.quote(pkg_path),
|
||||
workdir=target_dir,
|
||||
)
|
||||
|
||||
@ -374,6 +250,38 @@ class GitUrlResolver(object):
|
||||
return self.cache[key]
|
||||
|
||||
|
||||
class ContainerTagResolver(object):
|
||||
"""
|
||||
A caching resolver for container image urls that replaces tags with digests.
|
||||
"""
|
||||
|
||||
def __init__(self, offline=False):
|
||||
self.offline = offline
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, url):
|
||||
if self.offline:
|
||||
# We're offline, nothing to do
|
||||
return url
|
||||
if re.match(".*@sha256:[a-z0-9]+", url):
|
||||
# We already have a digest
|
||||
return url
|
||||
if url not in self.cache:
|
||||
self.cache[url] = self._resolve(url)
|
||||
return self.cache[url]
|
||||
|
||||
def _resolve(self, url):
|
||||
m = re.match("^.+(:.+)$", url)
|
||||
if not m:
|
||||
raise RuntimeError("Failed to find tag name")
|
||||
tag = m.group(1)
|
||||
|
||||
with tracing.span("skopeo-inspect", url=url):
|
||||
data = _skopeo_inspect(url)
|
||||
digest = data["Digest"]
|
||||
return url.replace(tag, f"@{digest}")
|
||||
|
||||
|
||||
# format: {arch|*: [data]}
|
||||
def get_arch_data(conf, var_name, arch):
|
||||
result = []
|
||||
@ -487,10 +395,7 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
||||
tried.add(volid)
|
||||
|
||||
if volid and len(volid) > 32:
|
||||
raise ValueError(
|
||||
"Could not create volume ID longer than 32 bytes, options are %r",
|
||||
sorted(tried, key=len),
|
||||
)
|
||||
volid = volid[:32]
|
||||
|
||||
if compose.conf["restricted_volid"]:
|
||||
# Replace all non-alphanumeric characters and non-underscores) with
|
||||
@ -593,7 +498,13 @@ def failable(
|
||||
else:
|
||||
compose.require_deliverable(variant, arch, deliverable, subvariant)
|
||||
try:
|
||||
yield
|
||||
with tracing.span(
|
||||
f"generate-{deliverable}",
|
||||
variant=variant.uid,
|
||||
arch=arch,
|
||||
subvariant=subvariant or "",
|
||||
):
|
||||
yield
|
||||
except Exception as exc:
|
||||
if not can_fail:
|
||||
raise
|
||||
@ -778,7 +689,11 @@ def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
||||
"""
|
||||
for i in range(max_retries):
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode == 0:
|
||||
@ -800,7 +715,8 @@ def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
||||
c,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
out, _ = proc.communicate()
|
||||
logger.debug(
|
||||
@ -1001,11 +917,12 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
||||
|
||||
@retry(wait_on=RuntimeError)
|
||||
def git_ls_remote(baseurl, ref, credential_helper=None):
|
||||
cmd = ["git"]
|
||||
if credential_helper:
|
||||
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||
return run(cmd + ["ls-remote", baseurl, ref], universal_newlines=True)
|
||||
with tracing.span("git-ls-remote", baseurl=baseurl, ref=ref):
|
||||
cmd = ["git"]
|
||||
if credential_helper:
|
||||
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||
return run(cmd + ["ls-remote", baseurl, ref], text=True, errors="replace")
|
||||
|
||||
|
||||
def get_tz_offset():
|
||||
@ -1163,3 +1080,14 @@ def format_size(sz):
|
||||
unit += 1
|
||||
|
||||
return "%.3g %sB" % (sz, UNITS[unit])
|
||||
|
||||
|
||||
@retry(interval=5, timeout=60, wait_on=RuntimeError)
|
||||
def _skopeo_inspect(url):
|
||||
"""Wrapper for running `skopeo inspect {url}` and parsing the output.
|
||||
Retries on failure.
|
||||
"""
|
||||
cp = subprocess.run(
|
||||
["skopeo", "inspect", url], stdout=subprocess.PIPE, check=True, encoding="utf-8"
|
||||
)
|
||||
return json.loads(cp.stdout)
|
||||
|
||||
@ -306,6 +306,8 @@ class CompsWrapper(object):
|
||||
append_common_info(doc, group_node, group, force_description=True)
|
||||
append_bool(doc, group_node, "default", group.default)
|
||||
append_bool(doc, group_node, "uservisible", group.uservisible)
|
||||
if group.display_order is not None:
|
||||
append(doc, group_node, "display_order", str(group.display_order))
|
||||
|
||||
if group.lang_only:
|
||||
append(doc, group_node, "langonly", group.lang_only)
|
||||
|
||||
@ -88,5 +88,12 @@ def parse_output(output):
|
||||
packages.add((name, arch, frozenset(flags)))
|
||||
else:
|
||||
name, arch = nevra.rsplit(".", 1)
|
||||
modules.add(name.split(":", 1)[1])
|
||||
# replace dash by underscore in stream of module's nerva
|
||||
# source of name looks like
|
||||
# module:llvm-toolset:rhel8:8040020210411062713:9f9e2e7e.x86_64
|
||||
name = ':'.join(
|
||||
item.replace('-', '_') if i == 1 else item for
|
||||
i, item in enumerate(name.split(':')[1:])
|
||||
)
|
||||
modules.add(name)
|
||||
return packages, modules
|
||||
|
||||
@ -15,9 +15,9 @@
|
||||
|
||||
|
||||
import os
|
||||
import shlex
|
||||
from fnmatch import fnmatch
|
||||
import contextlib
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from kobo.shortcuts import force_list, relative_path, run
|
||||
from pungi import util
|
||||
@ -227,7 +227,7 @@ def get_checkisomd5_cmd(iso_path, just_print=False):
|
||||
|
||||
def get_checkisomd5_data(iso_path, logger=None):
|
||||
cmd = get_checkisomd5_cmd(iso_path, just_print=True)
|
||||
retcode, output = run(cmd, universal_newlines=True)
|
||||
retcode, output = run(cmd, text=True, errors="replace")
|
||||
items = [line.strip().rsplit(":", 1) for line in output.splitlines()]
|
||||
items = dict([(k, v.strip()) for k, v in items])
|
||||
md5 = items.get(iso_path, "")
|
||||
@ -270,26 +270,26 @@ def get_manifest_cmd(iso_name, xorriso=False, output_file=None):
|
||||
tr -d "'" |
|
||||
cut -c2- |
|
||||
sort >> %s""" % (
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(output_file),
|
||||
shlex.quote(iso_name),
|
||||
shlex.quote(output_file),
|
||||
)
|
||||
else:
|
||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s" % (
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(output_file),
|
||||
shlex.quote(iso_name),
|
||||
shlex.quote(output_file),
|
||||
)
|
||||
|
||||
|
||||
def get_volume_id(path, xorriso=False):
|
||||
if xorriso:
|
||||
cmd = ["xorriso", "-indev", path]
|
||||
retcode, output = run(cmd, universal_newlines=True)
|
||||
retcode, output = run(cmd, text=True, errors="replace")
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Volume id"):
|
||||
return line.split("'")[1]
|
||||
else:
|
||||
cmd = ["isoinfo", "-d", "-i", path]
|
||||
retcode, output = run(cmd, universal_newlines=True)
|
||||
retcode, output = run(cmd, text=True, errors="replace")
|
||||
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
@ -500,7 +500,7 @@ def mount(image, logger=None, use_guestmount=True):
|
||||
else:
|
||||
env = {}
|
||||
cmd = ["mount", "-o", "loop", image, mount_dir]
|
||||
ret, out = run(cmd, env=env, can_fail=True, universal_newlines=True)
|
||||
ret, out = run(cmd, env=env, can_fail=True, text=True, errors="replace")
|
||||
if ret != 0:
|
||||
# The mount command failed, something is wrong.
|
||||
# Log the output and raise an exception.
|
||||
|
||||
299
pungi/wrappers/kojimock.py
Normal file
299
pungi/wrappers/kojimock.py
Normal file
@ -0,0 +1,299 @@
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from attr import dataclass
|
||||
from kobo.rpmlib import parse_nvra
|
||||
|
||||
from pungi.module_util import Modulemd
|
||||
|
||||
# just a random value which we don't
|
||||
# use in mock currently
|
||||
# originally builds are filtered by this value
|
||||
# to get consistent snapshot of tags and packages
|
||||
from pungi.scripts.gather_rpms import search_rpms
|
||||
|
||||
LAST_EVENT_ID = 999999
|
||||
# last event time is not important but build
|
||||
# time should be less then it
|
||||
LAST_EVENT_TIME = time.time()
|
||||
BUILD_TIME = 0
|
||||
# virtual build that collects all
|
||||
# packages built for some arch
|
||||
RELEASE_BUILD_ID = 15270
|
||||
# tag that should have all packages available
|
||||
ALL_PACKAGES_TAG = 'dist-c8-compose'
|
||||
# tag that should have all modules available
|
||||
ALL_MODULES_TAG = 'dist-c8-module-compose'
|
||||
|
||||
|
||||
@dataclass
|
||||
class Module:
|
||||
build_id: int
|
||||
name: str
|
||||
nvr: str
|
||||
stream: str
|
||||
version: str
|
||||
context: str
|
||||
arch: str
|
||||
|
||||
|
||||
class KojiMock:
|
||||
"""
|
||||
Class that acts like real koji (for some needed methods)
|
||||
but uses local storage as data source
|
||||
"""
|
||||
def __init__(self, packages_dir, modules_dir, all_arches):
|
||||
self._modules = self._gather_modules(modules_dir)
|
||||
self._modules_dir = modules_dir
|
||||
self._packages_dir = packages_dir
|
||||
self._all_arches = all_arches
|
||||
|
||||
@staticmethod
|
||||
def _gather_modules(modules_dir):
|
||||
modules = {}
|
||||
for index, (f, arch) in enumerate(
|
||||
(sub_path.name, sub_path.parent.name)
|
||||
for path in Path(modules_dir).glob('*')
|
||||
for sub_path in path.iterdir()
|
||||
):
|
||||
parsed = parse_nvra(f)
|
||||
modules[index] = Module(
|
||||
name=parsed['name'],
|
||||
nvr=f,
|
||||
version=parsed['release'],
|
||||
context=parsed['arch'],
|
||||
stream=parsed['version'],
|
||||
build_id=index,
|
||||
arch=arch,
|
||||
)
|
||||
return modules
|
||||
|
||||
@staticmethod
|
||||
def getLastEvent(*args, **kwargs):
|
||||
return {'id': LAST_EVENT_ID, 'ts': LAST_EVENT_TIME}
|
||||
|
||||
def listTagged(self, tag_name, *args, **kwargs):
|
||||
"""
|
||||
Returns list of virtual 'builds' that contain packages by given tag
|
||||
There are two kinds of tags: modular and distributive.
|
||||
For now, only one kind, distributive one, is needed.
|
||||
"""
|
||||
if tag_name != ALL_MODULES_TAG:
|
||||
raise ValueError("I don't know what tag is %s" % tag_name)
|
||||
|
||||
builds = []
|
||||
for module in self._modules.values():
|
||||
builds.append({
|
||||
'build_id': module.build_id,
|
||||
'owner_name': 'centos',
|
||||
'package_name': module.name,
|
||||
'nvr': module.nvr,
|
||||
'version': module.stream,
|
||||
'release': '%s.%s' % (module.version, module.context),
|
||||
'name': module.name,
|
||||
'id': module.build_id,
|
||||
'tag_name': tag_name,
|
||||
'arch': module.arch,
|
||||
# Following fields are currently not
|
||||
# used but returned by real koji
|
||||
# left them here just for reference
|
||||
#
|
||||
# 'task_id': None,
|
||||
# 'state': 1,
|
||||
# 'start_time': '2020-12-23 16:43:59',
|
||||
# 'creation_event_id': 309485,
|
||||
# 'creation_time': '2020-12-23 17:05:33.553748',
|
||||
# 'epoch': None, 'tag_id': 533,
|
||||
# 'completion_time': '2020-12-23 17:05:23',
|
||||
# 'volume_id': 0,
|
||||
# 'package_id': 3221,
|
||||
# 'owner_id': 11,
|
||||
# 'volume_name': 'DEFAULT',
|
||||
})
|
||||
|
||||
return builds
|
||||
|
||||
@staticmethod
|
||||
def getFullInheritance(*args, **kwargs):
|
||||
"""
|
||||
Unneeded because we use local storage.
|
||||
"""
|
||||
return []
|
||||
|
||||
def getBuild(self, build_id, *args, **kwargs):
|
||||
"""
|
||||
Used to get information about build
|
||||
(used in pungi only for modules currently)
|
||||
"""
|
||||
module = self._modules[build_id]
|
||||
|
||||
result = {
|
||||
'id': build_id,
|
||||
'name': module.name,
|
||||
'version': module.stream,
|
||||
'release': '%s.%s' % (module.version, module.context),
|
||||
'completion_ts': BUILD_TIME,
|
||||
'state': 'COMPLETE',
|
||||
'arch': module.arch,
|
||||
'extra': {
|
||||
'typeinfo': {
|
||||
'module': {
|
||||
'stream': module.stream,
|
||||
'version': module.version,
|
||||
'name': module.name,
|
||||
'context': module.context,
|
||||
'content_koji_tag': '-'.join([
|
||||
module.name,
|
||||
module.stream,
|
||||
module.version
|
||||
]) + '.' + module.context
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
|
||||
def listArchives(self, build_id, *args, **kwargs):
|
||||
"""
|
||||
Originally lists artifacts for build, but in pungi used
|
||||
only to get list of modulemd files for some module
|
||||
"""
|
||||
module = self._modules[build_id]
|
||||
|
||||
return [
|
||||
{
|
||||
'build_id': module.build_id,
|
||||
'filename': f'modulemd.{module.arch}.txt',
|
||||
'btype': 'module'
|
||||
},
|
||||
# noone ever uses this file
|
||||
# but it should be because pungi ignores builds
|
||||
# with len(files) <= 1
|
||||
{
|
||||
'build_id': module.build_id,
|
||||
'filename': 'modulemd.txt',
|
||||
'btype': 'module'
|
||||
}
|
||||
]
|
||||
|
||||
def listTaggedRPMS(self, tag_name, *args, **kwargs):
|
||||
"""
|
||||
Get information about packages that are tagged by tag.
|
||||
There are two kings of tags: per-module and per-distr.
|
||||
"""
|
||||
if tag_name == ALL_PACKAGES_TAG:
|
||||
builds, packages = self._get_release_packages()
|
||||
else:
|
||||
builds, packages = self._get_module_packages(tag_name)
|
||||
return [
|
||||
packages,
|
||||
builds
|
||||
]
|
||||
|
||||
def _get_release_packages(self):
|
||||
"""
|
||||
Search packages dir and keep only
|
||||
packages that are non-modular.
|
||||
|
||||
This is quite the way how real koji works:
|
||||
- modular packages are tagged by module-* tag
|
||||
- all other packages are tagged with dist* tag
|
||||
"""
|
||||
packages = []
|
||||
|
||||
# get all rpms in folder
|
||||
rpms = search_rpms(Path(self._packages_dir))
|
||||
|
||||
for rpm in rpms:
|
||||
info = parse_nvra(rpm.path.stem)
|
||||
if 'module' in info['release']:
|
||||
continue
|
||||
packages.append({
|
||||
"build_id": RELEASE_BUILD_ID,
|
||||
"name": info['name'],
|
||||
"extra": None,
|
||||
"arch": info['arch'],
|
||||
"epoch": info['epoch'] or None,
|
||||
"version": info['version'],
|
||||
"metadata_only": False,
|
||||
"release": info['release'],
|
||||
# not used currently
|
||||
# "id": 262555,
|
||||
# "size": 0
|
||||
})
|
||||
builds = []
|
||||
return builds, packages
|
||||
|
||||
def _get_module_packages(self, tag_name):
|
||||
"""
|
||||
Get list of builds for module and given module tag name.
|
||||
"""
|
||||
builds = []
|
||||
packages = []
|
||||
modules = self._get_modules_by_name(tag_name)
|
||||
for module in modules:
|
||||
if module is None:
|
||||
raise ValueError('Module %s is not found' % tag_name)
|
||||
path = os.path.join(
|
||||
self._modules_dir,
|
||||
module.arch,
|
||||
tag_name,
|
||||
)
|
||||
|
||||
builds.append({
|
||||
"build_id": module.build_id,
|
||||
"package_name": module.name,
|
||||
"nvr": module.nvr,
|
||||
"tag_name": module.nvr,
|
||||
"version": module.stream,
|
||||
"release": module.version,
|
||||
"id": module.build_id,
|
||||
"name": module.name,
|
||||
"volume_name": "DEFAULT",
|
||||
# Following fields are currently not
|
||||
# used but returned by real koji
|
||||
# left them here just for reference
|
||||
#
|
||||
# "owner_name": "mbox-mbs-backend",
|
||||
# "task_id": 195937,
|
||||
# "state": 1,
|
||||
# "start_time": "2020-12-22 19:20:12.504578",
|
||||
# "creation_event_id": 306731,
|
||||
# "creation_time": "2020-12-22 19:20:12.504578",
|
||||
# "epoch": None,
|
||||
# "tag_id": 1192,
|
||||
# "completion_time": "2020-12-22 19:34:34.716615",
|
||||
# "volume_id": 0,
|
||||
# "package_id": 104,
|
||||
# "owner_id": 6,
|
||||
})
|
||||
|
||||
if os.path.exists(path):
|
||||
info = Modulemd.ModuleStream.read_string(open(path).read(), strict=True)
|
||||
for art in info.get_rpm_artifacts():
|
||||
data = parse_nvra(art)
|
||||
packages.append({
|
||||
"build_id": module.build_id,
|
||||
"name": data['name'],
|
||||
"extra": None,
|
||||
"arch": data['arch'],
|
||||
"epoch": data['epoch'] or None,
|
||||
"version": data['version'],
|
||||
"metadata_only": False,
|
||||
"release": data['release'],
|
||||
"id": 262555,
|
||||
"size": 0
|
||||
})
|
||||
else:
|
||||
raise RuntimeError('Unable to find module %s' % path)
|
||||
return builds, packages
|
||||
|
||||
def _get_modules_by_name(self, tag_name):
|
||||
modules = []
|
||||
for arch in self._all_arches:
|
||||
for module in self._modules.values():
|
||||
if module.nvr != tag_name or module.arch != arch:
|
||||
continue
|
||||
modules.append(module)
|
||||
return modules
|
||||
@ -14,25 +14,27 @@
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import configparser
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import shlex
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import xmlrpc.client
|
||||
|
||||
import requests
|
||||
|
||||
import koji
|
||||
from kobo.shortcuts import run, force_list
|
||||
import six
|
||||
from six.moves import configparser, shlex_quote
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
from flufl.lock import Lock
|
||||
from datetime import timedelta
|
||||
|
||||
from .kojimock import KojiMock
|
||||
from .. import util
|
||||
from ..otel import tracing
|
||||
from ..arch_utils import getBaseArch
|
||||
|
||||
|
||||
@ -67,13 +69,13 @@ class KojiWrapper(object):
|
||||
value = getattr(self.koji_module.config, key, None)
|
||||
if value is not None:
|
||||
session_opts[key] = value
|
||||
self.koji_proxy = koji.ClientSession(
|
||||
self.koji_module.config.server, session_opts
|
||||
self.koji_proxy = tracing.instrument_xmlrpc_proxy(
|
||||
koji.ClientSession(self.koji_module.config.server, session_opts)
|
||||
)
|
||||
|
||||
# This retry should be removed once https://pagure.io/koji/issue/3170 is
|
||||
# fixed and released.
|
||||
@util.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError))
|
||||
@util.retry(wait_on=(xmlrpc.client.ProtocolError, koji.GenericError))
|
||||
def login(self):
|
||||
"""Authenticate to the hub."""
|
||||
auth_type = self.koji_module.config.authtype
|
||||
@ -144,7 +146,7 @@ class KojiWrapper(object):
|
||||
cmd.append(arch)
|
||||
|
||||
if isinstance(command, list):
|
||||
command = " ".join([shlex_quote(i) for i in command])
|
||||
command = " ".join([shlex.quote(i) for i in command])
|
||||
|
||||
# HACK: remove rpmdb and yum cache
|
||||
command = (
|
||||
@ -152,7 +154,7 @@ class KojiWrapper(object):
|
||||
)
|
||||
|
||||
if chown_paths:
|
||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||
paths = " ".join(shlex.quote(pth) for pth in chown_paths)
|
||||
command += " ; EXIT_CODE=$?"
|
||||
# Make the files world readable
|
||||
command += " ; chmod -R a+r %s" % paths
|
||||
@ -286,35 +288,38 @@ class KojiWrapper(object):
|
||||
:return dict: {"retcode": 0, "output": "", "task_id": 1}
|
||||
"""
|
||||
task_id = None
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
logfile=log_file,
|
||||
show_cmd=True,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
universal_newlines=True,
|
||||
)
|
||||
with tracing.span("run-runroot-cmd", command=command):
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
logfile=log_file,
|
||||
show_cmd=True,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
|
||||
# Look for first line that contains only a number. This is the ID of
|
||||
# the new task. Usually this should be the first line, but there may be
|
||||
# warnings before it.
|
||||
for line in output.splitlines():
|
||||
match = re.search(r"^(\d+)$", line)
|
||||
if match:
|
||||
task_id = int(match.groups()[0])
|
||||
break
|
||||
# Look for first line that contains only a number. This is the ID of
|
||||
# the new task. Usually this should be the first line, but there may be
|
||||
# warnings before it.
|
||||
for line in output.splitlines():
|
||||
match = re.search(r"^(\d+)$", line)
|
||||
if match:
|
||||
task_id = int(match.groups()[0])
|
||||
break
|
||||
|
||||
if not task_id:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
if not task_id:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
|
||||
self.save_task_id(task_id)
|
||||
self.save_task_id(task_id)
|
||||
tracing.set_attribute("task_id", task_id)
|
||||
|
||||
retcode, output = self._wait_for_task(task_id, logfile=log_file)
|
||||
retcode, output = self._wait_for_task(task_id, logfile=log_file)
|
||||
|
||||
return {
|
||||
"retcode": retcode,
|
||||
@ -358,7 +363,7 @@ class KojiWrapper(object):
|
||||
for option, value in opts.items():
|
||||
if isinstance(value, list):
|
||||
value = ",".join(value)
|
||||
if not isinstance(value, six.string_types):
|
||||
if not isinstance(value, str):
|
||||
# Python 3 configparser will reject non-string values.
|
||||
value = str(value)
|
||||
cfg_parser.set(section, option, value)
|
||||
@ -429,9 +434,10 @@ class KojiWrapper(object):
|
||||
attempt = 0
|
||||
|
||||
while True:
|
||||
retcode, output = run(
|
||||
cmd, can_fail=True, logfile=logfile, universal_newlines=True
|
||||
)
|
||||
with tracing.span("watch-task", task_id=task_id):
|
||||
retcode, output = run(
|
||||
cmd, can_fail=True, logfile=logfile, text=True, errors="replace"
|
||||
)
|
||||
|
||||
if retcode == 0 or not (
|
||||
self._has_connection_error(output) or self._has_offline_error(output)
|
||||
@ -455,33 +461,36 @@ class KojiWrapper(object):
|
||||
its exit code and parsed task id. This method will block until the
|
||||
command finishes.
|
||||
"""
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
show_cmd=True,
|
||||
logfile=log_file,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
universal_newlines=True,
|
||||
)
|
||||
with tracing.span("run-blocking-cmd", command=command):
|
||||
with self.get_koji_cmd_env() as env:
|
||||
retcode, output = run(
|
||||
command,
|
||||
can_fail=True,
|
||||
show_cmd=True,
|
||||
logfile=log_file,
|
||||
env=env,
|
||||
buffer_size=-1,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
|
||||
match = re.search(r"Created task: (\d+)", output)
|
||||
if not match:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
task_id = int(match.groups()[0])
|
||||
match = re.search(r"Created task: (\d+)", output)
|
||||
if not match:
|
||||
raise RuntimeError(
|
||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||
% (" ".join(command), output)
|
||||
)
|
||||
task_id = int(match.groups()[0])
|
||||
tracing.set_attribute("task_id", task_id)
|
||||
|
||||
self.save_task_id(task_id)
|
||||
self.save_task_id(task_id)
|
||||
|
||||
if retcode != 0 and (
|
||||
self._has_connection_error(output) or self._has_offline_error(output)
|
||||
):
|
||||
retcode, output = self._wait_for_task(
|
||||
task_id, logfile=log_file, max_retries=max_retries
|
||||
)
|
||||
if retcode != 0 and (
|
||||
self._has_connection_error(output) or self._has_offline_error(output)
|
||||
):
|
||||
retcode, output = self._wait_for_task(
|
||||
task_id, logfile=log_file, max_retries=max_retries
|
||||
)
|
||||
|
||||
return {
|
||||
"retcode": retcode,
|
||||
@ -527,6 +536,7 @@ class KojiWrapper(object):
|
||||
"createLiveMedia",
|
||||
"createAppliance",
|
||||
"createKiwiImage",
|
||||
"imageBuilderBuildArch",
|
||||
]:
|
||||
continue
|
||||
|
||||
@ -562,126 +572,6 @@ class KojiWrapper(object):
|
||||
|
||||
return result
|
||||
|
||||
def get_image_path(self, task_id):
|
||||
result = []
|
||||
task_info_list = []
|
||||
task_info_list.append(self.koji_proxy.getTaskInfo(task_id, request=True))
|
||||
task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))
|
||||
|
||||
# scan parent and child tasks for certain methods
|
||||
task_info = None
|
||||
for i in task_info_list:
|
||||
if i["method"] in ("createAppliance", "createLiveCD", "createImage"):
|
||||
task_info = i
|
||||
break
|
||||
|
||||
scratch = task_info["request"][-1].get("scratch", False)
|
||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||
task_result.pop("rpmlist", None)
|
||||
|
||||
if scratch:
|
||||
topdir = os.path.join(
|
||||
self.koji_module.pathinfo.work(),
|
||||
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||
)
|
||||
else:
|
||||
build = self.koji_proxy.getImageBuild(
|
||||
"%(name)s-%(version)s-%(release)s" % task_result
|
||||
)
|
||||
build["name"] = task_result["name"]
|
||||
build["version"] = task_result["version"]
|
||||
build["release"] = task_result["release"]
|
||||
build["arch"] = task_result["arch"]
|
||||
topdir = self.koji_module.pathinfo.imagebuild(build)
|
||||
for i in task_result["files"]:
|
||||
result.append(os.path.join(topdir, i))
|
||||
return result
|
||||
|
||||
def get_wrapped_rpm_path(self, task_id, srpm=False):
|
||||
result = []
|
||||
task_info_list = []
|
||||
task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))
|
||||
|
||||
# scan parent and child tasks for certain methods
|
||||
task_info = None
|
||||
for i in task_info_list:
|
||||
if i["method"] in ("wrapperRPM"):
|
||||
task_info = i
|
||||
break
|
||||
|
||||
# Get results of wrapperRPM task
|
||||
# {'buildroot_id': 2479520,
|
||||
# 'logs': ['checkout.log', 'root.log', 'state.log', 'build.log'],
|
||||
# 'rpms': ['foreman-discovery-image-2.1.0-2.el7sat.noarch.rpm'],
|
||||
# 'srpm': 'foreman-discovery-image-2.1.0-2.el7sat.src.rpm'}
|
||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||
|
||||
# Get koji dir with results (rpms, srpms, logs, ...)
|
||||
topdir = os.path.join(
|
||||
self.koji_module.pathinfo.work(),
|
||||
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||
)
|
||||
|
||||
# TODO: Maybe use different approach for non-scratch
|
||||
# builds - see get_image_path()
|
||||
|
||||
# Get list of filenames that should be returned
|
||||
result_files = task_result["rpms"]
|
||||
if srpm:
|
||||
result_files += [task_result["srpm"]]
|
||||
|
||||
# Prepare list with paths to the required files
|
||||
for i in result_files:
|
||||
result.append(os.path.join(topdir, i))
|
||||
|
||||
return result
|
||||
|
||||
def get_signed_wrapped_rpms_paths(self, task_id, sigkey, srpm=False):
|
||||
result = []
|
||||
parent_task = self.koji_proxy.getTaskInfo(task_id, request=True)
|
||||
task_info_list = []
|
||||
task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))
|
||||
|
||||
# scan parent and child tasks for certain methods
|
||||
task_info = None
|
||||
for i in task_info_list:
|
||||
if i["method"] in ("wrapperRPM"):
|
||||
task_info = i
|
||||
break
|
||||
|
||||
# Check parent_task if it's scratch build
|
||||
scratch = parent_task["request"][-1].get("scratch", False)
|
||||
if scratch:
|
||||
raise RuntimeError("Scratch builds cannot be signed!")
|
||||
|
||||
# Get results of wrapperRPM task
|
||||
# {'buildroot_id': 2479520,
|
||||
# 'logs': ['checkout.log', 'root.log', 'state.log', 'build.log'],
|
||||
# 'rpms': ['foreman-discovery-image-2.1.0-2.el7sat.noarch.rpm'],
|
||||
# 'srpm': 'foreman-discovery-image-2.1.0-2.el7sat.src.rpm'}
|
||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||
|
||||
# Get list of filenames that should be returned
|
||||
result_files = task_result["rpms"]
|
||||
if srpm:
|
||||
result_files += [task_result["srpm"]]
|
||||
|
||||
# Prepare list with paths to the required files
|
||||
for i in result_files:
|
||||
rpminfo = self.koji_proxy.getRPM(i)
|
||||
build = self.koji_proxy.getBuild(rpminfo["build_id"])
|
||||
path = os.path.join(
|
||||
self.koji_module.pathinfo.build(build),
|
||||
self.koji_module.pathinfo.signed(rpminfo, sigkey),
|
||||
)
|
||||
result.append(path)
|
||||
|
||||
return result
|
||||
|
||||
def get_build_nvrs(self, task_id):
|
||||
builds = self.koji_proxy.listBuilds(taskID=task_id)
|
||||
return [build.get("nvr") for build in builds if build.get("nvr")]
|
||||
|
||||
def multicall_map(
|
||||
self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None
|
||||
):
|
||||
@ -764,11 +654,11 @@ class KojiWrapper(object):
|
||||
|
||||
return results
|
||||
|
||||
@util.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError))
|
||||
@util.retry(wait_on=(xmlrpc.client.ProtocolError, koji.GenericError))
|
||||
def retrying_multicall_map(self, *args, **kwargs):
|
||||
"""
|
||||
Retrying version of multicall_map. This tries to retry the Koji call
|
||||
in case of koji.GenericError or xmlrpclib.ProtocolError.
|
||||
in case of koji.GenericError or xmlrpc.client.ProtocolError.
|
||||
|
||||
Please refer to koji_multicall_map for further specification of arguments.
|
||||
"""
|
||||
@ -784,6 +674,45 @@ class KojiWrapper(object):
|
||||
pass
|
||||
|
||||
|
||||
class KojiMockWrapper(object):
|
||||
lock = threading.Lock()
|
||||
|
||||
def __init__(self, compose, all_arches):
|
||||
self.all_arches = all_arches
|
||||
self.compose = compose
|
||||
try:
|
||||
self.profile = self.compose.conf["koji_profile"]
|
||||
except KeyError:
|
||||
raise RuntimeError("Koji profile must be configured")
|
||||
with self.lock:
|
||||
self.koji_module = koji.get_profile_module(self.profile)
|
||||
session_opts = {}
|
||||
for key in (
|
||||
"timeout",
|
||||
"keepalive",
|
||||
"max_retries",
|
||||
"retry_interval",
|
||||
"anon_retry",
|
||||
"offline_retry",
|
||||
"offline_retry_interval",
|
||||
"debug",
|
||||
"debug_xmlrpc",
|
||||
"serverca",
|
||||
"use_fast_upload",
|
||||
):
|
||||
value = getattr(self.koji_module.config, key, None)
|
||||
if value is not None:
|
||||
session_opts[key] = value
|
||||
self.koji_proxy = KojiMock(
|
||||
packages_dir=self.koji_module.config.topdir,
|
||||
modules_dir=os.path.join(
|
||||
self.koji_module.config.topdir,
|
||||
'modules',
|
||||
),
|
||||
all_arches=self.all_arches,
|
||||
)
|
||||
|
||||
|
||||
def get_buildroot_rpms(compose, task_id):
|
||||
"""Get build root RPMs - either from runroot or local"""
|
||||
result = []
|
||||
@ -808,7 +737,8 @@ def get_buildroot_rpms(compose, task_id):
|
||||
# local
|
||||
retcode, output = run(
|
||||
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
for i in output.splitlines():
|
||||
if not i:
|
||||
@ -968,7 +898,8 @@ class KojiDownloadProxy:
|
||||
os.utime(destination_file)
|
||||
return destination_file
|
||||
|
||||
return self._atomic_download(url, destination_file, validator)
|
||||
with tracing.span("download-rpm", url=url):
|
||||
return self._atomic_download(url, destination_file, validator)
|
||||
|
||||
def get_file(self, path, validator=None):
|
||||
"""
|
||||
|
||||
@ -46,6 +46,7 @@ class LoraxWrapper(object):
|
||||
skip_branding=False,
|
||||
squashfs_only=False,
|
||||
configuration_file=None,
|
||||
rootfs_type=None,
|
||||
):
|
||||
cmd = ["lorax"]
|
||||
cmd.append("--product=%s" % product)
|
||||
@ -106,6 +107,9 @@ class LoraxWrapper(object):
|
||||
output_dir = os.path.abspath(output_dir)
|
||||
cmd.append(output_dir)
|
||||
|
||||
if rootfs_type:
|
||||
cmd.append("--rootfs-type=%s" % rootfs_type)
|
||||
|
||||
# TODO: workdir
|
||||
|
||||
return cmd
|
||||
|
||||
@ -105,85 +105,6 @@ class PungiWrapper(object):
|
||||
|
||||
kickstart.close()
|
||||
|
||||
def get_pungi_cmd(
|
||||
self,
|
||||
config,
|
||||
destdir,
|
||||
name,
|
||||
version=None,
|
||||
flavor=None,
|
||||
selfhosting=False,
|
||||
fulltree=False,
|
||||
greedy=None,
|
||||
nodeps=False,
|
||||
nodownload=True,
|
||||
full_archlist=False,
|
||||
arch=None,
|
||||
cache_dir=None,
|
||||
lookaside_repos=None,
|
||||
multilib_methods=None,
|
||||
profiler=False,
|
||||
):
|
||||
cmd = ["pungi"]
|
||||
|
||||
# Gather stage
|
||||
cmd.append("-G")
|
||||
|
||||
# path to a kickstart file
|
||||
cmd.append("--config=%s" % config)
|
||||
|
||||
# destdir is optional in Pungi (defaults to current dir), but
|
||||
# want it mandatory here
|
||||
cmd.append("--destdir=%s" % destdir)
|
||||
|
||||
# name
|
||||
cmd.append("--name=%s" % name)
|
||||
|
||||
# version; optional, defaults to datestamp
|
||||
if version:
|
||||
cmd.append("--ver=%s" % version)
|
||||
|
||||
# rhel variant; optional
|
||||
if flavor:
|
||||
cmd.append("--flavor=%s" % flavor)
|
||||
|
||||
# turn selfhosting on
|
||||
if selfhosting:
|
||||
cmd.append("--selfhosting")
|
||||
|
||||
# NPLB
|
||||
if fulltree:
|
||||
cmd.append("--fulltree")
|
||||
|
||||
greedy = greedy or "none"
|
||||
cmd.append("--greedy=%s" % greedy)
|
||||
|
||||
if nodeps:
|
||||
cmd.append("--nodeps")
|
||||
|
||||
# don't download packages, just print paths
|
||||
if nodownload:
|
||||
cmd.append("--nodownload")
|
||||
|
||||
if full_archlist:
|
||||
cmd.append("--full-archlist")
|
||||
|
||||
if arch:
|
||||
cmd.append("--arch=%s" % arch)
|
||||
|
||||
if multilib_methods:
|
||||
for i in multilib_methods:
|
||||
cmd.append("--multilib=%s" % i)
|
||||
|
||||
if cache_dir:
|
||||
cmd.append("--cachedir=%s" % cache_dir)
|
||||
|
||||
if lookaside_repos:
|
||||
for i in lookaside_repos:
|
||||
cmd.append("--lookaside-repo=%s" % i)
|
||||
|
||||
return cmd
|
||||
|
||||
def get_pungi_cmd_dnf(
|
||||
self,
|
||||
config,
|
||||
@ -269,70 +190,3 @@ class PungiWrapper(object):
|
||||
broken_deps.setdefault(match.group(2), set()).add(match.group(1))
|
||||
|
||||
return packages, broken_deps, missing_comps
|
||||
|
||||
def run_pungi(
|
||||
self,
|
||||
ks_file,
|
||||
destdir,
|
||||
name,
|
||||
selfhosting=False,
|
||||
fulltree=False,
|
||||
greedy="",
|
||||
cache_dir=None,
|
||||
arch="",
|
||||
multilib_methods=[],
|
||||
nodeps=False,
|
||||
lookaside_repos=[],
|
||||
):
|
||||
"""
|
||||
This is a replacement for get_pungi_cmd that runs it in-process. Not
|
||||
all arguments are supported.
|
||||
"""
|
||||
from .. import ks, gather, config
|
||||
|
||||
ksparser = ks.get_ksparser(ks_path=ks_file)
|
||||
cfg = config.Config()
|
||||
cfg.set("pungi", "destdir", destdir)
|
||||
cfg.set("pungi", "family", name)
|
||||
cfg.set("pungi", "iso_basename", name)
|
||||
cfg.set("pungi", "fulltree", str(fulltree))
|
||||
cfg.set("pungi", "selfhosting", str(selfhosting))
|
||||
cfg.set("pungi", "cachedir", cache_dir)
|
||||
cfg.set("pungi", "full_archlist", "True")
|
||||
cfg.set("pungi", "workdirbase", "%s/work" % destdir)
|
||||
cfg.set("pungi", "greedy", greedy)
|
||||
cfg.set("pungi", "nosource", "False")
|
||||
cfg.set("pungi", "nodebuginfo", "False")
|
||||
cfg.set("pungi", "force", "False")
|
||||
cfg.set("pungi", "resolve_deps", str(not nodeps))
|
||||
if arch:
|
||||
cfg.set("pungi", "arch", arch)
|
||||
if multilib_methods:
|
||||
cfg.set("pungi", "multilib", " ".join(multilib_methods))
|
||||
if lookaside_repos:
|
||||
cfg.set("pungi", "lookaside_repos", " ".join(lookaside_repos))
|
||||
|
||||
mypungi = gather.Pungi(cfg, ksparser)
|
||||
|
||||
with open(os.path.join(destdir, "out"), "w") as f:
|
||||
with mypungi.yumlock:
|
||||
mypungi._inityum()
|
||||
mypungi.gather()
|
||||
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
f.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
mypungi.makeCompsFile()
|
||||
mypungi.getDebuginfoList()
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
f.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
f.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
|
||||
@ -19,13 +19,8 @@ import os
|
||||
from kobo.shortcuts import force_list
|
||||
|
||||
|
||||
def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
||||
def get_repoclosure_cmd(backend="dnf", arch=None, repos=None, lookaside=None):
|
||||
cmds = {
|
||||
"yum": {
|
||||
"cmd": ["/usr/bin/repoclosure", "--tempcache"],
|
||||
"repoarg": "--repoid=%s",
|
||||
"lookaside": "--lookaside=%s",
|
||||
},
|
||||
"dnf": {
|
||||
"cmd": ["dnf", "repoclosure"],
|
||||
"repoarg": "--repo=%s",
|
||||
@ -44,18 +39,17 @@ def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
||||
for i in arches:
|
||||
cmd.append("--arch=%s" % i)
|
||||
|
||||
if backend == "dnf" and arches:
|
||||
if arches:
|
||||
cmd.append("--forcearch=%s" % arches[0])
|
||||
|
||||
repos = repos or {}
|
||||
for repo_id, repo_path in repos.items():
|
||||
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
||||
cmd.append(cmds[backend]["repoarg"] % repo_id)
|
||||
if backend == "dnf":
|
||||
# For dnf we want to add all repos with the --repo option (which
|
||||
# enables only those and not any system repo), and the repos to
|
||||
# check are also listed with the --check option.
|
||||
cmd.append("--check=%s" % repo_id)
|
||||
# For dnf we want to add all repos with the --repo option (which
|
||||
# enables only those and not any system repo), and the repos to
|
||||
# check are also listed with the --check option.
|
||||
cmd.append("--check=%s" % repo_id)
|
||||
|
||||
lookaside = lookaside or {}
|
||||
for repo_id, repo_path in lookaside.items():
|
||||
|
||||
@ -19,16 +19,16 @@ from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import glob
|
||||
import six
|
||||
import shlex
|
||||
import threading
|
||||
from six.moves import shlex_quote
|
||||
from six.moves.urllib.request import urlretrieve
|
||||
from urllib.request import urlretrieve
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import kobo.log
|
||||
from kobo.shortcuts import run, force_list
|
||||
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||
from .kojiwrapper import KojiWrapper
|
||||
from ..otel import tracing
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
@ -57,7 +57,8 @@ class ScmBase(kobo.log.LoggingBase):
|
||||
workdir=cwd,
|
||||
can_fail=True,
|
||||
stdin_data="",
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
if retcode != 0:
|
||||
self.log_error("Output was: %r" % output)
|
||||
@ -79,7 +80,7 @@ class FileWrapper(ScmBase):
|
||||
for i in dirs:
|
||||
copy_all(i, target_dir)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
if scm_root:
|
||||
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
||||
self.log_debug(
|
||||
@ -118,7 +119,7 @@ class CvsWrapper(ScmBase):
|
||||
)
|
||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
scm_file = scm_file.lstrip("/")
|
||||
scm_branch = scm_branch or "HEAD"
|
||||
with temp_dir() as tmp_dir:
|
||||
@ -160,6 +161,9 @@ class GitWrapper(ScmBase):
|
||||
if "://" not in repo:
|
||||
repo = "file://%s" % repo
|
||||
|
||||
if repo.startswith("git+http"):
|
||||
repo = repo[4:]
|
||||
|
||||
git_cmd = ["git"]
|
||||
if "credential_helper" in self.options:
|
||||
git_cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
@ -198,6 +202,17 @@ class GitWrapper(ScmBase):
|
||||
copy_all(destdir, debugdir)
|
||||
raise
|
||||
|
||||
if os.path.exists(os.path.join(destdir, ".gitmodules")):
|
||||
try:
|
||||
self.log_debug("Cloning submodules")
|
||||
run(["git", "submodule", "init"], workdir=destdir)
|
||||
run(["git", "submodule", "update"], workdir=destdir)
|
||||
except RuntimeError as e:
|
||||
self.log_error(
|
||||
"Failed to clone submodules: %s %s", e, getattr(e, "output", "")
|
||||
)
|
||||
# Ignore the error here, there may just be no submodules.
|
||||
|
||||
def get_temp_repo_path(self, scm_root, scm_branch):
|
||||
scm_repo = scm_root.split("/")[-1]
|
||||
process_id = os.getpid()
|
||||
@ -215,7 +230,8 @@ class GitWrapper(ScmBase):
|
||||
tmp_dir = self.get_temp_repo_path(scm_root, scm_branch)
|
||||
if not os.path.isdir(tmp_dir):
|
||||
makedirs(tmp_dir)
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
with tracing.span("git-clone", repo=scm_root, ref=scm_branch):
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
self.run_process_command(tmp_dir)
|
||||
return tmp_dir
|
||||
|
||||
@ -233,7 +249,7 @@ class GitWrapper(ScmBase):
|
||||
|
||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
scm_file = scm_file.lstrip("/")
|
||||
scm_branch = scm_branch or "master"
|
||||
|
||||
@ -274,12 +290,12 @@ class RpmScmWrapper(ScmBase):
|
||||
run(
|
||||
"cp -a %s %s/"
|
||||
% (
|
||||
shlex_quote(os.path.join(tmp_dir, scm_dir)),
|
||||
shlex_quote(target_dir),
|
||||
shlex.quote(os.path.join(tmp_dir, scm_dir)),
|
||||
shlex.quote(target_dir),
|
||||
)
|
||||
)
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
for rpm in self._list_rpms(scm_root):
|
||||
scm_file = scm_file.lstrip("/")
|
||||
with temp_dir() as tmp_dir:
|
||||
@ -304,7 +320,7 @@ class KojiScmWrapper(ScmBase):
|
||||
def export_dir(self, *args, **kwargs):
|
||||
raise RuntimeError("Only files can be exported from Koji")
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
if scm_branch:
|
||||
self._get_latest_from_tag(scm_branch, scm_root, scm_file, target_dir)
|
||||
else:
|
||||
@ -341,6 +357,44 @@ class KojiScmWrapper(ScmBase):
|
||||
urlretrieve(url, target_file)
|
||||
|
||||
|
||||
class SkopeoCopyTimeoutError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class ContainerImageScmWrapper(ScmBase):
|
||||
|
||||
def export_dir(self, *args, **kwargs):
|
||||
raise RuntimeError("Containers can only be exported as files")
|
||||
|
||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, arch=None):
|
||||
if arch == "src":
|
||||
return
|
||||
ARCHES = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
arch = ARCHES.get(arch, arch)
|
||||
cmd = [
|
||||
"skopeo",
|
||||
"--override-arch=" + arch,
|
||||
"copy",
|
||||
scm_root,
|
||||
"oci:" + target_dir,
|
||||
"--remove-signatures",
|
||||
]
|
||||
try:
|
||||
self.log_debug(
|
||||
"Exporting container %s to %s: %s", scm_root, target_dir, cmd
|
||||
)
|
||||
with tracing.span("skopeo-copy", arch=arch, image=scm_root):
|
||||
self.retry_run(cmd, can_fail=False)
|
||||
except RuntimeError as e:
|
||||
output = getattr(e, "output", "")
|
||||
self.log_error("Failed to copy container image: %s %s", e, output)
|
||||
|
||||
if "connection timed out" in output:
|
||||
raise SkopeoCopyTimeoutError(output) from e
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def _get_wrapper(scm_type, *args, **kwargs):
|
||||
SCM_WRAPPERS = {
|
||||
"file": FileWrapper,
|
||||
@ -348,6 +402,7 @@ def _get_wrapper(scm_type, *args, **kwargs):
|
||||
"git": GitWrapper,
|
||||
"rpm": RpmScmWrapper,
|
||||
"koji": KojiScmWrapper,
|
||||
"container-image": ContainerImageScmWrapper,
|
||||
}
|
||||
try:
|
||||
cls = SCM_WRAPPERS[scm_type]
|
||||
@ -356,7 +411,7 @@ def _get_wrapper(scm_type, *args, **kwargs):
|
||||
return cls(*args, **kwargs)
|
||||
|
||||
|
||||
def get_file_from_scm(scm_dict, target_path, compose=None):
|
||||
def get_file_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
"""
|
||||
Copy one or more files from source control to a target path. A list of files
|
||||
created in ``target_path`` is returned.
|
||||
@ -387,7 +442,7 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
||||
>>> get_file_from_scm(scm_dict, target_path)
|
||||
['/tmp/path/share/variants.dtd']
|
||||
"""
|
||||
if isinstance(scm_dict, six.string_types):
|
||||
if isinstance(scm_dict, str):
|
||||
scm_type = "file"
|
||||
scm_repo = None
|
||||
scm_file = os.path.abspath(scm_dict)
|
||||
@ -410,8 +465,18 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
||||
files_copied = []
|
||||
for i in force_list(scm_file):
|
||||
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
||||
scm.export_file(scm_repo, i, scm_branch=scm_branch, target_dir=tmp_dir)
|
||||
files_copied += copy_all(tmp_dir, target_path)
|
||||
# Most SCM wrappers need a temporary directory: the git repo is
|
||||
# cloned there, and only relevant files are copied out. But this
|
||||
# doesn't work for the container image fetching. That pulls in only
|
||||
# required files, and the final output needs to be done by skopeo
|
||||
# to correctly handle multiple containers landing in the same OCI
|
||||
# archive.
|
||||
dest = target_path if scm_type == "container-image" else tmp_dir
|
||||
scm.export_file(
|
||||
scm_repo, i, scm_branch=scm_branch, target_dir=dest, arch=arch
|
||||
)
|
||||
if dest == tmp_dir:
|
||||
files_copied += copy_all(tmp_dir, target_path)
|
||||
return files_copied
|
||||
|
||||
|
||||
@ -450,7 +515,7 @@ def get_file(source, destination, compose, overwrite=False):
|
||||
return destination
|
||||
|
||||
|
||||
def get_dir_from_scm(scm_dict, target_path, compose=None):
|
||||
def get_dir_from_scm(scm_dict, target_path, compose=None, arch=None):
|
||||
"""
|
||||
Copy a directory from source control to a target path. A list of files
|
||||
created in ``target_path`` is returned.
|
||||
@ -480,7 +545,7 @@ def get_dir_from_scm(scm_dict, target_path, compose=None):
|
||||
>>> get_dir_from_scm(scm_dict, target_path)
|
||||
['/tmp/path/share/variants.dtd', '/tmp/path/share/rawhide-fedora.ks', ...]
|
||||
"""
|
||||
if isinstance(scm_dict, six.string_types):
|
||||
if isinstance(scm_dict, str):
|
||||
scm_type = "file"
|
||||
scm_repo = None
|
||||
scm_dir = os.path.abspath(scm_dict)
|
||||
|
||||
@ -15,8 +15,8 @@
|
||||
from kobo import shortcuts
|
||||
import os
|
||||
import productmd
|
||||
import shlex
|
||||
import tempfile
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi import util
|
||||
from pungi.phases.buildinstall import tweak_configs
|
||||
@ -24,8 +24,8 @@ from pungi.wrappers import iso
|
||||
|
||||
|
||||
def sh(log, cmd, *args, **kwargs):
|
||||
log.info("Running: %s", " ".join(shlex_quote(x) for x in cmd))
|
||||
ret, out = shortcuts.run(cmd, *args, universal_newlines=True, **kwargs)
|
||||
log.info("Running: %s", " ".join(shlex.quote(x) for x in cmd))
|
||||
ret, out = shortcuts.run(cmd, *args, text=True, errors="replace", **kwargs)
|
||||
if out:
|
||||
log.debug("%s", out)
|
||||
return ret, out
|
||||
@ -35,7 +35,8 @@ def get_lorax_dir(default="/usr/share/lorax"):
|
||||
try:
|
||||
_, out = shortcuts.run(
|
||||
["python3", "-c" "import pylorax; print(pylorax.find_templates())"],
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
return out.strip()
|
||||
except Exception:
|
||||
|
||||
@ -394,7 +394,8 @@ class UnifiedISO(object):
|
||||
iso.get_mkisofs_cmd(
|
||||
iso_path, [source_dir], volid=volid, exclude=["./lost+found"]
|
||||
),
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
errors="replace",
|
||||
)
|
||||
|
||||
# implant MD5
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
||||
dogpile.cache
|
||||
flufl.lock ; python_version >= '3.0'
|
||||
flufl.lock < 3.0 ; python_version <= '2.7'
|
||||
flufl.lock
|
||||
jsonschema
|
||||
kobo
|
||||
koji
|
||||
@ -12,4 +11,3 @@ ordered_set
|
||||
productmd
|
||||
pykickstart
|
||||
python-multilib
|
||||
urlgrabber ; python_version < '3.0'
|
||||
|
||||
23
setup.py
23
setup.py
@ -20,7 +20,7 @@ packages = sorted(packages)
|
||||
|
||||
setup(
|
||||
name="pungi",
|
||||
version="4.7.0",
|
||||
version="4.10.1",
|
||||
description="Distribution compose tool",
|
||||
url="https://pagure.io/pungi",
|
||||
author="Dennis Gilmore",
|
||||
@ -30,7 +30,6 @@ setup(
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"comps_filter = pungi.scripts.comps_filter:main",
|
||||
"pungi = pungi.scripts.pungi:main",
|
||||
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
||||
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||
@ -42,25 +41,27 @@ setup(
|
||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||
"pungi-cache-cleanup = pungi.scripts.cache_cleanup:main",
|
||||
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
|
||||
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
|
||||
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
|
||||
"pungi-create-extra-repo = pungi.scripts.create_extra_repo:cli_main"
|
||||
]
|
||||
},
|
||||
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
||||
data_files=[
|
||||
("/usr/lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||
("/usr/share/pungi/multilib", glob.glob("share/multilib/*")),
|
||||
("lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||
("share/pungi", glob.glob("share/*.xsl")),
|
||||
("share/pungi", glob.glob("share/*.ks")),
|
||||
("share/pungi", glob.glob("share/*.dtd")),
|
||||
("share/pungi/multilib", glob.glob("share/multilib/*")),
|
||||
],
|
||||
test_suite="tests",
|
||||
install_requires=[
|
||||
"jsonschema",
|
||||
"kobo",
|
||||
"lxml",
|
||||
"productmd>=1.23",
|
||||
"six",
|
||||
"productmd>=1.45",
|
||||
"dogpile.cache",
|
||||
],
|
||||
extras_require={':python_version=="2.7"': ["enum34", "lockfile"]},
|
||||
tests_require=["pytest", "pytest-cov"],
|
||||
tests_require=["pytest", "pytest-cov", "pyfakefs"],
|
||||
)
|
||||
|
||||
1
sources
Normal file
1
sources
Normal file
@ -0,0 +1 @@
|
||||
SHA512 (pungi-4.10.1.tar.bz2) = 4ff1005ece77ac9b41ac31c3b0bcdd558afaaea4d99bf178d42b24a4318ccc9a5576ad4740446f1589a07f88f59f5cb4954d182f3f4e15b1a798e19d9a54fb22
|
||||
@ -1,5 +1,3 @@
|
||||
mock; python_version < '3.3'
|
||||
parameterized
|
||||
pytest
|
||||
pytest-cov
|
||||
unittest2; python_version < '3.0'
|
||||
|
||||
@ -6,6 +6,7 @@ LABEL \
|
||||
license="MIT"
|
||||
|
||||
RUN dnf -y update && dnf -y install \
|
||||
--setopt=install_weak_deps=false \
|
||||
findutils \
|
||||
libmodulemd \
|
||||
git \
|
||||
@ -15,6 +16,7 @@ RUN dnf -y update && dnf -y install \
|
||||
python3-gobject-base \
|
||||
python3-tox \
|
||||
python3-urlgrabber \
|
||||
python3-dnf \
|
||||
&& dnf clean all
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
FROM centos:7
|
||||
LABEL \
|
||||
name="Pungi test" \
|
||||
description="Run tests using tox with Python 2" \
|
||||
vendor="Pungi developers" \
|
||||
license="MIT"
|
||||
|
||||
RUN yum -y update && yum -y install epel-release && yum -y install \
|
||||
git \
|
||||
libmodulemd2 \
|
||||
make \
|
||||
python3 \
|
||||
python-createrepo_c \
|
||||
python-gobject-base \
|
||||
python-gssapi \
|
||||
python-libcomps \
|
||||
pykickstart \
|
||||
&& yum clean all
|
||||
|
||||
# python-tox in yum repo is too old, let's install latest version
|
||||
RUN pip3 install tox
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["tox", "-e", "py27"]
|
||||
4
tests/Jenkinsfile
vendored
4
tests/Jenkinsfile
vendored
@ -1,5 +1,3 @@
|
||||
def DUFFY_SESSION_ID
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
label 'cico-workspace'
|
||||
@ -17,6 +15,7 @@ pipeline {
|
||||
if (params.REPO == "" || params.BRANCH == "") {
|
||||
error "Please supply both params (REPO and BRANCH)"
|
||||
}
|
||||
def DUFFY_SESSION_ID
|
||||
try {
|
||||
echo "Requesting duffy node ..."
|
||||
def session_str = sh returnStdout: true, script: "set +x; duffy client --url https://duffy.ci.centos.org/api/v1 --auth-name fedora-infra --auth-key $CICO_API_KEY request-session pool=virt-ec2-t2-centos-9s-x86_64,quantity=1"
|
||||
@ -40,7 +39,6 @@ git fetch proposed
|
||||
git checkout origin/master
|
||||
git merge --no-ff "proposed/$params.BRANCH" -m "Merge PR"
|
||||
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test tox -r -e flake8,black,py3,bandit
|
||||
podman run --rm -v .:/src:Z quay.io/exd-guild-compose/pungi-test-py2 tox -r -e py27
|
||||
"""
|
||||
sh "cat job.sh"
|
||||
sh "ssh -o StrictHostKeyChecking=no root@$hostname mkdir $remote_dir"
|
||||
|
||||
@ -35,6 +35,11 @@ for spec in $DIR/*.spec; do
|
||||
if [ "$(basename $spec)" == "dummy-skype.spec" ]; then
|
||||
continue
|
||||
fi
|
||||
if [ "$(basename $spec)" == "dummy-fcoe-target-utils.spec" ]; then
|
||||
if [ "$target" == "ppc" -o "$target" == "s390" -o "$target" == "s390x" ]; then
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
echo "Building ${spec/.spec/} for $target"
|
||||
rpmbuild --quiet --target=$target -ba --nodeps --define "_srcrpmdir $DIR/../repo/src" --define "_rpmdir $DIR/../repo" $spec
|
||||
done
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||
<revision>1612479076</revision>
|
||||
<data type="primary">
|
||||
<checksum type="sha256">08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb</checksum>
|
||||
<open-checksum type="sha256">2a15e618f049a883d360ccbf3e764b30640255f47dc526c633b1722fe23cbcbc</open-checksum>
|
||||
<location href="repodata/08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb-primary.xml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>1240</size>
|
||||
<open-size>3888</open-size>
|
||||
</data>
|
||||
<data type="filelists">
|
||||
<checksum type="sha256">e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f</checksum>
|
||||
<open-checksum type="sha256">b1782bc4207a5b7c3e64115d5a1d001802e8d363f022ea165df7cdab6f14651c</open-checksum>
|
||||
<location href="repodata/e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f-filelists.xml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>439</size>
|
||||
<open-size>1295</open-size>
|
||||
</data>
|
||||
<data type="other">
|
||||
<checksum type="sha256">92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1</checksum>
|
||||
<open-checksum type="sha256">3b847919691ad32279b13463de6c08f1f8b32f51e87b7d8d7e95a3ec2f46ef51</open-checksum>
|
||||
<location href="repodata/92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1-other.xml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>630</size>
|
||||
<open-size>1911</open-size>
|
||||
</data>
|
||||
<data type="modules">
|
||||
<checksum type="sha256">e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57</checksum>
|
||||
<open-checksum type="sha256">d59fee86c18018cc18bb7325aa74aa0abf923c64d29a4ec45e08dcd01a0c3966</open-checksum>
|
||||
<location href="repodata/e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57-modules.yaml.gz"/>
|
||||
<timestamp>1612479075</timestamp>
|
||||
<size>920</size>
|
||||
<open-size>3308</open-size>
|
||||
</data>
|
||||
</repomd>
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,55 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||
<revision>1666177486</revision>
|
||||
<data type="primary">
|
||||
<checksum type="sha256">89cb9cc1181635c9147864a7076d91fb81072641d481cd202832a2d257453576</checksum>
|
||||
<open-checksum type="sha256">07255d9856f7531b52a6459f6fc7701c6d93c6d6c29d1382d83afcc53f13494a</open-checksum>
|
||||
<location href="repodata/89cb9cc1181635c9147864a7076d91fb81072641d481cd202832a2d257453576-primary.xml.gz"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>1387</size>
|
||||
<open-size>6528</open-size>
|
||||
</data>
|
||||
<data type="filelists">
|
||||
<checksum type="sha256">f69ca03957574729fd5150335b0d87afddcfb37a97aed5b06272212854f1773d</checksum>
|
||||
<open-checksum type="sha256">c2e1e674d7d48bccaa16cae0a5f70cb55ef4cd7352b4d9d4fdaa619075d07dbc</open-checksum>
|
||||
<location href="repodata/f69ca03957574729fd5150335b0d87afddcfb37a97aed5b06272212854f1773d-filelists.xml.gz"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>1252</size>
|
||||
<open-size>5594</open-size>
|
||||
</data>
|
||||
<data type="other">
|
||||
<checksum type="sha256">b3827bd6c9ea67ffa3912002515c64e4d9fe5c4dacbf7c46b0d8768b7abbb84f</checksum>
|
||||
<open-checksum type="sha256">9ce24c526239e349d023c577b2ae3872c8b0f1888aed1fb24b9b9aa12063fdf3</open-checksum>
|
||||
<location href="repodata/b3827bd6c9ea67ffa3912002515c64e4d9fe5c4dacbf7c46b0d8768b7abbb84f-other.xml.gz"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>999</size>
|
||||
<open-size>6320</open-size>
|
||||
</data>
|
||||
<data type="primary_db">
|
||||
<checksum type="sha256">ab8df35061dfa0285069b843f24a7076e31266d9a8abe8282340bcb936aa61d7</checksum>
|
||||
<open-checksum type="sha256">2bce9554ce4496cef34b5cd69f186f7f3143c7cabae8fa384fc5c9eeab326f7f</open-checksum>
|
||||
<location href="repodata/ab8df35061dfa0285069b843f24a7076e31266d9a8abe8282340bcb936aa61d7-primary.sqlite.bz2"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>3558</size>
|
||||
<open-size>106496</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
<data type="filelists_db">
|
||||
<checksum type="sha256">8bcf6d40db4e922934ac47e8ac7fb8d15bdacf579af8c819d2134ed54d30550b</checksum>
|
||||
<open-checksum type="sha256">f7001d1df7f5f7e4898919b15710bea8ed9711ce42faf68e22b757e63169b1fb</open-checksum>
|
||||
<location href="repodata/8bcf6d40db4e922934ac47e8ac7fb8d15bdacf579af8c819d2134ed54d30550b-filelists.sqlite.bz2"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>2360</size>
|
||||
<open-size>28672</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
<data type="other_db">
|
||||
<checksum type="sha256">01b82e9eb7ee9151f283c6e761ae450de18ed2d64b5e32de88689eaf95216a80</checksum>
|
||||
<open-checksum type="sha256">07f5b9750af1e440d37ca216e719dd288149e79e9132f2fdccb6f73b2e5dd541</open-checksum>
|
||||
<location href="repodata/01b82e9eb7ee9151f283c6e761ae450de18ed2d64b5e32de88689eaf95216a80-other.sqlite.bz2"/>
|
||||
<timestamp>1666177486</timestamp>
|
||||
<size>2196</size>
|
||||
<open-size>32768</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
</repomd>
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,55 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||
<revision>1666177500</revision>
|
||||
<data type="primary">
|
||||
<checksum type="sha256">a1d342aa7cef3a2034fc3f9d6ee02d63572780bc76e61749a57e50b6b3ca9869</checksum>
|
||||
<open-checksum type="sha256">a9e3eae447dd44282d7d96db5f15f049b757925397adb752f4df982176bab7e0</open-checksum>
|
||||
<location href="repodata/a1d342aa7cef3a2034fc3f9d6ee02d63572780bc76e61749a57e50b6b3ca9869-primary.xml.gz"/>
|
||||
<timestamp>1666177500</timestamp>
|
||||
<size>3501</size>
|
||||
<open-size>37296</open-size>
|
||||
</data>
|
||||
<data type="filelists">
|
||||
<checksum type="sha256">6778922d5853d20f213ae7702699a76f1e87e55d6bfb5e4ac6a117d904d47b3c</checksum>
|
||||
<open-checksum type="sha256">e30b666d9d88a70de69a08f45e6696bcd600c45485d856bd0213395d7da7bd49</open-checksum>
|
||||
<location href="repodata/6778922d5853d20f213ae7702699a76f1e87e55d6bfb5e4ac6a117d904d47b3c-filelists.xml.gz"/>
|
||||
<timestamp>1666177500</timestamp>
|
||||
<size>27624</size>
|
||||
<open-size>318187</open-size>
|
||||
</data>
|
||||
<data type="other">
|
||||
<checksum type="sha256">5a60d79d8bce6a805f4fdb22fd891524359dce8ccc665c0b54e7299e79debe84</checksum>
|
||||
<open-checksum type="sha256">b18138f4a3de45714e578fb1f30b7ec54fdcdaf1a22585891625b6af0894388e</open-checksum>
|
||||
<location href="repodata/5a60d79d8bce6a805f4fdb22fd891524359dce8ccc665c0b54e7299e79debe84-other.xml.gz"/>
|
||||
<timestamp>1666177500</timestamp>
|
||||
<size>1876</size>
|
||||
<open-size>28701</open-size>
|
||||
</data>
|
||||
<data type="primary_db">
|
||||
<checksum type="sha256">c27bc2ce947173aba305041552c3c6d8db71442c1a2e5dcaf35ff750fe0469fc</checksum>
|
||||
<open-checksum type="sha256">586e1af8934229925adb9e746ae5ced119859dfd97f4e3237399bb36a7d7f071</open-checksum>
|
||||
<location href="repodata/c27bc2ce947173aba305041552c3c6d8db71442c1a2e5dcaf35ff750fe0469fc-primary.sqlite.bz2"/>
|
||||
<timestamp>1666177500</timestamp>
|
||||
<size>11528</size>
|
||||
<open-size>126976</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
<data type="filelists_db">
|
||||
<checksum type="sha256">ed350865982e7a1e45b144839b56eac888e5d8f680571dd2cd06b37dc83e0fd8</checksum>
|
||||
<open-checksum type="sha256">697903989d0f77de2d44a2b603e75c9b4ca23b3795eb136d175caf5666ce6459</open-checksum>
|
||||
<location href="repodata/ed350865982e7a1e45b144839b56eac888e5d8f680571dd2cd06b37dc83e0fd8-filelists.sqlite.bz2"/>
|
||||
<timestamp>1666177500</timestamp>
|
||||
<size>20440</size>
|
||||
<open-size>163840</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
<data type="other_db">
|
||||
<checksum type="sha256">35eff699131e0976429144c6f4514d21568177dc64bb4091c3ff62f76b293725</checksum>
|
||||
<open-checksum type="sha256">3bd999a1bdf300df836a4607b7b75f845d8e1432e3e4e1ab6f0c7cc8a853db39</open-checksum>
|
||||
<location href="repodata/35eff699131e0976429144c6f4514d21568177dc64bb4091c3ff62f76b293725-other.sqlite.bz2"/>
|
||||
<timestamp>1666177500</timestamp>
|
||||
<size>4471</size>
|
||||
<open-size>49152</open-size>
|
||||
<database_version>10</database_version>
|
||||
</data>
|
||||
</repomd>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user