Compare commits
2680 Commits
pungi-0.3.
...
master
Author | SHA1 | Date | |
---|---|---|---|
e17a6d7f42 | |||
5152dfa764 | |||
b61614969d | |||
38cc2f79a0 | |||
d8b7f9210e | |||
69ec4df8f0 | |||
20841cfd4c | |||
cb53de3c46 | |||
72635cf5c1 | |||
9ce519426d | |||
208c71c194 | |||
71c4e3c178 | |||
1308986569 | |||
|
e05a11f99a | ||
|
cb9dede604 | ||
|
ce2c222dc2 | ||
|
be4fd75a7a | ||
|
33bb0ceceb | ||
|
aef48c0ab4 | ||
|
bd91ef1d10 | ||
|
32d5d32a6e | ||
|
5bcb3f5ac1 | ||
|
78bfbef206 | ||
|
88b6d8ebf5 | ||
|
6223baa2ba | ||
|
9d6226b436 | ||
|
927a0d35ab | ||
|
d81ee0f553 | ||
|
e601345a38 | ||
|
1fe075e7e4 | ||
|
a8fc1b183b | ||
|
8f171b81a1 | ||
|
ee8a56e64d | ||
|
2bf6c216bc | ||
|
99a6dfe8ad | ||
|
c63f9f41b6 | ||
|
ab1960de6d | ||
|
c17b820490 | ||
|
36133b71da | ||
|
50b217145c | ||
|
57f2b428d5 | ||
|
3cdc8d0ba7 | ||
|
07829f2229 | ||
|
bdf06ea038 | ||
|
bcab3431e1 | ||
|
b181b08033 | ||
|
e05b1bcd78 | ||
|
a97488721d | ||
|
4d858ef958 | ||
|
744b00499d | ||
|
583547c6ee | ||
|
f28053eecc | ||
|
a196e9c895 | ||
|
a6f6199910 | ||
|
a3dcec5059 | ||
|
6aa674fbb3 | ||
|
05d9651eba | ||
|
75ab6a14b2 | ||
|
533ea641d8 | ||
|
185a53d56b | ||
|
305deab9ed | ||
|
6af11d5747 | ||
|
58f96531c7 | ||
|
e570aa7726 | ||
|
d8a553163f | ||
|
a9839d8078 | ||
|
dc05d1fbba | ||
|
dc4e8b2fb7 | ||
|
27d055992e | ||
|
34fcd550b6 | ||
|
4c0059e91b | ||
|
bb2e32132e | ||
|
dca3be5861 | ||
|
38ec4ca159 | ||
|
c589ccb56f | ||
|
e413955849 | ||
|
e70e1841c7 | ||
|
fc86e03e44 | ||
|
548441644b | ||
|
ca369df0df | ||
|
67ae4202c4 | ||
|
aba5a7a093 | ||
|
323d1c1eb6 | ||
|
b0964ff555 | ||
|
79bc4e0c3a | ||
|
8772ccca23 | ||
|
3bb34225a9 | ||
|
daea6cabdf | ||
|
35b720e87a | ||
|
5a6ee9f8eb | ||
|
9a64db0485 | ||
|
de7210f69a | ||
|
24418ef74d | ||
f4765fbe3a | |||
|
80b9add9f7 | ||
|
b241545ca6 | ||
|
2e536228ae | ||
|
ff7950b9d1 | ||
|
6971624f83 | ||
|
b7d371d1c3 | ||
bc8c776872 | |||
91d282708e | |||
ccaf31bc87 | |||
5fe0504265 | |||
d79f163685 | |||
793fb23958 | |||
65d0c09e97 | |||
0a9e5df66c | |||
ae527a2e01 | |||
|
4991144a01 | ||
|
68d94ff488 | ||
|
ce45fdc39a | ||
|
b625ccea06 | ||
|
8eccfc5a03 | ||
|
f5a0e06af5 | ||
|
f6f54b56ca | ||
|
fcee346c7c | ||
|
82ec38ad60 | ||
|
c9cbd80569 | ||
|
035fca1e6d | ||
|
0f8cae69b7 | ||
|
f17628dd5f | ||
|
f3485410ad | ||
|
cccfaea14e | ||
|
e2057b75c5 | ||
|
44ea4d4419 | ||
|
d4425f7935 | ||
|
c8118527ea | ||
|
a8ea322907 | ||
|
c4995c8f4b | ||
|
997e372f25 | ||
|
42f1c62528 | ||
|
3fd29d0ee0 | ||
|
c1f2fa5035 | ||
|
85c9e9e776 | ||
|
33012ab31e | ||
|
72ddf65e62 | ||
|
c402ff3d60 | ||
|
8dd344f9ee | ||
|
d07f517a90 | ||
|
48366177cc | ||
|
4cb8671fe4 | ||
|
135bbbfe7e | ||
|
5624829564 | ||
|
5fb4f86312 | ||
|
e891fe7b09 | ||
|
4cd7d39914 | ||
|
5de829d05b | ||
|
2930a1cc54 | ||
|
9c4d3d496d | ||
|
4637fd6697 | ||
|
2ff8132eaf | ||
|
f9190d1fd1 | ||
|
80ad0448ec | ||
|
027380f969 | ||
|
41048f60b7 | ||
|
9f8f6a7956 | ||
|
3d3e4bafdf | ||
|
8fe0257e93 | ||
|
d7b5fd2278 | ||
|
8b49d4ad61 | ||
|
57443cd0aa | ||
|
1d146bb8d5 | ||
|
790091b7d7 | ||
|
28aad3ea40 | ||
|
7373b4dbbf | ||
|
218b11f1b7 | ||
|
bfbe9095d2 | ||
|
eb17182c04 | ||
f91f90cf64 | |||
49931082b2 | |||
8ba8609bda | |||
6f495a8133 | |||
2b4bddbfe0 | |||
032cf725de | |||
8b11bb81af | |||
|
114a73f100 | ||
|
1c3e5dce5e | ||
|
e55abb17f1 | ||
|
e81d78a1d1 | ||
|
68915d04f8 | ||
|
a25bf72fb8 | ||
|
68aee1fa2d | ||
|
6592735aec | ||
|
943fd8e77d | ||
|
004fc4382f | ||
|
596c5c0b7f | ||
|
141d00e941 | ||
|
4b64d20826 | ||
|
0747e967b0 | ||
|
6d58bc2ed8 | ||
|
60a347a4a2 | ||
|
53ed7386f3 | ||
|
ed43f0038e | ||
|
fcc9b4f1ca | ||
|
d32c293bca | ||
|
f0bd1af999 | ||
|
1b4747b915 | ||
|
6aabfc9285 | ||
|
9e014fed6a | ||
|
7ccb1d4849 | ||
|
abec28256d | ||
|
46216b4f17 | ||
|
02b3adbaeb | ||
|
d17e578645 | ||
|
6c1c9d9efd | ||
|
8dd7d8326f | ||
|
d7b173cae5 | ||
|
fa4640f03e | ||
|
d66eb0dea8 | ||
|
d56227ab4a | ||
|
12433157dd | ||
|
623955cb1f | ||
|
4e0d2d14c9 | ||
|
b61e59d676 | ||
|
eb35d7baac | ||
|
54209f3643 | ||
|
80c4536eaa | ||
|
9bb5550d36 | ||
|
364ed6c3af | ||
|
0b965096ee | ||
|
d914626d92 | ||
|
32215d955a | ||
|
d711f8a2d6 | ||
|
bd9d800b52 | ||
|
e03648589d | ||
|
b5fe2e8129 | ||
|
b14e85324c | ||
|
5a19ad2258 | ||
|
9ae49dae5b | ||
|
c82cbfdc32 | ||
|
ee9c9a74e6 | ||
|
ea0f933315 | ||
|
323d31df2b | ||
|
9acd7f5fa4 | ||
|
a2b16eb44f | ||
|
ff946d3f7b | ||
|
ede91bcd03 | ||
|
0fa459eb9e | ||
|
b49ffee06d | ||
|
fce5493f09 | ||
|
479849042f | ||
|
8cd19605bd | ||
|
750499eda1 | ||
|
d999960235 | ||
|
6edece449d | ||
|
dd22d94a9e | ||
|
b157a1825a | ||
|
fd298d4f17 | ||
|
fa967f79b5 | ||
|
57739c238f | ||
|
805a1083a2 | ||
|
57ea640916 | ||
|
c7121f9378 | ||
|
146b88e1e9 | ||
|
8aba2363e2 | ||
|
779793386c | ||
|
603c61a033 | ||
|
11fa342507 | ||
|
13ea8e5834 | ||
|
0abf937b0e | ||
|
778dcfa587 | ||
|
ea8020473d | ||
|
b0b494fff0 | ||
|
19cb013fec | ||
|
b27301641a | ||
|
da336f75f8 | ||
|
960c85efde | ||
|
d7aebfc7f9 | ||
|
ca185aaea8 | ||
|
895b3982d7 | ||
|
c4aa45beab | ||
|
f21ed6f607 | ||
|
cfe6ec3f4e | ||
|
e6c6f74176 | ||
|
8676941655 | ||
|
5f74175c33 | ||
|
1e18e8995d | ||
|
38ea822260 | ||
|
34eb45c7ec | ||
|
7422d1e045 | ||
|
97801e772e | ||
|
dff346eedb | ||
|
de53dd0bbd | ||
|
80957f5205 | ||
|
e8d79e9269 | ||
|
c5cdd498ac | ||
|
e490764985 | ||
|
707a2c8d10 | ||
|
f8c7ad28e4 | ||
|
bebbefe46e | ||
|
d55770898c | ||
|
903ab076ba | ||
|
88121619bc | ||
|
b805ce3d12 | ||
|
0e82663327 | ||
|
ecb1646042 | ||
|
6c280f2c46 | ||
|
aabf8faea0 | ||
|
38810b3f13 | ||
|
330ba9b9c4 | ||
|
52c9816755 | ||
|
32221e8f36 | ||
|
fe986d68b9 | ||
|
42f668d969 | ||
|
894cce6a5a | ||
|
0484426e0c | ||
|
b9d86b90e1 | ||
|
58a16e5688 | ||
|
260b3fce8d | ||
|
20c2e59218 | ||
|
5e6248e3e0 | ||
|
f681956cf1 | ||
|
cfb9882269 | ||
|
f2ed64d952 | ||
|
b652119d54 | ||
|
33d7290d78 | ||
|
9bae86a51e | ||
|
1d654522be | ||
|
80bd254347 | ||
|
94ffa1c5c6 | ||
|
9d02f87c99 | ||
|
7b9e08ab28 | ||
|
e2b3002726 | ||
|
e8305f3978 | ||
|
ac66c3d7f3 | ||
|
eb61c97cdb | ||
|
b03490bf18 | ||
|
ab19043773 | ||
|
204d88a351 | ||
|
8133676270 | ||
|
e42e65783d | ||
|
7475d2a3a9 | ||
|
ac061b2ea8 | ||
|
0530cf2712 | ||
|
9612241396 | ||
|
ba6f7429ee | ||
|
72bcee01be | ||
|
a1ebd234a4 | ||
|
5c26aa9127 | ||
|
195bfbefa4 | ||
|
20dc4beb6b | ||
|
d8d1cc520b | ||
|
904a1c3271 | ||
|
e8ddacd10e | ||
|
b7666ba4a4 | ||
|
3d9335e90e | ||
|
7c3e8d4276 | ||
|
9cd42a2b5e | ||
|
980c7ba8fb | ||
|
66dacb21e0 | ||
|
795bbe31e3 | ||
|
1bb038ca72 | ||
|
efff2c9504 | ||
|
a7c111643d | ||
|
5831d4ae1e | ||
|
3349585d78 | ||
|
5a8df7b69c | ||
|
6afcfef919 | ||
|
2a679dcb81 | ||
|
8a2d0162d9 | ||
|
01a52447bc | ||
|
cf761633f4 | ||
|
446334fb95 | ||
|
56a55db966 | ||
|
a435fd58da | ||
|
edb091b7b1 | ||
|
9a5e901cfe | ||
|
b2c49dcaf6 | ||
|
14dd6a195f | ||
|
084321dd97 | ||
|
941d6b064a | ||
|
aaeee7132d | ||
|
cc4d99441c | ||
|
bf28e8d50c | ||
|
7fe32ae758 | ||
|
a435eeed06 | ||
|
b9f554bf39 | ||
|
c27bfe0c59 | ||
|
76d13d0062 | ||
|
ebf028ca3b | ||
|
305103a38e | ||
|
da791ed15c | ||
|
01bce26275 | ||
|
00a9861367 | ||
|
e866d22c04 | ||
|
ab1b5b48ec | ||
|
c8091899b2 | ||
|
035b37c566 | ||
|
edb4517e80 | ||
|
535034ef91 | ||
|
2769232b72 | ||
|
b217470464 | ||
|
4d763514c1 | ||
|
735bfaa0d6 | ||
|
41381df6a5 | ||
|
02686d7bdf | ||
|
2e48c9a56f | ||
|
5b5069175d | ||
|
477dcf37d9 | ||
|
98359654cf | ||
|
64897d7d48 | ||
|
b3a8c3f28a | ||
|
40133074b3 | ||
|
5434d24027 | ||
|
61e90fd7e0 | ||
|
36373479db | ||
|
44f7eff1b7 | ||
|
daa0ca6106 | ||
|
d4ee42ec23 | ||
|
49a5661521 | ||
|
3b5501b4bf | ||
|
cea8d92906 | ||
|
1a29de435e | ||
|
c87fce30ac | ||
|
0f4b0577f7 | ||
|
83458f26c2 | ||
|
69ed7699e8 | ||
|
103c3dc608 | ||
|
94ad7603b8 | ||
|
903db91c0f | ||
|
552343fffe | ||
|
5806217041 | ||
67eacf8483 | |||
|
38789d07ee | ||
|
3735aaa443 | ||
|
2c1603c414 | ||
|
f2fd10b0ab | ||
|
39b847094a | ||
|
9ea1098eae | ||
|
f518c1bb7c | ||
|
f470599f6c | ||
|
ac601ab8ea | ||
|
757a6ed653 | ||
|
b2e439e561 | ||
|
cda67776d9 | ||
|
62a97c0e1b | ||
|
98ddc74c16 | ||
|
4a048d4a85 | ||
|
6998ffe694 | ||
|
9accf5ecf4 | ||
|
e7af6d2ac2 | ||
|
27bab19a5e | ||
|
4c88e7dc0e | ||
|
c27e21ccf8 | ||
|
e3a500ca50 | ||
|
4562fba459 | ||
|
bb8cd030ec | ||
|
94bc5e286d | ||
|
295a60a704 | ||
|
99c1e2eb5e | ||
|
4c4c816e70 | ||
|
a45f4969f3 | ||
|
609a555597 | ||
|
d1eac95cda | ||
|
4c297beb65 | ||
|
f27b120cfc | ||
|
9df3f42a44 | ||
|
327019264f | ||
|
1595e188a9 | ||
|
866b881072 | ||
|
4623536b24 | ||
|
27a825de48 | ||
|
fdb2449c0e | ||
|
59727f84b1 | ||
|
22efe15379 | ||
|
b3a55fd863 | ||
|
c6312b34d0 | ||
|
e12331db78 | ||
|
160fc4f7df | ||
|
a6a96e40db | ||
|
e628bb91ec | ||
|
40bbb4325a | ||
|
8d4fea7890 | ||
|
981b69c699 | ||
|
b557bf160f | ||
|
1e1c8533ac | ||
|
29761d1656 | ||
|
2657a12c96 | ||
|
7a6d8303dc | ||
|
c273350fe5 | ||
|
4dcb6dee0d | ||
|
b899126b7e | ||
|
9920aa7a74 | ||
|
a294a05726 | ||
|
3c72755814 | ||
|
495a4c48b2 | ||
|
05a5e2b1f0 | ||
|
7e6bed9713 | ||
|
f7167fa3b6 | ||
|
f5e33950c1 | ||
|
7d00942d13 | ||
|
0ab6f48de3 | ||
|
b193fa0ab7 | ||
|
d9f111edae | ||
|
54882a0fc4 | ||
|
b6573fab92 | ||
|
658a5f805f | ||
|
ad1a3360bc | ||
|
b6605827b3 | ||
|
634d30fac5 | ||
|
e35c250700 | ||
|
4a15d1351a | ||
|
b59bdcea92 | ||
|
649ff095c0 | ||
|
3bb1e3df11 | ||
|
6ac12af343 | ||
|
f5bfd509ab | ||
|
b973657197 | ||
|
0196d7fd00 | ||
|
ffb65e8770 | ||
|
7c2743fb50 | ||
|
d6caf0785b | ||
|
b8c3ca1abe | ||
|
f1eea0b5a6 | ||
|
59e2aa9607 | ||
|
694b7f3d28 | ||
|
4ba65710c2 | ||
|
5ed5646bca | ||
|
d3acb0fa9e | ||
|
d4efe17328 | ||
|
fe4b2dd302 | ||
|
20ba1a7639 | ||
|
5d9dcf61fb | ||
|
9fced77140 | ||
|
9a1b9dd154 | ||
|
0525768519 | ||
|
153eb628e8 | ||
|
f7944a406e | ||
|
1a5cd9e0bf | ||
|
30f4771db1 | ||
|
5395af416c | ||
|
3509d7a36c | ||
|
477b43d4e9 | ||
|
e187b5ea79 | ||
|
4cf11906e8 | ||
|
fdfaae8b71 | ||
|
c5e59fa732 | ||
|
65251d983a | ||
|
63ec1adc22 | ||
|
4a78514162 | ||
|
4efdacd0a0 | ||
|
a209bda73c | ||
|
9391ce3065 | ||
|
3543f8fb3e | ||
|
169fa5b453 | ||
|
a32bf4046f | ||
|
fb7f7396be | ||
|
e70ad8aaa5 | ||
|
afcb3e969b | ||
|
fc3b5063ca | ||
|
887c68d05d | ||
|
16d6e5b0dd | ||
|
4734f9859a | ||
|
145c3adbef | ||
|
3cd94a4aa5 | ||
|
af5ee7030d | ||
|
e4f878a498 | ||
|
d23b576a60 | ||
|
5808733270 | ||
|
ff269a8675 | ||
|
3a31d47c83 | ||
|
9b12be7300 | ||
|
56fea60595 | ||
|
65aa8fde2f | ||
|
c0193c9fca | ||
|
3eddcfccd8 | ||
|
41a629969c | ||
|
38142d30ba | ||
|
ef33d00f5b | ||
|
6f23c7b8ba | ||
|
46ea0743a9 | ||
|
4c6396f491 | ||
|
116c617b86 | ||
|
3cde5c3a87 | ||
|
6eb6511aa6 | ||
|
bce57c2e66 | ||
|
52f82ccc6e | ||
|
e7a58ccd07 | ||
|
93ed40ad2d | ||
|
c624aab945 | ||
|
01ab1d2e24 | ||
|
aa6a213c8d | ||
|
2c2462970d | ||
|
fcf1442f71 | ||
|
02ace28fe4 | ||
|
3f111b559f | ||
|
3750d6795f | ||
|
794d151bef | ||
|
38f6162b46 | ||
|
817acdbbac | ||
|
b415e31f9d | ||
|
b8bb4f7daa | ||
|
38c24b3038 | ||
|
b043ac66dc | ||
|
12828849d6 | ||
|
9b101d554f | ||
|
0ed70fc8b6 | ||
|
3cf16eb42d | ||
|
ce16e55ebd | ||
|
0f53506765 | ||
|
fa8b2094da | ||
|
ea2cd448a0 | ||
|
aefe9b186d | ||
|
51b1144b70 | ||
|
7f35ac622a | ||
|
6afbe6d20a | ||
|
0c040e0a69 | ||
|
242100eb72 | ||
|
90187298f2 | ||
|
e0dd20dffe | ||
|
c87d299a20 | ||
|
4473b05f10 | ||
|
722411dfcd | ||
|
d34b0d7900 | ||
|
254d0cebff | ||
|
6be2cf5118 | ||
|
2cbd75803a | ||
|
cba3f1c88f | ||
|
20c3614fb3 | ||
|
39e8f6f710 | ||
|
b05295c202 | ||
|
114df77c6b | ||
|
5751de1096 | ||
|
8829fc32ab | ||
|
6771a21916 | ||
|
4f96164ec7 | ||
|
3901c227f0 | ||
|
f8dcda9dcb | ||
|
2ff1f2fac3 | ||
|
014560f8bd | ||
|
72bf795bd4 | ||
|
c346492df4 | ||
|
908a6a759d | ||
|
562b770b8d | ||
|
2f54745715 | ||
|
43fb06e00f | ||
|
26962d94ca | ||
|
4413b0df24 | ||
|
89fcb79aca | ||
|
428f8297e1 | ||
|
c7f3f38822 | ||
|
ff526d1dd2 | ||
|
8ab7d9f7ba | ||
|
f822ee324a | ||
|
21d45eb243 | ||
|
44e551317a | ||
|
2b112d53f7 | ||
|
260df24859 | ||
|
150f5f0cb6 | ||
|
3e263f33f2 | ||
|
e814e1445a | ||
|
2d99edc8d8 | ||
|
dfd4ff6016 | ||
|
b8e41d9b1b | ||
|
7aa65f00c2 | ||
|
1423105802 | ||
|
465ecf229c | ||
|
63a8b7b6c9 | ||
|
cec3efed51 | ||
|
a426a83ed6 | ||
|
2d0ffb56ca | ||
|
26ddd46acb | ||
|
ab9122be2a | ||
|
3824eab15b | ||
|
054b91c798 | ||
|
11779f6644 | ||
|
fe2df01e8b | ||
|
6fa478e688 | ||
|
a446593698 | ||
|
eed2aa2753 | ||
|
3f665937b2 | ||
|
be61da0192 | ||
|
90393c4b49 | ||
|
a99bf8c828 | ||
|
b420986aa4 | ||
|
53a93f016a | ||
|
82349626c6 | ||
|
29224b02ff | ||
|
048698d885 | ||
|
eeaee1c20f | ||
|
51d638d5db | ||
|
e674c2f574 | ||
|
8994aa5d88 | ||
|
c00162413c | ||
|
e9a363bfde | ||
|
27d015543f | ||
|
6efaae19fd | ||
|
0891bfbe59 | ||
|
b514e20833 | ||
|
c6d6367932 | ||
|
3811c0a176 | ||
|
cf52665a8d | ||
|
2dfb1cd4c8 | ||
|
3097019338 | ||
|
61e3cb0ef1 | ||
|
62c6d4ddcf | ||
|
eeec62756f | ||
|
da78b99fc0 | ||
|
04baa2a4db | ||
|
a31fe998d5 | ||
|
a340663f00 | ||
|
8d00f56117 | ||
|
fc362c5347 | ||
|
46333b01ad | ||
|
bf1b3e8421 | ||
|
49d0ab797c | ||
|
c4ed2bf3b2 | ||
|
920eceaa5a | ||
|
32a6415e58 | ||
|
d063217d6f | ||
|
4e59c7595e | ||
|
acd3c19618 | ||
|
32624c59b1 | ||
|
2f05a71c61 | ||
|
e6a26571e0 | ||
|
82580ed5b3 | ||
|
f1263eeacb | ||
|
d01084337e | ||
|
2c3e6a5a74 | ||
|
6ec206f9ae | ||
|
e68e17d8fe | ||
|
f784bbd519 | ||
|
2a9490526a | ||
|
0207260b9f | ||
|
ce066707c1 | ||
|
4e5f74b78d | ||
|
f6162b90dd | ||
|
c55ed742cd | ||
|
68115f3502 | ||
|
fa6197246b | ||
|
0a90f18b1f | ||
|
163d69713d | ||
|
217fcd6c02 | ||
|
33471c38bb | ||
|
551f52922f | ||
|
1951b0a521 | ||
|
f2bbf35429 | ||
|
8acd2c9689 | ||
|
a74470c18d | ||
|
187ce8df79 | ||
|
fe723a2094 | ||
|
283bae11da | ||
|
cbe8457377 | ||
|
a33cb0bf91 | ||
|
d0e8472ab5 | ||
|
cfb7b71fca | ||
|
2dd30008ae | ||
|
dc69281025 | ||
|
9da70ee7a9 | ||
|
c1a03c259b | ||
|
b0f0579a9e | ||
|
9517df44c7 | ||
|
a9b9ec97fb | ||
|
2ae742af04 | ||
|
f858cea466 | ||
|
b73d2d7f11 | ||
|
e550686e06 | ||
|
72b4969832 | ||
|
1e7ec68bbd | ||
|
0cd089802f | ||
|
479d17c033 | ||
|
49d137b444 | ||
|
d521711957 | ||
|
959d6979d4 | ||
|
2f8717ec97 | ||
|
b165866f39 | ||
|
59a9c01304 | ||
|
724255a984 | ||
|
fd0117f38c | ||
|
e71e91982b | ||
|
1f95c33e2a | ||
|
c6a86c444a | ||
|
c2c36dc3c2 | ||
|
5f8b519941 | ||
|
31ef7736aa | ||
|
d7ef86293e | ||
|
66a127c313 | ||
|
9f9b784e64 | ||
|
ac15f21135 | ||
|
9939d09643 | ||
|
9229699078 | ||
|
358fdd50ce | ||
|
75bb48a882 | ||
|
45cdbb2faf | ||
|
088ea7fe37 | ||
|
86fb93d603 | ||
|
b80efbfd97 | ||
|
b5bef68c60 | ||
|
039b8d44b3 | ||
|
0fc797a06f | ||
|
2d39490909 | ||
|
9de036b401 | ||
|
b3bf6cd9cd | ||
|
9541c75b7e | ||
|
4431ea8a0c | ||
|
cc456d3e75 | ||
|
a206a73db1 | ||
|
357f556d69 | ||
|
6bd1e9da2a | ||
|
2d694272c0 | ||
|
5a7ced5b7d | ||
|
ff4e6d4782 | ||
|
ef6bb20a0e | ||
|
afd2d3ae66 | ||
|
8b84aa384a | ||
|
fa47d9edba | ||
|
42a8965e87 | ||
|
95596f1c69 | ||
|
3efcfede6a | ||
|
8c82cfc1c7 | ||
|
f33973ee65 | ||
|
071d11a883 | ||
|
59c9c150ef | ||
|
5a251ff24c | ||
|
e21c49d9e2 | ||
|
6c6d4759f5 | ||
|
7693e562b1 | ||
|
23bf01bb45 | ||
|
e573246a2a | ||
|
c3aa297d8c | ||
|
31bafa29c5 | ||
|
940a581bd9 | ||
|
8065239e04 | ||
|
f59034b22d | ||
|
da1ea83561 | ||
|
f92c71683f | ||
|
7320bf4943 | ||
|
86314fdc83 | ||
|
3fe4beb20c | ||
|
6a32b3b741 | ||
|
c9c5fcac94 | ||
|
dbc0d5736c | ||
|
90c60f8e64 | ||
|
ba260c24e8 | ||
|
682f959ee0 | ||
|
70f46dfb62 | ||
|
610bf4b125 | ||
|
b7fa03dffd | ||
|
f41c32e413 | ||
|
c69bc13068 | ||
|
27d5073b98 | ||
|
7c45157a92 | ||
|
5f6dcb37f6 | ||
|
7c48b808f1 | ||
|
df35f26910 | ||
|
ca7d6256e5 | ||
|
444af0396e | ||
|
4864a0f58e | ||
|
7e923d3823 | ||
|
7c2e701a74 | ||
|
c2a4700446 | ||
|
605c9ca435 | ||
|
f809cac0b2 | ||
|
324b371cff | ||
|
07d08627c6 | ||
|
1d020dbedd | ||
|
8e88373a82 | ||
|
dc692bc604 | ||
|
7c7f997d74 | ||
|
fa752eb2b5 | ||
|
85bf5535bc | ||
|
a73099d446 | ||
|
32bb9aeabe | ||
|
04715f4906 | ||
|
2153c5fe21 | ||
|
65a1779e3a | ||
|
e10b893adc | ||
|
fe39056431 | ||
|
12f949fe84 | ||
|
5fc0f915c6 | ||
|
10fa53a6ac | ||
|
0c3e02eeb0 | ||
|
66d9c10a6f | ||
|
736772f954 | ||
|
9afb8e6801 | ||
|
4f712259d7 | ||
|
111af836ea | ||
|
9d9f0add83 | ||
|
844f2005a0 | ||
|
cbfb556f7c | ||
|
a00ea3d4a6 | ||
|
bb6e68a853 | ||
|
740df1bc6c | ||
|
0c8702cd6d | ||
|
ce9ac35640 | ||
|
814103d87f | ||
|
3fea217b9c | ||
|
1a161982c0 | ||
|
06b32b5d80 | ||
|
08d65bdde6 | ||
|
410d0125bc | ||
|
1bff5ccfa2 | ||
|
604ec40c8e | ||
|
e78f8d1f13 | ||
|
d6e72c8e61 | ||
|
2aeb8de459 | ||
|
d469cbfef5 | ||
|
a12c2b9ea0 | ||
|
41e144e47d | ||
|
cb33b0278d | ||
|
aab2fc4519 | ||
|
1aff7fc3ac | ||
|
e2e8df3f09 | ||
|
4537001ff6 | ||
|
37c89dfde6 | ||
|
2a65b8fb7d | ||
|
36c347eb79 | ||
|
93bc843682 | ||
|
be81aeaa7d | ||
|
e580ad6db1 | ||
|
21b8f3af3c | ||
|
5f6ee61c70 | ||
|
a53dc6f1bb | ||
|
1350684c31 | ||
|
fbb739ef17 | ||
|
833ba64c51 | ||
|
3419762830 | ||
|
828557b4d6 | ||
|
d2f392fac8 | ||
|
92968fe52d | ||
|
7c5020e82d | ||
|
19a42792db | ||
|
5346e000f0 | ||
|
45fa9d3273 | ||
|
03ee632cc8 | ||
|
6f527ae5b9 | ||
|
a5fa4457c3 | ||
|
9278008ba0 | ||
|
0c38ecdc85 | ||
|
5be2798c6b | ||
|
4b05e7b9fd | ||
|
ac0d5e4ede | ||
|
b85cd7ff9f | ||
|
b5d5e8da4a | ||
|
2fa1f09827 | ||
|
b12deab153 | ||
|
5926858b58 | ||
|
add9835b56 | ||
|
10bdb370ba | ||
|
90291d7c73 | ||
|
4d4c6555e2 | ||
|
80c3fd1170 | ||
|
4ecf75295a | ||
|
b772d4a773 | ||
|
16ac225013 | ||
|
9b576bf976 | ||
|
7d099012aa | ||
|
9ca454007a | ||
|
0b9652f2d6 | ||
|
6080b45178 | ||
|
23c454ff67 | ||
|
60917fdc77 | ||
|
e3de4dcccf | ||
|
878eaeaaf6 | ||
|
489bb03a3d | ||
|
c8bd967a57 | ||
|
ba0193ca28 | ||
|
2c6b784f70 | ||
|
a509064696 | ||
|
d2849d3826 | ||
|
1759c1ba80 | ||
|
d29e56c7d8 | ||
|
6c14236562 | ||
|
98e7106f3e | ||
|
470c0ab3be | ||
|
096075848b | ||
|
38f1a8509e | ||
|
d8c03f6239 | ||
|
04f68a018f | ||
|
4a61de1e8a | ||
|
fa92e54c22 | ||
|
004ef31917 | ||
|
eed97f357c | ||
|
13f5018ce1 | ||
|
cd493bc4c1 | ||
|
c10a4ca337 | ||
|
663a07068e | ||
|
71f2c61020 | ||
|
23ced26588 | ||
|
8b9508b027 | ||
|
102fec83b3 | ||
|
cbcebe90e1 | ||
|
ec96757707 | ||
|
49e8aa0c7e | ||
|
92b5ad2e05 | ||
|
e53da69db3 | ||
|
f1cd1ae562 | ||
|
288d9ecc90 | ||
|
420b744193 | ||
|
fdea7878f1 | ||
|
a4bbf475f1 | ||
|
c85d80f3c2 | ||
|
b4e746aa71 | ||
|
1afb709404 | ||
|
89d798006b | ||
|
f1b71d1eeb | ||
|
5324c6441f | ||
|
2862ae0b28 | ||
|
b2f995d516 | ||
|
4544b454f8 | ||
|
b2190c1c3f | ||
|
ab2faa85b3 | ||
|
057751381a | ||
|
48d0f2f643 | ||
|
fc78a3cbb3 | ||
|
8f3c06bd14 | ||
|
905064775a | ||
|
7c237c2c63 | ||
|
543154d597 | ||
|
076be762ec | ||
|
bd9a0ceda2 | ||
|
dc557b809a | ||
|
99b6e44a30 | ||
|
490f079c44 | ||
|
182a5a399b | ||
|
ed8fffb6d1 | ||
|
f298121bbc | ||
|
e419100d5f | ||
|
d3938d7c04 | ||
|
68f80751cf | ||
|
b8555b7869 | ||
|
8c22236ad4 | ||
|
4d53a5c9ca | ||
|
eaf58f7d40 | ||
|
ea0964eeef | ||
|
9915c7d644 | ||
|
1e972885f5 | ||
|
c07a4d64a1 | ||
|
58afece033 | ||
|
267ff86f04 | ||
|
527394707d | ||
|
a217eea24e | ||
|
d7021c5688 | ||
|
44c1e2dc6f | ||
|
4596020ecd | ||
|
cb3d36be5d | ||
|
15ccd309fa | ||
|
9daaf1e038 | ||
|
9f583eeb6d | ||
|
d1d074ce28 | ||
|
8b24c7cbf3 | ||
|
8238011bdd | ||
|
b9523ff5b0 | ||
|
0e7f770fb7 | ||
|
506ac99f62 | ||
|
8f1beeb54b | ||
|
6daee968ae | ||
|
aab3b04b08 | ||
|
064f922117 | ||
|
c705488505 | ||
|
de231064b7 | ||
|
7ea4c33d87 | ||
|
7798174b30 | ||
|
6454f30714 | ||
|
f38770c67d | ||
|
b3a3575ecf | ||
|
d9e2101b08 | ||
|
1436ea2b03 | ||
|
1f0739831c | ||
|
f3b5a66614 | ||
|
9b46377fb6 | ||
|
e3aa2f769b | ||
|
c6d507582a | ||
|
071792bdd0 | ||
|
f814651d91 | ||
|
9be2d6a920 | ||
|
1574f306c7 | ||
|
3f71cdd384 | ||
|
6bff4bd10e | ||
|
fedce5dff1 | ||
|
5c902592ae | ||
|
95bb147015 | ||
|
340ae4d286 | ||
|
56e00505e0 | ||
|
c83316da31 | ||
|
b393a4246b | ||
|
a03a46a078 | ||
|
41d0139b39 | ||
|
11c2af3246 | ||
|
6514dc85f3 | ||
|
bd852f4059 | ||
|
3201648c37 | ||
|
660c04368b | ||
|
5c081cb545 | ||
|
18d005e593 | ||
|
a1d559fb93 | ||
|
caed78e11a | ||
|
7ee920a085 | ||
|
a49704b2b8 | ||
|
e557dfd61a | ||
|
303fb29a6c | ||
|
cc8c7a702c | ||
|
ef058d1f9b | ||
|
c75f4a1e96 | ||
|
c7cc200246 | ||
|
364d7f5229 | ||
|
0074fe3f2c | ||
|
e8fa2a13b1 | ||
|
2ae056e021 | ||
|
f1f6ca74f1 | ||
|
fbeb14344f | ||
|
778dbaef73 | ||
|
121ffb403f | ||
|
f301158974 | ||
|
3427d6abe2 | ||
|
b2bdc8a608 | ||
|
5cc612f966 | ||
|
df27164c1c | ||
|
670a68a5b8 | ||
|
61a3be2307 | ||
|
930c2f1a42 | ||
|
68a1370036 | ||
|
f4c3d2423d | ||
|
03293c725b | ||
|
51cd359057 | ||
|
161b7f974b | ||
|
5cc54cd587 | ||
|
d4d264eb33 | ||
|
2152e7ea26 | ||
|
116e7ca3bd | ||
|
59c162d46f | ||
|
ba3adf9bc4 | ||
|
6c3245bdee | ||
|
d66abc3aea | ||
|
25907f61ce | ||
|
e15a49defd | ||
|
d5a0316877 | ||
|
60cc3e5d55 | ||
|
fa4d728230 | ||
|
aa9c137412 | ||
|
001ae1d7b4 | ||
|
970143e59f | ||
|
cf77a6e413 | ||
|
9642c1171c | ||
|
a6c65e026a | ||
|
953fb4c54c | ||
|
cf114a7fab | ||
|
b068514471 | ||
|
0f104cea40 | ||
|
91ee1fb854 | ||
|
2bd3b85bb7 | ||
|
48d155d304 | ||
|
69adf35e84 | ||
|
d2804b5d89 | ||
|
8181c5be48 | ||
|
388be481ea | ||
|
cb740f063e | ||
|
1dbd0248d4 | ||
|
1a10a1fe83 | ||
|
385002fe94 | ||
|
cdfa3cb45f | ||
|
026ba10987 | ||
|
333045fb87 | ||
|
390dff52ae | ||
|
28c3bc6268 | ||
|
e0308a74ff | ||
|
6d6cf6e233 | ||
|
f21e3a2d6d | ||
|
6f21576921 | ||
|
463fb961bc | ||
|
4ff3190935 | ||
|
daf162503c | ||
|
f3806f7c77 | ||
|
797b13b34a | ||
|
2efc4d8561 | ||
|
ed9d7f69a6 | ||
|
ed22e07ef9 | ||
|
3088df8e60 | ||
|
274f8b4e56 | ||
|
be39dc3caf | ||
|
894a5a11a4 | ||
|
8951e90882 | ||
|
6208dae869 | ||
|
2819311d6e | ||
|
68fdef451c | ||
|
b79ff7d8dd | ||
|
bb6c5da7af | ||
|
f2a2a5a0b6 | ||
|
e117c904c7 | ||
|
da590d559e | ||
|
a3b90f7474 | ||
|
97d52d03c8 | ||
|
c89f033457 | ||
|
842e2e810e | ||
|
b51a639277 | ||
|
9ab3840085 | ||
|
dec00fe2f4 | ||
|
19436c66b2 | ||
|
44c523339c | ||
|
04836cfa9f | ||
|
adaab46bf7 | ||
|
3ef2a65275 | ||
|
9dbf231080 | ||
|
d34c0a2777 | ||
|
239e6b4301 | ||
|
eead6ccc44 | ||
|
ed0a8249b1 | ||
|
4b40a1258a | ||
|
816ab917da | ||
|
6a425ee891 | ||
|
87884b6412 | ||
|
2fa89c061b | ||
|
c98f0a88d8 | ||
|
5b6e468952 | ||
|
40796c04f4 | ||
|
e21a27bdc9 | ||
|
90be25c14c | ||
|
b81e94e808 | ||
|
5379fb5e28 | ||
|
65910f2c33 | ||
|
fcbc3ed4ae | ||
|
2e3a9385a3 | ||
|
439a7ce348 | ||
|
ec39514fba | ||
|
c14c28a157 | ||
|
c9d95f5f6f | ||
|
49c6abcfea | ||
|
a63e4746c9 | ||
|
c3b49f7ffb | ||
|
be4501c54b | ||
|
a8cd78faf9 | ||
|
23ca2fe5d2 | ||
|
804a0049f6 | ||
|
9780f36e37 | ||
|
c12bad295f | ||
|
22fdd59ca4 | ||
|
7e03133c8f | ||
|
cae202c17b | ||
|
f9640ae0b4 | ||
|
470b3e4923 | ||
|
c9f34b6684 | ||
|
cef8650c3e | ||
|
1d7617f783 | ||
|
e2962dc547 | ||
|
056ae31ef9 | ||
|
99204bb695 | ||
|
381d08a810 | ||
|
58fe997e29 | ||
|
c66f2228b5 | ||
|
471e369d23 | ||
|
e1eacd456e | ||
|
b26547ae77 | ||
|
938531e2b2 | ||
|
29bedf2ccc | ||
|
0a3e5b27bf | ||
|
d92390b80b | ||
|
65078ef9cf | ||
|
5dd6b1b0e7 | ||
|
32ca02efd6 | ||
|
c0bac63f4d | ||
|
6cf912f555 | ||
|
482181f52a | ||
|
085a8ef7c7 | ||
|
81cb0952ca | ||
|
a831d65c40 | ||
|
4d117d17f8 | ||
|
910f816be4 | ||
|
5c5708afe3 | ||
|
543c184e0d | ||
|
f7a9c77626 | ||
|
e246aef5f6 | ||
|
8c48dfb93a | ||
|
0350b715dd | ||
|
5acfb90b23 | ||
|
afffb27f94 | ||
|
9a3d04c305 | ||
|
a21c8a555d | ||
|
18bd37ff2c | ||
|
9193a6902e | ||
|
87c485f2d0 | ||
|
ecbb43ab86 | ||
|
69a6046bf8 | ||
|
01026858f7 | ||
|
68098bec37 | ||
|
af631f560e | ||
|
bfc1cebbc4 | ||
|
079454c502 | ||
|
65bc6969e2 | ||
|
65b75e7049 | ||
|
8e89168ad3 | ||
|
3e64238fca | ||
|
72081a3a35 | ||
|
9ab3dc89b2 | ||
|
b2554ce663 | ||
|
2a8d7f8843 | ||
|
bef573d222 | ||
|
d586368515 | ||
|
4fb28979cf | ||
|
c337018294 | ||
|
6e6e250cec | ||
|
1c3637c48f | ||
|
f7197ddbcc | ||
|
a54b68d08b | ||
|
81b71b9ed3 | ||
|
d3dd7aa7ce | ||
|
3a59e8f266 | ||
|
0cdf996e6e | ||
|
700106facf | ||
|
3601d6d1a8 | ||
|
1db1abbb82 | ||
|
3ddfd4d172 | ||
|
247a1a71ba | ||
|
8c237b78c2 | ||
|
118444a311 | ||
|
58a6affd65 | ||
|
a2c42aee3c | ||
|
63c0bbc5c9 | ||
|
77ee882c21 | ||
|
3130d837c0 | ||
|
f27f3ce4ba | ||
|
68351fa5a8 | ||
|
dc410b58b5 | ||
|
e549c732a4 | ||
|
7b880af0ad | ||
|
d037d61521 | ||
|
6ce88630ec | ||
|
ad120f2608 | ||
|
c784dab4aa | ||
|
834445e7d0 | ||
|
feb87077b3 | ||
|
ccbd246edb | ||
|
b396e8273f | ||
|
dce44410b4 | ||
|
55058f1590 | ||
|
be0c1e1964 | ||
|
24c32831a1 | ||
|
68a1051f14 | ||
|
a2c3274585 | ||
|
29c339d659 | ||
|
ebe25a3717 | ||
|
75430a95b9 | ||
|
059449e140 | ||
|
7028399403 | ||
|
bab2a125d4 | ||
|
21952955ad | ||
|
c293a1e147 | ||
|
532b6b1fbc | ||
|
460a8dea0e | ||
|
610c4ec596 | ||
|
2bc719a33a | ||
|
63327e7d88 | ||
|
180a5b94a9 | ||
|
3861be3e08 | ||
|
39c3f42f77 | ||
|
e5dc69cd41 | ||
|
fbf189d9c7 | ||
|
daf3628594 | ||
|
20005adb98 | ||
|
c5fcab2aa5 | ||
|
f6d07c1651 | ||
|
0f4b6b1947 | ||
|
98f40f6138 | ||
|
01607602c6 | ||
|
dcc1750df2 | ||
|
0168388492 | ||
|
b5efb67ff1 | ||
|
621f1e2247 | ||
|
d4b7293acb | ||
|
f6121f0887 | ||
|
2f5d6d7dcd | ||
|
0ee2189d9c | ||
|
0f508e2228 | ||
|
d1763fca7e | ||
|
2aacefd9cd | ||
|
9784961568 | ||
|
258d716a71 | ||
|
d0974d5c6a | ||
|
be9a63c42f | ||
|
1575ed28ea | ||
|
2b21e13aad | ||
|
5dd2b3947d | ||
|
76cf4a7540 | ||
|
7bdaa3bd5b | ||
|
77673e1eac | ||
|
fcd4c231c4 | ||
|
27b24a489f | ||
|
d23a2f4548 | ||
|
5533bf7ca3 | ||
|
c32bdce46d | ||
|
73b8928013 | ||
|
ba57e6ee60 | ||
|
5c1d04eb00 | ||
|
08fbdec494 | ||
|
919e4d748e | ||
|
35e72df99f | ||
|
fe026bb588 | ||
|
2f40a45708 | ||
|
d4f8e32a80 | ||
|
a9415eb0e5 | ||
|
cd80d6ef5a | ||
|
7343daba24 | ||
|
e06e9165fc | ||
|
8cc912c5aa | ||
|
ec67eac1cc | ||
|
d5e6639a6d | ||
|
9fd8e6319f | ||
|
9a1674f3bf | ||
|
b53e4a84d3 | ||
|
3a6653fbc2 | ||
|
0a44b2fd07 | ||
|
5ef630d6d4 | ||
|
99699b85aa | ||
|
a155674269 | ||
|
9b2f0349de | ||
|
e753891e27 | ||
|
b9fd755a05 | ||
|
4f4fd845d9 | ||
|
fdda8fe491 | ||
|
22a6291c8a | ||
|
26cac77639 | ||
|
94d16d8c32 | ||
|
dfef1837ea | ||
|
14784847d4 | ||
|
1a8f0bb0f9 | ||
|
320d2a8ae2 | ||
|
c02d801988 | ||
|
321d471125 | ||
|
5a2c4f5e0f | ||
|
06635a3917 | ||
|
61e76d5824 | ||
|
026e49e259 | ||
|
835460ce8f | ||
|
110fe7647c | ||
|
ec56ee75e2 | ||
|
d8fcaf95f7 | ||
|
36f57b26b8 | ||
|
99b68ca96b | ||
|
7cb9e9dc20 | ||
|
ddba26edd7 | ||
|
a5d302d036 | ||
|
aff6ac906c | ||
|
43d8d02cb1 | ||
|
9764acbc4d | ||
|
13ed3f2b1d | ||
|
caf11f55d4 | ||
|
9fcd71f831 | ||
|
4b90822115 | ||
|
ae5ee3d856 | ||
|
306f7e69b0 | ||
|
05a666fb3b | ||
|
55035487de | ||
|
79e97dc845 | ||
|
23e53219eb | ||
|
69af0a6a65 | ||
|
263a990489 | ||
|
5ee285cc24 | ||
|
b10d275745 | ||
|
b09641a708 | ||
|
94b1159829 | ||
|
f0dc15bdc6 | ||
|
d081a4eda8 | ||
|
ee21663c8b | ||
|
a1214525f5 | ||
|
08b9d275a8 | ||
|
5cd1c22e2d | ||
|
9041ccccc4 | ||
|
cd805a1e6d | ||
|
1647f7612a | ||
|
63a3b6ccaa | ||
|
fa3d94bae3 | ||
|
d3a2ceb8ce | ||
|
2ae8710934 | ||
|
c93207addb | ||
|
40e2874676 | ||
|
345308464f | ||
|
069ee8d3df | ||
|
a52561da6d | ||
|
f43d3584c5 | ||
|
a49bf3d74e | ||
|
8b1fb287d3 | ||
|
4ea1916a87 | ||
|
8bc65a8be5 | ||
|
d9ab899920 | ||
|
9e020c2782 | ||
|
3162fea60d | ||
|
857aee05c1 | ||
|
a57bc13e30 | ||
|
d496eeb090 | ||
|
8d41a004c3 | ||
|
ad23efd323 | ||
|
e00776a413 | ||
|
c3cf09a2f7 | ||
|
b5cfeaa6ca | ||
|
9533fca96c | ||
|
42cdf047cc | ||
|
5a498f80b6 | ||
|
c650f04d0b | ||
|
dc7d3b36ab | ||
|
7feadb14ba | ||
|
02beb35e67 | ||
|
e3fe67be53 | ||
|
95fc0fa4ab | ||
|
56932f9067 | ||
|
7a8fa87172 | ||
|
5693bf9925 | ||
|
9af0cca9eb | ||
|
8418b68fb0 | ||
|
6c708549c8 | ||
|
40df2034a8 | ||
|
4f011fbd45 | ||
|
59dd4dbcd8 | ||
|
4edf567bd4 | ||
|
d3e701e10f | ||
|
2c76313382 | ||
|
a9b275f13b | ||
|
5d241d316a | ||
|
aa5487d544 | ||
|
afaa0e025e | ||
|
6fbf1e8f59 | ||
|
f6f84de1fb | ||
|
b25d7e7153 | ||
|
dc19363a5c | ||
|
b93e0fd4ab | ||
|
9306ee6357 | ||
|
ce218288db | ||
|
baca306edf | ||
|
46f77403df | ||
|
345432ac90 | ||
|
63bd9de744 | ||
|
20d035befa | ||
|
e260fe5581 | ||
|
e7c8b2affd | ||
|
61a4c43db0 | ||
|
814bf4484b | ||
|
2d404c88e6 | ||
|
c64f75d1b4 | ||
|
cb4b5cc54d | ||
|
c0c3e2e79d | ||
|
c41c46403a | ||
|
17a5f2841c | ||
|
356b78d440 | ||
|
7f56b978ce | ||
|
721932a573 | ||
|
c338219ef0 | ||
|
ec6206b064 | ||
|
56147f2e4d | ||
|
83428a06bf | ||
|
e043604822 | ||
|
d3cad4795c | ||
|
e66d8dd190 | ||
|
75934f20e5 | ||
|
dd814a5f4d | ||
|
2bd18b1b03 | ||
|
5148adf233 | ||
|
96fc6fb11e | ||
|
80fa723b1d | ||
|
9e52c68c82 | ||
|
6c89f86c53 | ||
|
7bcbe30fd2 | ||
|
deb8623bd1 | ||
|
6203541ac6 | ||
|
b7813d34ac | ||
|
3c1e2fba66 | ||
|
b85d7ae787 | ||
|
13c1c4da69 | ||
|
69d949aaf9 | ||
|
3953914bd1 | ||
|
d4a78a1553 | ||
|
5dc8132fa4 | ||
|
68e121e421 | ||
|
0cdef2d8e3 | ||
|
ea1bcf625b | ||
|
4a7257f550 | ||
|
88dbf8f849 | ||
|
13871b64fb | ||
|
bd00920c62 | ||
|
99ba369901 | ||
|
9a72ea8f6d | ||
|
079cc46458 | ||
|
adaed6345c | ||
|
8e5b197e3f | ||
|
4427769f6a | ||
|
fa3c5aff63 | ||
|
133b6a5cf5 | ||
|
06ba3b8551 | ||
|
a3a415e398 | ||
|
48bd3e6d2d | ||
|
2cd2e0e15f | ||
|
3e7f2dfa9d | ||
|
e6342ede18 | ||
|
a49510a9fb | ||
|
754458823c | ||
|
c5bd99da9e | ||
|
bfd5a39ce6 | ||
|
f4cd25450b | ||
|
59dbd9d35f | ||
|
933c2608cd | ||
|
51a8f7a7fe | ||
|
d067928a4e | ||
|
1345eb87a4 | ||
|
e02ed6c04b | ||
|
18128623b4 | ||
|
8f974ea109 | ||
|
a5273dc798 | ||
|
c38bb4809b | ||
|
1bb1c2ba28 | ||
|
74aa41f8bd | ||
|
6e55cc6419 | ||
|
a7dc4e80a7 | ||
|
2f184f9b23 | ||
|
84fcc0083f | ||
|
ff6c952094 | ||
|
8194dd0cf0 | ||
|
34d3be35dc | ||
|
bb933c83ae | ||
|
fd08efaa03 | ||
|
b5e4ea848a | ||
|
1f4f6ceb08 | ||
|
b46af7acbb | ||
|
02ddcf7387 | ||
|
e58c78fd2f | ||
|
50e9294057 | ||
|
b2b5dd919b | ||
|
4be3c72bd5 | ||
|
730764e2e7 | ||
|
32059c57c0 | ||
|
cc559b12f4 | ||
|
8880b1ea8a | ||
|
446f21d4d7 | ||
|
ac635a0937 | ||
|
4e90f81fc1 | ||
|
c0474b6cca | ||
|
c34817989c | ||
|
d898f00d33 | ||
|
7bf12636a0 | ||
|
c5f878330c | ||
|
d6dc269ef4 | ||
|
160df7f89a | ||
|
a042906717 | ||
|
c118adc705 | ||
|
9a3b0f089e | ||
|
2ac45a783d | ||
|
bfe1068b8e | ||
|
3a68a93532 | ||
|
8d1b4dea17 | ||
|
6fede3f395 | ||
|
f97e9258aa | ||
|
bdf968ff3f | ||
|
444c3a21ca | ||
|
912f81ca16 | ||
|
23e00fd87d | ||
|
2c401a4a5c | ||
|
bf46048cbf | ||
|
10b93796bd | ||
|
1a16d94dda | ||
|
ee1ee0467b | ||
|
6aeab9ee9d | ||
|
223a015898 | ||
|
f9a6c8418f | ||
|
5534fda192 | ||
|
1881bf740d | ||
|
a31be0a3c4 | ||
|
f43b8736b7 | ||
|
d3b9ae79a5 | ||
|
f507b14954 | ||
|
43fda1e9ef | ||
|
d90dba9312 | ||
|
7fd5403495 | ||
|
76f3f909ec | ||
|
b862fc5a50 | ||
|
f0f236ac5e | ||
|
1238aaa2e6 | ||
|
cd747ac065 | ||
|
492cdec719 | ||
|
91b2f6e941 | ||
|
b4765459f3 | ||
|
a72a38b278 | ||
|
3de6d094be | ||
|
c05282be44 | ||
|
1a4e1b211c | ||
|
fc23ca5fdf | ||
|
f37a14fb60 | ||
|
6a682f64fe | ||
|
ef99e28849 | ||
|
761a0ef40e | ||
|
247149d4e1 | ||
|
463088d580 | ||
|
7d0ee41f23 | ||
|
664c5e54a3 | ||
|
22e94caf3c | ||
|
d51550da84 | ||
|
f0dca7687e | ||
|
9861e80c80 | ||
|
5ae58b6d46 | ||
|
ad44ef4695 | ||
|
c32080e7f9 | ||
|
a1b86b93ea | ||
|
a72182c817 | ||
|
11bbbae2ed | ||
|
8323a735e7 | ||
|
00e11b2f51 | ||
|
e1ab519542 | ||
|
8e4b1e278c | ||
|
170cd8f72d | ||
|
74001d5aa5 | ||
|
b67f6369db | ||
|
d3b2fbe387 | ||
|
b4fc97be03 | ||
|
95cfbfb3fe | ||
|
658ef90458 | ||
|
11d5ca8342 | ||
|
9429c40b13 | ||
|
6800501217 | ||
|
469c275670 | ||
|
5b5f9b7460 | ||
|
698805f62b | ||
|
b2a266c3e4 | ||
|
53f93b27c2 | ||
|
f78709aaf9 | ||
|
f0aecf6744 | ||
|
4582e635f6 | ||
|
b62b468ccf | ||
|
a8d7f8a63e | ||
|
7655756a4f | ||
|
e5454a23eb | ||
|
79035ea7e6 | ||
|
97f9cc9abc | ||
|
5cc7dc204f | ||
|
961648819f | ||
|
264c7b1ddf | ||
|
0e0e333982 | ||
|
e2ee4b0cc1 | ||
|
022a4d36f5 | ||
|
5698a9727e | ||
|
9eb85c481c | ||
|
75d71ebbe1 | ||
|
d13f655300 | ||
|
d5512fd6ff | ||
|
b634a18a7f | ||
|
b55f8abd29 | ||
|
9ab9aaf8d9 | ||
|
6e10d8c713 | ||
|
8de7d685d9 | ||
|
c8341e1806 | ||
|
17bb3d2122 | ||
|
5d849cd050 | ||
|
8ef9478879 | ||
|
2113b6475c | ||
|
bb4afea4f1 | ||
|
385f0e950c | ||
|
fcfdb36352 | ||
|
9dce30a78d | ||
|
b43e35f7f2 | ||
|
00f2e24bce | ||
|
eeacb5b9f1 | ||
|
e998bbe436 | ||
|
d3900296af | ||
|
12652ad1fa | ||
|
4e3d87e658 | ||
|
5aadf4ac39 | ||
|
3064a6f0fe | ||
|
d8272bd0fa | ||
|
c80b7c6894 | ||
|
8615c96624 | ||
|
f9498744af | ||
|
5c4d3a5196 | ||
|
b949bfd615 | ||
|
d4effc1610 | ||
|
4da77c5e5d | ||
|
dafeb613c6 | ||
|
9f3408fec0 | ||
|
377fcb6948 | ||
|
d7012c442a | ||
|
0c8eb6c0fb | ||
|
ffcb3684de | ||
|
2bb65408ee | ||
|
65697a7526 | ||
|
01422983cb | ||
|
084994538b | ||
|
a8ff48ce92 | ||
|
91119d7d17 | ||
|
2abf9511c9 | ||
|
29a2b45fb4 | ||
|
3ac7148b38 | ||
|
6daf43f72e | ||
|
770ca9c202 | ||
|
5d14304dd1 | ||
|
bb9cbe88db | ||
|
46ab1cb6f6 | ||
|
e6079e4c85 | ||
|
ffd054d262 | ||
|
a83a34fd40 | ||
|
18b6020ac5 | ||
|
ad32b73918 | ||
|
31d922692f | ||
|
a696eb4f3a | ||
|
4f76425c9f | ||
|
cb271dc5ac | ||
|
f17b5e13ad | ||
|
1bf4bec32c | ||
|
8799d4cd85 | ||
|
09222a87a0 | ||
|
81d84a030b | ||
|
8c381e33aa | ||
|
e705cdd165 | ||
|
b7948f2d65 | ||
|
36e7279a5e | ||
|
5f12c8c9ce | ||
|
ed12a64a84 | ||
|
7516fbd690 | ||
|
3e1a6edecc | ||
|
df400002d8 | ||
|
5b1b6c1c4f | ||
|
96e7ddd3ea | ||
|
ad87ea2d28 | ||
|
5ff2d9f957 | ||
|
2ddaea5387 | ||
|
1ebb9d1773 | ||
|
861a4f5823 | ||
|
f69ca2c5fa | ||
|
6b07dcead3 | ||
|
428af17cb1 | ||
|
4af6a8f438 | ||
|
620cecc409 | ||
|
e5f37016e0 | ||
|
c5c2261489 | ||
|
490514e263 | ||
|
69316d827a | ||
|
4ec8ad5de8 | ||
|
d383e6c5c0 | ||
|
98f7ef739e | ||
|
bb0267bc7c | ||
|
652987f2cc | ||
|
8d224b206b | ||
|
536c6c85b7 | ||
|
282058dafe | ||
|
6230b0ff3e | ||
|
ecbf08c6f8 | ||
|
1a1cfc65ce | ||
|
ea7f98ef4e | ||
|
f1974d3f03 | ||
|
4afd21952c | ||
|
c4baf51b98 | ||
|
0759c7fb48 | ||
|
f5897eccbb | ||
|
aab22b6f9f | ||
|
aa38eb1fd7 | ||
|
c321bca8ef | ||
|
be8605ac91 | ||
|
c1b8fbe4f9 | ||
|
254bfb8ca4 | ||
|
2fd117367e | ||
|
07f5da5d4f | ||
|
e77821987c | ||
|
619e5323d6 | ||
|
1efd4a9873 | ||
|
1c84dc6ea0 | ||
|
41cdf54c05 | ||
|
2ca002d602 | ||
|
6f9f84c4d4 | ||
|
3174f06fd5 | ||
|
19468dda77 | ||
|
0f3bfbbddf | ||
|
d8f685000d | ||
|
2d9dd0e535 | ||
|
62b73944da | ||
|
337d2a999c | ||
|
d909adf26c | ||
|
0c13908864 | ||
|
7e2cd28fb8 | ||
|
4bc126979b | ||
|
0e7ea91d9d | ||
|
003efef2cf | ||
|
182c39669d | ||
|
f898c7ca7d | ||
|
103b0eb590 | ||
|
72f9819779 | ||
|
44d7b31a80 | ||
|
0c9ad96a31 | ||
|
ad104f994a | ||
|
935931b812 | ||
|
8e20f216f1 | ||
|
27375788c2 | ||
|
19c3707aee | ||
|
86bb816417 | ||
|
595845e104 | ||
|
f202d24961 | ||
|
9aed3364a6 | ||
|
223344e04a | ||
|
055b5e431b | ||
|
6ae48e2bf5 | ||
|
d619f14dbf | ||
|
c93772ade8 | ||
|
a813e926dc | ||
|
44c4ef5c41 | ||
|
4a4ef23e3c | ||
|
73a560d63c | ||
|
e6596d818c | ||
|
8670452b6b | ||
|
abb27ac7d7 | ||
|
452da86649 | ||
|
636ac79186 | ||
|
ab30fe0cef | ||
|
60d820d4fd | ||
|
6e8970e648 | ||
|
98a9e02b1b | ||
|
133cb97aa0 | ||
|
f6c4f166bf | ||
|
48979a4979 | ||
|
c61abac137 | ||
|
3ff1d3e21f | ||
|
a3044f4381 | ||
|
44d6f7d708 | ||
|
ca7c78d98c | ||
|
456fbb8812 | ||
|
e9292fc942 | ||
|
385b52041c | ||
|
90954ddf0e | ||
|
8661d294ab | ||
|
75b57ac25a | ||
|
7734ddf57e | ||
|
f80c97e3ec | ||
|
4a1e029c1d | ||
|
9b748f752e | ||
|
ab44f3539e | ||
|
1044fb01f8 | ||
|
33d5440a8a | ||
|
7c81c5aa9c | ||
|
ec03a8685a | ||
|
32a966fc91 | ||
|
8ed44dd153 | ||
|
0fb8049fdd | ||
|
b4a008f86a | ||
|
466a0eb24f | ||
|
37ac0d7bfe | ||
|
1b6688d22c | ||
|
71369c7690 | ||
|
921664c3b5 | ||
|
0c85e549e2 | ||
|
5bffca5037 | ||
|
be4d596c36 | ||
|
1616de1e1e | ||
|
2858b1aa54 | ||
|
bf5196af4a | ||
|
454363fba8 | ||
|
adbc772fd0 | ||
|
826afdc162 | ||
|
284d3a3510 | ||
|
abcc51fb85 | ||
|
e1895bff26 | ||
|
93015d679c | ||
|
96489a1e78 | ||
|
8ef2e7e308 | ||
|
e9a42ad54f | ||
|
f067f38df0 | ||
|
39c688f7ef | ||
|
5138a9b6e6 | ||
|
ac29bf05ee | ||
|
6ec151cde2 | ||
|
0c4bd42f80 | ||
|
91faa0fafe | ||
|
8bf1b09641 | ||
|
b35264cfba | ||
|
aec57dc72b | ||
|
8e5e893e5c | ||
|
f57665f963 | ||
|
d661de3e73 | ||
|
3baa386ed4 | ||
|
2b897ec6ea | ||
|
a2ea84934b | ||
|
439622d576 | ||
|
a5a0f3d69f | ||
|
ae30c07553 | ||
|
d85f00818d | ||
|
f0a8899ea7 | ||
|
a698c7cc0a | ||
|
dc4d502f2a | ||
|
39ce2556c3 | ||
|
c77e78c4f1 | ||
|
09aebd3db9 | ||
|
8b4ad1ea02 | ||
|
6d7fff5d1b | ||
|
b16699ddfc | ||
|
9059852ab5 | ||
|
e016015cf3 | ||
|
6e0f6ee73e | ||
|
5f0675dd66 | ||
|
7ec409a236 | ||
|
f3f0ce0d35 | ||
|
5f7f23ed96 | ||
|
b78878ac69 | ||
|
5688f1cbae | ||
|
af11bebf1b | ||
|
d2ea1dd288 | ||
|
7f815d80f5 | ||
|
195b13d434 | ||
|
9c1418eb0a | ||
|
88bb6ac3d7 | ||
|
a1384b60f4 | ||
|
770a81b76c | ||
|
1af17a5262 | ||
|
0222d4dc6d | ||
|
7a965bcb83 | ||
|
a6b673dbdd | ||
|
0e237db5f6 | ||
|
719ec458f4 | ||
|
f552eae2bf | ||
|
df0304095d | ||
|
10a90f97b4 | ||
|
81e90eb780 | ||
|
f5862deea7 | ||
|
95761c69e1 | ||
|
4b374278e4 | ||
|
fe95221f10 | ||
|
716b2d521a | ||
|
4a96e8e313 | ||
|
bdb1bcb35c | ||
|
4a26b99a24 | ||
|
31fe85038b | ||
|
77aca8a00a | ||
|
cdec198b6d | ||
|
94f519d01c | ||
|
791448cc32 | ||
|
cfda99e6fe | ||
|
b6d9b5632e | ||
|
539736a11e | ||
|
290f8f6540 | ||
|
e9699b7327 | ||
|
a2ace9e05c | ||
|
211bd6eef7 | ||
|
1ab4144dfb | ||
|
fd5d3ababb | ||
|
bd8d814230 | ||
|
ba396ea401 | ||
|
496563aaef | ||
|
ba8ae15eb1 | ||
|
6f00f20b3d | ||
|
7ce5c76673 | ||
|
066855a039 | ||
|
129e654690 | ||
|
660c8bc2b3 | ||
|
359eb444e5 | ||
|
5ff5ffc259 | ||
|
3897671190 | ||
|
fd537e5070 | ||
|
656b081565 | ||
|
9798454cb3 | ||
|
2114369e9c | ||
|
15811107bb | ||
|
5c8694b286 | ||
|
ecc6b0f62a | ||
|
36fff71059 | ||
|
4d93221005 | ||
|
1d97ce5ea1 | ||
|
b4fd280d80 | ||
|
291c509c2f | ||
|
3df5a11a89 | ||
|
02e55b67b3 | ||
|
2599a42a14 | ||
|
fd64bba4dd | ||
|
3efd29e465 | ||
|
da0405a0f5 | ||
|
d982cd585f | ||
|
6e773f8676 | ||
|
abb2ff1584 | ||
|
158d1834a6 | ||
|
8e90a2a32a | ||
|
4fb20198db | ||
|
449b4e4c87 | ||
|
831352b2ea | ||
|
0e18ffcf31 | ||
|
853c27d1be | ||
|
9235844529 | ||
|
ba9df6dc5e | ||
|
219af0c904 | ||
|
d9d9db3e5d | ||
|
df1415b3f5 | ||
|
44ebf5eedc | ||
|
39c073abf9 | ||
|
ba39435bf6 | ||
|
ea751bb119 | ||
|
05e9bbab9c | ||
|
1373fe5178 | ||
|
919a4fc619 | ||
|
1089847cd7 | ||
|
5238e1cc99 | ||
|
8b1d5ab9b2 | ||
|
19a7394974 | ||
|
5432d3fa74 | ||
|
4dc7079bd2 | ||
|
0d8ad9a111 | ||
|
1f313b39ad | ||
|
b85307c683 | ||
|
63338a9689 | ||
|
e80879a4fe | ||
|
ca5aeb3106 | ||
|
63c631a99b | ||
|
72302bd98e | ||
|
afa05021f0 | ||
|
017bc76093 | ||
|
6286e87d3b | ||
|
90918dc34d | ||
|
a3d46f5393 | ||
|
81e935fe0e | ||
|
51cdd94379 | ||
|
b53f2ba9bf | ||
|
11a80ddbda | ||
|
e8f4b7334e | ||
|
793e9f3d06 | ||
|
7d714e49f3 | ||
|
e3e60c75a2 | ||
|
008d80186e | ||
|
0da05a8682 | ||
|
913494d889 | ||
|
ef7c78cd38 | ||
|
ac24cfa1f9 | ||
|
fd8cd62e79 | ||
|
6b34cb98d2 | ||
|
977ad66995 | ||
|
ec7424395d | ||
|
f633c04497 | ||
|
cc48f6522a | ||
|
989b018090 | ||
|
8d50f89e6e | ||
|
654ac5fbaa | ||
|
dc9cfffbb9 | ||
|
f85fabba8f | ||
|
e71f8026fc | ||
|
59ca987233 | ||
|
5fa5fcdd06 | ||
|
4e8e1d4d90 | ||
|
37b7a23505 | ||
|
ff77a9209c | ||
|
1d87fca380 | ||
|
dc1be3eecc | ||
|
ca17987de9 | ||
|
f5eaa7326f | ||
|
b6825f3471 | ||
|
771ed2efcb | ||
|
f828850466 | ||
|
eba5a8bd96 | ||
|
5197fed16a | ||
|
d337c34b2a | ||
|
ad18e21d1a | ||
|
012c749cdb | ||
|
18d4d2ecf8 | ||
|
07e90f0f96 | ||
|
fdc7901127 | ||
|
a3158ec144 | ||
|
320724ed98 | ||
|
13526d1c49 | ||
|
9947c7e0cd | ||
|
dfd0cc947b | ||
|
64b6c8065c | ||
|
0633eb29d3 | ||
|
f116d9384f | ||
|
f5c6d44000 | ||
|
274236a3f3 | ||
|
fedc440159 | ||
|
c4dd0e75ed | ||
|
2221f66ff5 | ||
|
1bd069683c | ||
|
33ebc4e11f | ||
|
d865c94330 | ||
|
add538d7b4 | ||
|
21021f521a | ||
|
e3c8c3b7e0 | ||
|
55c00f6d60 | ||
|
1175551e2c | ||
|
6a26176c23 | ||
|
2bb948fce7 | ||
|
6177cf6f88 | ||
|
ea8f3909ac | ||
|
76ba16d4a6 | ||
|
a5aa03f58c | ||
|
af9f7520f4 | ||
|
5c9d28dc9f | ||
|
9d339e774b | ||
|
a313fa8214 | ||
|
6820ad7c23 | ||
|
f57a4ac5ee | ||
|
fb3d4ca185 | ||
|
170ca88549 | ||
|
1ba64d6e6b | ||
|
224463030b | ||
|
94235b093e | ||
|
6ceadebaa9 | ||
|
76c8941456 | ||
|
9acb9c58c5 | ||
|
293143d8f1 | ||
|
e7d19b70ee | ||
|
23977a90de | ||
|
cdf9916906 | ||
|
03efbfd62b | ||
|
8b1c2433c9 | ||
|
43c9185323 | ||
|
cb6f1dbe17 | ||
|
e1f1231bb1 | ||
|
9a09cf9df3 | ||
|
2f0b9fc616 | ||
|
754a1bb59b | ||
|
3b71b8b457 | ||
|
448cb84305 | ||
|
e1f71d828d | ||
|
663ea07c08 | ||
|
5dc0913ef1 | ||
|
c363b7242a | ||
|
6a68c139c8 | ||
|
14ffefd376 | ||
|
f9f9e6a151 | ||
|
e04fcc4441 | ||
|
09ebeccd4d | ||
|
44cd5ff806 | ||
|
3b6146abc7 | ||
|
cb45400298 | ||
|
854899344c | ||
|
17221a33f3 | ||
|
60803f32f3 | ||
|
9776e3cd9d | ||
|
a9581e2056 | ||
|
30c0f358d9 | ||
|
91f70cbb43 | ||
|
bd35d6bc03 | ||
|
c047fe570b | ||
|
5451453f6f | ||
|
c076de2e9f | ||
|
2d2a3e8083 | ||
|
cd75bc4875 | ||
|
5069eb8b09 | ||
|
77dea0ec92 | ||
|
dd6a68eb7c | ||
|
50c1d46c7e | ||
|
49b530b1f8 | ||
|
eea89a2b78 | ||
|
4ae5a8c18b | ||
|
d978ca3fa8 | ||
|
45bb97ba97 | ||
|
b7c6c80949 | ||
|
8d54c4be1b | ||
|
6cbf093f8e | ||
|
feadeaadb8 | ||
|
ee88779bb6 | ||
|
5cd88a47e6 | ||
|
4b7685daaa | ||
|
2d37fb2fa9 | ||
|
989462bf86 | ||
|
cd1ec8f6ed | ||
|
8ed9aafb78 | ||
|
72b9333abd | ||
|
aa401cc49b | ||
|
9d7f82d91a | ||
|
8ccc24e106 | ||
|
80454a89b2 | ||
|
1623bbe936 | ||
|
83a22bd5ea | ||
|
44cda03261 | ||
|
cd9797de71 | ||
|
249efe1d75 | ||
|
9cf7418cd5 | ||
|
0857d30f82 | ||
|
7ab8ee1443 | ||
|
45964282af | ||
|
32eacf5eec | ||
|
0ff7275349 | ||
|
59ff9f9068 | ||
|
e70fa6c60f | ||
|
e215694a6b | ||
|
d6490ebf6e | ||
|
7e65e60fc6 | ||
|
76c3cd9309 | ||
|
ebedace49d | ||
|
80120d1e53 | ||
|
6e05f8162f | ||
|
0c8b9d268e | ||
|
e3a59fe484 | ||
|
7a8ab8817a | ||
|
4002565f53 | ||
|
4bc181ed4a | ||
|
b4e40a079e | ||
|
27cf6a26b4 | ||
|
7deae5840c | ||
|
6d480f0896 | ||
|
5b0c9715a5 | ||
|
19993ef780 | ||
|
31d6c3d6ef | ||
|
6e28466560 | ||
|
cbba0e4cd7 | ||
|
d5ef2c25ff | ||
|
9976438e31 | ||
|
9b221ec535 | ||
|
95a8aaa0c1 | ||
|
18143a47a1 | ||
|
17070c0887 | ||
|
06c5a94b5b | ||
|
a7a43e413b | ||
|
39326ef54a | ||
|
f3dafc4446 | ||
|
6703e700ba | ||
|
c6f01468c3 | ||
|
7099d39f24 | ||
|
7dd898cbad | ||
|
9f6fe74826 | ||
|
ab0b3aa369 | ||
|
6ed01b8852 | ||
|
be078f9850 | ||
|
38d41ad0be | ||
|
d4fc805857 | ||
|
b79b50b669 | ||
|
9aed6d9656 | ||
|
d30edb8b6a | ||
|
d9a294aee5 | ||
|
fe23e1f65a | ||
|
f8865c35e2 | ||
|
acdac6a2be | ||
|
1991a09a48 | ||
|
0ace77f6c6 | ||
|
f099500663 | ||
|
51f03b32e4 | ||
|
e434cf4876 | ||
|
7f0cd87c4f | ||
|
05c51e4b96 | ||
|
eb3caa4161 | ||
|
cd466ce546 | ||
|
af347fd6a3 | ||
|
5d6fc5c543 | ||
|
179d3cd678 | ||
|
a859efe4a9 | ||
|
8a15ecca80 | ||
|
a73e976bc8 | ||
|
ff3b72a97e | ||
|
e377f015df | ||
|
038a98a2d3 | ||
|
9012c9fd41 | ||
|
076ba206cb | ||
|
26f9fad9a5 | ||
|
0074f79e5d | ||
|
63adcfcc7c | ||
|
e437dd7f7f | ||
|
2c2a1178ae | ||
|
28412ffc8f | ||
|
f46d84ed73 | ||
|
9ecb4c5dc3 | ||
|
b28d842bec | ||
|
c484a26f44 | ||
|
44c4028447 | ||
|
979c0b5e94 | ||
|
db54e3dbfd | ||
|
9d831bc470 | ||
|
050c8da46b | ||
|
ff70fea3ed | ||
|
2688d1c397 | ||
|
ea148b8b1c | ||
|
e3d6cf508d | ||
|
bdf3b118aa | ||
|
923a0c9392 | ||
|
5216793a69 | ||
|
6843a12485 | ||
|
7dc76be687 | ||
|
9b77cea3b7 | ||
|
90ef65f192 | ||
|
e073e0a032 | ||
|
c0e7d8afe5 | ||
|
074879de5c | ||
|
dee9472d0c | ||
|
c4004dd307 | ||
|
972240b972 | ||
|
353600eb98 | ||
|
5a00b479b7 | ||
|
dbf82cce09 | ||
|
3ca7a26682 | ||
|
208fe34cc5 | ||
|
a8a1aa0149 | ||
|
056cf18c10 | ||
|
14771fb5ef | ||
|
3c30592dab | ||
|
4d01eb4785 | ||
|
d77a180f29 | ||
|
dc3dbc2bdf | ||
|
b929b89835 | ||
|
15efef64fa | ||
|
2fca7f7405 | ||
|
dcf4d90e69 | ||
|
c550c16f62 | ||
|
a157c94252 | ||
|
94828b34fb | ||
|
08fc496a36 | ||
|
a82d29ccaa | ||
|
e101067357 | ||
|
9ca05af1e9 | ||
|
bc18a1d4f3 | ||
|
6fc3d4ff79 | ||
|
768ab66ee6 | ||
|
7326b388c3 | ||
|
e9e0a1880c | ||
|
bdaf3a662a | ||
|
f253e2e8bb | ||
|
5beb34227e | ||
|
be8b06d523 | ||
|
7d92748838 | ||
|
8331ae9e0f | ||
|
6ae28edb54 | ||
|
2b5ed26466 | ||
|
116a0df898 | ||
|
74e014707a | ||
|
ec8b64e6cf | ||
|
38bd19ea9a | ||
|
0517ef0782 | ||
|
3e62130a6c | ||
|
7c2e8593c8 | ||
|
26795667ac | ||
|
cbe47772c9 | ||
|
3cc980b49d | ||
|
874853e78b | ||
|
3c979842b4 | ||
|
af2d5a1e58 | ||
|
0670212e14 | ||
|
15c5b80e87 | ||
|
897f97ee59 | ||
|
953ee3addf | ||
|
151e3b134e | ||
|
3b955cc823 | ||
|
379020e5c5 | ||
|
64e5734178 | ||
|
a81ead6181 | ||
|
a9249e3c31 | ||
|
043a799b25 | ||
|
4627931835 | ||
|
3b89e4586b | ||
|
4ae0dc35e8 | ||
|
b45c700be8 | ||
|
1b58d024a3 | ||
|
baa1ce8ee1 | ||
|
203f152239 | ||
|
554310a690 | ||
|
58c08e04eb | ||
|
8aa4e5ce55 | ||
|
0befc4d3f3 | ||
|
2455411091 | ||
|
24641ddfbe | ||
|
e7430529af | ||
|
b71ae43cc5 | ||
|
c0d44a06a6 | ||
|
178e217eca | ||
|
ed1e80c6da | ||
|
b1bd3c7a46 | ||
|
5d33519dbf | ||
|
2ddaea50a9 | ||
|
4b69164797 | ||
|
ac609004b2 | ||
|
13d6ed35ee | ||
|
65c7bdf6de | ||
|
51f5969e49 | ||
|
ed5d17207f | ||
|
007effec74 | ||
|
aa358c1517 | ||
|
bd4eea43d0 | ||
|
3f9479029c | ||
|
6373d57d22 | ||
|
f29a4ca8b1 | ||
|
5c106b05d8 | ||
|
ecc53ba291 | ||
|
66a6d6eafd | ||
|
0a4fdfe767 | ||
|
0a9dffe2a5 | ||
|
a08871a741 | ||
|
7405122981 | ||
|
e2f79e49f6 | ||
|
8a89242100 | ||
|
b623f31621 | ||
|
7b27cd5c8b | ||
|
706184a97f | ||
|
eb1a47579f | ||
|
1a5e874d84 | ||
|
923ea6fa1a | ||
|
4a55747832 | ||
|
6f8c7be932 | ||
|
b2cf9776ea | ||
|
3ce7826b06 | ||
|
34bb3d57fc | ||
|
bbbcc1dff8 | ||
|
7de5f15356 | ||
|
202059224a | ||
|
59c189438a | ||
|
6d0d5e5839 | ||
|
e81d4b8e51 | ||
|
d36d3a32e0 | ||
|
1402ef4414 | ||
|
49eb1f853e | ||
|
a18974c12c | ||
|
2cb4e338f7 | ||
|
b94b37422a | ||
|
6299545b10 | ||
|
c91524b39b | ||
|
fa1fc86b8d | ||
|
15f0b6b682 | ||
|
eb3083c94a | ||
|
cf2469d7fa | ||
|
1c76dd3722 | ||
|
5bfbdb1a47 | ||
|
302a5ab5db | ||
|
b7b45d59c1 | ||
|
5abaa9b226 | ||
|
fb6c4402a2 | ||
|
8ca20d2fa7 | ||
|
9c11eec79a | ||
|
5e0fad1742 | ||
|
0d2d9e55f5 | ||
|
591f65c982 | ||
|
4929567859 | ||
|
d9a840c863 | ||
|
91a9edceb7 | ||
|
9be2c94dc6 | ||
|
43c0267445 | ||
|
3248639f9b | ||
|
8df58b56cb | ||
|
a9c124e7e0 | ||
|
33a2ec8508 | ||
|
67df99833a | ||
|
dcf069a017 | ||
|
6748133646 | ||
|
aef1db89aa | ||
|
a09b885fd8 | ||
|
669fd7e62b | ||
|
f499bc4f8d | ||
|
1116fcc4b0 | ||
|
1c6ac68a59 | ||
|
8733a990ec | ||
|
3260c78f53 | ||
|
d99c9048c4 | ||
|
8e5e261774 | ||
|
333255c35c | ||
|
c60c9b443e | ||
|
a621cdc5c5 | ||
|
cf54262dee | ||
|
0334c92f11 | ||
|
87f914e814 | ||
|
7bf5e3d458 | ||
|
47b32e3c2d | ||
|
ab3bcf6194 | ||
|
33a28f0c0a | ||
|
83aec9363a | ||
|
930f0f2667 | ||
|
cc9bb72726 | ||
|
53301b5313 | ||
|
c28af6c5ea | ||
|
2aeafff418 | ||
|
400ff52477 | ||
|
24a52f5cb6 | ||
|
4b4a2d2f32 | ||
|
076706ead6 | ||
|
06ace11bfd | ||
|
4ca3248d0a | ||
|
adbc4c787c | ||
|
6119a75069 | ||
|
7795c25583 | ||
|
420f9a59aa | ||
|
6c4913c169 | ||
|
149a6a8929 | ||
|
b80ab86a7a | ||
|
db5c76a807 | ||
|
bd41b1c239 | ||
|
5e467d0cb5 | ||
|
ddab468b43 | ||
|
51d96f80e1 | ||
|
00487ea9c8 | ||
|
dcb7376ff2 | ||
|
835188ac2c | ||
|
c4b27ff703 | ||
|
371c782b81 | ||
|
c2d4e9ed34 | ||
|
ec1ecc368d | ||
|
93ad1c3f57 | ||
|
5fafe9c962 | ||
|
ee22bb65ae | ||
|
f074a600de | ||
|
4cb9239814 | ||
|
788105f59b | ||
|
28c0eca953 | ||
|
9e4cc8ad1a | ||
|
5af01719d3 | ||
|
180564aadc | ||
|
0d888052c4 | ||
|
e7779582c3 | ||
|
33d983e8b6 | ||
|
b3e1040798 | ||
|
2c785b0ba4 | ||
|
b428a139f2 | ||
|
9322210a93 | ||
|
c88e50ee5e | ||
|
e6438b5daa | ||
|
ae78aea673 | ||
|
72fdba8821 | ||
|
0a029cbb14 | ||
|
5321a6e321 | ||
|
9f1523cb7e | ||
|
d8e95fffa3 | ||
|
736951231e | ||
|
b8457e64d5 | ||
|
29d033e838 | ||
|
1357a05145 | ||
|
d5aea6b8b2 | ||
|
54737ebb70 | ||
|
c53ff26201 | ||
|
5c70f43906 | ||
|
a578d93a9e | ||
|
85620199c0 | ||
|
b8b7d763f9 | ||
|
666fb84064 | ||
|
38ff3cef53 | ||
|
0b3221eafa | ||
|
c6986f49a8 | ||
|
cc68ffa5d8 | ||
|
cfdfdbbabe | ||
|
885dae0818 | ||
|
6feda08320 | ||
|
9c757202c2 | ||
|
e1d532cfc0 | ||
|
c8beac9215 | ||
|
54d3d6b809 | ||
|
9c64c98c8f | ||
|
6c313fe24a | ||
|
af625b2b9c | ||
|
27d0a9808f | ||
|
fa3f75b16d | ||
|
edf1389443 | ||
|
0390e7d04e | ||
|
4d270948dd | ||
|
823e578ff0 | ||
|
2221c0fb66 | ||
|
c7e5e94eac | ||
|
6612ec39ba | ||
|
7aea97468c | ||
|
3abfe92500 | ||
|
0a7c6f4683 | ||
|
25fa13f329 | ||
|
a83f26efbd | ||
|
55d398b5b4 | ||
|
1f85a11293 | ||
|
9cb130eb8d | ||
|
d98fe83e7a | ||
|
33a542f60b | ||
|
24cb8d04cf | ||
|
5fecaeb9dd | ||
|
1b1a81b2c7 | ||
|
eee5556f50 | ||
|
829f90aa5f | ||
|
36c629a262 | ||
|
99e2ac017e | ||
|
7eff6de180 | ||
|
b04defc8e1 | ||
|
228ea17bdd | ||
|
516bffb56b | ||
|
c6bc2d2de5 | ||
|
cabdee9e92 | ||
|
14e3a3922b | ||
|
8dd1c2aa93 | ||
|
6e819ae0f1 | ||
|
5e4077d3eb | ||
|
d612ffdfb3 | ||
|
7a16c58d83 | ||
|
7c88b058e1 | ||
|
51b7c6d81a | ||
|
a5fdee3d55 | ||
|
d507bed1b2 | ||
|
02539d5d9e | ||
|
20d68be58d | ||
|
ec285d1c32 | ||
|
0487c7aadd | ||
|
1eb9ea938f | ||
|
a300431630 | ||
|
c8e8f436b5 | ||
|
55f643eb7d | ||
|
0e7a34a453 | ||
|
69bf86d8a6 | ||
|
3ca782105d | ||
|
5c6905f6ff | ||
|
e111bec37b | ||
|
1dfca44ec1 | ||
|
c0f3f63202 | ||
|
30b9b6bfa6 | ||
|
defce836a2 | ||
|
804be3f018 | ||
|
01d756a34a | ||
|
f0500348f3 | ||
|
7baf239914 | ||
|
c77150cc11 | ||
|
f3924cd7f9 | ||
|
565938526e | ||
|
ef38a79664 |
19
.gitignore
vendored
Normal file
19
.gitignore
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
*.py[co]
|
||||
*~
|
||||
*.egg-info
|
||||
MANIFEST
|
||||
build/*
|
||||
dist/*
|
||||
doc/_build
|
||||
noarch/*
|
||||
tests/data/repo
|
||||
tests/data/repo-krb5-lookaside
|
||||
tests/_composes
|
||||
htmlcov/
|
||||
.coverage
|
||||
.eggs
|
||||
.idea/
|
||||
.tox
|
||||
.venv
|
||||
.kdev4/
|
||||
pungi.kdev4
|
28
.hgtags
28
.hgtags
@ -1,28 +0,0 @@
|
||||
e80b96291cfe23c4c21b2e668d8d80a8998c7cfc pungi--
|
||||
f755487fdd539c3a68296c0dc7b6c6dc49dccb98 pungi-0.1.0-1%{?dist}
|
||||
d9bda840074f8f5e7b8844007e9951cd55ad9c1d pungi--
|
||||
baa55b9774642535467104c3f6b268671cc35e08 pungi-0.1.0-1
|
||||
902402e675943d6c3186b924a9cff89d539b06f9 pungi-0.1.0-1
|
||||
14a5e625d91034b7dcb1f2b26486827929e87e24 pungi-0.1.0-1
|
||||
b13071d9363851d2766e9efaf80e9e13feec7a0c pungi-0.1.0-1
|
||||
00326e01cc7dd77f527d1a70e97fa907f35ce669 pungi-0.1.0-1
|
||||
591cf30beec90deb8b01aaef07e042d8878f4f09 pungi-0.1.1-1
|
||||
9f954716abd9c8db453b9f1b56f64e0defd8fa1d FC-6
|
||||
f0cbd4fbc9e7915fa94588237827e0a1379ec823 pungi--
|
||||
c5e81c8e1adc642b15e5aac713ae2e58a386c9b9 pungi-0.2.0-1
|
||||
f90b645121cb2f794ceda3c4be050c53d36a7bec pungi-0.2.0-1
|
||||
ebfe0e963db6d7b652f63f99aaec121e7ff19074 pungi-0.2.1-1
|
||||
769a8e08d77a2234295449282b16ca38ff4d846e pungi-0.2.2-1
|
||||
ba049c3454d5dae9326d8050bb0d7d8116982ca4 f7-test1
|
||||
780520383876b76dd06fa013e1a41ddd6bf0901e pungi-0.2.3-1
|
||||
158bd9a170892b43645caed12bddc3602ef3be4d pungi-0.2.3-1
|
||||
6659955ccfdf29ecd6027bd3770f80d815720af0 pungi-0.2.3-1
|
||||
9f7b5877c32c533d00ea6baa67057ce424a40a61 pungi-0.2.3-1
|
||||
7ea08753383766ce36bb49fef6d4fcf44158ad26 pungi-0.2.3-1
|
||||
65596b024b8380bd72c6faec00d37820ada1444d pungi-0.2.4-1
|
||||
5e3332cfa2bb723f438507313836c299fcc99cff pungi-0.2.5-1
|
||||
61146ab008d70cb4ce294d14a8465c05613e91e5 pungi-0.2.6-1
|
||||
6de1d8a07c7b75fc069c72eaa9b3cb4ecaa5ad5a pungi-0.2.7-1
|
||||
c150a9d7a125e6c25384fbbf8080d7532191b587 f7-test2
|
||||
9c5cdf9e045ab0c804d85a50b24107b108aa2da5 pungi-0.2.8-1
|
||||
f1ee949b238b004ee53c6b30915e69352274f583 pungi-0.3.0-1
|
41
1715.patch
Normal file
41
1715.patch
Normal file
@ -0,0 +1,41 @@
|
||||
From 432b0bce0401c4bbcd1a958a89305c475a794f26 Mon Sep 17 00:00:00 2001
|
||||
From: Adam Williamson <awilliam@redhat.com>
|
||||
Date: Jan 19 2024 07:25:09 +0000
|
||||
Subject: checks: don't require "repo" in the "ostree" schema
|
||||
|
||||
|
||||
Per @siosm in https://pagure.io/pungi-fedora/pull-request/1227
|
||||
this option "is deprecated and not needed anymore", so Pungi
|
||||
should not be requiring it.
|
||||
|
||||
Merges: https://pagure.io/pungi/pull-request/1714
|
||||
Signed-off-by: Adam Williamson <awilliam@redhat.com>
|
||||
|
||||
---
|
||||
|
||||
diff --git a/pungi/checks.py b/pungi/checks.py
|
||||
index a340f93..db8b297 100644
|
||||
--- a/pungi/checks.py
|
||||
+++ b/pungi/checks.py
|
||||
@@ -1066,7 +1066,6 @@ def make_schema():
|
||||
"required": [
|
||||
"treefile",
|
||||
"config_url",
|
||||
- "repo",
|
||||
"ostree_repo",
|
||||
],
|
||||
"additionalProperties": False,
|
||||
diff --git a/pungi/phases/ostree.py b/pungi/phases/ostree.py
|
||||
index 90578ae..2649cdb 100644
|
||||
--- a/pungi/phases/ostree.py
|
||||
+++ b/pungi/phases/ostree.py
|
||||
@@ -85,7 +85,7 @@ class OSTreeThread(WorkerThread):
|
||||
comps_repo = compose.paths.work.comps_repo(
|
||||
"$basearch", variant=variant, create_dir=False
|
||||
)
|
||||
- repos = shortcuts.force_list(config["repo"]) + self.repos
|
||||
+ repos = shortcuts.force_list(config.get("repo", [])) + self.repos
|
||||
if compose.has_comps:
|
||||
repos.append(translate_path(compose, comps_repo))
|
||||
repos = get_repo_dicts(repos, logger=self.pool)
|
||||
|
@ -1,6 +1,8 @@
|
||||
Authors:
|
||||
|
||||
Jesse Keating <jkeating at redhat dot com>
|
||||
Dennis Gilmore <dennis at ausil dot us>
|
||||
Daniel Mach <dmach at redhat dot com>
|
||||
|
||||
Contributors:
|
||||
|
||||
@ -9,3 +11,6 @@ Essien Ita Essien <essien at wazobialinux dot com>
|
||||
James Bowes <jbowes at redhat dot com>
|
||||
Tom Callaway <tcallawa at redhat dot com>
|
||||
Joel Andres Granados <jgranado at redhat dot com>
|
||||
<proski at fedoraproject dot org>
|
||||
Mark McLoughlin <markmc at redhat dot com>
|
||||
Jeremy Cline <jcline at redhat dot com>
|
8
COPYING
8
COPYING
@ -1,5 +1,5 @@
|
||||
Pungi - a Fedora release compose tool
|
||||
Copyright (C) 2006 Jesse Keating
|
||||
Pungi - Distribution compose tool
|
||||
Copyright (C) 2006-2015 Red Hat, Inc.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@ -11,6 +11,4 @@
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
||||
|
||||
along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
185
Changelog
185
Changelog
@ -1,185 +0,0 @@
|
||||
* Tue May 15 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Don't quote ISO label, not running mkisofs in shell
|
||||
- Apply sparc patches (spot)
|
||||
- Fix cached downloads comparing correctly
|
||||
- Shorten 'development' to 'devel' in default config, more space for mkisofs
|
||||
- Handle config file missing better (jgranado)
|
||||
|
||||
* Fri Apr 06 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Fix comments in config file
|
||||
|
||||
* Mon Apr 02 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Remove incompatible fc6 config files
|
||||
- Update default config file with comments / new options
|
||||
- Update comps file
|
||||
|
||||
* Mon Mar 26 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Enable source iso building again.
|
||||
|
||||
* Fri Mar 23 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Don't try a rescue if the script doesn't exist (prarit)
|
||||
|
||||
* Thu Mar 22 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Pass flavor off to buildinstall if it is set (wwoods)
|
||||
|
||||
* Fri Mar 16 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Fix a logic flaw in the depsolving loop
|
||||
|
||||
* Thu Mar 15 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Use yum's built in exclude handling
|
||||
- Use yum's built in conditional handling for things from comps
|
||||
- Do excludes before group handling.
|
||||
- Get all potential matches for deps, let install time figure
|
||||
the best one to use.
|
||||
- Work around false positive 'unmatched' packages (globs are fun)
|
||||
- Change how depsolving is done
|
||||
- Get all potential matches for a dep, instead of our 'best'
|
||||
our 'best' may not be the same as install time best.
|
||||
- Remove anaconda code, use direct yum functions to get deps
|
||||
- Use a True/False flag to depsolve instead of iterating over
|
||||
a dict.
|
||||
- Log what packages are being added for which reasons.
|
||||
|
||||
* Tue Mar 14 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Do things faster/smarter if we've only asked for one disc
|
||||
|
||||
* Tue Mar 13 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- log the rpm2cpio stuff for release notes
|
||||
- correctly capture errors from subprocess
|
||||
|
||||
* Wed Mar 07 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Call createrepo ourselves for the tree, not buildinstall's job
|
||||
|
||||
* Tue Mar 06 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Convert from commands to subprocess for things we call out
|
||||
|
||||
* Fri Mar 02 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Add kickstart %packages syntax support to package manifest
|
||||
- Make the list we hand off to yum to search for as unique as we can
|
||||
|
||||
* Wed Feb 28 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Update Fedora 7 comps file.
|
||||
- Tag for F7 Test2
|
||||
|
||||
* Mon Feb 26 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Fix gathering of srpms (thanks skvidal)
|
||||
|
||||
* Wed Feb 21 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Don't use TMPDIR with buildinstall for now
|
||||
|
||||
* Fri Feb 16 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Make use of anaconda's TMPDIR support
|
||||
- Put yum tempdirs in the workdir
|
||||
- Add a version option to cli arguments
|
||||
- Make cdsize a config option
|
||||
|
||||
* Thu Feb 15 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Various logging tweaks
|
||||
- Use -d flag in createrepo for sqlite blobs
|
||||
- Add pydoc stuff to various functions
|
||||
- Support comments in the package manifest
|
||||
|
||||
* Wed Feb 14 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Add logging patch from jbowes
|
||||
|
||||
* Tue Feb 13 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Fix part of the patch from Essien
|
||||
- Add Contributors to the Authors file
|
||||
- Adjust the Makefile so that srpm doesn't cause a tag
|
||||
- Merged changes from Will Woods
|
||||
- Write out some tree description files
|
||||
- Don't traceback on existing files in download area (not sure this will stay)
|
||||
- Style fixed some stuff from Will
|
||||
|
||||
* Mon Feb 12 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Add new Make targets (Essien Ita Essien)
|
||||
- Add runtime flags for doing specific stages of the compose (Essien Ita Essien)
|
||||
- Add ability to define destdir on the cli to override conf file
|
||||
- Clean up optionparse stuff, print usage if arg list is too small
|
||||
|
||||
* Thu Feb 08 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Add support for globbing in manifest
|
||||
|
||||
* Tue Feb 06 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- yum bestPackagesFromList takes an arch argument. Fixes ppc64 bug
|
||||
- Don't use 'returnSimple' anymore, deprecated in yum api
|
||||
- Speed up depsolving a bit by tracking solved deps
|
||||
|
||||
* Sat Feb 03 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Be able to opt-out of a bugurl since buildinstall supports this
|
||||
- Make isodir an object of pungi (wwoods)
|
||||
|
||||
* Tue Jan 30 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- implantmd5 _then_ sha1sum.
|
||||
|
||||
* Mon Jan 29 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Update the comps file again from F7
|
||||
- Fix the ppc boot flags
|
||||
- Clean up SRPM-disc junk
|
||||
- add bugurl config option for anaconda betanag
|
||||
|
||||
* Thu Jan 25 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Update the comps file from F7
|
||||
|
||||
* Wed Jan 24 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Add a "flavor" option (such as Desktop)
|
||||
- Move packageorder file into workdir
|
||||
- Use some anaconda code to depsolve, gets better (and more common) results
|
||||
|
||||
* Tue Jan 23 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Now use a manifest to determine what to pull in, not comps itself
|
||||
- Add a minimal-manifest for test composes
|
||||
- Add current F7 comps file for test composes
|
||||
- Bump the iso size to what was used in FC6
|
||||
|
||||
* Wed Jan 17 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Move splittree workdirs into work/ at the end of the run
|
||||
|
||||
* Tue Jan 16 2007 Jesse Keating <jkeating@redhat.com>
|
||||
- Remove our splittree for rawhide
|
||||
- Remove old main() sections from pungi.py and gather.py
|
||||
- Require yum 3.0.3 or newer
|
||||
- Add rescueCD support
|
||||
|
||||
* Wed Dec 13 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- Fix a bug in DVD repodata
|
||||
- Add correct ppc boot args
|
||||
- Set ppc arch correctly
|
||||
|
||||
* Mon Dec 11 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- Now able to get release note files from release note packages.
|
||||
- Add a config file for the source run
|
||||
|
||||
* Sat Dec 9 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- Now able to do srpms, lots of changes
|
||||
|
||||
* Fri Nov 17 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- First pass at a config file, lots of changes
|
||||
|
||||
* Wed Nov 8 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- Shuffle things around for dist-utils
|
||||
- Add setup.py and such to do installs
|
||||
|
||||
* Tue Nov 7 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- pungi.py
|
||||
Turn on split repo creation
|
||||
Add iso creation code
|
||||
Add DVD creation code
|
||||
- __init.py__
|
||||
Turn on split repo creation
|
||||
Turn on iso creation
|
||||
|
||||
* Mon Nov 6 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- pungi.py
|
||||
use splittree.py as a module rather than a script
|
||||
use same layout as current fedora trees
|
||||
- gather.py
|
||||
use same layout as current fedora trees
|
||||
|
||||
* Wed Nov 1 2006 Jesse Keating <jkeating@redhat.com>
|
||||
- First changelog entry
|
||||
- pungi.py
|
||||
Code up splittree, use our own for now with patches
|
||||
Code up createSplitRepodata
|
||||
- Now able to create installable tree and create split CD dirs suitable for
|
||||
mkisofs
|
439
GPL
439
GPL
@ -1,232 +1,242 @@
|
||||
|
||||
The GNU General Public License (GPL)
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
|
||||
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your freedom to
|
||||
share and change it. By contrast, the GNU General Public License is
|
||||
intended to guarantee your freedom to share and change free software--to
|
||||
make sure the software is free for all its users. This General Public
|
||||
License applies to most of the Free Software Foundation's software and to
|
||||
any other program whose authors commit to using it. (Some other Free
|
||||
Software Foundation software is covered by the GNU Library General Public
|
||||
License instead.) You can apply it to your programs, too.
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not price.
|
||||
Our General Public Licenses are designed to make sure that you have the
|
||||
freedom to distribute copies of free software (and charge for this service
|
||||
if you wish), that you receive source code or can get it if you want it,
|
||||
that you can change the software or use pieces of it in new free programs;
|
||||
and that you know you can do these things.
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid anyone to
|
||||
deny you these rights or to ask you to surrender the rights. These
|
||||
restrictions translate to certain responsibilities for you if you
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether gratis or
|
||||
for a fee, you must give the recipients all the rights that you have. You
|
||||
must make sure that they, too, receive or can get the source code. And you
|
||||
must show them these terms so they know their rights.
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and (2)
|
||||
offer you this license which gives you legal permission to copy,
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain that
|
||||
everyone understands that there is no warranty for this free software. If
|
||||
the software is modified by someone else and passed on, we want its
|
||||
recipients to know that what they have is not the original, so that any
|
||||
problems introduced by others will not reflect on the original authors'
|
||||
reputations.
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software patents. We
|
||||
wish to avoid the danger that redistributors of a free program will
|
||||
individually obtain patent licenses, in effect making the program
|
||||
proprietary. To prevent this, we have made it clear that any patent must
|
||||
be licensed for everyone's free use or not licensed at all.
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains a
|
||||
notice placed by the copyright holder saying it may be distributed under
|
||||
the terms of this General Public License. The "Program", below, refers to
|
||||
any such program or work, and a "work based on the Program" means either
|
||||
the Program or any derivative work under copyright law: that is to say, a
|
||||
work containing the Program or a portion of it, either verbatim or with
|
||||
modifications and/or translated into another language. (Hereinafter,
|
||||
translation is included without limitation in the term "modification".)
|
||||
Each licensee is addressed as "you".
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of running
|
||||
the Program is not restricted, and the output from the Program is covered
|
||||
only if its contents constitute a work based on the Program (independent
|
||||
of having been made by running the Program). Whether that is true depends
|
||||
on what the Program does.
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's source
|
||||
code as you receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice and
|
||||
disclaimer of warranty; keep intact all the notices that refer to this
|
||||
License and to the absence of any warranty; and give any other recipients
|
||||
of the Program a copy of this License along with the Program.
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and you
|
||||
may at your option offer warranty protection in exchange for a fee.
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion of it,
|
||||
thus forming a work based on the Program, and copy and distribute such
|
||||
modifications or work under the terms of Section 1 above, provided that
|
||||
you also meet all of these conditions:
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices stating
|
||||
that you changed the files and the date of any change.
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in whole
|
||||
or in part contains or is derived from the Program or any part thereof,
|
||||
to be licensed as a whole at no charge to all third parties under the
|
||||
terms of this License.
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively when
|
||||
run, you must cause it, when started running for such interactive use in
|
||||
the most ordinary way, to print or display an announcement including an
|
||||
appropriate copyright notice and a notice that there is no warranty (or
|
||||
else, saying that you provide a warranty) and that users may
|
||||
redistribute the program under these conditions, and telling the user
|
||||
how to view a copy of this License. (Exception: if the Program itself is
|
||||
interactive but does not normally print such an announcement, your work
|
||||
based on the Program is not required to print an announcement.)
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If identifiable
|
||||
sections of that work are not derived from the Program, and can be
|
||||
reasonably considered independent and separate works in themselves, then
|
||||
this License, and its terms, do not apply to those sections when you
|
||||
distribute them as separate works. But when you distribute the same
|
||||
sections as part of a whole which is a work based on the Program, the
|
||||
distribution of the whole must be on the terms of this License, whose
|
||||
permissions for other licensees extend to the entire whole, and thus to
|
||||
each and every part regardless of who wrote it.
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest your
|
||||
rights to work written entirely by you; rather, the intent is to exercise
|
||||
the right to control the distribution of derivative or collective works
|
||||
based on the Program.
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of a
|
||||
storage or distribution medium does not bring the other work under the
|
||||
scope of this License.
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it, under
|
||||
Section 2) in object code or executable form under the terms of Sections 1
|
||||
and 2 above provided that you also do one of the following:
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable source
|
||||
code, which must be distributed under the terms of Sections 1 and 2
|
||||
above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three years, to
|
||||
give any third party, for a charge no more than your cost of physically
|
||||
performing source distribution, a complete machine-readable copy of the
|
||||
corresponding source code, to be distributed under the terms of Sections
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer to
|
||||
distribute corresponding source code. (This alternative is allowed only
|
||||
for noncommercial distribution and only if you received the program in
|
||||
object code or executable form with such an offer, in accord with
|
||||
Subsection b above.)
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
The source code for a work means the preferred form of the work for making
|
||||
modifications to it. For an executable work, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control compilation
|
||||
and installation of the executable. However, as a special exception, the
|
||||
source code distributed need not include anything that is normally
|
||||
distributed (in either source or binary form) with the major components
|
||||
(compiler, kernel, and so on) of the operating system on which the
|
||||
executable runs, unless that component itself accompanies the executable.
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
If distribution of executable or object code is made by offering access to
|
||||
copy from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place counts as distribution of the source code,
|
||||
even though third parties are not compelled to copy the source along with
|
||||
the object code.
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program except
|
||||
as expressly provided under this License. Any attempt otherwise to copy,
|
||||
modify, sublicense or distribute the Program is void, and will
|
||||
automatically terminate your rights under this License. However, parties
|
||||
who have received copies, or rights, from you under this License will not
|
||||
have their licenses terminated so long as such parties remain in full
|
||||
compliance.
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. You are not required to accept this License, since you have not signed
|
||||
it. However, nothing else grants you permission to modify or distribute
|
||||
the Program or its derivative works. These actions are prohibited by law
|
||||
if you do not accept this License. Therefore, by modifying or distributing
|
||||
the Program (or any work based on the Program), you indicate your
|
||||
acceptance of this License to do so, and all its terms and conditions for
|
||||
copying, distributing or modifying the Program or works based on it.
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the original
|
||||
licensor to copy, distribute or modify the Program subject to these terms
|
||||
and conditions. You may not impose any further restrictions on the
|
||||
recipients' exercise of the rights granted herein. You are not responsible
|
||||
for enforcing compliance by third parties to this License.
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot distribute
|
||||
so as to satisfy simultaneously your obligations under this License and
|
||||
any other pertinent obligations, then as a consequence you may not
|
||||
distribute the Program at all. For example, if a patent license would not
|
||||
permit royalty-free redistribution of the Program by all those who receive
|
||||
copies directly or indirectly through you, then the only way you could
|
||||
satisfy both it and this License would be to refrain entirely from
|
||||
distribution of the Program.
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under any
|
||||
particular circumstance, the balance of the section is intended to apply
|
||||
and the section as a whole is intended to apply in other circumstances.
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any such
|
||||
claims; this section has the sole purpose of protecting the integrity of
|
||||
the free software distribution system, which is implemented by public
|
||||
license practices. Many people have made generous contributions to the
|
||||
wide range of software distributed through that system in reliance on
|
||||
consistent application of that system; it is up to the author/donor to
|
||||
decide if he or she is willing to distribute software through any other
|
||||
system and a licensee cannot impose that choice.
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to be a
|
||||
consequence of the rest of this License.
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in certain
|
||||
countries either by patents or by copyrighted interfaces, the original
|
||||
copyright holder who places the Program under this License may add an
|
||||
explicit geographical distribution limitation excluding those countries,
|
||||
so that distribution is permitted only in or among countries not thus
|
||||
excluded. In such case, this License incorporates the limitation as if
|
||||
written in the body of this License.
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions of
|
||||
the General Public License from time to time. Such new versions will be
|
||||
similar in spirit to the present version, but may differ in detail to
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
@ -234,16 +244,16 @@
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free
|
||||
Software Foundation.
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals of
|
||||
preserving the free status of all derivatives of our free software and of
|
||||
promoting the sharing and reuse of software generally.
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
@ -260,13 +270,70 @@
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
|
||||
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT
|
||||
LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES
|
||||
SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE
|
||||
WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN
|
||||
ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
||||
|
20
MANIFEST.in
20
MANIFEST.in
@ -1,10 +1,16 @@
|
||||
include Authors
|
||||
include Changelog
|
||||
include AUTHORS
|
||||
include COPYING
|
||||
include GPL
|
||||
include PLAN.*
|
||||
include README
|
||||
include TESTING
|
||||
include ToDo
|
||||
include pungi.spec
|
||||
include config/*
|
||||
include setup.cfg
|
||||
include tox.ini
|
||||
include share/*
|
||||
include share/multilib/*
|
||||
include doc/*
|
||||
include doc/_static/*
|
||||
include tests/*
|
||||
include tests/data/*
|
||||
include tests/data/specs/*
|
||||
recursive-include tests/fixtures *
|
||||
global-exclude *.py[co]
|
||||
global-exclude *~ *.sw? \#*\#
|
||||
|
99
Makefile
99
Makefile
@ -1,44 +1,113 @@
|
||||
.PHONY: all clean doc log test
|
||||
|
||||
PKGNAME=pungi
|
||||
VERSION=$(shell rpm -q --qf "%{VERSION}\n" --specfile ${PKGNAME}.spec)
|
||||
RELEASE=$(shell rpm -q --qf "%{RELEASE}\n" --specfile ${PKGNAME}.spec)
|
||||
HGTAG=${PKGNAME}-$(VERSION)-$(RELEASE)
|
||||
VERSION=$(shell rpm -q --qf "%{VERSION}\n" --specfile ${PKGNAME}.spec | head -n1)
|
||||
RELEASE=$(shell rpm -q --qf "%{RELEASE}\n" --specfile ${PKGNAME}.spec | head -n1)
|
||||
GITTAG=${PKGNAME}-$(VERSION)
|
||||
PKGRPMFLAGS=--define "_topdir ${PWD}" --define "_specdir ${PWD}" --define "_sourcedir ${PWD}/dist" --define "_srcrpmdir ${PWD}" --define "_rpmdir ${PWD}" --define "_builddir ${PWD}"
|
||||
|
||||
RPM="noarch/${PKGNAME}-$(VERSION)-$(RELEASE).noarch.rpm"
|
||||
SRPM="${PKGNAME}-$(VERSION)-$(RELEASE).src.rpm"
|
||||
|
||||
PYTEST=pytest
|
||||
|
||||
default: all
|
||||
|
||||
all:
|
||||
@echo "Nothing to do"
|
||||
all: help
|
||||
|
||||
|
||||
help:
|
||||
@echo "Usage: make <target>"
|
||||
@echo
|
||||
@echo "Available targets are:"
|
||||
@echo " help show this text"
|
||||
@echo " clean remove python bytecode and temp files"
|
||||
@echo " doc build documentation"
|
||||
@echo " install install program on current system"
|
||||
@echo " test run tests"
|
||||
@echo " test-coverage run tests and generate a coverage report"
|
||||
@echo " test-compose run a small teest compose (requires test data)"
|
||||
@echo " test-data build test data (requirement for running tests)"
|
||||
@echo
|
||||
@echo "Available rel-eng targets are:"
|
||||
@echo " archive create source tarball"
|
||||
@echo " log display changelog for spec file"
|
||||
@echo " tag create a git tag according to version and release from spec file"
|
||||
@echo " rpm build rpm"
|
||||
@echo " srpm build srpm"
|
||||
@echo " rpminstall build rpm and install it"
|
||||
@echo " release build srpm and create git tag"
|
||||
|
||||
|
||||
tag:
|
||||
@hg tag -m "$(HGTAG)" $(HGTAG)
|
||||
# @hg push
|
||||
@git tag -a -m "Tag as $(GITTAG)" -f $(GITTAG)
|
||||
@echo "Tagged as $(GITTAG)"
|
||||
|
||||
|
||||
Changelog:
|
||||
(GIT_DIR=.git git log > .changelog.tmp && mv .changelog.tmp Changelog; rm -f .changelog.tmp) || (touch Changelog; echo 'git directory not found: installing possibly empty changelog.' >&2)
|
||||
|
||||
|
||||
log:
|
||||
@(LC_ALL=C date +"* %a %b %e %Y `git config --get user.name` <`git config --get user.email`> - VERSION"; git log --pretty="format:- %s (%ae)" | sed -r 's/ \(([^@]+)@[^)]+\)/ (\1)/g' | cat) | less
|
||||
|
||||
|
||||
archive:
|
||||
@rm -f Changelog
|
||||
@rm -f MANIFEST
|
||||
@make Changelog
|
||||
@rm -rf ${PKGNAME}-$(VERSION)/
|
||||
@python setup.py sdist > /dev/null
|
||||
@echo "The archive is in dist/${PKGNAME}-$(VERSION).tar.gz"
|
||||
@python setup.py sdist --formats=bztar > /dev/null
|
||||
@echo "The archive is in dist/${PKGNAME}-$(VERSION).tar.bz2"
|
||||
|
||||
|
||||
srpm: archive
|
||||
@rm -f $(SRPM)
|
||||
@rpmbuild -bs ${PKGRPMFLAGS} ${PKGNAME}.spec
|
||||
@echo "The srpm is in $(SRPM)"
|
||||
|
||||
|
||||
rpm: archive
|
||||
@rpmbuild --clean -bb ${PKGRPMFLAGS} ${PKGNAME}.spec
|
||||
@echo "The rpm is in $(RPM)"
|
||||
|
||||
|
||||
rpminstall: rpm
|
||||
@rpm -ivh --force $(RPM)
|
||||
|
||||
|
||||
release: tag srpm
|
||||
|
||||
|
||||
install:
|
||||
@python setup.py install
|
||||
|
||||
|
||||
clean:
|
||||
@rm -f *.rpm
|
||||
@rm -rf noarch
|
||||
@rm -f *.tar.gz
|
||||
@rm -rf dist
|
||||
@rm -f MANIFEST
|
||||
@python setup.py clean
|
||||
@rm -vf *.rpm
|
||||
@rm -vrf noarch
|
||||
@rm -vf *.tar.gz
|
||||
@rm -vrf dist
|
||||
@rm -vf MANIFEST
|
||||
@rm -vf Changelog
|
||||
@find . -\( -name "*.pyc" -o -name '*.pyo' -o -name "*~" -o -name "__pycache__" -\) -delete
|
||||
@find . -depth -type d -a -name '*.egg-info' -exec rm -rf {} \;
|
||||
|
||||
|
||||
test:
|
||||
$(PYTEST) $(PYTEST_OPTS)
|
||||
|
||||
test-coverage:
|
||||
$(PYTEST) --cov=pungi --cov-report term --cov-report html --cov-config tox.ini $(PYTEST_OPTS)
|
||||
|
||||
test-data:
|
||||
./tests/data/specs/build.sh
|
||||
|
||||
test-compose:
|
||||
cd tests && ./test_compose.sh
|
||||
|
||||
test-multi-compose:
|
||||
PYTHONPATH=$$(pwd) PATH=$$(pwd)/bin:$$PATH pungi-orchestrate --debug start tests/data/multi-compose.conf
|
||||
|
||||
doc:
|
||||
cd doc; make html
|
||||
|
9
README
9
README
@ -1,9 +0,0 @@
|
||||
Pungi
|
||||
An anaconda based installation spin tool
|
||||
|
||||
This project is aimed at making a public / free tool to spin installation
|
||||
trees/isos of Fedora. It will be written in python (for many obvious
|
||||
reasons). Code style I hope will be of a simple "master" process that can
|
||||
call any number of subprocesses depending on a configuration set.
|
||||
|
||||
See http://hosted.fedoraproject.org/projects/pungi for more information.
|
39
README.md
Normal file
39
README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Pungi
|
||||
|
||||
*Pungi* is a distribution compose tool.
|
||||
|
||||
Composes are release snapshots that contain release deliverables such as:
|
||||
|
||||
- installation trees
|
||||
- RPMs
|
||||
- repodata
|
||||
- comps
|
||||
- (bootable) ISOs
|
||||
- kickstart trees
|
||||
- anaconda images
|
||||
- images for PXE boot
|
||||
|
||||
|
||||
## Tool overview
|
||||
|
||||
*Pungi* consists of multiple separate executables backed by a common library.
|
||||
|
||||
The main entry-point is the `pungi-koji` script. It loads the compose
|
||||
configuration and kicks off the process. Composing itself is done in phases.
|
||||
Each phase is responsible for generating some artifacts on disk and updating
|
||||
the `compose` object that is threaded through all the phases.
|
||||
|
||||
*Pungi* itself does not actually do that much. Most of the actual work is
|
||||
delegated to separate executables. *Pungi* just makes sure that all the
|
||||
commands are invoked in the appropriate order and with correct arguments. It
|
||||
also moves the artifacts to correct locations.
|
||||
|
||||
|
||||
## Links
|
||||
|
||||
- Documentation: https://docs.pagure.org/pungi/
|
||||
- Upstream GIT: https://pagure.io/pungi/
|
||||
- Issue tracker: https://pagure.io/pungi/issues
|
||||
- Questions can be asked in the *#fedora-releng* IRC channel on irc.libera.chat
|
||||
or in the matrix room
|
||||
[`#releng:fedoraproject.org`](https://matrix.to/#/#releng:fedoraproject.org)
|
74
TODO
Normal file
74
TODO
Normal file
@ -0,0 +1,74 @@
|
||||
Random thoughts on what needs to be done before Pungi 4.0 is completed.
|
||||
|
||||
|
||||
Define building blocks and their metadata
|
||||
=========================================
|
||||
|
||||
* rpms in yum repos
|
||||
* comps
|
||||
* kickstart trees
|
||||
* isos
|
||||
* kickstart trees
|
||||
* bootable images
|
||||
* readme files
|
||||
* license(s)
|
||||
|
||||
|
||||
Compose structure
|
||||
=================
|
||||
* topdir
|
||||
* work, logs, etc.
|
||||
* compose
|
||||
* $variant
|
||||
* $arch
|
||||
* $content_type (rpms, isos, kickstart trees, etc.)
|
||||
* actual content
|
||||
|
||||
|
||||
Split Pungi into smaller well-defined tools
|
||||
===========================================
|
||||
|
||||
* process initial packages
|
||||
* comps
|
||||
* json mapping
|
||||
* ???
|
||||
|
||||
* grab initial package set
|
||||
* yum repos
|
||||
* koji instance (basically what mash does today)
|
||||
|
||||
* resolve deps (gather)
|
||||
* self-hosting
|
||||
* fulltree
|
||||
* multilib
|
||||
* langpacks
|
||||
|
||||
* create repos
|
||||
|
||||
* create install images
|
||||
* lorax
|
||||
|
||||
* create isos
|
||||
* isos
|
||||
* bootable
|
||||
* hybrid
|
||||
* implant md5sum
|
||||
* jigdo
|
||||
* checksums
|
||||
|
||||
* run tests
|
||||
* just quick sanity tests
|
||||
|
||||
* notification
|
||||
* email
|
||||
* messagebus
|
||||
|
||||
|
||||
Unsorted
|
||||
========
|
||||
* run any tasks in koji or local host
|
||||
* support for non-rpm content? (java artifacts, etc.)
|
||||
* docs!
|
||||
* unit tests!
|
||||
* use productmd for metadata: https://github.com/release-engineering/productmd/
|
||||
* use next-gen tools: createrepo_c, mergerepo_c, dnf, hawkey, libcomps
|
5
ToDo
5
ToDo
@ -1,5 +0,0 @@
|
||||
Drop release notes files in the tree
|
||||
Make pungi use the Fedora layout, composedir/topdir/os/<arch>/Fedora, composedir/topdir/<arch>/iso/, composedir/topdir/<arch>/debug/
|
||||
Get debuginfo packages?
|
||||
Put working items in composedir/work
|
||||
Create a logging system, log to composedir/logs/
|
13350
config/comps-fc7.xml
13350
config/comps-fc7.xml
File diff suppressed because it is too large
Load Diff
@ -1,12 +0,0 @@
|
||||
kernel
|
||||
xorg-x11-fonts-ISO8859-1-75dpi
|
||||
busybox-anaconda
|
||||
dejavu-lgc-fonts
|
||||
xorg-x11-fonts-base
|
||||
memtest86+
|
||||
xorg-x11-drivers
|
||||
selinux-policy-targeted
|
||||
anaconda-runtime
|
||||
man
|
||||
joe
|
||||
grub
|
@ -1,21 +0,0 @@
|
||||
# Pungi config file
|
||||
#
|
||||
# # or ; can be used at the start of a line, ; only to comment inline.
|
||||
|
||||
[default]
|
||||
product_name = Fedora ; The name used during install
|
||||
product_path = Fedora ; The directory where RPMS go
|
||||
iso_basename = F ; The first part of the iso file name
|
||||
bugurl = http://bugzilla.redhat.com ; Used for betanag
|
||||
comps = /etc/pungi/comps-fc7.xml ; Used to define package groupings and default installs
|
||||
manifest = /etc/pungi/minimal-manifest ; Used to determine what to bring in. Supports Kickstart syntax
|
||||
yumconf = /etc/pungi/yum.conf.x86_64 ; Used to determine where to gather packages from
|
||||
destdir = /srv/pungi/Fedora ; Top level compose directory, must be clean
|
||||
cachedir = /srv/pungi/cache ; Cache used for repeat runs
|
||||
arch = x86_64 ; What arch to compose (must be same arch as system)
|
||||
version = devel ; Used both in install and part of the dest tree
|
||||
flavor = Custom ; Further define a given cut of the package set
|
||||
discs = 1 ; Number of discs needed to fit data.
|
||||
#cdsize = 4608.0 ; Not used if disc count is 1
|
||||
getsource = Yes ; Used to determine if we want source packages or not
|
||||
|
@ -1,30 +0,0 @@
|
||||
[main]
|
||||
#keepcache=0
|
||||
#debuglevel=2
|
||||
pkgpolicy=newest
|
||||
distroverpkg=redhat-release
|
||||
tolerant=1
|
||||
exactarch=1
|
||||
obsoletes=1
|
||||
gpgcheck=1
|
||||
reposdir=./
|
||||
#plugins=1
|
||||
metadata_expire=1800
|
||||
#exclude=\*.i?86
|
||||
|
||||
# PUT YOUR REPOS HERE OR IN separate files named file.repo
|
||||
# in /etc/yum.repos.d
|
||||
[development]
|
||||
name=Fedora Core - Development
|
||||
#baseurl=http://download.fedora.redhat.com/pub/fedora/linux/core/development/$basearch/os/
|
||||
mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=rawhide&arch=i386
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
||||
[development-source]
|
||||
name=Fedora Core - Development Source
|
||||
#baseurl=http://download.fedora.redhat.com/pub/fedora/linux/core/development/source/SRPMS
|
||||
mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=rawhide-source&arch=i386
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
@ -1,30 +0,0 @@
|
||||
[main]
|
||||
#keepcache=0
|
||||
#debuglevel=2
|
||||
pkgpolicy=newest
|
||||
distroverpkg=redhat-release
|
||||
tolerant=1
|
||||
exactarch=1
|
||||
obsoletes=1
|
||||
gpgcheck=1
|
||||
reposdir=./
|
||||
#plugins=1
|
||||
metadata_expire=1800
|
||||
#exclude=\*.i?86
|
||||
|
||||
# PUT YOUR REPOS HERE OR IN separate files named file.repo
|
||||
# in /etc/yum.repos.d
|
||||
[development]
|
||||
name=Fedora Core - Development
|
||||
#baseurl=http://download.fedora.redhat.com/pub/fedora/linux/core/development/x86_64/os/
|
||||
mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=rawhide&arch=x86_64
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
||||
[development-source]
|
||||
name=Fedora Core - Development Source
|
||||
#baseurl=http://download.fedora.redhat.com/pub/fedora/linux/core/development/x86_64/os/
|
||||
mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=rawhide-source&arch=x86_64
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
2
contrib/tmpfiles.d/pungi-clean-cache.conf
Normal file
2
contrib/tmpfiles.d/pungi-clean-cache.conf
Normal file
@ -0,0 +1,2 @@
|
||||
# Clean up pungi cache
|
||||
d /var/cache/pungi/createrepo_c/ - - - 30d
|
19
contrib/yum-dnf-compare/README
Normal file
19
contrib/yum-dnf-compare/README
Normal file
@ -0,0 +1,19 @@
|
||||
This directory contains scripts to compare YUM and DNF based gathering code in
|
||||
Pungi.
|
||||
|
||||
There are two scripts to help re-run the depsolving on existing code. As input
|
||||
they need .conf and .log file from an existing compose. They collect correct
|
||||
command line options from them and run the respective tool.
|
||||
|
||||
Run:
|
||||
|
||||
$ run-dnf.sh Server.x86_64.conf
|
||||
$ run-yum.sh Server.x86_64.conf
|
||||
|
||||
The results are stored in a file with .log.dnf or .log.yum extensions. When
|
||||
--interactive is used as second argument of the scripts, the output is printed
|
||||
to terminal (useful for running in debugger).
|
||||
|
||||
To compare the RPM package lists, run:
|
||||
|
||||
$ ./pungi-compare-depsolving Server.x86_64.log.yum Server.x86_64.log.dnf
|
63
contrib/yum-dnf-compare/pungi-compare-depsolving
Executable file
63
contrib/yum-dnf-compare/pungi-compare-depsolving
Executable file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
here = sys.path[0]
|
||||
if here != '/usr/bin':
|
||||
# Git checkout
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")))
|
||||
|
||||
from kobo.rpmlib import parse_nvra, make_nvra
|
||||
from pungi.wrappers.pungi import PungiWrapper
|
||||
|
||||
|
||||
def read_rpms(fn):
|
||||
pw = PungiWrapper()
|
||||
with open(fn, "r") as f:
|
||||
data, _, _ = pw.parse_log(f)
|
||||
result = set()
|
||||
for i in data["rpm"]:
|
||||
nvra = parse_nvra(i["path"])
|
||||
result.add(make_nvra(nvra, add_rpm=True))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('old', metavar='OLD', default='pungi-yum.log')
|
||||
parser.add_argument('new', metavar='NEW', default='pungi-dnf.log')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
yum_rpms = read_rpms(args.old)
|
||||
dnf_rpms = read_rpms(args.new)
|
||||
|
||||
removed = yum_rpms - dnf_rpms
|
||||
added = dnf_rpms - yum_rpms
|
||||
|
||||
|
||||
print("ADDED: %s" % len(added))
|
||||
for i in sorted(added):
|
||||
print(" %s" % i)
|
||||
|
||||
print()
|
||||
|
||||
print("REMOVED: %s" % len(removed))
|
||||
for i in sorted(removed):
|
||||
print(" %s" % i)
|
||||
|
||||
print()
|
||||
|
||||
print("ADDED: %6s" % len(added))
|
||||
print("REMOVED: %6s" % len(removed))
|
||||
print("YUM RPMS: %6s" % len(yum_rpms))
|
||||
print("DNF RPMS: %6s" % len(dnf_rpms))
|
||||
print("ALL RPMS: %6s" % len(yum_rpms | dnf_rpms))
|
||||
|
||||
if added or removed:
|
||||
sys.exit(1)
|
24
contrib/yum-dnf-compare/run-dnf.sh
Executable file
24
contrib/yum-dnf-compare/run-dnf.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
HERE=$(dirname "$0")
|
||||
PATH=$HERE/../../bin:$PATH
|
||||
PYTHONPATH=$HERE/../../:${PYTHONPATH:-}
|
||||
export PATH PYTHONPATH
|
||||
|
||||
CONF=$1
|
||||
LOG=${CONF%%.conf}.log
|
||||
ARCH=$(head -n1 "$LOG" | tr ' ' '\n' | grep -- '--arch=')
|
||||
|
||||
CMD=(pungi-gather "--config=$CONF" "$ARCH" $(head -n1 "$LOG" | tr ' ' '\n' | grep '^--\(selfhosting\|fulltree\|greedy\|multilib\)'))
|
||||
|
||||
echo "${CMD[@]}"
|
||||
if [ $# -le 1 ] || [ "$2" != "--interactive" ]; then
|
||||
exec >"$LOG.dnf"
|
||||
fi
|
||||
exec 2>&1
|
||||
|
||||
exec "${CMD[@]}"
|
28
contrib/yum-dnf-compare/run-yum.sh
Executable file
28
contrib/yum-dnf-compare/run-yum.sh
Executable file
@ -0,0 +1,28 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
set -o pipefail
|
||||
set -u
|
||||
|
||||
export LANG=C
|
||||
|
||||
HERE=$(dirname "$0")
|
||||
PATH=$HERE/../../bin:$PATH
|
||||
PYTHONPATH=$HERE/../../
|
||||
export PATH PYTHONPATH
|
||||
|
||||
CONF="$1"
|
||||
LOG=${CONF%%.conf}.log
|
||||
|
||||
tempdir=$(mktemp -d)
|
||||
trap 'rm -rf $tempdir' EXIT
|
||||
|
||||
cmd=$(head -n1 "$LOG" | cut -d' ' -f2- | sed "s@--\(destdir\|cachedir\)=\(/[^/ ]*\)*@--\1=$tempdir/\1@g" | sed 's/^pungi3/pungi/' | sed "s@--config=/\([^/]*/\)*work/[^/]*/pungi/\([^ ]*\)@--config=$1@g")
|
||||
|
||||
echo "$cmd"
|
||||
if [ $# -le 1 ] || [ "$2" != "--interactive" ]; then
|
||||
exec >"$LOG.yum"
|
||||
fi
|
||||
exec 2>&1
|
||||
|
||||
$cmd
|
177
doc/Makefile
Normal file
177
doc/Makefile
Normal file
@ -0,0 +1,177 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Pungi.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Pungi.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/Pungi"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Pungi"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
0
pypungi/__init__.py → doc/_static/.keep
vendored
0
pypungi/__init__.py → doc/_static/.keep
vendored
557
doc/_static/phases.svg
vendored
Normal file
557
doc/_static/phases.svg
vendored
Normal file
@ -0,0 +1,557 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="610.46454"
|
||||
height="327.16599"
|
||||
viewBox="0 0 610.46457 327.16599"
|
||||
id="svg2"
|
||||
version="1.1"
|
||||
inkscape:version="1.3.2 (091e20e, 2023-11-25)"
|
||||
sodipodi:docname="phases.svg"
|
||||
inkscape:export-filename="/home/lsedlar/repos/pungi/doc/_static/phases.png"
|
||||
inkscape:export-xdpi="90"
|
||||
inkscape:export-ydpi="90"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="1"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1.5"
|
||||
inkscape:cx="268"
|
||||
inkscape:cy="260.66667"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="false"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1027"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="1"
|
||||
units="px"
|
||||
inkscape:document-rotation="0"
|
||||
showguides="true"
|
||||
inkscape:guide-bbox="true"
|
||||
fit-margin-top="7.4"
|
||||
fit-margin-left="7.4"
|
||||
fit-margin-right="7.4"
|
||||
fit-margin-bottom="7.4"
|
||||
lock-margins="true"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1" />
|
||||
<defs
|
||||
id="defs4">
|
||||
<marker
|
||||
inkscape:isstock="true"
|
||||
style="overflow:visible"
|
||||
id="Arrow1Lend"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lend">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path4451" />
|
||||
</marker>
|
||||
</defs>
|
||||
<metadata
|
||||
id="metadata7">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
transform="matrix(1.066667,0,0,1.066667,-99.07321,-903.45239)"
|
||||
id="layer1"
|
||||
inkscape:groupmode="layer"
|
||||
inkscape:label="Vrstva 1">
|
||||
<g
|
||||
transform="translate(98.243246,-80.817124)"
|
||||
id="g3411">
|
||||
<rect
|
||||
style="fill:#8ae234;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3340"
|
||||
width="26.295755"
|
||||
height="49.214859"
|
||||
x="953.49097"
|
||||
y="49.250374"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="51.554729"
|
||||
y="970.26605"
|
||||
id="text3360"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan3362"
|
||||
x="51.554729"
|
||||
y="970.26605"
|
||||
style="font-size:13.1479px;line-height:1.25">Pkgset</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(56.378954,-80.817124)"
|
||||
id="g3398">
|
||||
<rect
|
||||
y="553.98242"
|
||||
x="953.49097"
|
||||
height="46.01757"
|
||||
width="26.295755"
|
||||
id="rect3400"
|
||||
style="fill:#3465a4;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="557.61566"
|
||||
y="971.33813"
|
||||
id="text3396"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan3398"
|
||||
x="557.61566"
|
||||
y="971.33813"
|
||||
style="font-size:13.1479px;line-height:1.25">Test</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3720"
|
||||
transform="translate(97.49995,-0.34404039)">
|
||||
<rect
|
||||
style="fill:#fce94f;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3336"
|
||||
width="26.295755"
|
||||
height="39.669899"
|
||||
x="873.01788"
|
||||
y="2.3186533"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="6.2600794"
|
||||
y="891.1604"
|
||||
id="text3356"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan3358"
|
||||
x="6.2600794"
|
||||
y="891.1604"
|
||||
style="font-size:13.1479px;line-height:1.25">Init</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path3642"
|
||||
d="M 100.90864,859.8891 H 654.22706"
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.17467px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)" />
|
||||
<g
|
||||
transform="translate(26.249988)"
|
||||
id="g262">
|
||||
<g
|
||||
id="g234">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="179.38934"
|
||||
x="872.67383"
|
||||
height="162.72726"
|
||||
width="26.295755"
|
||||
id="rect3342"
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:0.838448px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3364"
|
||||
y="890.72327"
|
||||
x="181.69368"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="890.72327"
|
||||
x="181.69368"
|
||||
id="tspan3366"
|
||||
sodipodi:role="line">Buildinstall</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3639"
|
||||
transform="translate(75.925692,-0.34404039)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="103.28194"
|
||||
x="905.2099"
|
||||
height="54.197887"
|
||||
width="26.295755"
|
||||
id="rect3344"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3368"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="106.1384"
|
||||
id="tspan3370"
|
||||
sodipodi:role="line">Gather</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(15.925722,63.405928)"
|
||||
id="g3647">
|
||||
<g
|
||||
id="g3644">
|
||||
<rect
|
||||
style="fill:#ad7fa8;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3346"
|
||||
width="26.295755"
|
||||
height="72.729973"
|
||||
x="905.2099"
|
||||
y="162.92607"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
</g>
|
||||
<text
|
||||
id="text3372"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="923.25934"
|
||||
x="165.23042"
|
||||
id="tspan3374"
|
||||
sodipodi:role="line">ExtraFiles</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-2.824268,-0.34404039)"
|
||||
id="g3658">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="241.10229"
|
||||
x="905.2099"
|
||||
height="78.636055"
|
||||
width="26.295755"
|
||||
id="rect3348"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3376"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
y="921.86945"
|
||||
x="243.95874"
|
||||
id="tspan3378"
|
||||
sodipodi:role="line">Createrepo</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3408"
|
||||
transform="translate(-74.638308,113.77258)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="254.60153"
|
||||
x="823.54675"
|
||||
height="53.653927"
|
||||
width="26.295755"
|
||||
id="rect3350-3"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3380-2"
|
||||
y="840.3219"
|
||||
x="256.90588"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
id="tspan3406"
|
||||
sodipodi:role="line"
|
||||
x="256.90588"
|
||||
y="840.3219">OSTree</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-252.46536,-85.861863)"
|
||||
id="g288">
|
||||
<g
|
||||
transform="translate(0.56706579)"
|
||||
id="g3653">
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3428"
|
||||
width="26.295755"
|
||||
height="101.85102"
|
||||
x="1022.637"
|
||||
y="490.33765"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="492.642"
|
||||
y="1039.4121"
|
||||
id="text3430"><tspan
|
||||
id="tspan283"
|
||||
sodipodi:role="line"
|
||||
x="492.642"
|
||||
y="1039.4121"
|
||||
style="font-size:12px;line-height:0">OSTreeInstaller</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g2"
|
||||
transform="translate(-1.4062678e-8,9.3749966)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#e9b96e;fill-rule:evenodd;stroke:none;stroke-width:1.85901px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1"
|
||||
width="103.12497"
|
||||
height="115.80065"
|
||||
x="863.29883"
|
||||
y="486.55563" />
|
||||
<text
|
||||
id="text3384-0"
|
||||
y="921.73846"
|
||||
x="489.56451"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391"
|
||||
sodipodi:role="line"
|
||||
x="489.56451"
|
||||
y="921.73846">ImageChecksum</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-42.209584,-80.817124)"
|
||||
id="g3458">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#edd400;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338"
|
||||
width="26.295755"
|
||||
height="102.36562"
|
||||
x="953.49097"
|
||||
y="420.13605" />
|
||||
<text
|
||||
id="text3384"
|
||||
y="971.54041"
|
||||
x="422.99252"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
y="971.54041"
|
||||
x="422.99252"
|
||||
id="tspan3386"
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">Createiso</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3453"
|
||||
transform="translate(-42.466031,-84.525321)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="420.39337"
|
||||
x="989.65247"
|
||||
height="101.85102"
|
||||
width="26.295755"
|
||||
id="rect3352"
|
||||
style="fill:#73d216;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3388"
|
||||
y="1006.4276"
|
||||
x="422.69772"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
y="1006.4276"
|
||||
x="422.69772"
|
||||
id="tspan3390"
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">LiveImages</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3448"
|
||||
transform="translate(-42.466031,-88.485966)">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
y="420.39337"
|
||||
x="1026.0664"
|
||||
height="101.85102"
|
||||
width="26.295755"
|
||||
id="rect3354"
|
||||
style="fill:#f57900;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<text
|
||||
id="text3392"
|
||||
y="1042.8416"
|
||||
x="422.69772"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
y="1042.8416"
|
||||
x="422.69772"
|
||||
id="tspan3394"
|
||||
sodipodi:role="line"
|
||||
style="font-size:13.1479px;line-height:1.25">ImageBuild</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3443"
|
||||
transform="translate(-43.173123,-92.80219)">
|
||||
<rect
|
||||
style="fill:#edd400;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3422"
|
||||
width="26.295755"
|
||||
height="101.85102"
|
||||
x="1062.8359"
|
||||
y="421.10046"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="423.40482"
|
||||
y="1079.6111"
|
||||
id="text3424"><tspan
|
||||
id="tspan3434"
|
||||
sodipodi:role="line"
|
||||
x="423.40482"
|
||||
y="1079.6111"
|
||||
style="font-size:13.1479px;line-height:1.25">LiveMedia</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="fill:#c17d11;fill-rule:evenodd;stroke:none;stroke-width:1.48416px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290"
|
||||
width="26.295755"
|
||||
height="224.35098"
|
||||
x="1091.7223"
|
||||
y="378.43698"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.74133"
|
||||
y="1106.6223"
|
||||
id="text294"><tspan
|
||||
y="1106.6223"
|
||||
x="380.74133"
|
||||
sodipodi:role="line"
|
||||
id="tspan301"
|
||||
style="font-size:12px;line-height:0">OSBS</tspan></text>
|
||||
<g
|
||||
transform="translate(-70.933542,-51.043149)"
|
||||
id="g3819">
|
||||
<rect
|
||||
style="fill:#73d216;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3801"
|
||||
width="26.295755"
|
||||
height="101.85102"
|
||||
x="1052.2335"
|
||||
y="448.86087"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="451.16522"
|
||||
y="1069.0087"
|
||||
id="text3805"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
sodipodi:role="line"
|
||||
x="451.16522"
|
||||
y="1069.0087"
|
||||
id="tspan3812">ExtraIsos</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
y="377.92242"
|
||||
x="1122.3463"
|
||||
height="224.24059"
|
||||
width="26.295755"
|
||||
id="rect87"
|
||||
style="fill:#5ed4ec;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.48006px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;line-height:0%;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.7789"
|
||||
y="1140.3958"
|
||||
id="text91"><tspan
|
||||
style="font-size:13.1479px;line-height:1.25"
|
||||
sodipodi:role="line"
|
||||
id="tspan89"
|
||||
x="380.7789"
|
||||
y="1140.3958">Repoclosure</tspan></text>
|
||||
<g
|
||||
id="g206"
|
||||
transform="translate(0,-1.8749994)">
|
||||
<rect
|
||||
style="fill:#fcd9a4;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290-6"
|
||||
width="26.295755"
|
||||
height="101.91849"
|
||||
x="1032.3469"
|
||||
y="377.92731"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.23166"
|
||||
y="1049.1219"
|
||||
id="text294-7"><tspan
|
||||
y="1049.1219"
|
||||
x="380.23166"
|
||||
sodipodi:role="line"
|
||||
id="tspan301-5"
|
||||
style="font-size:12px;line-height:0">KiwiBuild</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g3">
|
||||
<g
|
||||
id="g1">
|
||||
<g
|
||||
id="g4">
|
||||
<rect
|
||||
transform="matrix(0,1,1,0,0,0)"
|
||||
style="fill:#729fcf;fill-rule:evenodd;stroke:none;stroke-width:1.83502px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect3338-1-3"
|
||||
width="103.12497"
|
||||
height="115.80065"
|
||||
x="983.44263"
|
||||
y="486.55563" />
|
||||
<text
|
||||
id="text3384-0-6"
|
||||
y="1038.8422"
|
||||
x="489.56451"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
xml:space="preserve"><tspan
|
||||
style="font-size:13.1475px;line-height:1.25"
|
||||
id="tspan3391-7"
|
||||
sodipodi:role="line"
|
||||
x="489.56451"
|
||||
y="1038.8422">ImageContainer</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g206-1"
|
||||
transform="translate(-0.04628921,28.701853)">
|
||||
<rect
|
||||
style="fill:#fcaf3e;fill-rule:evenodd;stroke:none;stroke-width:1.00033px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
id="rect290-6-7"
|
||||
width="26.295755"
|
||||
height="101.91849"
|
||||
x="1032.3469"
|
||||
y="377.92731"
|
||||
transform="matrix(0,1,1,0,0,0)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:0%;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="380.23166"
|
||||
y="1049.1219"
|
||||
id="text294-7-5"><tspan
|
||||
y="1049.1219"
|
||||
x="380.23166"
|
||||
sodipodi:role="line"
|
||||
id="tspan301-5-5"
|
||||
style="font-size:12px;line-height:0">OSBuild</tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 23 KiB |
BIN
doc/_static/pungi_snake-sm-dark.png
vendored
Normal file
BIN
doc/_static/pungi_snake-sm-dark.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 13 KiB |
0
doc/_templates/.keep
vendored
Normal file
0
doc/_templates/.keep
vendored
Normal file
70
doc/about.rst
Normal file
70
doc/about.rst
Normal file
@ -0,0 +1,70 @@
|
||||
=============
|
||||
About Pungi
|
||||
=============
|
||||
|
||||
.. figure:: _static/pungi_snake-sm-dark.png
|
||||
:align: right
|
||||
:alt: Pungi Logo
|
||||
|
||||
*Pungi* is a distribution compose tool.
|
||||
|
||||
Composes are release snapshots that contain release deliverables such as:
|
||||
|
||||
- installation trees
|
||||
|
||||
- RPMs
|
||||
- repodata
|
||||
- comps
|
||||
|
||||
- (bootable) ISOs
|
||||
- kickstart trees
|
||||
|
||||
- anaconda images
|
||||
- images for PXE boot
|
||||
|
||||
|
||||
Tool overview
|
||||
=============
|
||||
|
||||
*Pungi* consists of multiple separate executables backed by a common library.
|
||||
|
||||
The main entry-point is the ``pungi-koji`` script. It loads the compose
|
||||
configuration and kicks off the process. Composing itself is done in phases.
|
||||
Each phase is responsible for generating some artifacts on disk and updating
|
||||
the ``compose`` object that is threaded through all the phases.
|
||||
|
||||
*Pungi* itself does not actually do that much. Most of the actual work is
|
||||
delegated to separate executables. *Pungi* just makes sure that all the
|
||||
commands are invoked in the appropriate order and with correct arguments. It
|
||||
also moves the artifacts to correct locations.
|
||||
|
||||
The executable name ``pungi-koji`` comes from the fact that most of those
|
||||
separate executables submit tasks to Koji that does the actual work in an
|
||||
auditable way.
|
||||
|
||||
However unlike doing everything manually in Koji, Pungi will make sure you are
|
||||
building all images from the same package set, and will produce even
|
||||
deliverables that Koji can not create like YUM repos and installer ISOs.
|
||||
|
||||
|
||||
Links
|
||||
=====
|
||||
- Upstream GIT: https://pagure.io/pungi/
|
||||
- Issue tracker: https://pagure.io/pungi/issues
|
||||
- Questions can be asked on *#fedora-releng* IRC channel on FreeNode
|
||||
|
||||
|
||||
Origin of name
|
||||
==============
|
||||
|
||||
The name *Pungi* comes from the instrument used to charm snakes. *Anaconda*
|
||||
being the software Pungi was manipulating, and anaconda being a snake, led to
|
||||
the referential naming.
|
||||
|
||||
The first name, which was suggested by Seth Vidal, was *FIST*, *Fedora
|
||||
Installation <Something> Tool*. That name was quickly discarded and replaced
|
||||
with Pungi.
|
||||
|
||||
There was also a bit of an inside joke that when said aloud, it could sound
|
||||
like punji, which is `a sharpened stick at the bottom of a
|
||||
trap <https://en.wikipedia.org/wiki/Punji_stick>`_. Kind of like software…
|
27
doc/comps.rst
Normal file
27
doc/comps.rst
Normal file
@ -0,0 +1,27 @@
|
||||
.. _comps:
|
||||
|
||||
Processing comps files
|
||||
======================
|
||||
|
||||
The comps file that Pungi takes as input is not really pure comps as used by
|
||||
tools like DNF. There are extensions used to customize how the file is processed.
|
||||
|
||||
The first step of Pungi processing is to retrieve the actual file. This can use
|
||||
anything that :ref:`scm_support` supports.
|
||||
|
||||
Pungi extensions are ``arch`` attribute on ``packageref``, ``group`` and
|
||||
``environment`` tags. The value of this attribute is a comma separated list of
|
||||
architectures.
|
||||
|
||||
Second step Pungi performs is creating a file for each architecture. This is
|
||||
done by removing all elements with incompatible ``arch`` attribute. No
|
||||
additional clean up is performed on this file. The resulting file is only used
|
||||
internally for the rest of the compose process.
|
||||
|
||||
Third and final step is to create comps file for each Variant.Arch combination.
|
||||
This is the actual file that will be included in the compose. The start file is
|
||||
the original input file, from which all elements with incompatible architecture
|
||||
are removed. Then clean up is performed by removing all empty groups, removing
|
||||
non-existing groups from environments and categories and finally removing empty
|
||||
environments and categories. As a last step groups not listed in the variants
|
||||
file are removed.
|
258
doc/conf.py
Normal file
258
doc/conf.py
Normal file
@ -0,0 +1,258 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pungi documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Jul 2 08:11:04 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = "Pungi"
|
||||
copyright = "2016, Red Hat, Inc."
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = "4.7"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = "4.7.0"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = "default"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
# html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Pungidoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
("index", "Pungi.tex", "Pungi Documentation", "Daniel Mach", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(
|
||||
"index",
|
||||
"Pungi",
|
||||
"Pungi Documentation",
|
||||
"Daniel Mach",
|
||||
"Pungi",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
# texinfo_no_detailmenu = False
|
2278
doc/configuration.rst
Normal file
2278
doc/configuration.rst
Normal file
File diff suppressed because it is too large
Load Diff
166
doc/contributing.rst
Normal file
166
doc/contributing.rst
Normal file
@ -0,0 +1,166 @@
|
||||
=====================
|
||||
Contributing to Pungi
|
||||
=====================
|
||||
|
||||
|
||||
Set up development environment
|
||||
==============================
|
||||
|
||||
In order to work on *Pungi*, you should install recent version of *Fedora*.
|
||||
|
||||
Python2
|
||||
-------
|
||||
|
||||
Fedora 29 is recommended because some packages are not available in newer Fedora release, e.g. python2-libcomps.
|
||||
|
||||
Install required packages ::
|
||||
|
||||
$ sudo dnf install -y krb5-devel gcc make libcurl-devel python2-devel python2-createrepo_c kobo-rpmlib yum python2-libcomps python2-libselinx
|
||||
|
||||
Python3
|
||||
-------
|
||||
|
||||
Install required packages ::
|
||||
|
||||
$ sudo dnf install -y krb5-devel gcc make libcurl-devel python3-devel python3-createrepo_c python3-libcomps
|
||||
|
||||
Developing
|
||||
==========
|
||||
|
||||
Currently the development workflow for Pungi is on master branch:
|
||||
|
||||
- Make your own fork at https://pagure.io/pungi
|
||||
- Clone your fork locally (replacing $USERNAME with your own)::
|
||||
|
||||
git clone git@pagure.io:forks/$USERNAME/pungi.git
|
||||
|
||||
- cd into your local clone and add the remote upstream for rebasing::
|
||||
|
||||
cd pungi
|
||||
git remote add upstream git@pagure.io:pungi.git
|
||||
|
||||
.. note::
|
||||
This workflow assumes that you never ``git commit`` directly to the master
|
||||
branch of your fork. This will make more sense when we cover rebasing
|
||||
below.
|
||||
|
||||
- create a topic branch based on master::
|
||||
|
||||
git branch my_topic_branch master
|
||||
git checkout my_topic_branch
|
||||
|
||||
|
||||
- Make edits, changes, add new features, etc. and then make sure to pull
|
||||
from upstream master and rebase before submitting a pull request::
|
||||
|
||||
# lets just say you edited setup.py for sake of argument
|
||||
git checkout my_topic_branch
|
||||
|
||||
# make changes to setup.py
|
||||
black setup.py
|
||||
tox
|
||||
git add setup.py
|
||||
git commit -s -m "added awesome feature to setup.py"
|
||||
|
||||
# now we rebase
|
||||
git checkout master
|
||||
git pull --rebase upstream master
|
||||
git push origin master
|
||||
git push origin --tags
|
||||
git checkout my_topic_branch
|
||||
git rebase master
|
||||
|
||||
# resolve merge conflicts if any as a result of your development in
|
||||
# your topic branch
|
||||
git push origin my_topic_branch
|
||||
|
||||
.. note::
|
||||
In order to for your commit to be merged:
|
||||
|
||||
- you must sign-off on it. Use ``-s`` option when running ``git commit``.
|
||||
|
||||
- The code must be well formatted via ``black`` and pass ``flake8`` checking. Run ``tox -e black,flake8`` to do the check.
|
||||
|
||||
- Create pull request in the pagure.io web UI
|
||||
|
||||
- For convenience, here is a bash shell function that can be placed in your
|
||||
~/.bashrc and called such as ``pullupstream pungi-4-devel`` that will
|
||||
automate a large portion of the rebase steps from above::
|
||||
|
||||
pullupstream () {
|
||||
if [[ -z "$1" ]]; then
|
||||
printf "Error: must specify a branch name (e.g. - master, devel)\n"
|
||||
else
|
||||
pullup_startbranch=$(git describe --contains --all HEAD)
|
||||
git checkout $1
|
||||
git pull --rebase upstream master
|
||||
git push origin $1
|
||||
git push origin --tags
|
||||
git checkout ${pullup_startbranch}
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
You must write unit tests for any new code (except for trivial changes). Any
|
||||
code without sufficient test coverage may not be merged.
|
||||
|
||||
To run all existing tests, suggested method is to use *tox*. ::
|
||||
|
||||
$ sudo dnf install python3-tox -y
|
||||
|
||||
$ tox -e py3
|
||||
$ tox -e py27
|
||||
|
||||
Alternatively you could create a vitualenv, install deps and run tests
|
||||
manually if you don't want to use tox. ::
|
||||
|
||||
$ sudo dnf install python3-virtualenvwrapper -y
|
||||
$ mkvirtualenv --system-site-packages py3
|
||||
$ workon py3
|
||||
$ pip install -r requirements.txt -r test-requirements.txt
|
||||
$ make test
|
||||
|
||||
# or with coverage
|
||||
$ make test-coverage
|
||||
|
||||
If you need to run specified tests, *pytest* is recommended. ::
|
||||
|
||||
# Activate virtualenv first
|
||||
|
||||
# Run tests
|
||||
$ pytest tests/test_config.py
|
||||
$ pytest tests/test_config.py -k test_pkgset_mismatch_repos
|
||||
|
||||
In the ``tests/`` directory there is a shell script ``test_compose.sh`` that
|
||||
you can use to try and create a miniature compose on dummy data. The actual
|
||||
data will be created by running ``make test-data`` in project root. ::
|
||||
|
||||
$ sudo dnf -y install rpm-build createrepo_c isomd5sum genisoimage syslinux
|
||||
|
||||
# Activate virtualenv (the one created by tox could be used)
|
||||
$ source .tox/py3/bin/activate
|
||||
|
||||
$ python setup.py develop
|
||||
$ make test-data
|
||||
$ make test-compose
|
||||
|
||||
This testing compose does not actually use all phases that are available, and
|
||||
there is no checking that the result is correct. It only tells you whether it
|
||||
crashed or not.
|
||||
|
||||
.. note::
|
||||
Even when it finishes successfully, it may print errors about
|
||||
``repoclosure`` on *Server-Gluster.x86_64* in *test* phase. This is not a
|
||||
bug.
|
||||
|
||||
|
||||
Documenting
|
||||
===========
|
||||
|
||||
You must write documentation for any new features and functional changes.
|
||||
Any code without sufficient documentation may not be merged.
|
||||
|
||||
To generate the documentation, run ``make doc`` in project root.
|
480
doc/examples.rst
Normal file
480
doc/examples.rst
Normal file
@ -0,0 +1,480 @@
|
||||
.. _examples:
|
||||
|
||||
Big picture examples
|
||||
====================
|
||||
|
||||
Actual Pungi configuration files can get very large. This pages brings two
|
||||
examples of (almost) full configuration for two different composes.
|
||||
|
||||
Fedora Rawhide compose
|
||||
----------------------
|
||||
|
||||
This is a shortened configuration for Fedora Radhide compose as of 2019-10-14.
|
||||
|
||||
::
|
||||
|
||||
release_name = 'Fedora'
|
||||
release_short = 'Fedora'
|
||||
release_version = 'Rawhide'
|
||||
release_is_layered = False
|
||||
|
||||
bootable = True
|
||||
comps_file = {
|
||||
'scm': 'git',
|
||||
'repo': 'https://pagure.io/fedora-comps.git',
|
||||
'branch': 'master',
|
||||
'file': 'comps-rawhide.xml',
|
||||
# Merge translations by running make. This command will generate the file.
|
||||
'command': 'make comps-rawhide.xml'
|
||||
}
|
||||
module_defaults_dir = {
|
||||
'scm': 'git',
|
||||
'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
|
||||
'branch': 'main',
|
||||
'dir': '.'
|
||||
}
|
||||
# Optional module obsoletes configuration which is merged
|
||||
# into the module index and gets resolved
|
||||
module_obsoletes_dir = {
|
||||
'scm': 'git',
|
||||
'repo': 'https://pagure.io/releng/fedora-module-defaults.git',
|
||||
'branch': 'main',
|
||||
'dir': 'obsoletes'
|
||||
}
|
||||
|
||||
variants_file='variants-fedora.xml'
|
||||
sigkeys = ['12C944D0']
|
||||
|
||||
# Put packages into subdirectories hashed by their initial letter.
|
||||
hashed_directories = True
|
||||
|
||||
# There is a special profile for use with compose. It makes Pungi
|
||||
# authenticate automatically as rel-eng user.
|
||||
koji_profile = 'compose_koji'
|
||||
|
||||
# RUNROOT settings
|
||||
runroot = True
|
||||
runroot_channel = 'compose'
|
||||
runroot_tag = 'f32-build'
|
||||
|
||||
# PKGSET
|
||||
pkgset_source = 'koji'
|
||||
pkgset_koji_tag = 'f32'
|
||||
pkgset_koji_inherit = False
|
||||
|
||||
filter_system_release_packages = False
|
||||
|
||||
# GATHER
|
||||
gather_method = {
|
||||
'^.*': { # For all variants
|
||||
'comps': 'deps', # resolve dependencies for packages from comps file
|
||||
'module': 'nodeps', # but not for packages from modules
|
||||
}
|
||||
}
|
||||
gather_backend = 'dnf'
|
||||
gather_profiler = True
|
||||
check_deps = False
|
||||
greedy_method = 'build'
|
||||
|
||||
repoclosure_backend = 'dnf'
|
||||
|
||||
# CREATEREPO
|
||||
createrepo_deltas = False
|
||||
createrepo_database = True
|
||||
createrepo_use_xz = True
|
||||
createrepo_extra_args = ['--zck', '--zck-dict-dir=/usr/share/fedora-repo-zdicts/rawhide']
|
||||
|
||||
# CHECKSUMS
|
||||
media_checksums = ['sha256']
|
||||
media_checksum_one_file = True
|
||||
media_checksum_base_filename = '%(release_short)s-%(variant)s-%(version)s-%(arch)s-%(date)s%(type_suffix)s.%(respin)s'
|
||||
|
||||
# CREATEISO
|
||||
iso_hfs_ppc64le_compatible = False
|
||||
|
||||
# BUILDINSTALL
|
||||
buildinstall_method = 'lorax'
|
||||
buildinstall_skip = [
|
||||
# No installer for Modular variant
|
||||
('^Modular$', {'*': True}),
|
||||
# No 32 bit installer for Everything.
|
||||
('^Everything$', {'i386': True}),
|
||||
]
|
||||
|
||||
# Enables macboot on x86_64 for all variants and disables upgrade image building
|
||||
# everywhere.
|
||||
lorax_options = [
|
||||
('^.*$', {
|
||||
'x86_64': {
|
||||
'nomacboot': False
|
||||
},
|
||||
'ppc64le': {
|
||||
# Use 3GB image size for ppc64le.
|
||||
'rootfs_size': 3
|
||||
},
|
||||
'*': {
|
||||
'noupgrade': True
|
||||
}
|
||||
})
|
||||
]
|
||||
|
||||
additional_packages = [
|
||||
('^(Server|Everything)$', {
|
||||
'*': [
|
||||
# Add all architectures of dracut package.
|
||||
'dracut.*',
|
||||
# All all packages matching this pattern
|
||||
'autocorr-*',
|
||||
],
|
||||
}),
|
||||
|
||||
('^Everything$', {
|
||||
# Everything should include all packages from the tag. This only
|
||||
# applies to the native arch. Multilib will still be pulled in
|
||||
# according to multilib rules.
|
||||
'*': ['*'],
|
||||
}),
|
||||
]
|
||||
|
||||
filter_packages = [
|
||||
("^.*$", {"*": ["glibc32", "libgcc32"]}),
|
||||
('(Server)$', {
|
||||
'*': [
|
||||
'kernel*debug*',
|
||||
'kernel-kdump*',
|
||||
]
|
||||
}),
|
||||
]
|
||||
|
||||
multilib = [
|
||||
('^Everything$', {
|
||||
'x86_64': ['devel', 'runtime'],
|
||||
})
|
||||
]
|
||||
|
||||
# These packages should never be multilib on any arch.
|
||||
multilib_blacklist = {
|
||||
'*': [
|
||||
'kernel', 'kernel-PAE*', 'kernel*debug*', 'java-*', 'php*', 'mod_*', 'ghc-*'
|
||||
],
|
||||
}
|
||||
|
||||
# These should be multilib even if they don't match the rules defined above.
|
||||
multilib_whitelist = {
|
||||
'*': ['wine', '*-static'],
|
||||
}
|
||||
|
||||
createiso_skip = [
|
||||
# Keep binary ISOs for Server, but not source ones.
|
||||
('^Server$', {'src': True}),
|
||||
|
||||
# Remove all other ISOs.
|
||||
('^Everything$', {'*': True, 'src': True}),
|
||||
('^Modular$', {'*': True, 'src': True}),
|
||||
]
|
||||
|
||||
# Image name respecting Fedora's image naming policy
|
||||
image_name_format = '%(release_short)s-%(variant)s-%(disc_type)s-%(arch)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s.iso'
|
||||
# Use the same format for volume id
|
||||
image_volid_formats = [
|
||||
'%(release_short)s-%(variant)s-%(disc_type)s-%(arch)s-%(version)s'
|
||||
]
|
||||
# Used by Pungi to replace 'Cloud' with 'C' (etc.) in ISO volume IDs.
|
||||
# There is a hard 32-character limit on ISO volume IDs, so we use
|
||||
# these to try and produce short enough but legible IDs. Note this is
|
||||
# duplicated in Koji for live images, as livemedia-creator does not
|
||||
# allow Pungi to tell it what volume ID to use. Note:
|
||||
# https://fedoraproject.org/wiki/User:Adamwill/Draft_fedora_image_naming_policy
|
||||
volume_id_substitutions = {
|
||||
'Beta': 'B',
|
||||
'Rawhide': 'rawh',
|
||||
'Silverblue': 'SB',
|
||||
'Cinnamon': 'Cinn',
|
||||
'Cloud': 'C',
|
||||
'Design_suite': 'Dsgn',
|
||||
'Electronic_Lab': 'Elec',
|
||||
'Everything': 'E',
|
||||
'Scientific_KDE': 'SciK',
|
||||
'Security': 'Sec',
|
||||
'Server': 'S',
|
||||
'Workstation': 'WS',
|
||||
}
|
||||
|
||||
disc_types = {
|
||||
'boot': 'netinst',
|
||||
'live': 'Live',
|
||||
}
|
||||
|
||||
translate_paths = [
|
||||
('/mnt/koji/compose/', 'https://kojipkgs.fedoraproject.org/compose/'),
|
||||
]
|
||||
|
||||
# These will be inherited by live_media, live_images and image_build
|
||||
global_ksurl = 'git+https://pagure.io/fedora-kickstarts.git?#HEAD'
|
||||
global_release = '!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN'
|
||||
global_version = 'Rawhide'
|
||||
# live_images ignores this in favor of live_target
|
||||
global_target = 'f32'
|
||||
|
||||
image_build = {
|
||||
'^Container$': [
|
||||
{
|
||||
'image-build': {
|
||||
'format': [('docker', 'tar.xz')],
|
||||
'name': 'Fedora-Container-Base',
|
||||
'kickstart': 'fedora-container-base.ks',
|
||||
'distro': 'Fedora-22',
|
||||
'disk_size': 5,
|
||||
'arches': ['armhfp', 'aarch64', 'ppc64le', 's390x', 'x86_64'],
|
||||
'repo': 'Everything',
|
||||
'install_tree_from': 'Everything',
|
||||
'subvariant': 'Container_Base',
|
||||
'failable': ['*'],
|
||||
},
|
||||
'factory-parameters': {
|
||||
'dockerversion': "1.10.1",
|
||||
'docker_cmd': '[ "/bin/bash" ]',
|
||||
'docker_env': '[ "DISTTAG=f32container", "FGC=f32", "container=oci" ]',
|
||||
'docker_label': '{ "name": "fedora", "license": "MIT", "vendor": "Fedora Project", "version": "32"}',
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
live_media = {
|
||||
'^Workstation$': [
|
||||
{
|
||||
'name': 'Fedora-Workstation-Live',
|
||||
'kickstart': 'fedora-live-workstation.ks',
|
||||
# Variants.xml also contains aarch64 and armhfp, but there
|
||||
# should be no live media for those arches.
|
||||
'arches': ['x86_64', 'ppc64le'],
|
||||
'failable': ['ppc64le'],
|
||||
# Take packages and install tree from Everything repo.
|
||||
'repo': 'Everything',
|
||||
'install_tree_from': 'Everything',
|
||||
}
|
||||
],
|
||||
'^Spins': [
|
||||
# There are multiple media for Spins variant. They use subvariant
|
||||
# field so that they can be identified in the metadata.
|
||||
{
|
||||
'name': 'Fedora-KDE-Live',
|
||||
'kickstart': 'fedora-live-kde.ks',
|
||||
'arches': ['x86_64'],
|
||||
'repo': 'Everything',
|
||||
'install_tree_from': 'Everything',
|
||||
'subvariant': 'KDE'
|
||||
|
||||
},
|
||||
{
|
||||
'name': 'Fedora-Xfce-Live',
|
||||
'kickstart': 'fedora-live-xfce.ks',
|
||||
'arches': ['x86_64'],
|
||||
'failable': ['*'],
|
||||
'repo': 'Everything',
|
||||
'install_tree_from': 'Everything',
|
||||
'subvariant': 'Xfce'
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
failable_deliverables = [
|
||||
# Installer and ISOs for server failing do not abort the compose.
|
||||
('^Server$', {
|
||||
'*': ['buildinstall', 'iso'],
|
||||
}),
|
||||
('^.*$', {
|
||||
# Buildinstall is not blocking
|
||||
'src': ['buildinstall'],
|
||||
# Nothing on i386, ppc64le blocks the compose
|
||||
'i386': ['buildinstall', 'iso'],
|
||||
'ppc64le': ['buildinstall', 'iso'],
|
||||
's390x': ['buildinstall', 'iso'],
|
||||
})
|
||||
]
|
||||
|
||||
ostree = {
|
||||
"^Silverblue$": {
|
||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||
# To get config, clone master branch from this repo and take
|
||||
# treefile from there.
|
||||
"treefile": "fedora-silverblue.yaml",
|
||||
"config_url": "https://pagure.io/workstation-ostree-config.git",
|
||||
"config_branch": "master",
|
||||
# Consume packages from Everything
|
||||
"repo": "Everything",
|
||||
# Don't create a reference in the ostree repo (signing automation does that).
|
||||
"tag_ref": False,
|
||||
# Don't use change detection in ostree.
|
||||
"force_new_commit": True,
|
||||
# Use unified core mode for rpm-ostree composes
|
||||
"unified_core": True,
|
||||
# This is the location for the repo where new commit will be
|
||||
# created. Note that this is outside of the compose dir.
|
||||
"ostree_repo": "/mnt/koji/compose/ostree/repo/",
|
||||
"ostree_ref": "fedora/rawhide/${basearch}/silverblue",
|
||||
"arches": ["x86_64", "ppc64le", "aarch64"],
|
||||
"failable": ['*'],
|
||||
}
|
||||
}
|
||||
|
||||
ostree_container = {
|
||||
"^Sagano$": {
|
||||
"treefile": "fedora-tier-0-38.yaml",
|
||||
"config_url": "https://gitlab.com/CentOS/cloud/sagano.git",
|
||||
"config_branch": "main",
|
||||
# Consume packages from Everything
|
||||
"repo": "Everything",
|
||||
# Automatically generate a reasonable version
|
||||
"version": "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN",
|
||||
# Only run this for x86_64 even if Sagano has more arches
|
||||
"arches": ["x86_64"],
|
||||
}
|
||||
}
|
||||
|
||||
ostree_installer = [
|
||||
("^Silverblue$", {
|
||||
"x86_64": {
|
||||
"repo": "Everything",
|
||||
"release": None,
|
||||
"rootfs_size": "8",
|
||||
# Take templates from this repository.
|
||||
'template_repo': 'https://pagure.io/fedora-lorax-templates.git',
|
||||
'template_branch': 'master',
|
||||
# Use following templates.
|
||||
"add_template": ["ostree-based-installer/lorax-configure-repo.tmpl",
|
||||
"ostree-based-installer/lorax-embed-repo.tmpl",
|
||||
"ostree-based-installer/lorax-embed-flatpaks.tmpl"],
|
||||
# And add these variables for the templates.
|
||||
"add_template_var": [
|
||||
"ostree_install_repo=https://kojipkgs.fedoraproject.org/compose/ostree/repo/",
|
||||
"ostree_update_repo=https://ostree.fedoraproject.org",
|
||||
"ostree_osname=fedora",
|
||||
"ostree_oskey=fedora-32-primary",
|
||||
"ostree_contenturl=mirrorlist=https://ostree.fedoraproject.org/mirrorlist",
|
||||
"ostree_install_ref=fedora/rawhide/x86_64/silverblue",
|
||||
"ostree_update_ref=fedora/rawhide/x86_64/silverblue",
|
||||
"flatpak_remote_name=fedora",
|
||||
"flatpak_remote_url=oci+https://registry.fedoraproject.org",
|
||||
"flatpak_remote_refs=runtime/org.fedoraproject.Platform/x86_64/f30 app/org.gnome.Baobab/x86_64/stable",
|
||||
],
|
||||
'failable': ['*'],
|
||||
},
|
||||
})
|
||||
]
|
||||
|
||||
|
||||
RCM Tools compose
|
||||
-----------------
|
||||
|
||||
This is a small compose used to deliver packages to Red Hat internal users. The
|
||||
configuration is split into two files.
|
||||
|
||||
::
|
||||
|
||||
# rcmtools-common.conf
|
||||
|
||||
release_name = "RCM Tools"
|
||||
release_short = "RCMTOOLS"
|
||||
release_version = "2.0"
|
||||
release_type = "updates"
|
||||
release_is_layered = True
|
||||
createrepo_c = True
|
||||
createrepo_checksum = "sha256"
|
||||
|
||||
# PKGSET
|
||||
pkgset_source = "koji"
|
||||
koji_profile = "brew"
|
||||
pkgset_koji_inherit = True
|
||||
|
||||
|
||||
# GENERAL SETTINGS
|
||||
bootable = False
|
||||
comps_file = "rcmtools-comps.xml"
|
||||
variants_file = "rcmtools-variants.xml"
|
||||
sigkeys = ["3A3A33A3"]
|
||||
|
||||
|
||||
# RUNROOT settings
|
||||
runroot = False
|
||||
|
||||
|
||||
# GATHER
|
||||
gather_method = "deps"
|
||||
check_deps = True
|
||||
|
||||
additional_packages = [
|
||||
('.*', {
|
||||
'*': ['puddle', 'rcm-nexus'],
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
# Set repoclosure_strictness to fatal to avoid installation dependency
|
||||
# issues in production composes
|
||||
repoclosure_strictness = [
|
||||
("^.*$", {
|
||||
"*": "fatal"
|
||||
})
|
||||
]
|
||||
|
||||
|
||||
Configuration specific for different base products is split into separate files.
|
||||
|
||||
::
|
||||
|
||||
# rcmtools-common.conf
|
||||
from rcmtools-common import *
|
||||
|
||||
# BASE PRODUCT
|
||||
base_product_name = "Red Hat Enterprise Linux"
|
||||
base_product_short = "RHEL"
|
||||
base_product_version = "7"
|
||||
|
||||
# PKGSET
|
||||
pkgset_koji_tag = "rcmtools-rhel-7-compose"
|
||||
|
||||
# remove i386 arch on rhel7
|
||||
tree_arches = ["aarch64", "ppc64le", "s390x", "x86_64"]
|
||||
|
||||
check_deps = False
|
||||
|
||||
# Packages in these repos are available to satisfy dependencies inside the
|
||||
# compose, but will not be pulled in.
|
||||
gather_lookaside_repos = [
|
||||
("^Client|Client-optional$", {
|
||||
"x86_64": [
|
||||
"http://example.redhat.com/rhel/7/Client/x86_64/os/",
|
||||
"http://example.redhat.com/rhel/7/Client/x86_64/optional/os/",
|
||||
],
|
||||
}),
|
||||
("^Workstation|Workstation-optional$", {
|
||||
"x86_64": [
|
||||
"http://example.redhat.com/rhel/7/Workstation/x86_64/os/",
|
||||
"http://example.redhat.com/rhel/7/Workstation/x86_64/optional/os/",
|
||||
],
|
||||
}),
|
||||
("^Server|Server-optional$", {
|
||||
"aarch64": [
|
||||
"http://example.redhat.com/rhel/7/Server/aarch64/os/",
|
||||
"http://example.redhat.com/rhel/7/Server/aarch64/optional/os/",
|
||||
],
|
||||
"ppc64": [
|
||||
"http://example.redhat.com/rhel/7/Server/ppc64/os/",
|
||||
"http://example.redhat.com/rhel/7/Server/ppc64/optional/os/",
|
||||
],
|
||||
"ppc64le": [
|
||||
"http://example.redhat.com/rhel/7/Server/ppc64le/os/",
|
||||
"http://example.redhat.com/rhel/7/Server/ppc64le/optional/os/",
|
||||
],
|
||||
"s390x": [
|
||||
"http://example.redhat.com/rhel/7/Server/s390x/os/",
|
||||
"http://example.redhat.com/rhel/7/Server/s390x/optional/os/",
|
||||
],
|
||||
"x86_64": [
|
||||
"http://example.redhat.com/rhel/7/Server/x86_64/os/",
|
||||
"http://example.redhat.com/rhel/7/Server/x86_64/optional/os/",
|
||||
],
|
||||
})
|
||||
]
|
90
doc/format.rst
Normal file
90
doc/format.rst
Normal file
@ -0,0 +1,90 @@
|
||||
==================
|
||||
Config file format
|
||||
==================
|
||||
|
||||
The configuration file parser is provided by `kobo
|
||||
<https://github.com/release-engineering/kobo>`_
|
||||
|
||||
The file follows a Python-like format. It consists of a sequence of variables
|
||||
that have a value assigned to them. ::
|
||||
|
||||
variable = value
|
||||
|
||||
The variable names must follow the same convention as Python code: start with a
|
||||
letter and consist of letters, digits and underscores only.
|
||||
|
||||
The values can be either an integer, float, boolean (``True`` or ``False``), a
|
||||
string or ``None``. Strings must be enclosed in either single or double quotes.
|
||||
|
||||
Complex types are supported as well.
|
||||
|
||||
A list is enclosed in square brackets and items are separated with commas.
|
||||
There can be a comma after the last item as well. ::
|
||||
|
||||
a_list = [1,
|
||||
2,
|
||||
3,
|
||||
]
|
||||
|
||||
A tuple works like a list, but is enclosed in parenthesis. ::
|
||||
|
||||
a_tuple = (1, "one")
|
||||
|
||||
A dictionary is wrapped in brackets, and consists of ``key: value`` pairs
|
||||
separated by commas. The keys can only be formed from basic types (int, float,
|
||||
string). ::
|
||||
|
||||
a_dict = {
|
||||
'foo': 'bar',
|
||||
1: None
|
||||
}
|
||||
|
||||
The value assigned to a variable can also be taken from another variable. ::
|
||||
|
||||
one = 1
|
||||
another = one
|
||||
|
||||
Anything on a line after a ``#`` symbol is ignored and functions as a comment.
|
||||
|
||||
|
||||
Importing other files
|
||||
=====================
|
||||
|
||||
It is possible to include another configuration file. The files are looked up
|
||||
relative to the currently processed file.
|
||||
|
||||
The general structure of import is: ::
|
||||
|
||||
from FILENAME import WHAT
|
||||
|
||||
The ``FILENAME`` should be just the base name of the file without extension
|
||||
(which must be ``.conf``). ``WHAT`` can either be a comma separated list of
|
||||
variables or ``*``. ::
|
||||
|
||||
# Opens constants.conf and brings PI and E into current scope.
|
||||
from constants import PI, E
|
||||
|
||||
# Opens common.conf and brings everything defined in that file into current
|
||||
# file as well.
|
||||
from common import *
|
||||
|
||||
.. note::
|
||||
Pungi will copy the configuration file given on command line into the
|
||||
``logs/`` directory. Only this single file will be copied, not any included
|
||||
ones. (Copying included files requires a fix in kobo library.)
|
||||
|
||||
The JSON-formatted dump of configuration is correct though.
|
||||
|
||||
Formatting strings
|
||||
==================
|
||||
|
||||
String interpolation is available as well. It uses a ``%``-encoded format. See
|
||||
Python documentation for more details. ::
|
||||
|
||||
joined = "%s %s" % (var_a, var_b)
|
||||
|
||||
a_dict = {
|
||||
"fst": 1,
|
||||
"snd": 2,
|
||||
}
|
||||
another = "%(fst)s %(snd)s" % a_dict
|
102
doc/gathering.rst
Normal file
102
doc/gathering.rst
Normal file
@ -0,0 +1,102 @@
|
||||
==================
|
||||
Gathering packages
|
||||
==================
|
||||
|
||||
A compose created by Pungi consists of one or more variants. A variant contains
|
||||
a subset of the content targeted at a particular use case.
|
||||
|
||||
There are different types of variants. The type affects how packages are
|
||||
gathered into the variant.
|
||||
|
||||
The inputs for gathering are defined by various gather sources. Packages from
|
||||
all sources are collected to create a big list of package names, comps groups
|
||||
names and a list of packages that should be filtered out.
|
||||
|
||||
.. note::
|
||||
The inputs for both explicit package list and comps file are interpreted as
|
||||
RPM names, not any arbitrary provides nor source package name.
|
||||
|
||||
Next, ``gather_method`` defines how the list is processed. For ``nodeps``, the
|
||||
results from source are used pretty much as is [#]_. For ``deps`` method, a
|
||||
process will be launched to figure out what dependencies are needed and those
|
||||
will be pulled in.
|
||||
|
||||
.. [#] The lists are filtered based on what packages are available in the
|
||||
package set, but nothing else will be pulled in.
|
||||
|
||||
|
||||
Variant types
|
||||
=============
|
||||
|
||||
*Variant*
|
||||
is a base type that has no special behaviour.
|
||||
|
||||
*Addon*
|
||||
is built on top of a regular variant. Any packages that should go to both
|
||||
the addon and its parent will be removed from addon. Packages that are only
|
||||
in addon but pulled in because of ``gather_fulltree`` option will be moved
|
||||
to parent.
|
||||
|
||||
*Integrated Layered Product*
|
||||
works similarly to *addon*. Additionally, all packages from addons on the
|
||||
same parent variant are removed integrated layered products.
|
||||
|
||||
The main difference between an *addon* and *integrated layered product* is
|
||||
that *integrated layered product* has its own identity in the metadata
|
||||
(defined with product name and version).
|
||||
|
||||
.. note::
|
||||
There's also *Layered Product* as a term, but this is not related to
|
||||
variants. It's used to describe a product that is not a standalone
|
||||
operating system and is instead meant to be used on some other base
|
||||
system.
|
||||
|
||||
*Optional*
|
||||
contains packages that complete the base variants' package set. It always
|
||||
has ``fulltree`` and ``selfhosting`` enabled, so it contains build
|
||||
dependencies and packages which were not specifically requested for base
|
||||
variant.
|
||||
|
||||
|
||||
Some configuration options are overridden for particular variant types.
|
||||
|
||||
.. table:: Depsolving configuration
|
||||
|
||||
+-----------+--------------+--------------+
|
||||
| Variant | Fulltree | Selfhosting |
|
||||
+===========+==============+==============+
|
||||
| base | configurable | configurable |
|
||||
+-----------+--------------+--------------+
|
||||
| addon/ILP | enabled | disabled |
|
||||
+-----------+--------------+--------------+
|
||||
| optional | enabled | enabled |
|
||||
+-----------+--------------+--------------+
|
||||
|
||||
|
||||
Profiling
|
||||
=========
|
||||
|
||||
Profiling data on the ``pungi-gather`` tool can be enabled by setting the
|
||||
``gather_profiler`` configuration option to ``True``.
|
||||
|
||||
|
||||
Modular compose
|
||||
===============
|
||||
|
||||
A compose with ``gather_source`` set to ``module`` is called *modular*. The
|
||||
package list is determined by a list of modules.
|
||||
|
||||
The list of modules that will be put into a variant is defined in the
|
||||
``variants.xml`` file. The file can contain either *Name:Stream* or
|
||||
*Name:Stream:Version* references. See `Module Naming Policy
|
||||
<https://pagure.io/modularity/blob/master/f/source/development/building-modules/naming-policy.rst>`_
|
||||
for details. When *Version* is missing from the specification, Pungi will ask
|
||||
PDC for the latest one.
|
||||
|
||||
The module metadata in PDC contains a list of RPMs in the module as well as
|
||||
Koji tag from which the packages can be retrieved.
|
||||
|
||||
Restrictions
|
||||
------------
|
||||
|
||||
* A modular compose must always use Koji as a package set source.
|
25
doc/index.rst
Normal file
25
doc/index.rst
Normal file
@ -0,0 +1,25 @@
|
||||
.. Pungi documentation master file, created by
|
||||
sphinx-quickstart on Thu Jul 2 08:11:04 2015.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to Pungi's documentation!
|
||||
=================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
about
|
||||
phases
|
||||
format
|
||||
configuration
|
||||
examples
|
||||
scm_support
|
||||
messaging
|
||||
gathering
|
||||
koji
|
||||
comps
|
||||
contributing
|
||||
testing
|
105
doc/koji.rst
Normal file
105
doc/koji.rst
Normal file
@ -0,0 +1,105 @@
|
||||
======================
|
||||
Getting data from koji
|
||||
======================
|
||||
|
||||
When Pungi is configured to get packages from a Koji tag, it somehow needs to
|
||||
access the actual RPM files.
|
||||
|
||||
Historically, this required the storage used by Koji to be directly available
|
||||
on the host where Pungi was running. This was usually achieved by using NFS for
|
||||
the Koji volume, and mounting it on the compose host.
|
||||
|
||||
The compose could be created directly on the same volume. In such case the
|
||||
packages would be hardlinked, significantly reducing space consumption.
|
||||
|
||||
The compose could also be created on a different storage, in which case the
|
||||
packages would either need to be copied over or symlinked. Using symlinks
|
||||
requires that anything that accesses the compose (e.g. a download server) would
|
||||
also need to mount the Koji volume in the same location.
|
||||
|
||||
There is also a risk with symlinks that the package in Koji can change (due to
|
||||
being resigned for example), which would invalidate composes linking to it.
|
||||
|
||||
|
||||
Using Koji without direct mount
|
||||
===============================
|
||||
|
||||
It is possible now to run a compose from a Koji tag without direct access to
|
||||
Koji storage.
|
||||
|
||||
Pungi can download the packages over HTTP protocol, store them in a local
|
||||
cache, and consume them from there.
|
||||
|
||||
The local cache has similar structure to what is on the Koji volume.
|
||||
|
||||
When Pungi needs some package, it has a path on Koji volume. It will replace
|
||||
the ``topdir`` with the cache location. If such file exists, it will be used.
|
||||
If it doesn't exist, it will be downloaded from Koji (by replacing the
|
||||
``topdir`` with ``topurl``).
|
||||
|
||||
::
|
||||
|
||||
Koji path /mnt/koji/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
|
||||
The packages can be hardlinked from this cache directory.
|
||||
|
||||
|
||||
Cleanup
|
||||
-------
|
||||
|
||||
While the approach above allows each RPM to be downloaded only once, it will
|
||||
eventually result in the Koji volume being mirrored locally. Most of the
|
||||
packages will however no longer be needed.
|
||||
|
||||
There is a script ``pungi-cache-cleanup`` that can help with that. It can find
|
||||
and remove files from the cache that are no longer needed.
|
||||
|
||||
A file is no longer needed if it has a single link (meaning it is only in the
|
||||
cache, not in any compose), and it has mtime older than a given threshold.
|
||||
|
||||
It doesn't make sense to delete files that are hardlinked in an existing
|
||||
compose as it would not save any space anyway.
|
||||
|
||||
The mtime check is meant to preserve files that are downloaded but not actually
|
||||
used in a compose, like a subpackage that is not included in any variant. Every
|
||||
time its existence in the local cache is checked, the mtime is updated.
|
||||
|
||||
|
||||
Race conditions?
|
||||
----------------
|
||||
|
||||
It should be safe to have multiple compose hosts share the same storage volume
|
||||
for generated composes and local cache.
|
||||
|
||||
If a cache file is accessed and it exists, there's no risk of race condition.
|
||||
|
||||
If two composes need the same file at the same time and it is not present yet,
|
||||
one of them will take a lock on it and start downloading. The other will wait
|
||||
until the download is finished.
|
||||
|
||||
The lock is only valid for a set amount of time (5 minutes) to avoid issues
|
||||
where the downloading process is killed in a way that blocks it from releasing
|
||||
the lock.
|
||||
|
||||
If the file is large and network slow, the limit may not be enough finish
|
||||
downloading. In that case the second process will steal the lock while the
|
||||
first process is still downloading. This will result in the same file being
|
||||
downloaded twice.
|
||||
|
||||
When the first process finishes the download, it will put the file into the
|
||||
local cache location. When the second process finishes, it will atomically
|
||||
replace it, but since it's the same file it will be the same file.
|
||||
|
||||
If the first compose already managed to hardlink the file before it gets
|
||||
replaced, there will be two copies of the file present locally.
|
||||
|
||||
|
||||
Integrity checking
|
||||
------------------
|
||||
|
||||
There is minimal integrity checking. RPM packages belonging to real builds will
|
||||
be check to match the checksum provided by Koji hub.
|
||||
|
||||
There is no checking for scratch builds or any images.
|
45
doc/messaging.rst
Normal file
45
doc/messaging.rst
Normal file
@ -0,0 +1,45 @@
|
||||
.. _messaging:
|
||||
|
||||
Progress notification
|
||||
=====================
|
||||
|
||||
*Pungi* has the ability to emit notification messages about progress and
|
||||
general status of the compose. These can be used to e.g. send messages to
|
||||
*fedmsg*. This is implemented by actually calling a separate script.
|
||||
|
||||
The script will be called with one argument describing action that just
|
||||
happened. A JSON-encoded object will be passed to standard input to provide
|
||||
more information about the event. At the very least, the object will contain a
|
||||
``compose_id`` key.
|
||||
|
||||
The notification script inherits working directory from the parent process and it
|
||||
can be called from the same directory ``pungi-koji`` is called from. The working directory
|
||||
is listed at the start of main log.
|
||||
|
||||
Currently these messages are sent:
|
||||
|
||||
* ``status-change`` -- when composing starts, finishes or fails; a ``status``
|
||||
key is provided to indicate details
|
||||
* ``phase-start`` -- on start of a phase
|
||||
* ``phase-stop`` -- when phase is finished
|
||||
* ``createiso-targets`` -- with a list of images to be created
|
||||
* ``createiso-imagedone`` -- when any single image is finished
|
||||
* ``createiso-imagefail`` -- when any single image fails to create
|
||||
* ``fail-to-start`` -- when there are incorrect CLI options or errors in
|
||||
configuration file; this message does not contain ``compose_id`` nor is it
|
||||
started in the compose directory (which does not exist yet)
|
||||
* ``ostree`` -- when a new commit is created, this message will announce its
|
||||
hash and the name of ref it is meant for.
|
||||
|
||||
For phase related messages ``phase_name`` key is provided as well.
|
||||
|
||||
A ``pungi-fedmsg-notification`` script is provided and understands this
|
||||
interface.
|
||||
|
||||
Setting it up
|
||||
-------------
|
||||
|
||||
The script should be provided as a command line argument
|
||||
``--notification-script``. ::
|
||||
|
||||
--notification-script=pungi-fedmsg-notification
|
175
doc/phases.rst
Normal file
175
doc/phases.rst
Normal file
@ -0,0 +1,175 @@
|
||||
.. _phases:
|
||||
|
||||
Phases
|
||||
======
|
||||
|
||||
Each invocation of ``pungi-koji`` consists of a set of phases.
|
||||
|
||||
.. image:: _static/phases.svg
|
||||
:alt: phase diagram
|
||||
|
||||
Most of the phases run sequentially (left-to-right in the diagram), but there
|
||||
are use cases where multiple phases run in parallel. This happens for phases
|
||||
whose main point is to wait for a Koji task to finish.
|
||||
|
||||
Init
|
||||
----
|
||||
|
||||
The first phase to ever run. Can not be skipped. It prepares the comps files
|
||||
for variants (by filtering out groups and packages that should not be there).
|
||||
See :doc:`comps` for details about how this is done.
|
||||
|
||||
Pkgset
|
||||
------
|
||||
|
||||
This phase loads a set of packages that should be composed. It has two separate
|
||||
results: it prepares repos with packages in ``work/`` directory (one per arch)
|
||||
for further processing, and it returns a data structure with mapping of
|
||||
packages to architectures.
|
||||
|
||||
Buildinstall
|
||||
------------
|
||||
|
||||
Spawns a bunch of threads, each of which runs the ``lorax`` command. The
|
||||
commands create ``boot.iso`` and other boot configuration files. The image is
|
||||
finally linked into the ``compose/`` directory as netinstall media.
|
||||
|
||||
The created images are also needed for creating live media or other images in
|
||||
later phases.
|
||||
|
||||
With ``lorax`` this phase runs one task per variant.arch combination.
|
||||
|
||||
Gather
|
||||
------
|
||||
|
||||
This phase uses data collected by ``pkgset`` phase and figures out what
|
||||
packages should be in each variant. The basic mapping can come from comps file,
|
||||
a JSON mapping or ``additional_packages`` config option. This inputs can then
|
||||
be enriched by adding all dependencies. See :doc:`gathering` for details.
|
||||
|
||||
Once the mapping is finalized, the packages are linked to appropriate places
|
||||
and the ``rpms.json`` manifest is created.
|
||||
|
||||
ExtraFiles
|
||||
----------
|
||||
|
||||
This phase collects extra files from the configuration and copies them to the
|
||||
compose directory. The files are described by a JSON file in the compose
|
||||
subtree where the files are copied. This metadata is meant to be distributed
|
||||
with the data (on ISO images).
|
||||
|
||||
Createrepo
|
||||
----------
|
||||
|
||||
This phase creates RPM repositories for each variant.arch tree. It is actually
|
||||
reading the ``rpms.json`` manifest to figure out which packages should be
|
||||
included.
|
||||
|
||||
OSTree
|
||||
------
|
||||
|
||||
Updates an ostree repository with a new commit with packages from the compose.
|
||||
The repository lives outside of the compose and is updated immediately. If the
|
||||
compose fails in a later stage, the commit will not be reverted.
|
||||
|
||||
Implementation wise, this phase runs ``rpm-ostree`` command in Koji runroot (to
|
||||
allow running on different arches).
|
||||
|
||||
Createiso
|
||||
---------
|
||||
|
||||
Generates ISO files and accumulates enough metadata to be able to create
|
||||
``image.json`` manifest. The file is however not created in this phase, instead
|
||||
it is dumped in the ``pungi-koji`` script itself.
|
||||
|
||||
The files include a repository with all RPMs from the variant. There will be
|
||||
multiple images if the packages do not fit on a single image.
|
||||
|
||||
The image will be bootable if ``buildinstall`` phase is enabled and the
|
||||
packages fit on a single image.
|
||||
|
||||
There can also be images with source repositories. These are never bootable.
|
||||
|
||||
ExtraIsos
|
||||
---------
|
||||
|
||||
This phase is very similar to ``createiso``, except it combines content from
|
||||
multiple variants onto a single image. Packages, repodata and extra files from
|
||||
each configured variant are put into a subdirectory. Additional extra files can
|
||||
be put into top level of the image. The image will be bootable if the main
|
||||
variant is bootable.
|
||||
|
||||
LiveImages, LiveMedia
|
||||
---------------------
|
||||
|
||||
Creates media in Koji with ``koji spin-livecd``, ``koji spin-appliance`` or
|
||||
``koji spin-livemedia`` command. When the media are finished, the images are
|
||||
copied into the ``compose/`` directory and metadata for images is updated.
|
||||
|
||||
ImageBuild
|
||||
----------
|
||||
|
||||
This phase wraps up ``koji image-build``. It also updates the metadata
|
||||
ultimately responsible for ``images.json`` manifest.
|
||||
|
||||
KiwiBuild
|
||||
---------
|
||||
|
||||
Similarly to image build, this phases creates a koji `kiwiBuild` task. In the
|
||||
background it uses Kiwi to create images.
|
||||
|
||||
OSBuild
|
||||
-------
|
||||
|
||||
Similarly to image build, this phases creates a koji `osbuild` task. In the
|
||||
background it uses OSBuild Composer to create images.
|
||||
|
||||
OSBS
|
||||
----
|
||||
|
||||
This phase builds container base images in `OSBS
|
||||
<http://osbs.readthedocs.io/en/latest/index.html>`_.
|
||||
|
||||
The finished images are available in registry provided by OSBS, but not
|
||||
downloaded directly into the compose. The is metadata about the created image
|
||||
in ``compose/metadata/osbs.json``.
|
||||
|
||||
ImageContainer
|
||||
--------------
|
||||
|
||||
This phase builds a container image in OSBS, and stores the metadata in the
|
||||
same file as OSBS phase. The container produced here wraps a different image,
|
||||
created it ImageBuild or OSBuild phase. It can be useful to deliver a VM image
|
||||
to containerized environments.
|
||||
|
||||
OSTreeInstaller
|
||||
---------------
|
||||
|
||||
Creates bootable media that carry an ostree repository as a payload. These
|
||||
images are created by running ``lorax`` with special templates. Again it runs
|
||||
in Koji runroot.
|
||||
|
||||
Repoclosure
|
||||
-----------
|
||||
|
||||
Run ``repoclosure`` on each repository. By default errors are only reported
|
||||
in the log, the compose will still be considered a success. The actual error
|
||||
has to be looked up in the compose logs directory. Configuration allows customizing this.
|
||||
|
||||
ImageChecksum
|
||||
-------------
|
||||
|
||||
Responsible for generating checksums for the images. The checksums are stored
|
||||
in image manifest as well as files on disk. The list of images to be processed
|
||||
is obtained from the image manifest. This way all images will get the same
|
||||
checksums irrespective of the phase that created them.
|
||||
|
||||
Test
|
||||
----
|
||||
|
||||
This phase is supposed to run some sanity checks on the finished compose.
|
||||
|
||||
The only test is to check all images listed the metadata and verify that they
|
||||
look sane. For ISO files headers are checked to verify the format is correct,
|
||||
and for bootable media a check is run to verify they have properties that allow
|
||||
booting.
|
100
doc/scm_support.rst
Normal file
100
doc/scm_support.rst
Normal file
@ -0,0 +1,100 @@
|
||||
.. _scm_support:
|
||||
|
||||
Exporting files from SCM
|
||||
========================
|
||||
|
||||
Multiple places in Pungi can use files from external storage. The configuration
|
||||
is similar independently of the backend that is used, although some features
|
||||
may be different.
|
||||
|
||||
The so-called ``scm_dict`` is always put into configuration as a dictionary,
|
||||
which can contain following keys.
|
||||
|
||||
* ``scm`` -- indicates which SCM system is used. This is always required.
|
||||
Allowed values are:
|
||||
|
||||
* ``file`` -- copies files from local filesystem
|
||||
* ``git`` -- copies files from a Git repository
|
||||
* ``cvs`` -- copies files from a CVS repository
|
||||
* ``rpm`` -- copies files from a package in the compose
|
||||
* ``koji`` -- downloads archives from a given build in Koji build system
|
||||
|
||||
* ``repo``
|
||||
|
||||
* for Git and CVS backends this should be URL to the repository
|
||||
* for RPM backend this should be a shell style glob matching package names
|
||||
(or a list of such globs)
|
||||
* for file backend this should be empty
|
||||
* for Koji backend this should be an NVR or package name
|
||||
|
||||
* ``branch``
|
||||
|
||||
* branch name for Git and CVS backends, with ``master`` and ``HEAD`` as defaults
|
||||
* Koji tag for koji backend if only package name is given
|
||||
* otherwise should not be specified
|
||||
|
||||
* ``file`` -- a list of files that should be exported.
|
||||
|
||||
* ``dir`` -- a directory that should be exported. All its contents will be
|
||||
exported. This option is mutually exclusive with ``file``.
|
||||
|
||||
* ``command`` -- defines a shell command to run after Git clone to generate the
|
||||
needed file (for example to run ``make``). Only supported in Git backend.
|
||||
|
||||
* ``options`` -- a dictionary of additional configuration options. These are
|
||||
specific to different backends.
|
||||
|
||||
Currently supported values for Git:
|
||||
|
||||
* ``credential_helper`` -- path to a credential helper used to supply
|
||||
username/password for remotes that require authentication.
|
||||
|
||||
|
||||
Koji examples
|
||||
-------------
|
||||
|
||||
There are two different ways how to configure the Koji backend. ::
|
||||
|
||||
{
|
||||
# Download all *.tar files from build my-image-1.0-1.
|
||||
"scm": "koji",
|
||||
"repo": "my-image-1.0-1",
|
||||
"file": "*.tar",
|
||||
}
|
||||
|
||||
{
|
||||
# Find latest build of my-image in tag my-tag and take files from
|
||||
# there.
|
||||
"scm": "koji",
|
||||
"repo": "my-image",
|
||||
"branch": "my-tag",
|
||||
"file": "*.tar",
|
||||
}
|
||||
|
||||
Using both tag name and exact NVR will result in error: the NVR would be
|
||||
interpreted as a package name, and would not match anything.
|
||||
|
||||
|
||||
``file`` vs. ``dir``
|
||||
--------------------
|
||||
|
||||
Exactly one of these two options has to be specified. Documentation for each
|
||||
configuration option should specify whether it expects a file or a directory.
|
||||
|
||||
For ``extra_files`` phase either key is valid and should be chosen depending on
|
||||
what the actual use case.
|
||||
|
||||
|
||||
Caveats
|
||||
-------
|
||||
|
||||
The ``rpm`` backend can only be used in phases that would extract the files
|
||||
after ``pkgset`` phase finished. You can't get comps file from a package.
|
||||
|
||||
Depending on Git repository URL configuration Pungi can only export the
|
||||
requested content using ``git archive``. When a command should run this is not
|
||||
possible and a clone is always needed.
|
||||
|
||||
When using ``koji`` backend, it is required to provide configuration for Koji
|
||||
profile to be used (``koji_profile``). It is not possible to contact multiple
|
||||
different Koji instances.
|
42
doc/testing.rst
Normal file
42
doc/testing.rst
Normal file
@ -0,0 +1,42 @@
|
||||
===============
|
||||
Testing Pungi
|
||||
===============
|
||||
|
||||
|
||||
Test Data
|
||||
=========
|
||||
Tests require test data and not all of it is available in git.
|
||||
You must create test repositories before running the tests::
|
||||
|
||||
make test-data
|
||||
|
||||
Requirements: createrepo_c, rpmbuild
|
||||
|
||||
|
||||
Unit Tests
|
||||
==========
|
||||
Unit tests cover functionality of Pungi python modules.
|
||||
You can run all of them at once::
|
||||
|
||||
make test
|
||||
|
||||
which is shortcut to::
|
||||
|
||||
python2 setup.py test
|
||||
python3 setup.py test
|
||||
|
||||
You can alternatively run individual tests::
|
||||
|
||||
cd tests
|
||||
./<test>.py [<class>[.<test>]]
|
||||
|
||||
|
||||
Functional Tests
|
||||
================
|
||||
Because compose is quite complex process and not everything is covered with
|
||||
unit tests yet, the easiest way how to test if your changes did not break
|
||||
anything badly is to start a compose on a relatively small and well defined
|
||||
package set::
|
||||
|
||||
cd tests
|
||||
./test_compose.sh
|
40
doc/update-docs.sh
Executable file
40
doc/update-docs.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (C) 2015 Red Hat, Inc.
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
function cleanup() {
|
||||
printf "Run cleanup\\n"
|
||||
rm -rf "$dir_pungi" "$dir_pungi_doc"
|
||||
}
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
printf "Usage:\\n"
|
||||
printf "\\t%s release_version\\n" "$0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -e
|
||||
dir_pungi=$(mktemp -d /tmp/pungi.XXX) || { echo "Failed to create temp directory"; exit 1; }
|
||||
git clone https://pagure.io/pungi.git "$dir_pungi"
|
||||
pushd "$dir_pungi"/doc
|
||||
make html
|
||||
popd
|
||||
|
||||
dir_pungi_doc=$(mktemp -d /tmp/pungi-doc.XXX) || { echo "Failed to create temp directory"; exit 1; }
|
||||
git clone ssh://git@pagure.io/docs/pungi.git "$dir_pungi_doc"
|
||||
pushd "$dir_pungi_doc"
|
||||
git rm -fr ./*
|
||||
cp -r "$dir_pungi"/doc/_build/html/* ./
|
||||
pushd "$dir_pungi"/doc
|
||||
git checkout 4.0.x
|
||||
make html
|
||||
popd
|
||||
mkdir 4.0
|
||||
cp -r "$dir_pungi"/doc/_build/html/* ./4.0/
|
||||
git add .
|
||||
git commit -s -m "update rendered pungi docs for release $1"
|
||||
git push origin master
|
||||
popd
|
105
git-changelog
Executable file
105
git-changelog
Executable file
@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# git-changelog - Output a rpm changelog
|
||||
#
|
||||
# Copyright (C) 2009-2010 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published
|
||||
# by the Free Software Foundation; either version 2.1 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# Author: David Cantrell <dcantrell@redhat.com>
|
||||
# Author: Brian C. Lane <bcl@redhat.com>
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import subprocess
|
||||
import textwrap
|
||||
from argparse import ArgumentParser
|
||||
|
||||
|
||||
class ChangeLog:
|
||||
def __init__(self, name, version):
|
||||
self.name = name
|
||||
self.version = version
|
||||
|
||||
def _getCommitDetail(self, commit, field):
|
||||
proc = subprocess.Popen(
|
||||
["git", "log", "-1", "--pretty=format:%s" % field, commit],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
).communicate()
|
||||
|
||||
ret = proc[0].strip('\n').split('\n')
|
||||
|
||||
if field == '%aE' and len(ret) == 1 and ret[0].find('@') != -1:
|
||||
ret = ret[0].split('@')[0]
|
||||
elif len(ret) == 1:
|
||||
ret = ret[0]
|
||||
else:
|
||||
ret = filter(lambda x: x != '', ret)
|
||||
|
||||
return ret
|
||||
|
||||
def getLog(self):
|
||||
if not self.name:
|
||||
range = "%s.." % (self.version)
|
||||
else:
|
||||
range = "%s-%s.." % (self.name, self.version)
|
||||
proc = subprocess.Popen(
|
||||
["git", "log", "--pretty=oneline", "--no-merges", range],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
).communicate()
|
||||
lines = filter(lambda x: x.find('l10n: ') != 41,
|
||||
proc[0].strip('\n').split('\n'))
|
||||
|
||||
log = []
|
||||
for line in lines:
|
||||
fields = line.split(' ')
|
||||
commit = fields[0]
|
||||
|
||||
summary = self._getCommitDetail(commit, "%s")
|
||||
author = self._getCommitDetail(commit, "%aE")
|
||||
|
||||
log.append(("%s (%s)" % (summary.strip(), author)))
|
||||
|
||||
return log
|
||||
|
||||
def formatLog(self):
|
||||
s = ""
|
||||
for msg in self.getLog():
|
||||
sublines = textwrap.wrap(msg, 77)
|
||||
s = s + "- %s\n" % sublines[0]
|
||||
|
||||
if len(sublines) > 1:
|
||||
for subline in sublines[1:]:
|
||||
s = s + " %s\n" % subline
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("-n", "--name",
|
||||
help="Name of package used in tags")
|
||||
parser.add_argument("-v", "--version",
|
||||
help="Last version, changelog is commits after this tag")
|
||||
args = parser.parse_args()
|
||||
|
||||
cl = ChangeLog(args.name, args.version)
|
||||
print(cl.formatLog())
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
187
pungi
187
pungi
@ -1,187 +0,0 @@
|
||||
#!/usr/bin/python -tt
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
|
||||
import os
|
||||
import pypungi.gather
|
||||
import pypungi.pungi
|
||||
import yum
|
||||
|
||||
from ConfigParser import SafeConfigParser
|
||||
|
||||
def main():
|
||||
# Set some default variables, can be overrided in config file
|
||||
|
||||
# Turn this into a dict someday, to iterate over when setting defaults
|
||||
flavor = ""
|
||||
osdir = "os"
|
||||
sourcedir = "source"
|
||||
debugdir = "debug"
|
||||
isodir = "iso"
|
||||
cdsize = "685.0"
|
||||
relnotefilere = "eula.txt fedora.css GPL README-BURNING-ISOS-en_US.txt RELEASE-NOTES-en_US.html ^RPM-GPG"
|
||||
relnotedirre = "images stylesheet-images"
|
||||
relnotepkgs = "fedora-release fedora-release-notes"
|
||||
|
||||
(opts, args) = get_arguments()
|
||||
|
||||
config = SafeConfigParser()
|
||||
config.read(opts.config)
|
||||
|
||||
if "default" not in config.sections():
|
||||
print ("Check that the file %s exists and that it has a 'default' section" % opts.config)
|
||||
sys.exit(1)
|
||||
|
||||
if not config.has_option('default', 'flavor'):
|
||||
config.set('default', 'flavor', flavor)
|
||||
|
||||
if not config.has_option('default', 'osdir'):
|
||||
config.set('default', 'osdir', osdir)
|
||||
|
||||
if not config.has_option('default', 'sourcedir'):
|
||||
config.set('default', 'sourcedir', sourcedir)
|
||||
|
||||
if not config.has_option('default', 'debugdir'):
|
||||
config.set('default', 'debugdir', debugdir)
|
||||
|
||||
if not config.has_option('default', 'isodir'):
|
||||
config.set('default', 'isodir', isodir)
|
||||
|
||||
if not config.has_option('default', 'cdsize'):
|
||||
config.set('default', 'cdsize', cdsize)
|
||||
|
||||
if not config.has_option('default', 'relnotefilere'):
|
||||
config.set('default', 'relnotefilere', relnotefilere)
|
||||
|
||||
if not config.has_option('default', 'relnotedirre'):
|
||||
config.set('default', 'relnotedirre', relnotedirre)
|
||||
|
||||
if not config.has_option('default', 'relnotepkgs'):
|
||||
config.set('default', 'relnotepkgs', relnotepkgs)
|
||||
|
||||
# set some other defaults
|
||||
if not config.has_option('default', 'product_path'):
|
||||
config.set('default', 'product_path', config.get('default', 'product_name'))
|
||||
|
||||
if not config.has_option('default', 'iso_basename'):
|
||||
config.set('default', 'iso_basename', config.get('default', 'product_name'))
|
||||
|
||||
pkglist = get_packagelist(config.get('default', 'manifest'))
|
||||
|
||||
if not opts.destdir == "*CONFFILE*":
|
||||
config.set('default', 'destdir', opts.destdir)
|
||||
|
||||
destdir = config.get('default', 'destdir')
|
||||
|
||||
if not os.path.exists(destdir):
|
||||
try:
|
||||
os.makedirs(destdir)
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create destination dir %s" % destdir
|
||||
sys.exit(1)
|
||||
|
||||
cachedir = config.get('default', 'cachedir')
|
||||
|
||||
if not os.path.exists(cachedir):
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create cache dir %s" % cachedir
|
||||
sys.exit(1)
|
||||
|
||||
# Actually do work.
|
||||
if not config.get('default', 'arch') == 'source':
|
||||
if opts.do_all or opts.do_gather:
|
||||
mygather = pypungi.gather.Gather(config, pkglist)
|
||||
mygather.getPackageObjects()
|
||||
mygather.downloadPackages()
|
||||
if config.getboolean('default', 'getsource'):
|
||||
mygather.getSRPMList()
|
||||
mygather.downloadSRPMs()
|
||||
|
||||
mypungi = pypungi.pungi.Pungi(config)
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
mypungi.doBuildinstall()
|
||||
mypungi.doGetRelnotes()
|
||||
|
||||
if opts.do_all or opts.do_packageorder:
|
||||
mypungi.doPackageorder()
|
||||
|
||||
if opts.do_all or opts.do_splittree:
|
||||
mypungi.doSplittree()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateSplitrepo()
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
# Do things slightly different for src.
|
||||
if config.get('default', 'arch') == 'source':
|
||||
# we already have all the content gathered
|
||||
mypungi = pypungi.pungi.Pungi(config)
|
||||
mypungi.topdir = os.path.join(config.get('default', 'destdir'),
|
||||
config.get('default', 'version'),
|
||||
config.get('default', 'flavor'),
|
||||
'source', 'SRPM')
|
||||
if opts.do_all or opts.do_splittree:
|
||||
mypungi.doSplitSRPMs()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
if __name__ == '__main__':
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
|
||||
def get_arguments():
|
||||
parser = OptionParser(version="%prog 0.3.2")
|
||||
parser.add_option("--destdir", default="*CONFFILE*", dest="destdir",
|
||||
help='destination directory (defaults to current directory)')
|
||||
parser.add_option("-c", "--conf", default='/etc/pungi/pungi.conf', dest="config",
|
||||
help='Config file to use')
|
||||
parser.add_option("--all-stages", action="store_true", default=True, dest="do_all",
|
||||
help="Enable ALL stages")
|
||||
parser.add_option("-G", action="store_true", default=False, dest="do_gather",
|
||||
help="Flag to enable processing the Gather stage")
|
||||
parser.add_option("-B", action="store_true", default=False, dest="do_buildinstall",
|
||||
help="Flag to enable processing the BuildInstall stage")
|
||||
parser.add_option("-P", action="store_true", default=False, dest="do_packageorder",
|
||||
help="Flag to enable processing the Package Order stage")
|
||||
parser.add_option("-S", action="store_true", default=False, dest="do_splittree",
|
||||
help="Flag to enable processing the SplitTree stage")
|
||||
parser.add_option("-I", action="store_true", default=False, dest="do_createiso",
|
||||
help="Flag to enable processing the CreateISO stage")
|
||||
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
if opts.do_gather or opts.do_buildinstall or opts.do_packageorder or opts.do_splittree or opts.do_createiso:
|
||||
opts.do_all = False
|
||||
if len(sys.argv) < 2:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
return (opts, args)
|
||||
|
||||
def get_packagelist(manifest):
|
||||
# Get the list of packages from the manifest file
|
||||
try:
|
||||
manifestfile = open(manifest, 'r')
|
||||
|
||||
except IOError:
|
||||
print >> sys.stderr, "pungi: No such file:\'%s\'" % manifest
|
||||
sys.exit(1)
|
||||
|
||||
pkglist = manifestfile.readlines()
|
||||
manifestfile.close()
|
||||
return pkglist
|
||||
|
||||
main()
|
2623
pungi.spec
2623
pungi.spec
File diff suppressed because it is too large
Load Diff
33
pungi/__init__.py
Normal file
33
pungi/__init__.py
Normal file
@ -0,0 +1,33 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def get_full_version():
|
||||
"""
|
||||
Find full version of Pungi: if running from git, this will return cleaned
|
||||
output of `git describe`, otherwise it will look for installed version.
|
||||
"""
|
||||
location = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
|
||||
if os.path.isdir(os.path.join(location, ".git")):
|
||||
import subprocess
|
||||
|
||||
proc = subprocess.Popen(
|
||||
["git", "--git-dir=%s/.git" % location, "describe", "--tags"],
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
)
|
||||
output, _ = proc.communicate()
|
||||
return re.sub(r"-1.fc\d\d?", "", output.strip().replace("pungi-", ""))
|
||||
else:
|
||||
import subprocess
|
||||
|
||||
proc = subprocess.Popen(
|
||||
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, universal_newlines=True
|
||||
)
|
||||
(output, err) = proc.communicate()
|
||||
if not err:
|
||||
return output.rstrip()
|
||||
else:
|
||||
return "unknown"
|
115
pungi/arch.py
Normal file
115
pungi/arch.py
Normal file
@ -0,0 +1,115 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from .arch_utils import arches as ALL_ARCHES
|
||||
from .arch_utils import getBaseArch, getMultiArchInfo, getArchList
|
||||
|
||||
TREE_ARCH_YUM_ARCH_MAP = {
|
||||
"i386": "i686",
|
||||
"sparc": "sparc64v",
|
||||
"arm": "armv7l",
|
||||
"armhfp": "armv7hnl",
|
||||
}
|
||||
|
||||
|
||||
def tree_arch_to_yum_arch(tree_arch):
|
||||
# this is basically an opposite to pungi.arch_utils.getBaseArch()
|
||||
yum_arch = TREE_ARCH_YUM_ARCH_MAP.get(tree_arch, tree_arch)
|
||||
return yum_arch
|
||||
|
||||
|
||||
def get_multilib_arch(yum_arch):
|
||||
arch_info = getMultiArchInfo(yum_arch)
|
||||
if arch_info is None:
|
||||
return None
|
||||
return arch_info[0]
|
||||
|
||||
|
||||
def get_valid_multilib_arches(tree_arch):
|
||||
yum_arch = tree_arch_to_yum_arch(tree_arch)
|
||||
multilib_arch = get_multilib_arch(yum_arch)
|
||||
if not multilib_arch:
|
||||
return []
|
||||
return [i for i in getArchList(multilib_arch) if i not in ("noarch", "src")]
|
||||
|
||||
|
||||
def get_valid_arches(tree_arch, multilib=True, add_noarch=True, add_src=False):
|
||||
result = []
|
||||
|
||||
yum_arch = tree_arch_to_yum_arch(tree_arch)
|
||||
for arch in getArchList(yum_arch):
|
||||
if arch not in result:
|
||||
result.append(arch)
|
||||
|
||||
if not multilib:
|
||||
for i in get_valid_multilib_arches(tree_arch):
|
||||
while i in result:
|
||||
result.remove(i)
|
||||
|
||||
if add_noarch and "noarch" not in result:
|
||||
result.append("noarch")
|
||||
|
||||
if add_src and "src" not in result:
|
||||
result.append("src")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_compatible_arches(arch, multilib=False):
|
||||
tree_arch = getBaseArch(arch)
|
||||
compatible_arches = get_valid_arches(tree_arch, multilib=multilib)
|
||||
return compatible_arches
|
||||
|
||||
|
||||
def is_valid_arch(arch):
|
||||
if arch in ("noarch", "src", "nosrc"):
|
||||
return True
|
||||
if arch in ALL_ARCHES:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def split_name_arch(name_arch):
|
||||
if "." in name_arch:
|
||||
name, arch = name_arch.rsplit(".", 1)
|
||||
if not is_valid_arch(arch):
|
||||
name, arch = name_arch, None
|
||||
else:
|
||||
name, arch = name_arch, None
|
||||
return name, arch
|
||||
|
||||
|
||||
def is_excluded(package, arches, logger=None):
|
||||
"""Check if package is excluded from given architectures."""
|
||||
if any(
|
||||
getBaseArch(exc_arch) == 'x86_64' for exc_arch in package.exclusivearch
|
||||
) and 'x86_64_v2' not in package.exclusivearch:
|
||||
package.exclusivearch.append('x86_64_v2')
|
||||
|
||||
if package.excludearch and set(package.excludearch) & set(arches):
|
||||
if logger:
|
||||
logger.debug(
|
||||
"Excluding (EXCLUDEARCH: %s): %s"
|
||||
% (sorted(set(package.excludearch)), package.file_name)
|
||||
)
|
||||
return True
|
||||
if package.exclusivearch and not (set(package.exclusivearch) & set(arches)):
|
||||
if logger:
|
||||
logger.debug(
|
||||
"Excluding (EXCLUSIVEARCH: %s): %s"
|
||||
% (sorted(set(package.exclusivearch)), package.file_name)
|
||||
)
|
||||
return True
|
||||
return False
|
368
pungi/arch_utils.py
Normal file
368
pungi/arch_utils.py
Normal file
@ -0,0 +1,368 @@
|
||||
# A copy of some necessary parts from yum.rpmUtils.arch, with slightly changes:
|
||||
# 1. _ppc64_native_is_best changed to True
|
||||
# 2. code style fixes for flake8 reported errors
|
||||
|
||||
import os
|
||||
import rpm
|
||||
import ctypes
|
||||
import struct
|
||||
|
||||
# _ppc64_native_is_best is False in yum's source code, but patched with a
|
||||
# separate patch when built from source rpm, so we set it to True here.
|
||||
_ppc64_native_is_best = True
|
||||
|
||||
# dict mapping arch -> ( multicompat, best personality, biarch personality )
|
||||
multilibArches = {
|
||||
"x86_64": ("athlon", "x86_64", "athlon"),
|
||||
"sparc64v": ("sparcv9v", "sparcv9v", "sparc64v"),
|
||||
"sparc64": ("sparcv9", "sparcv9", "sparc64"),
|
||||
"ppc64": ("ppc", "ppc", "ppc64"),
|
||||
"s390x": ("s390", "s390x", "s390"),
|
||||
}
|
||||
if _ppc64_native_is_best:
|
||||
multilibArches["ppc64"] = ("ppc", "ppc64", "ppc64")
|
||||
|
||||
arches = {
|
||||
# ia32
|
||||
"athlon": "i686",
|
||||
"i686": "i586",
|
||||
"geode": "i586",
|
||||
"i586": "i486",
|
||||
"i486": "i386",
|
||||
"i386": "noarch",
|
||||
# amd64
|
||||
"x86_64": "athlon",
|
||||
"amd64": "x86_64",
|
||||
"ia32e": "x86_64",
|
||||
# x86-64-v2
|
||||
"x86_64_v2": "noarch",
|
||||
# ppc64le
|
||||
"ppc64le": "noarch",
|
||||
# ppc
|
||||
"ppc64p7": "ppc64",
|
||||
"ppc64pseries": "ppc64",
|
||||
"ppc64iseries": "ppc64",
|
||||
"ppc64": "ppc",
|
||||
"ppc": "noarch",
|
||||
# s390{,x}
|
||||
"s390x": "s390",
|
||||
"s390": "noarch",
|
||||
# sparc
|
||||
"sparc64v": "sparcv9v",
|
||||
"sparc64": "sparcv9",
|
||||
"sparcv9v": "sparcv9",
|
||||
"sparcv9": "sparcv8",
|
||||
"sparcv8": "sparc",
|
||||
"sparc": "noarch",
|
||||
# alpha
|
||||
"alphaev7": "alphaev68",
|
||||
"alphaev68": "alphaev67",
|
||||
"alphaev67": "alphaev6",
|
||||
"alphaev6": "alphapca56",
|
||||
"alphapca56": "alphaev56",
|
||||
"alphaev56": "alphaev5",
|
||||
"alphaev5": "alphaev45",
|
||||
"alphaev45": "alphaev4",
|
||||
"alphaev4": "alpha",
|
||||
"alpha": "noarch",
|
||||
# arm
|
||||
"armv7l": "armv6l",
|
||||
"armv6l": "armv5tejl",
|
||||
"armv5tejl": "armv5tel",
|
||||
"armv5tel": "noarch",
|
||||
# arm hardware floating point
|
||||
"armv7hnl": "armv7hl",
|
||||
"armv7hl": "armv6hl",
|
||||
"armv6hl": "noarch",
|
||||
# arm64
|
||||
"arm64": "noarch",
|
||||
# aarch64
|
||||
"aarch64": "noarch",
|
||||
# super-h
|
||||
"sh4a": "sh4",
|
||||
"sh4": "noarch",
|
||||
"sh3": "noarch",
|
||||
# itanium
|
||||
"ia64": "noarch",
|
||||
}
|
||||
|
||||
# Will contain information parsed from /proc/self/auxv via _parse_auxv().
|
||||
# Should move into rpm really.
|
||||
_aux_vector = {
|
||||
"platform": "",
|
||||
"hwcap": 0,
|
||||
}
|
||||
|
||||
|
||||
def isMultiLibArch(arch=None): # pragma: no cover
|
||||
"""returns true if arch is a multilib arch, false if not"""
|
||||
if arch is None:
|
||||
arch = canonArch
|
||||
|
||||
if arch not in arches: # or we could check if it is noarch
|
||||
return 0
|
||||
|
||||
if arch in multilibArches:
|
||||
return 1
|
||||
|
||||
if arches[arch] in multilibArches:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def getArchList(thisarch=None): # pragma: no cover
|
||||
# this returns a list of archs that are compatible with arch given
|
||||
if not thisarch:
|
||||
thisarch = canonArch
|
||||
|
||||
archlist = [thisarch]
|
||||
while thisarch in arches:
|
||||
thisarch = arches[thisarch]
|
||||
archlist.append(thisarch)
|
||||
|
||||
# hack hack hack
|
||||
# sparc64v is also sparc64 compat
|
||||
if archlist[0] == "sparc64v":
|
||||
archlist.insert(1, "sparc64")
|
||||
|
||||
# if we're a weirdo arch - add noarch on there.
|
||||
if len(archlist) == 1 and archlist[0] == thisarch:
|
||||
archlist.append("noarch")
|
||||
return archlist
|
||||
|
||||
|
||||
def _try_read_cpuinfo(): # pragma: no cover
|
||||
"""Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
|
||||
mounted)."""
|
||||
try:
|
||||
with open("/proc/cpuinfo", "r") as f:
|
||||
return f.readlines()
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
def _parse_auxv(): # pragma: no cover
|
||||
"""Read /proc/self/auxv and parse it into global dict for easier access
|
||||
later on, very similar to what rpm does."""
|
||||
# In case we can't open and read /proc/self/auxv, just return
|
||||
try:
|
||||
with open("/proc/self/auxv", "rb") as f:
|
||||
data = f.read()
|
||||
except Exception:
|
||||
return
|
||||
|
||||
# Define values from /usr/include/elf.h
|
||||
AT_PLATFORM = 15
|
||||
AT_HWCAP = 16
|
||||
fmtlen = struct.calcsize("LL")
|
||||
offset = 0
|
||||
platform = ctypes.c_char_p()
|
||||
|
||||
# Parse the data and fill in _aux_vector dict
|
||||
while offset <= len(data) - fmtlen:
|
||||
at_type, at_val = struct.unpack_from("LL", data, offset)
|
||||
if at_type == AT_PLATFORM:
|
||||
platform.value = at_val
|
||||
_aux_vector["platform"] = platform.value
|
||||
if at_type == AT_HWCAP:
|
||||
_aux_vector["hwcap"] = at_val
|
||||
offset = offset + fmtlen
|
||||
|
||||
|
||||
def getCanonX86Arch(arch): # pragma: no cover
|
||||
if arch == "i586":
|
||||
for line in _try_read_cpuinfo():
|
||||
if line.startswith("model name"):
|
||||
if line.find("Geode(TM)") != -1:
|
||||
return "geode"
|
||||
break
|
||||
return arch
|
||||
# only athlon vs i686 isn't handled with uname currently
|
||||
if arch != "i686":
|
||||
return arch
|
||||
|
||||
# if we're i686 and AuthenticAMD, then we should be an athlon
|
||||
for line in _try_read_cpuinfo():
|
||||
if line.startswith("vendor") and line.find("AuthenticAMD") != -1:
|
||||
return "athlon"
|
||||
elif line.startswith("vendor") and line.find("HygonGenuine") != -1:
|
||||
return "athlon"
|
||||
# i686 doesn't guarantee cmov, but we depend on it
|
||||
elif line.startswith("flags"):
|
||||
if line.find("cmov") == -1:
|
||||
return "i586"
|
||||
break
|
||||
|
||||
return arch
|
||||
|
||||
|
||||
def getCanonARMArch(arch): # pragma: no cover
|
||||
# the %{_target_arch} macro in rpm will let us know the abi we are using
|
||||
target = rpm.expandMacro("%{_target_cpu}")
|
||||
if target.startswith("armv6h"):
|
||||
return target
|
||||
if target.startswith("armv7h"):
|
||||
return target
|
||||
return arch
|
||||
|
||||
|
||||
def getCanonPPCArch(arch): # pragma: no cover
|
||||
# FIXME: should I do better handling for mac, etc?
|
||||
if arch != "ppc64":
|
||||
return arch
|
||||
|
||||
machine = None
|
||||
for line in _try_read_cpuinfo():
|
||||
if line.find("machine") != -1:
|
||||
machine = line.split(":")[1]
|
||||
break
|
||||
|
||||
platform = _aux_vector["platform"]
|
||||
if machine is None and not platform:
|
||||
return arch
|
||||
|
||||
try:
|
||||
if platform.startswith("power") and int(platform[5:].rstrip("+")) >= 7:
|
||||
return "ppc64p7"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if machine is None:
|
||||
return arch
|
||||
|
||||
if machine.find("CHRP IBM") != -1:
|
||||
return "ppc64pseries"
|
||||
if machine.find("iSeries") != -1:
|
||||
return "ppc64iseries"
|
||||
return arch
|
||||
|
||||
|
||||
def getCanonSPARCArch(arch): # pragma: no cover
|
||||
# Deal with sun4v, sun4u, sun4m cases
|
||||
SPARCtype = None
|
||||
for line in _try_read_cpuinfo():
|
||||
if line.startswith("type"):
|
||||
SPARCtype = line.split(":")[1]
|
||||
break
|
||||
if SPARCtype is None:
|
||||
return arch
|
||||
|
||||
if SPARCtype.find("sun4v") != -1:
|
||||
if arch.startswith("sparc64"):
|
||||
return "sparc64v"
|
||||
else:
|
||||
return "sparcv9v"
|
||||
if SPARCtype.find("sun4u") != -1:
|
||||
if arch.startswith("sparc64"):
|
||||
return "sparc64"
|
||||
else:
|
||||
return "sparcv9"
|
||||
if SPARCtype.find("sun4m") != -1:
|
||||
return "sparcv8"
|
||||
return arch
|
||||
|
||||
|
||||
def getCanonX86_64Arch(arch): # pragma: no cover
|
||||
if arch != "x86_64":
|
||||
return arch
|
||||
|
||||
vendor = None
|
||||
for line in _try_read_cpuinfo():
|
||||
if line.startswith("vendor_id"):
|
||||
vendor = line.split(":")[1]
|
||||
break
|
||||
if vendor is None:
|
||||
return arch
|
||||
|
||||
if vendor.find("Authentic AMD") != -1 or vendor.find("AuthenticAMD") != -1:
|
||||
return "amd64"
|
||||
if vendor.find("HygonGenuine") != -1:
|
||||
return "amd64"
|
||||
if vendor.find("GenuineIntel") != -1:
|
||||
return "ia32e"
|
||||
return arch
|
||||
|
||||
|
||||
def getCanonArch(skipRpmPlatform=0): # pragma: no cover
|
||||
if not skipRpmPlatform and os.access("/etc/rpm/platform", os.R_OK):
|
||||
try:
|
||||
f = open("/etc/rpm/platform", "r")
|
||||
line = f.readline()
|
||||
f.close()
|
||||
(arch, vendor, opersys) = line.split("-", 2)
|
||||
return arch
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
arch = os.uname()[4]
|
||||
|
||||
_parse_auxv()
|
||||
|
||||
if len(arch) == 4 and arch[0] == "i" and arch[2:4] == "86":
|
||||
return getCanonX86Arch(arch)
|
||||
|
||||
if arch.startswith("arm"):
|
||||
return getCanonARMArch(arch)
|
||||
if arch.startswith("ppc"):
|
||||
return getCanonPPCArch(arch)
|
||||
if arch.startswith("sparc"):
|
||||
return getCanonSPARCArch(arch)
|
||||
if arch == "x86_64":
|
||||
return getCanonX86_64Arch(arch)
|
||||
|
||||
return arch
|
||||
|
||||
|
||||
canonArch = getCanonArch()
|
||||
|
||||
|
||||
# this gets you the "compat" arch of a biarch pair
|
||||
def getMultiArchInfo(arch=canonArch): # pragma: no cover
|
||||
if arch in multilibArches:
|
||||
return multilibArches[arch]
|
||||
if arch in arches and arches[arch] != "noarch":
|
||||
return getMultiArchInfo(arch=arches[arch])
|
||||
return None
|
||||
|
||||
|
||||
def getBaseArch(myarch=None): # pragma: no cover
|
||||
"""returns 'base' arch for myarch, if specified, or canonArch if not.
|
||||
base arch is the arch before noarch in the arches dict if myarch is not
|
||||
a key in the multilibArches."""
|
||||
|
||||
if not myarch:
|
||||
myarch = canonArch
|
||||
|
||||
if myarch not in arches: # this is dumb, but <shrug>
|
||||
return myarch
|
||||
|
||||
if myarch.startswith("sparc64"):
|
||||
return "sparc"
|
||||
elif myarch == "ppc64le":
|
||||
return "ppc64le"
|
||||
elif myarch.startswith("ppc64") and not _ppc64_native_is_best:
|
||||
return "ppc"
|
||||
elif myarch.startswith("arm64"):
|
||||
return "arm64"
|
||||
elif myarch.startswith("armv6h"):
|
||||
return "armhfp"
|
||||
elif myarch.startswith("armv7h"):
|
||||
return "armhfp"
|
||||
elif myarch.startswith("arm"):
|
||||
return "arm"
|
||||
|
||||
if isMultiLibArch(arch=myarch):
|
||||
if myarch in multilibArches:
|
||||
return myarch
|
||||
else:
|
||||
return arches[myarch]
|
||||
|
||||
if myarch in arches:
|
||||
basearch = myarch
|
||||
value = arches[basearch]
|
||||
while value != "noarch":
|
||||
basearch = value
|
||||
value = arches[basearch]
|
||||
|
||||
return basearch
|
1622
pungi/checks.py
Normal file
1622
pungi/checks.py
Normal file
File diff suppressed because it is too large
Load Diff
34
pungi/common.py
Normal file
34
pungi/common.py
Normal file
@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
class OptionsBase(object):
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
inherit and initialize attributes
|
||||
call self.merge_options(**kwargs) at the end
|
||||
"""
|
||||
pass
|
||||
|
||||
def merge_options(self, **kwargs):
|
||||
"""
|
||||
override defaults with user defined values
|
||||
"""
|
||||
for key, value in kwargs.items():
|
||||
if not hasattr(self, key):
|
||||
raise ValueError(
|
||||
"Invalid option in %s: %s" % (self.__class__.__name__, key)
|
||||
)
|
||||
setattr(self, key, value)
|
779
pungi/compose.py
Normal file
779
pungi/compose.py
Normal file
@ -0,0 +1,779 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
__all__ = ("Compose",)
|
||||
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import tempfile
|
||||
import shutil
|
||||
import json
|
||||
import socket
|
||||
|
||||
import kobo.log
|
||||
import kobo.tback
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
from productmd.composeinfo import ComposeInfo
|
||||
from productmd.images import Images
|
||||
from dogpile.cache import make_region
|
||||
|
||||
|
||||
from pungi.graph import SimpleAcyclicOrientedGraph
|
||||
from pungi.wrappers.variants import VariantsXmlParser
|
||||
from pungi.paths import Paths
|
||||
from pungi.wrappers.kojiwrapper import KojiDownloadProxy
|
||||
from pungi.wrappers.scm import get_file_from_scm
|
||||
from pungi.util import (
|
||||
makedirs,
|
||||
get_arch_variant_data,
|
||||
get_format_substs,
|
||||
get_variant_data,
|
||||
retry,
|
||||
translate_path_raw,
|
||||
)
|
||||
from pungi.metadata import compose_to_composeinfo
|
||||
|
||||
try:
|
||||
# This is available since productmd >= 1.18
|
||||
# TODO: remove this once the version is distributed widely enough
|
||||
from productmd.composeinfo import SUPPORTED_MILESTONES
|
||||
except ImportError:
|
||||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||
|
||||
|
||||
def is_status_fatal(status_code):
|
||||
"""Check if status code returned from CTS reports an error that is unlikely
|
||||
to be fixed by retrying. Generally client errors (4XX) are fatal, with the
|
||||
exception of 401 Unauthorized which could be caused by transient network
|
||||
issue between compose host and KDC.
|
||||
"""
|
||||
if status_code == 401:
|
||||
return False
|
||||
return status_code >= 400 and status_code < 500
|
||||
|
||||
|
||||
@retry(wait_on=RequestException)
|
||||
def retry_request(method, url, data=None, json_data=None, auth=None):
|
||||
"""
|
||||
:param str method: Reqest method.
|
||||
:param str url: Target URL.
|
||||
:param dict data: form-urlencoded data to send in the body of the request.
|
||||
:param dict json_data: json data to send in the body of the request.
|
||||
"""
|
||||
request_method = getattr(requests, method)
|
||||
rv = request_method(url, data=data, json=json_data, auth=auth)
|
||||
if is_status_fatal(rv.status_code):
|
||||
try:
|
||||
error = rv.json()
|
||||
except ValueError:
|
||||
error = rv.text
|
||||
raise RuntimeError("%s responded with %d: %s" % (url, rv.status_code, error))
|
||||
rv.raise_for_status()
|
||||
return rv
|
||||
|
||||
|
||||
class BearerAuth(requests.auth.AuthBase):
|
||||
def __init__(self, token):
|
||||
self.token = token
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers["authorization"] = "Bearer " + self.token
|
||||
return r
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cts_auth(pungi_conf):
|
||||
"""
|
||||
:param dict pungi_conf: dict obj of pungi.json config.
|
||||
"""
|
||||
auth = None
|
||||
token = None
|
||||
cts_keytab = pungi_conf.get("cts_keytab")
|
||||
cts_oidc_token_url = os.environ.get("CTS_OIDC_TOKEN_URL", "") or pungi_conf.get(
|
||||
"cts_oidc_token_url"
|
||||
)
|
||||
|
||||
try:
|
||||
if cts_keytab:
|
||||
# requests-kerberos cannot accept custom keytab, we need to use
|
||||
# environment variable for this. But we need to change environment
|
||||
# only temporarily just for this single requests.post.
|
||||
# So at first backup the current environment and revert to it
|
||||
# after the requests call.
|
||||
from requests_kerberos import HTTPKerberosAuth
|
||||
|
||||
auth = HTTPKerberosAuth()
|
||||
environ_copy = dict(os.environ)
|
||||
if "$HOSTNAME" in cts_keytab:
|
||||
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||
elif cts_oidc_token_url:
|
||||
cts_oidc_client_id = os.environ.get(
|
||||
"CTS_OIDC_CLIENT_ID", ""
|
||||
) or pungi_conf.get("cts_oidc_client_id", "")
|
||||
token = retry_request(
|
||||
"post",
|
||||
cts_oidc_token_url,
|
||||
data={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": cts_oidc_client_id,
|
||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||
},
|
||||
).json()["access_token"]
|
||||
auth = BearerAuth(token)
|
||||
del token
|
||||
|
||||
yield auth
|
||||
except Exception as e:
|
||||
# Avoid leaking client secret in trackback
|
||||
e.show_locals = False
|
||||
raise e
|
||||
finally:
|
||||
if cts_keytab:
|
||||
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||
os.environ.clear()
|
||||
os.environ.update(environ_copy)
|
||||
|
||||
|
||||
def get_compose_info(
|
||||
conf,
|
||||
compose_type="production",
|
||||
compose_date=None,
|
||||
compose_respin=None,
|
||||
compose_label=None,
|
||||
parent_compose_ids=None,
|
||||
respin_of=None,
|
||||
):
|
||||
"""
|
||||
Creates inncomplete ComposeInfo to generate Compose ID
|
||||
"""
|
||||
ci = ComposeInfo()
|
||||
ci.release.name = conf["release_name"]
|
||||
ci.release.short = conf["release_short"]
|
||||
ci.release.version = conf["release_version"]
|
||||
ci.release.is_layered = True if conf.get("base_product_name", "") else False
|
||||
ci.release.type = conf.get("release_type", "ga").lower()
|
||||
ci.release.internal = bool(conf.get("release_internal", False))
|
||||
if ci.release.is_layered:
|
||||
ci.base_product.name = conf["base_product_name"]
|
||||
ci.base_product.short = conf["base_product_short"]
|
||||
ci.base_product.version = conf["base_product_version"]
|
||||
ci.base_product.type = conf.get("base_product_type", "ga").lower()
|
||||
|
||||
ci.compose.label = compose_label
|
||||
ci.compose.type = compose_type
|
||||
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
||||
ci.compose.respin = compose_respin or 0
|
||||
ci.compose.id = ci.create_compose_id()
|
||||
|
||||
cts_url = conf.get("cts_url")
|
||||
if cts_url:
|
||||
# Create compose in CTS and get the reserved compose ID.
|
||||
url = os.path.join(cts_url, "api/1/composes/")
|
||||
data = {
|
||||
"compose_info": json.loads(ci.dumps()),
|
||||
"parent_compose_ids": parent_compose_ids,
|
||||
"respin_of": respin_of,
|
||||
}
|
||||
with cts_auth(conf) as authentication:
|
||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||
|
||||
# Update local ComposeInfo with received ComposeInfo.
|
||||
cts_ci = ComposeInfo()
|
||||
cts_ci.loads(rv.text)
|
||||
ci.compose.respin = cts_ci.compose.respin
|
||||
ci.compose.id = cts_ci.compose.id
|
||||
|
||||
return ci
|
||||
|
||||
|
||||
def write_compose_info(compose_dir, ci):
|
||||
"""
|
||||
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
||||
"""
|
||||
makedirs(compose_dir)
|
||||
with open(os.path.join(compose_dir, "COMPOSE_ID"), "w") as f:
|
||||
f.write(ci.compose.id)
|
||||
work_dir = os.path.join(compose_dir, "work", "global")
|
||||
makedirs(work_dir)
|
||||
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
|
||||
|
||||
|
||||
def update_compose_url(compose_id, compose_dir, conf):
|
||||
cts_url = conf.get("cts_url", None)
|
||||
if cts_url:
|
||||
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
||||
tp = conf.get("translate_paths", None)
|
||||
compose_url = translate_path_raw(tp, compose_dir)
|
||||
if compose_url == compose_dir:
|
||||
# We do not have a URL, do not attempt the update.
|
||||
return
|
||||
data = {
|
||||
"action": "set_url",
|
||||
"compose_url": compose_url,
|
||||
}
|
||||
with cts_auth(conf) as authentication:
|
||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||
|
||||
|
||||
def get_compose_dir(
|
||||
topdir,
|
||||
conf,
|
||||
compose_type="production",
|
||||
compose_date=None,
|
||||
compose_respin=None,
|
||||
compose_label=None,
|
||||
already_exists_callbacks=None,
|
||||
parent_compose_ids=None,
|
||||
respin_of=None,
|
||||
):
|
||||
already_exists_callbacks = already_exists_callbacks or []
|
||||
|
||||
ci = get_compose_info(
|
||||
conf,
|
||||
compose_type,
|
||||
compose_date,
|
||||
compose_respin,
|
||||
compose_label,
|
||||
parent_compose_ids,
|
||||
respin_of,
|
||||
)
|
||||
|
||||
cts_url = conf.get("cts_url", None)
|
||||
if cts_url:
|
||||
# Create compose directory.
|
||||
compose_dir = os.path.join(topdir, ci.compose.id)
|
||||
os.makedirs(compose_dir)
|
||||
else:
|
||||
while 1:
|
||||
ci.compose.id = ci.create_compose_id()
|
||||
|
||||
compose_dir = os.path.join(topdir, ci.compose.id)
|
||||
|
||||
exists = False
|
||||
# TODO: callbacks to determine if a composeid was already used
|
||||
# for callback in already_exists_callbacks:
|
||||
# if callback(data):
|
||||
# exists = True
|
||||
# break
|
||||
|
||||
# already_exists_callbacks fallback: does target compose_dir exist?
|
||||
try:
|
||||
os.makedirs(compose_dir)
|
||||
except OSError as ex:
|
||||
if ex.errno == errno.EEXIST:
|
||||
exists = True
|
||||
else:
|
||||
raise
|
||||
|
||||
if exists:
|
||||
ci = get_compose_info(
|
||||
conf,
|
||||
compose_type,
|
||||
compose_date,
|
||||
ci.compose.respin + 1,
|
||||
compose_label,
|
||||
)
|
||||
continue
|
||||
break
|
||||
|
||||
write_compose_info(compose_dir, ci)
|
||||
return compose_dir
|
||||
|
||||
|
||||
class Compose(kobo.log.LoggingBase):
|
||||
def __init__(
|
||||
self,
|
||||
conf,
|
||||
topdir,
|
||||
skip_phases=None,
|
||||
just_phases=None,
|
||||
old_composes=None,
|
||||
koji_event=None,
|
||||
supported=False,
|
||||
logger=None,
|
||||
notifier=None,
|
||||
):
|
||||
kobo.log.LoggingBase.__init__(self, logger)
|
||||
# TODO: check if minimal conf values are set
|
||||
self.conf = conf
|
||||
# This is a dict mapping UID to Variant objects. It only contains top
|
||||
# level variants.
|
||||
self.variants = {}
|
||||
# This is a similar mapping, but contains even nested variants.
|
||||
self.all_variants = {}
|
||||
self.topdir = os.path.abspath(topdir)
|
||||
self.skip_phases = skip_phases or []
|
||||
self.just_phases = just_phases or []
|
||||
self.old_composes = old_composes or []
|
||||
self.koji_event = koji_event or conf.get("koji_event")
|
||||
self.notifier = notifier
|
||||
|
||||
self._old_config = None
|
||||
|
||||
# path definitions
|
||||
self.paths = Paths(self)
|
||||
|
||||
# Set up logging to file
|
||||
if logger:
|
||||
kobo.log.add_file_logger(
|
||||
logger, self.paths.log.log_file("global", "pungi.log")
|
||||
)
|
||||
kobo.log.add_file_logger(
|
||||
logger, self.paths.log.log_file("global", "excluding-arch.log")
|
||||
)
|
||||
|
||||
class PungiLogFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
return (
|
||||
False
|
||||
if record.funcName and record.funcName == "is_excluded"
|
||||
else True
|
||||
)
|
||||
|
||||
class ExcludingArchLogFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
message = record.getMessage()
|
||||
if "Populating package set for arch:" in message or (
|
||||
record.funcName and record.funcName == "is_excluded"
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
for handler in logger.handlers:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
log_file_name = os.path.basename(handler.stream.name)
|
||||
if log_file_name == "pungi.global.log":
|
||||
handler.addFilter(PungiLogFilter())
|
||||
elif log_file_name == "excluding-arch.global.log":
|
||||
handler.addFilter(ExcludingArchLogFilter())
|
||||
|
||||
# to provide compose_id, compose_date and compose_respin
|
||||
self.ci_base = ComposeInfo()
|
||||
self.ci_base.load(
|
||||
os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json")
|
||||
)
|
||||
|
||||
self.supported = supported
|
||||
if (
|
||||
self.compose_label
|
||||
and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES
|
||||
):
|
||||
self.log_info(
|
||||
"Automatically setting 'supported' flag due to label: %s."
|
||||
% self.compose_label
|
||||
)
|
||||
self.supported = True
|
||||
|
||||
self.im = Images()
|
||||
self.im.compose.id = self.compose_id
|
||||
self.im.compose.type = self.compose_type
|
||||
self.im.compose.date = self.compose_date
|
||||
self.im.compose.respin = self.compose_respin
|
||||
self.im.metadata_path = self.paths.compose.metadata()
|
||||
|
||||
self.containers_metadata = {}
|
||||
|
||||
# Stores list of deliverables that failed, but did not abort the
|
||||
# compose.
|
||||
# {deliverable: [(Variant.uid, arch, subvariant)]}
|
||||
self.failed_deliverables = {}
|
||||
self.attempted_deliverables = {}
|
||||
self.required_deliverables = {}
|
||||
|
||||
if self.conf.get("dogpile_cache_backend", None):
|
||||
self.cache_region = make_region().configure(
|
||||
self.conf.get("dogpile_cache_backend"),
|
||||
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
|
||||
arguments=self.conf.get("dogpile_cache_arguments", {}),
|
||||
)
|
||||
else:
|
||||
self.cache_region = make_region().configure("dogpile.cache.null")
|
||||
|
||||
self.koji_downloader = KojiDownloadProxy.from_config(self.conf, self._logger)
|
||||
|
||||
get_compose_info = staticmethod(get_compose_info)
|
||||
write_compose_info = staticmethod(write_compose_info)
|
||||
get_compose_dir = staticmethod(get_compose_dir)
|
||||
update_compose_url = staticmethod(update_compose_url)
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.variants[name]
|
||||
|
||||
@property
|
||||
def compose_id(self):
|
||||
return self.ci_base.compose.id
|
||||
|
||||
@property
|
||||
def compose_date(self):
|
||||
return self.ci_base.compose.date
|
||||
|
||||
@property
|
||||
def compose_respin(self):
|
||||
return self.ci_base.compose.respin
|
||||
|
||||
@property
|
||||
def compose_type(self):
|
||||
return self.ci_base.compose.type
|
||||
|
||||
@property
|
||||
def compose_type_suffix(self):
|
||||
return self.ci_base.compose.type_suffix
|
||||
|
||||
@property
|
||||
def compose_label(self):
|
||||
return self.ci_base.compose.label
|
||||
|
||||
@property
|
||||
def compose_label_major_version(self):
|
||||
return self.ci_base.compose.label_major_version
|
||||
|
||||
@property
|
||||
def has_comps(self):
|
||||
return bool(self.conf.get("comps_file", False))
|
||||
|
||||
@property
|
||||
def has_module_defaults(self):
|
||||
return bool(self.conf.get("module_defaults_dir", False))
|
||||
|
||||
@property
|
||||
def has_module_obsoletes(self):
|
||||
return bool(self.conf.get("module_obsoletes_dir", False))
|
||||
|
||||
@property
|
||||
def config_dir(self):
|
||||
return os.path.dirname(self.conf._open_file or "")
|
||||
|
||||
@property
|
||||
def should_create_yum_database(self):
|
||||
"""Explicit configuration trumps all. Otherwise check gather backend
|
||||
and only create it for Yum.
|
||||
"""
|
||||
config = self.conf.get("createrepo_database")
|
||||
if config is not None:
|
||||
return config
|
||||
return self.conf["gather_backend"] == "yum"
|
||||
|
||||
def read_variants(self):
|
||||
# TODO: move to phases/init ?
|
||||
variants_file = self.paths.work.variants_file(arch="global")
|
||||
|
||||
scm_dict = self.conf["variants_file"]
|
||||
if isinstance(scm_dict, dict):
|
||||
file_name = os.path.basename(scm_dict["file"])
|
||||
if scm_dict["scm"] == "file":
|
||||
scm_dict["file"] = os.path.join(
|
||||
self.config_dir, os.path.basename(scm_dict["file"])
|
||||
)
|
||||
else:
|
||||
file_name = os.path.basename(scm_dict)
|
||||
scm_dict = os.path.join(self.config_dir, scm_dict)
|
||||
|
||||
self.log_debug("Writing variants file: %s", variants_file)
|
||||
tmp_dir = self.mkdtemp(prefix="variants_file_")
|
||||
get_file_from_scm(scm_dict, tmp_dir, compose=self)
|
||||
shutil.copy2(os.path.join(tmp_dir, file_name), variants_file)
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
tree_arches = self.conf.get("tree_arches", None)
|
||||
tree_variants = self.conf.get("tree_variants", None)
|
||||
with open(variants_file, "r") as file_obj:
|
||||
parser = VariantsXmlParser(
|
||||
file_obj, tree_arches, tree_variants, logger=self._logger
|
||||
)
|
||||
self.variants = parser.parse()
|
||||
|
||||
self.all_variants = {}
|
||||
for variant in self.get_variants():
|
||||
self.all_variants[variant.uid] = variant
|
||||
|
||||
# populate ci_base with variants - needed for layered-products (compose_id)
|
||||
# FIXME - compose_to_composeinfo is no longer needed and has been
|
||||
# removed, but I'm not entirely sure what this is needed for
|
||||
# or if it is at all
|
||||
self.ci_base = compose_to_composeinfo(self)
|
||||
|
||||
def get_variants(self, types=None, arch=None):
|
||||
result = []
|
||||
for i in self.variants.values():
|
||||
if (not types or i.type in types) and (not arch or arch in i.arches):
|
||||
result.append(i)
|
||||
result.extend(i.get_variants(types=types, arch=arch))
|
||||
return sorted(set(result))
|
||||
|
||||
def get_arches(self):
|
||||
result = set()
|
||||
for variant in self.get_variants():
|
||||
for arch in variant.arches:
|
||||
result.add(arch)
|
||||
return sorted(result)
|
||||
|
||||
@property
|
||||
def status_file(self):
|
||||
"""Path to file where the compose status will be stored."""
|
||||
if not hasattr(self, "_status_file"):
|
||||
self._status_file = os.path.join(self.topdir, "STATUS")
|
||||
return self._status_file
|
||||
|
||||
def _log_failed_deliverables(self):
|
||||
for kind, data in self.failed_deliverables.items():
|
||||
for variant, arch, subvariant in data:
|
||||
self.log_info(
|
||||
"Failed %s on variant <%s>, arch <%s>, subvariant <%s>."
|
||||
% (kind, variant, arch, subvariant)
|
||||
)
|
||||
log = os.path.join(self.paths.log.topdir("global"), "deliverables.json")
|
||||
with open(log, "w") as f:
|
||||
json.dump(
|
||||
{
|
||||
"required": self.required_deliverables,
|
||||
"failed": self.failed_deliverables,
|
||||
"attempted": self.attempted_deliverables,
|
||||
},
|
||||
f,
|
||||
indent=4,
|
||||
)
|
||||
|
||||
def write_status(self, stat_msg):
|
||||
if stat_msg not in ("STARTED", "FINISHED", "DOOMED", "TERMINATED"):
|
||||
self.log_warning("Writing nonstandard compose status: %s" % stat_msg)
|
||||
old_status = self.get_status()
|
||||
if stat_msg == old_status:
|
||||
return
|
||||
if old_status == "FINISHED":
|
||||
msg = "Could not modify a FINISHED compose: %s" % self.topdir
|
||||
self.log_error(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if stat_msg == "FINISHED" and self.failed_deliverables:
|
||||
stat_msg = "FINISHED_INCOMPLETE"
|
||||
|
||||
self._log_failed_deliverables()
|
||||
|
||||
with open(self.status_file, "w") as f:
|
||||
f.write(stat_msg + "\n")
|
||||
|
||||
if self.notifier:
|
||||
self.notifier.send("status-change", status=stat_msg)
|
||||
|
||||
def get_status(self):
|
||||
if not os.path.isfile(self.status_file):
|
||||
return
|
||||
return open(self.status_file, "r").read().strip()
|
||||
|
||||
def get_image_name(
|
||||
self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", format=None
|
||||
):
|
||||
"""Create a filename for image with given parameters.
|
||||
|
||||
:raises RuntimeError: when unknown ``disc_type`` is given
|
||||
"""
|
||||
default_format = "{compose_id}-{variant}-{arch}-{disc_type}{disc_num}{suffix}"
|
||||
format = format or self.conf.get("image_name_format", default_format)
|
||||
|
||||
if isinstance(format, dict):
|
||||
conf = get_variant_data(self.conf, "image_name_format", variant)
|
||||
format = conf[0] if conf else default_format
|
||||
|
||||
if arch == "src":
|
||||
arch = "source"
|
||||
|
||||
if disc_num:
|
||||
disc_num = int(disc_num)
|
||||
else:
|
||||
disc_num = ""
|
||||
|
||||
kwargs = {
|
||||
"arch": arch,
|
||||
"disc_type": disc_type,
|
||||
"disc_num": disc_num,
|
||||
"suffix": suffix,
|
||||
}
|
||||
if variant.type == "layered-product":
|
||||
variant_uid = variant.parent.uid
|
||||
kwargs["compose_id"] = self.ci_base[variant.uid].compose_id
|
||||
else:
|
||||
variant_uid = variant.uid
|
||||
args = get_format_substs(self, variant=variant_uid, **kwargs)
|
||||
try:
|
||||
return (format % args).format(**args)
|
||||
except KeyError as err:
|
||||
raise RuntimeError(
|
||||
"Failed to create image name: unknown format element: %s" % err
|
||||
)
|
||||
|
||||
def can_fail(self, variant, arch, deliverable):
|
||||
"""Figure out if deliverable can fail on variant.arch.
|
||||
|
||||
Variant can be None.
|
||||
"""
|
||||
failable = get_arch_variant_data(
|
||||
self.conf, "failable_deliverables", arch, variant
|
||||
)
|
||||
return deliverable in failable
|
||||
|
||||
def attempt_deliverable(self, variant, arch, kind, subvariant=None):
|
||||
"""Log information about attempted deliverable."""
|
||||
variant_uid = variant.uid if variant else ""
|
||||
self.attempted_deliverables.setdefault(kind, []).append(
|
||||
(variant_uid, arch, subvariant)
|
||||
)
|
||||
|
||||
def require_deliverable(self, variant, arch, kind, subvariant=None):
|
||||
"""Log information about attempted deliverable."""
|
||||
variant_uid = variant.uid if variant else ""
|
||||
self.required_deliverables.setdefault(kind, []).append(
|
||||
(variant_uid, arch, subvariant)
|
||||
)
|
||||
|
||||
def fail_deliverable(self, variant, arch, kind, subvariant=None):
|
||||
"""Log information about failed deliverable."""
|
||||
variant_uid = variant.uid if variant else ""
|
||||
self.failed_deliverables.setdefault(kind, []).append(
|
||||
(variant_uid, arch, subvariant)
|
||||
)
|
||||
|
||||
@property
|
||||
def image_release(self):
|
||||
"""Generate a value to pass to Koji as image release.
|
||||
|
||||
If this compose has a label, the version from it will be used,
|
||||
otherwise we will create a string with date, compose type and respin.
|
||||
"""
|
||||
if self.compose_label:
|
||||
milestone, release = self.compose_label.split("-")
|
||||
return release
|
||||
|
||||
return "%s%s.%s" % (
|
||||
self.compose_date,
|
||||
self.ci_base.compose.type_suffix,
|
||||
self.compose_respin,
|
||||
)
|
||||
|
||||
@property
|
||||
def image_version(self):
|
||||
"""Generate a value to pass to Koji as image version.
|
||||
|
||||
The value is based on release version. If compose has a label, the
|
||||
milestone from it is appended to the version (unless it is RC).
|
||||
"""
|
||||
version = self.ci_base.release.version
|
||||
if self.compose_label and not self.compose_label.startswith("RC-"):
|
||||
milestone, release = self.compose_label.split("-")
|
||||
return "%s_%s" % (version, milestone)
|
||||
|
||||
return version
|
||||
|
||||
def mkdtemp(self, arch=None, variant=None, suffix="", prefix="tmp"):
|
||||
"""
|
||||
Create and return a unique temporary directory under dir of
|
||||
<compose_topdir>/work/{global,<arch>}/tmp[-<variant>]/
|
||||
"""
|
||||
path = os.path.join(self.paths.work.tmp_dir(arch=arch, variant=variant))
|
||||
tmpdir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=path)
|
||||
os.chmod(tmpdir, 0o755)
|
||||
return tmpdir
|
||||
|
||||
def dump_containers_metadata(self):
|
||||
"""Create a file with container metadata if there are any containers."""
|
||||
if not self.containers_metadata:
|
||||
return
|
||||
with open(self.paths.compose.metadata("osbs.json"), "w") as f:
|
||||
json.dump(
|
||||
self.containers_metadata,
|
||||
f,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
separators=(",", ": "),
|
||||
)
|
||||
|
||||
def traceback(self, detail=None, show_locals=True):
|
||||
"""Store an extended traceback. This method should only be called when
|
||||
handling an exception.
|
||||
|
||||
:param str detail: Extra information appended to the filename
|
||||
"""
|
||||
basename = "traceback"
|
||||
if detail:
|
||||
basename += "-" + detail
|
||||
tb_path = self.paths.log.log_file("global", basename)
|
||||
self.log_error("Extended traceback in: %s", tb_path)
|
||||
tback = kobo.tback.Traceback(show_locals=show_locals).get_traceback()
|
||||
# Kobo 0.36.0 returns traceback as str, older versions return bytes
|
||||
with open(tb_path, "wb" if isinstance(tback, bytes) else "w") as f:
|
||||
f.write(tback)
|
||||
|
||||
def load_old_compose_config(self):
|
||||
"""
|
||||
Helper method to load Pungi config dump from old compose.
|
||||
"""
|
||||
if not self._old_config:
|
||||
config_dump_full = self.paths.log.log_file("global", "config-dump")
|
||||
config_dump_full = self.paths.old_compose_path(config_dump_full)
|
||||
if not config_dump_full:
|
||||
return None
|
||||
|
||||
self.log_info("Loading old config file: %s", config_dump_full)
|
||||
with open(config_dump_full, "r") as f:
|
||||
self._old_config = json.load(f)
|
||||
|
||||
return self._old_config
|
||||
|
||||
|
||||
def get_ordered_variant_uids(compose):
|
||||
if not hasattr(compose, "_ordered_variant_uids"):
|
||||
ordered_variant_uids = _prepare_variant_as_lookaside(compose)
|
||||
# Some variants were not mentioned in configuration value
|
||||
# 'variant_as_lookaside' and its run order is not crucial (that
|
||||
# means there are no dependencies inside this group). They will be
|
||||
# processed first. A-Z sorting is for reproducibility.
|
||||
unordered_variant_uids = sorted(
|
||||
set(compose.all_variants.keys()) - set(ordered_variant_uids)
|
||||
)
|
||||
setattr(
|
||||
compose,
|
||||
"_ordered_variant_uids",
|
||||
unordered_variant_uids + ordered_variant_uids,
|
||||
)
|
||||
return getattr(compose, "_ordered_variant_uids")
|
||||
|
||||
|
||||
def _prepare_variant_as_lookaside(compose):
|
||||
"""
|
||||
Configuration value 'variant_as_lookaside' contains variant pairs <variant,
|
||||
its lookaside>. In that pair lookaside variant have to be processed first.
|
||||
Structure can be represented as a oriented graph. Its spanning line shows
|
||||
order how to process this set of variants.
|
||||
"""
|
||||
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
||||
graph = SimpleAcyclicOrientedGraph()
|
||||
for variant, lookaside_variant in variant_as_lookaside:
|
||||
try:
|
||||
graph.add_edge(variant, lookaside_variant)
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
"There is a bad configuration in 'variant_as_lookaside': %s" % e
|
||||
)
|
||||
|
||||
variant_processing_order = reversed(graph.prune_graph())
|
||||
return list(variant_processing_order)
|
0
pungi/compose_metadata/__init__.py
Normal file
0
pungi/compose_metadata/__init__.py
Normal file
93
pungi/compose_metadata/discinfo.py
Normal file
93
pungi/compose_metadata/discinfo.py
Normal file
@ -0,0 +1,93 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
The .discinfo file contains metadata about media.
|
||||
Following fields are part of the .discinfo file,
|
||||
one record per line:
|
||||
- timestamp
|
||||
- release
|
||||
- architecture
|
||||
- disc number (optional)
|
||||
"""
|
||||
|
||||
|
||||
__all__ = (
|
||||
"read_discinfo",
|
||||
"write_discinfo",
|
||||
"write_media_repo",
|
||||
)
|
||||
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
def write_discinfo(file_path, description, arch, disc_numbers=None, timestamp=None):
|
||||
"""
|
||||
Write a .discinfo file:
|
||||
"""
|
||||
disc_numbers = disc_numbers or ["ALL"]
|
||||
if not isinstance(disc_numbers, list):
|
||||
raise TypeError(
|
||||
"Invalid type: disc_numbers type is %s; expected: <list>"
|
||||
% type(disc_numbers)
|
||||
)
|
||||
if not timestamp:
|
||||
timestamp = os.environ.get("SOURCE_DATE_EPOCH", "%f" % time.time())
|
||||
with open(file_path, "w") as f:
|
||||
f.write("%s\n" % timestamp)
|
||||
f.write("%s\n" % description)
|
||||
f.write("%s\n" % arch)
|
||||
if disc_numbers:
|
||||
f.write("%s\n" % ",".join([str(i) for i in disc_numbers]))
|
||||
return timestamp
|
||||
|
||||
|
||||
def read_discinfo(file_path):
|
||||
result = {}
|
||||
with open(file_path, "r") as f:
|
||||
result["timestamp"] = f.readline().strip()
|
||||
result["description"] = f.readline().strip()
|
||||
result["arch"] = f.readline().strip()
|
||||
disc_numbers = f.readline().strip()
|
||||
if not disc_numbers:
|
||||
result["disc_numbers"] = None
|
||||
elif disc_numbers == "ALL":
|
||||
result["disc_numbers"] = ["ALL"]
|
||||
else:
|
||||
result["disc_numbers"] = [int(i) for i in disc_numbers.split(",")]
|
||||
return result
|
||||
|
||||
|
||||
def write_media_repo(file_path, description, timestamp):
|
||||
"""
|
||||
Write media.repo file for the disc to be used on installed system.
|
||||
PackageKit uses this.
|
||||
"""
|
||||
data = [
|
||||
"[InstallMedia]",
|
||||
"name=%s" % description,
|
||||
"mediaid=%s" % timestamp,
|
||||
"metadata_expire=-1",
|
||||
"gpgcheck=0",
|
||||
"cost=500",
|
||||
"",
|
||||
]
|
||||
|
||||
with open(file_path, "w") as repo_file:
|
||||
repo_file.write("\n".join(data))
|
||||
return timestamp
|
79
pungi/config.py
Normal file
79
pungi/config.py
Normal file
@ -0,0 +1,79 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ConfigParser import SafeConfigParser
|
||||
|
||||
from .arch_utils import getBaseArch
|
||||
|
||||
# In development, `here` will point to the bin/ directory with scripts.
|
||||
here = sys.path[0]
|
||||
MULTILIBCONF = (
|
||||
os.path.join(os.path.dirname(__file__), "..", "share", "multilib")
|
||||
if here != "/usr/bin"
|
||||
else "/usr/share/pungi/multilib"
|
||||
)
|
||||
|
||||
|
||||
class Config(SafeConfigParser):
|
||||
def __init__(self, pungirc=None):
|
||||
SafeConfigParser.__init__(self)
|
||||
|
||||
self.add_section("pungi")
|
||||
self.add_section("lorax")
|
||||
|
||||
self.set("pungi", "osdir", "os")
|
||||
self.set("pungi", "sourcedir", "source")
|
||||
self.set("pungi", "debugdir", "debug")
|
||||
self.set("pungi", "isodir", "iso")
|
||||
self.set("pungi", "multilibconf", MULTILIBCONF)
|
||||
self.set(
|
||||
"pungi", "relnotefilere", "LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG"
|
||||
)
|
||||
self.set("pungi", "relnotedirre", "")
|
||||
self.set(
|
||||
"pungi", "relnotepkgs", "fedora-repos fedora-release fedora-release-notes"
|
||||
)
|
||||
self.set("pungi", "product_path", "Packages")
|
||||
self.set("pungi", "cachedir", "/var/cache/pungi")
|
||||
self.set("pungi", "compress_type", "xz")
|
||||
self.set("pungi", "arch", getBaseArch())
|
||||
self.set("pungi", "family", "Fedora")
|
||||
self.set("pungi", "iso_basename", "Fedora")
|
||||
self.set("pungi", "version", time.strftime("%Y%m%d", time.localtime()))
|
||||
self.set("pungi", "variant", "")
|
||||
self.set("pungi", "destdir", os.getcwd())
|
||||
self.set("pungi", "workdirbase", "/work")
|
||||
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
|
||||
self.set("pungi", "cdsize", "695.0")
|
||||
self.set("pungi", "debuginfo", "True")
|
||||
self.set("pungi", "alldeps", "True")
|
||||
self.set("pungi", "isfinal", "False")
|
||||
self.set("pungi", "nohash", "False")
|
||||
self.set("pungi", "full_archlist", "False")
|
||||
self.set("pungi", "multilib", "")
|
||||
self.set("pungi", "lookaside_repos", "")
|
||||
self.set("pungi", "resolve_deps", "True")
|
||||
self.set("pungi", "no_dvd", "False")
|
||||
self.set("pungi", "nomacboot", "False")
|
||||
self.set("pungi", "rootfs_size", "False")
|
||||
|
||||
# if missing, self.read() is a noop, else change 'defaults'
|
||||
if pungirc:
|
||||
self.read(os.path.expanduser(pungirc))
|
201
pungi/createiso.py
Normal file
201
pungi/createiso.py
Normal file
@ -0,0 +1,201 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import six
|
||||
from collections import namedtuple
|
||||
from kobo.shortcuts import run
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from .wrappers import iso
|
||||
from .wrappers.jigdo import JigdoWrapper
|
||||
|
||||
from .phases.buildinstall import BOOT_CONFIGS, BOOT_IMAGES
|
||||
|
||||
|
||||
CreateIsoOpts = namedtuple(
|
||||
"CreateIsoOpts",
|
||||
[
|
||||
"buildinstall_method",
|
||||
"boot_iso",
|
||||
"arch",
|
||||
"output_dir",
|
||||
"jigdo_dir",
|
||||
"iso_name",
|
||||
"volid",
|
||||
"graft_points",
|
||||
"supported",
|
||||
"os_tree",
|
||||
"hfs_compat",
|
||||
"use_xorrisofs",
|
||||
"iso_level",
|
||||
"script_dir",
|
||||
],
|
||||
)
|
||||
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
|
||||
|
||||
|
||||
def quote(str):
|
||||
"""Quote an argument for shell, but make sure $TEMPLATE variable will be
|
||||
expanded.
|
||||
"""
|
||||
if str.startswith("$TEMPLATE"):
|
||||
return "$TEMPLATE%s" % shlex_quote(str.replace("$TEMPLATE", "", 1))
|
||||
return shlex_quote(str)
|
||||
|
||||
|
||||
def emit(f, cmd):
|
||||
"""Print line of shell code into the stream."""
|
||||
if isinstance(cmd, six.string_types):
|
||||
print(cmd, file=f)
|
||||
else:
|
||||
print(" ".join([quote(x) for x in cmd]), file=f)
|
||||
|
||||
|
||||
FIND_TEMPLATE_SNIPPET = """if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then TEMPLATE=/usr/share/lorax; fi""" # noqa: E501
|
||||
|
||||
|
||||
def make_image(f, opts):
|
||||
mkisofs_kwargs = {}
|
||||
|
||||
if opts.buildinstall_method:
|
||||
if opts.buildinstall_method == "lorax":
|
||||
emit(f, FIND_TEMPLATE_SNIPPET)
|
||||
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
||||
opts.arch,
|
||||
os.path.join("$TEMPLATE", "config_files/ppc"),
|
||||
hfs_compat=opts.hfs_compat,
|
||||
)
|
||||
|
||||
# ppc(64) doesn't seem to support utf-8
|
||||
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
||||
mkisofs_kwargs["input_charset"] = None
|
||||
|
||||
cmd = iso.get_mkisofs_cmd(
|
||||
opts.iso_name,
|
||||
None,
|
||||
volid=opts.volid,
|
||||
exclude=["./lost+found"],
|
||||
graft_points=opts.graft_points,
|
||||
use_xorrisofs=opts.use_xorrisofs,
|
||||
iso_level=opts.iso_level,
|
||||
**mkisofs_kwargs
|
||||
)
|
||||
emit(f, cmd)
|
||||
|
||||
|
||||
def implant_md5(f, opts):
|
||||
cmd = iso.get_implantisomd5_cmd(opts.iso_name, opts.supported)
|
||||
emit(f, cmd)
|
||||
|
||||
|
||||
def run_isohybrid(f, opts):
|
||||
"""If the image is bootable, it should include an MBR or GPT so that it can
|
||||
be booted when written to USB disk. This is done by running isohybrid on
|
||||
the image.
|
||||
"""
|
||||
if opts.buildinstall_method and opts.arch in ["x86_64", "i386"]:
|
||||
cmd = iso.get_isohybrid_cmd(opts.iso_name, opts.arch)
|
||||
emit(f, cmd)
|
||||
|
||||
|
||||
def make_manifest(f, opts):
|
||||
emit(f, iso.get_manifest_cmd(opts.iso_name, opts.use_xorrisofs))
|
||||
|
||||
|
||||
def make_jigdo(f, opts):
|
||||
jigdo = JigdoWrapper()
|
||||
files = [{"path": opts.os_tree, "label": None, "uri": None}]
|
||||
cmd = jigdo.get_jigdo_cmd(
|
||||
os.path.join(opts.output_dir, opts.iso_name),
|
||||
files,
|
||||
output_dir=opts.jigdo_dir,
|
||||
no_servers=True,
|
||||
report="noprogress",
|
||||
)
|
||||
emit(f, cmd)
|
||||
|
||||
|
||||
def _get_perms(fs_path):
|
||||
"""Compute proper permissions for a file.
|
||||
|
||||
This mimicks what -rational-rock option of genisoimage does. All read bits
|
||||
are set, so that files and directories are globally readable. If any
|
||||
execute bit is set for a file, set them all. No writes are allowed and
|
||||
special bits are erased too.
|
||||
"""
|
||||
statinfo = os.stat(fs_path)
|
||||
perms = 0o444
|
||||
if statinfo.st_mode & 0o111:
|
||||
perms |= 0o111
|
||||
return perms
|
||||
|
||||
|
||||
def write_xorriso_commands(opts):
|
||||
# Create manifest for the boot.iso listing all contents
|
||||
boot_iso_manifest = "%s.manifest" % os.path.join(
|
||||
opts.script_dir, os.path.basename(opts.boot_iso)
|
||||
)
|
||||
run(
|
||||
iso.get_manifest_cmd(
|
||||
opts.boot_iso, opts.use_xorrisofs, output_file=boot_iso_manifest
|
||||
)
|
||||
)
|
||||
# Find which files may have been updated by pungi. This only includes a few
|
||||
# files from tweaking buildinstall and .discinfo metadata. There's no good
|
||||
# way to detect whether the boot config files actually changed, so we may
|
||||
# be updating files in the ISO with the same data.
|
||||
UPDATEABLE_FILES = set(BOOT_IMAGES + BOOT_CONFIGS + [".discinfo"])
|
||||
updated_files = set()
|
||||
excluded_files = set()
|
||||
with open(boot_iso_manifest) as f:
|
||||
for line in f:
|
||||
path = line.lstrip("/").rstrip("\n")
|
||||
if path in UPDATEABLE_FILES:
|
||||
updated_files.add(path)
|
||||
else:
|
||||
excluded_files.add(path)
|
||||
|
||||
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||
with open(script, "w") as f:
|
||||
for cmd in iso.xorriso_commands(
|
||||
opts.arch, opts.boot_iso, os.path.join(opts.output_dir, opts.iso_name)
|
||||
):
|
||||
emit(f, " ".join(cmd))
|
||||
emit(f, "-volid %s" % opts.volid)
|
||||
|
||||
with open(opts.graft_points) as gp:
|
||||
for line in gp:
|
||||
iso_path, fs_path = line.strip().split("=", 1)
|
||||
if iso_path in excluded_files:
|
||||
continue
|
||||
cmd = "-update" if iso_path in updated_files else "-map"
|
||||
emit(f, "%s %s %s" % (cmd, fs_path, iso_path))
|
||||
emit(f, "-chmod 0%o %s" % (_get_perms(fs_path), iso_path))
|
||||
|
||||
emit(f, "-chown_r 0 /")
|
||||
emit(f, "-chgrp_r 0 /")
|
||||
emit(f, "-end")
|
||||
return script
|
||||
|
||||
|
||||
def write_script(opts, f):
|
||||
if bool(opts.jigdo_dir) != bool(opts.os_tree):
|
||||
raise RuntimeError("jigdo_dir must be used together with os_tree")
|
||||
|
||||
emit(f, "#!/bin/bash")
|
||||
emit(f, "set -ex")
|
||||
emit(f, "cd %s" % opts.output_dir)
|
||||
|
||||
if opts.use_xorrisofs and opts.buildinstall_method:
|
||||
script = write_xorriso_commands(opts)
|
||||
emit(f, "xorriso -dialog on <%s" % script)
|
||||
else:
|
||||
make_image(f, opts)
|
||||
run_isohybrid(f, opts)
|
||||
|
||||
implant_md5(f, opts)
|
||||
make_manifest(f, opts)
|
||||
if opts.jigdo_dir:
|
||||
make_jigdo(f, opts)
|
155
pungi/dnf_wrapper.py
Normal file
155
pungi/dnf_wrapper.py
Normal file
@ -0,0 +1,155 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
# TODO: remove all DNF hacks, possibly this whole file
|
||||
|
||||
|
||||
import dnf
|
||||
|
||||
import dnf.conf
|
||||
import dnf.repo
|
||||
import dnf.sack
|
||||
|
||||
import pungi.arch
|
||||
|
||||
try:
|
||||
import dnf.rpm as dnf_arch
|
||||
except ImportError:
|
||||
import dnf.arch as dnf_arch
|
||||
|
||||
|
||||
class Conf(dnf.conf.Conf):
|
||||
# This is only modified to get our custom Substitutions class in.
|
||||
def __init__(self, arch, *args, **kwargs):
|
||||
super(Conf, self).__init__(*args, **kwargs)
|
||||
self.substitutions = Substitutions(arch)
|
||||
|
||||
|
||||
class Substitutions(dict):
|
||||
# DNF version of Substitutions detects host arch. We don't want that.
|
||||
def __init__(self, arch):
|
||||
super(Substitutions, self).__init__()
|
||||
self["arch"] = arch
|
||||
self["basearch"] = dnf_arch.basearch(arch)
|
||||
|
||||
|
||||
class DnfWrapper(dnf.Base):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DnfWrapper, self).__init__(*args, **kwargs)
|
||||
self.arch_wrapper = ArchWrapper(self.conf.substitutions["arch"])
|
||||
self.comps_wrapper = CompsWrapper(self)
|
||||
|
||||
def add_repo(
|
||||
self, repoid, baseurl=None, enablegroups=True, lookaside=False, **kwargs
|
||||
):
|
||||
self.repos.add_new_repo(
|
||||
repoid,
|
||||
self.conf,
|
||||
baseurl=[baseurl],
|
||||
enabledgroups=enablegroups,
|
||||
priority=10 if lookaside else 20,
|
||||
module_hotfixes=True,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class CompsWrapper(object):
|
||||
def __init__(self, dnf_obj):
|
||||
self.dnf = dnf_obj
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.groups[name]
|
||||
|
||||
@property
|
||||
def comps(self):
|
||||
return self.dnf.comps
|
||||
|
||||
@property
|
||||
def groups(self):
|
||||
result = {}
|
||||
for i in self.comps.groups:
|
||||
result[i.id] = i
|
||||
return result
|
||||
|
||||
def get_packages_from_group(
|
||||
self,
|
||||
group_id,
|
||||
include_default=True,
|
||||
include_optional=True,
|
||||
include_conditional=True,
|
||||
):
|
||||
packages = []
|
||||
conditional = []
|
||||
|
||||
group = self.groups[group_id]
|
||||
|
||||
# add mandatory packages
|
||||
packages.extend([i.name for i in group.mandatory_packages])
|
||||
|
||||
# add default packages
|
||||
if include_default:
|
||||
packages.extend([i.name for i in group.default_packages])
|
||||
|
||||
# add optional packages
|
||||
if include_optional:
|
||||
packages.extend([i.name for i in group.optional_packages])
|
||||
|
||||
for package in group.conditional_packages:
|
||||
conditional.append({"name": package.requires, "install": package.name})
|
||||
|
||||
return packages, conditional
|
||||
|
||||
def get_comps_packages(self, groups, exclude_groups):
|
||||
packages = set()
|
||||
conditional = []
|
||||
|
||||
if isinstance(groups, list):
|
||||
groups = dict([(i, 1) for i in groups])
|
||||
|
||||
for group_id, group_include in sorted(groups.items()):
|
||||
if group_id in exclude_groups:
|
||||
continue
|
||||
|
||||
include_default = group_include in (1, 2)
|
||||
include_optional = group_include in (2,)
|
||||
include_conditional = True
|
||||
pkgs, cond = self.get_packages_from_group(
|
||||
group_id, include_default, include_optional, include_conditional
|
||||
)
|
||||
packages.update(pkgs)
|
||||
for i in cond:
|
||||
if i not in conditional:
|
||||
conditional.append(i)
|
||||
return list(packages), conditional
|
||||
|
||||
def get_langpacks(self):
|
||||
result = []
|
||||
for name, install in self.comps._i.langpacks.items():
|
||||
result.append({"name": name, "install": install})
|
||||
return result
|
||||
|
||||
|
||||
class ArchWrapper(object):
|
||||
def __init__(self, arch):
|
||||
self.base_arch = dnf_arch.basearch(arch)
|
||||
self.all_arches = pungi.arch.get_valid_arches(
|
||||
self.base_arch, multilib=True, add_noarch=True
|
||||
)
|
||||
self.native_arches = pungi.arch.get_valid_arches(
|
||||
self.base_arch, multilib=False, add_noarch=True
|
||||
)
|
||||
self.multilib_arches = pungi.arch.get_valid_multilib_arches(self.base_arch)
|
||||
self.source_arches = ["src", "nosrc"]
|
20
pungi/errors.py
Normal file
20
pungi/errors.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
class UnsignedPackagesError(RuntimeError):
|
||||
"""Raised when package set fails to find a properly signed copy of an
|
||||
RPM."""
|
||||
|
||||
pass
|
2297
pungi/gather.py
Normal file
2297
pungi/gather.py
Normal file
File diff suppressed because it is too large
Load Diff
1099
pungi/gather_dnf.py
Normal file
1099
pungi/gather_dnf.py
Normal file
File diff suppressed because it is too large
Load Diff
105
pungi/graph.py
Executable file
105
pungi/graph.py
Executable file
@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
class SimpleAcyclicOrientedGraph(object):
|
||||
"""
|
||||
Stores a graph data structure and allows operation with it.
|
||||
Example data: {'P1': ['P2'], 'P3': ['P4', 'P5'], 'P2': 'P3'}
|
||||
Graph is constructed by adding oriented edges one by one. It can not contain cycles.
|
||||
Main result is spanning line, it determines ordering of the nodes.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._graph = {}
|
||||
self._all_nodes = set()
|
||||
|
||||
def add_edge(self, start, end):
|
||||
"""
|
||||
Add one edge from node 'start' to node 'end'.
|
||||
This operation must not create a cycle in the graph.
|
||||
"""
|
||||
if start == end:
|
||||
raise ValueError(
|
||||
"Can not add this kind of edge into graph: %s-%s" % (start, end)
|
||||
)
|
||||
self._graph.setdefault(start, [])
|
||||
if end not in self._graph[start]:
|
||||
self._graph[start].append(end)
|
||||
self._all_nodes.add(start)
|
||||
self._all_nodes.add(end)
|
||||
# try to find opposite direction path (from end to start)
|
||||
# to detect newly created cycle
|
||||
path = SimpleAcyclicOrientedGraph.find_path(self._graph, end, start)
|
||||
if path:
|
||||
raise ValueError("There is a cycle in the graph: %s" % path)
|
||||
|
||||
def get_active_nodes(self):
|
||||
"""
|
||||
nodes connected to any edge
|
||||
"""
|
||||
active_nodes = set()
|
||||
for start, ends in self._graph.items():
|
||||
active_nodes.add(start)
|
||||
active_nodes.update(ends)
|
||||
return active_nodes
|
||||
|
||||
def is_final_endpoint(self, node):
|
||||
"""
|
||||
edge(s) ends in this node; no other edge starts in this node
|
||||
"""
|
||||
if node not in self._all_nodes:
|
||||
return ValueError("This node is not found in the graph: %s" % node)
|
||||
if node not in self.get_active_nodes():
|
||||
return False
|
||||
return False if node in self._graph else True
|
||||
|
||||
def remove_final_endpoint(self, node):
|
||||
""""""
|
||||
remove_start_points = []
|
||||
for start, ends in self._graph.items():
|
||||
if node in ends:
|
||||
ends.remove(node)
|
||||
if not ends:
|
||||
remove_start_points.append(start)
|
||||
for start in remove_start_points:
|
||||
del self._graph[start]
|
||||
|
||||
@staticmethod
|
||||
def find_path(graph, start, end, path=[]):
|
||||
"""
|
||||
find path among nodes 'start' and 'end' recursively
|
||||
"""
|
||||
path = path + [start]
|
||||
if start == end:
|
||||
return path
|
||||
if start not in graph:
|
||||
return None
|
||||
for node in graph[start]:
|
||||
if node not in path:
|
||||
newpath = SimpleAcyclicOrientedGraph.find_path(graph, node, end, path)
|
||||
if newpath:
|
||||
return newpath
|
||||
return None
|
||||
|
||||
def prune_graph(self):
|
||||
"""
|
||||
Construct spanning_line by pruning the graph.
|
||||
Looking for endpoints and remove them one by one until graph is empty.
|
||||
"""
|
||||
spanning_line = []
|
||||
while self._graph:
|
||||
for node in sorted(self._all_nodes):
|
||||
if self.is_final_endpoint(node):
|
||||
self.remove_final_endpoint(node)
|
||||
spanning_line.insert(0, node)
|
||||
# orphan node = no edge is connected with this node
|
||||
orphans = self._all_nodes - self.get_active_nodes()
|
||||
if orphans:
|
||||
# restart iteration not to set size self._all_nodes
|
||||
# during iteration
|
||||
break
|
||||
for orphan in orphans:
|
||||
if orphan not in spanning_line:
|
||||
spanning_line.insert(0, orphan)
|
||||
self._all_nodes.remove(orphan)
|
||||
return spanning_line
|
223
pungi/ks.py
Normal file
223
pungi/ks.py
Normal file
@ -0,0 +1,223 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
Pungi adds several new sections to kickstarts.
|
||||
|
||||
|
||||
FULLTREE EXCLUDES
|
||||
-----------------
|
||||
Fulltree excludes allow us to define SRPM names
|
||||
we don't want to be part of fulltree processing.
|
||||
|
||||
Syntax:
|
||||
%fulltree-excludes
|
||||
<srpm_name>
|
||||
<srpm_name>
|
||||
...
|
||||
%end
|
||||
|
||||
|
||||
MULTILIB BLACKLIST
|
||||
------------------
|
||||
List of RPMs which are prevented from becoming multilib.
|
||||
|
||||
Syntax:
|
||||
%multilib-blacklist
|
||||
<rpm_name>
|
||||
<rpm_name>
|
||||
...
|
||||
%end
|
||||
|
||||
|
||||
MULTILIB WHITELIST
|
||||
------------------
|
||||
List of RPMs which will become multilib (but only if native package is pulled in).
|
||||
|
||||
Syntax:
|
||||
%multilib-whitelist
|
||||
<rpm_name>
|
||||
<rpm_name>
|
||||
...
|
||||
%end
|
||||
|
||||
|
||||
PREPOPULATE
|
||||
-----------
|
||||
To make sure no package is left behind between 2 composes,
|
||||
we can explicitly add <name>.<arch> records to the %prepopulate section.
|
||||
These will be added to the input list and marked with 'prepopulate' flag.
|
||||
|
||||
Syntax:
|
||||
%prepopulate
|
||||
<rpm_name>.<rpm_arch>
|
||||
<rpm_name>.<rpm_arch>
|
||||
...
|
||||
%end
|
||||
"""
|
||||
|
||||
|
||||
import pykickstart.parser
|
||||
import pykickstart.sections
|
||||
from pykickstart.constants import GROUP_REQUIRED, GROUP_DEFAULT
|
||||
|
||||
|
||||
class FulltreeExcludesSection(pykickstart.sections.Section):
|
||||
sectionOpen = "%fulltree-excludes"
|
||||
|
||||
def handleLine(self, line):
|
||||
if not self.handler:
|
||||
return
|
||||
|
||||
(h, s, t) = line.partition("#")
|
||||
line = h.rstrip()
|
||||
|
||||
self.handler.fulltree_excludes.add(line)
|
||||
|
||||
|
||||
class MultilibBlacklistSection(pykickstart.sections.Section):
|
||||
sectionOpen = "%multilib-blacklist"
|
||||
|
||||
def handleLine(self, line):
|
||||
if not self.handler:
|
||||
return
|
||||
|
||||
(h, s, t) = line.partition("#")
|
||||
line = h.rstrip()
|
||||
|
||||
self.handler.multilib_blacklist.add(line)
|
||||
|
||||
|
||||
class MultilibWhitelistSection(pykickstart.sections.Section):
|
||||
sectionOpen = "%multilib-whitelist"
|
||||
|
||||
def handleLine(self, line):
|
||||
if not self.handler:
|
||||
return
|
||||
|
||||
(h, s, t) = line.partition("#")
|
||||
line = h.rstrip()
|
||||
|
||||
self.handler.multilib_whitelist.add(line)
|
||||
|
||||
|
||||
class PrepopulateSection(pykickstart.sections.Section):
|
||||
sectionOpen = "%prepopulate"
|
||||
|
||||
def handleLine(self, line):
|
||||
if not self.handler:
|
||||
return
|
||||
|
||||
(h, s, t) = line.partition("#")
|
||||
line = h.rstrip()
|
||||
|
||||
self.handler.prepopulate.add(line)
|
||||
|
||||
|
||||
class KickstartParser(pykickstart.parser.KickstartParser):
|
||||
def setupSections(self):
|
||||
pykickstart.parser.KickstartParser.setupSections(self)
|
||||
self.registerSection(FulltreeExcludesSection(self.handler))
|
||||
self.registerSection(MultilibBlacklistSection(self.handler))
|
||||
self.registerSection(MultilibWhitelistSection(self.handler))
|
||||
self.registerSection(PrepopulateSection(self.handler))
|
||||
|
||||
def get_packages(self, dnf_obj):
|
||||
packages = set()
|
||||
conditional_packages = []
|
||||
|
||||
packages.update(self.handler.packages.packageList)
|
||||
|
||||
for ks_group in self.handler.packages.groupList:
|
||||
group_id = ks_group.name
|
||||
|
||||
if ks_group.include == GROUP_REQUIRED:
|
||||
include_default = False
|
||||
include_optional = False
|
||||
elif ks_group.include == GROUP_DEFAULT:
|
||||
include_default = True
|
||||
include_optional = False
|
||||
else:
|
||||
include_default = True
|
||||
include_optional = True
|
||||
|
||||
(
|
||||
group_packages,
|
||||
group_conditional_packages,
|
||||
) = dnf_obj.comps_wrapper.get_packages_from_group(
|
||||
group_id,
|
||||
include_default=include_default,
|
||||
include_optional=include_optional,
|
||||
include_conditional=True,
|
||||
)
|
||||
packages.update(group_packages)
|
||||
for i in group_conditional_packages:
|
||||
if i not in conditional_packages:
|
||||
conditional_packages.append(i)
|
||||
|
||||
return packages, conditional_packages
|
||||
|
||||
def get_excluded_packages(self, dnf_obj):
|
||||
excluded = set()
|
||||
excluded.update(self.handler.packages.excludedList)
|
||||
|
||||
for ks_group in self.handler.packages.excludedGroupList:
|
||||
group_id = ks_group.name
|
||||
include_default = False
|
||||
include_optional = False
|
||||
|
||||
if ks_group.include == 1:
|
||||
include_default = True
|
||||
|
||||
if ks_group.include == 2:
|
||||
include_default = True
|
||||
include_optional = True
|
||||
|
||||
(
|
||||
group_packages,
|
||||
group_conditional_packages,
|
||||
) = dnf_obj.comps_wrapper.get_packages_from_group(
|
||||
group_id,
|
||||
include_default=include_default,
|
||||
include_optional=include_optional,
|
||||
include_conditional=False,
|
||||
)
|
||||
excluded.update(group_packages)
|
||||
|
||||
return excluded
|
||||
|
||||
|
||||
HandlerClass = pykickstart.version.returnClassForVersion()
|
||||
|
||||
|
||||
class PungiHandler(HandlerClass):
|
||||
def __init__(self, *args, **kwargs):
|
||||
HandlerClass.__init__(self, *args, **kwargs)
|
||||
self.fulltree_excludes = set()
|
||||
self.multilib_blacklist = set()
|
||||
self.multilib_whitelist = set()
|
||||
self.prepopulate = set()
|
||||
|
||||
|
||||
def get_ksparser(ks_path=None):
|
||||
"""
|
||||
Return a kickstart parser instance.
|
||||
Read kickstart if ks_path provided.
|
||||
"""
|
||||
ksparser = KickstartParser(PungiHandler())
|
||||
if ks_path:
|
||||
ksparser.readKickstart(ks_path)
|
||||
return ksparser
|
247
pungi/linker.py
Normal file
247
pungi/linker.py
Normal file
@ -0,0 +1,247 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import kobo.log
|
||||
from kobo.shortcuts import relative_path
|
||||
from kobo.threads import WorkerThread, ThreadPool
|
||||
|
||||
from pungi.util import makedirs
|
||||
|
||||
|
||||
class LinkerPool(ThreadPool):
|
||||
def __init__(self, link_type="hardlink-or-copy", logger=None):
|
||||
ThreadPool.__init__(self, logger)
|
||||
self.link_type = link_type
|
||||
self.linker = Linker()
|
||||
|
||||
@classmethod
|
||||
def with_workers(cls, num_workers, *args, **kwargs):
|
||||
pool = cls(*args, **kwargs)
|
||||
for _ in range(num_workers):
|
||||
pool.add(LinkerThread(pool))
|
||||
return pool
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def linker_pool(link_type="hardlink-or-copy", num_workers=10):
|
||||
"""Create a linker and make sure it is stopped no matter what."""
|
||||
linker = LinkerPool.with_workers(num_workers=num_workers, link_type=link_type)
|
||||
linker.start()
|
||||
try:
|
||||
yield linker
|
||||
finally:
|
||||
linker.stop()
|
||||
|
||||
|
||||
class LinkerThread(WorkerThread):
|
||||
def process(self, item, num):
|
||||
src, dst = item
|
||||
|
||||
if (num % 100 == 0) or (num == self.pool.queue_total):
|
||||
self.pool.log_debug(
|
||||
"Linked %s out of %s packages" % (num, self.pool.queue_total)
|
||||
)
|
||||
|
||||
directory = os.path.dirname(dst)
|
||||
makedirs(directory)
|
||||
self.pool.linker.link(src, dst, link_type=self.pool.link_type)
|
||||
|
||||
|
||||
class Linker(kobo.log.LoggingBase):
|
||||
def __init__(self, always_copy=None, test=False, logger=None):
|
||||
kobo.log.LoggingBase.__init__(self, logger=logger)
|
||||
self.always_copy = always_copy or []
|
||||
self.test = test
|
||||
self._inode_map = {}
|
||||
|
||||
def _is_same_type(self, path1, path2):
|
||||
if not os.path.islink(path1) == os.path.islink(path2):
|
||||
return False
|
||||
if not os.path.isdir(path1) == os.path.isdir(path2):
|
||||
return False
|
||||
if not os.path.isfile(path1) == os.path.isfile(path2):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_same(self, path1, path2):
|
||||
if path1 == path2:
|
||||
return True
|
||||
if os.path.islink(path2) and not os.path.exists(path2):
|
||||
# Broken symlink
|
||||
return True
|
||||
if os.path.getsize(path1) != os.path.getsize(path2):
|
||||
return False
|
||||
if int(os.path.getmtime(path1)) != int(os.path.getmtime(path2)):
|
||||
return False
|
||||
return True
|
||||
|
||||
def symlink(self, src, dst, relative=True):
|
||||
if src == dst:
|
||||
return
|
||||
|
||||
# Always hardlink or copy scratch builds
|
||||
if "/work/tasks/" in src:
|
||||
self._link_file(src, dst, "hardlink-or-copy")
|
||||
|
||||
old_src = src
|
||||
if relative:
|
||||
src = relative_path(src, dst)
|
||||
|
||||
msg = "Symlinking %s -> %s" % (dst, src)
|
||||
if self.test:
|
||||
self.log_info("TEST: %s" % msg)
|
||||
return
|
||||
self.log_info(msg)
|
||||
|
||||
try:
|
||||
os.symlink(src, dst)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
if os.path.islink(dst) and self._is_same(old_src, dst):
|
||||
if os.readlink(dst) != src:
|
||||
raise
|
||||
self.log_debug(
|
||||
"The same file already exists, skipping symlink %s -> %s"
|
||||
% (dst, src)
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
def hardlink(self, src, dst):
|
||||
if src == dst:
|
||||
return
|
||||
|
||||
msg = "Hardlinking %s to %s" % (src, dst)
|
||||
if self.test:
|
||||
self.log_info("TEST: %s" % msg)
|
||||
return
|
||||
self.log_info(msg)
|
||||
|
||||
try:
|
||||
os.link(src, dst)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
if self._is_same(src, dst):
|
||||
if not self._is_same_type(src, dst):
|
||||
self.log_error(
|
||||
"File %s already exists but has different type than %s"
|
||||
% (dst, src)
|
||||
)
|
||||
raise
|
||||
self.log_debug(
|
||||
"The same file already exists, skipping hardlink %s to %s"
|
||||
% (src, dst)
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
def copy(self, src, dst):
|
||||
if src == dst:
|
||||
return True
|
||||
|
||||
if os.path.islink(src):
|
||||
msg = "Copying symlink %s to %s" % (src, dst)
|
||||
else:
|
||||
msg = "Copying file %s to %s" % (src, dst)
|
||||
|
||||
if self.test:
|
||||
self.log_info("TEST: %s" % msg)
|
||||
return
|
||||
self.log_info(msg)
|
||||
|
||||
if os.path.exists(dst):
|
||||
if self._is_same(src, dst):
|
||||
if not self._is_same_type(src, dst):
|
||||
self.log_error(
|
||||
"File %s already exists but has different type than %s"
|
||||
% (dst, src)
|
||||
)
|
||||
raise OSError(errno.EEXIST, "File exists")
|
||||
self.log_debug(
|
||||
"The same file already exists, skipping copy %s to %s" % (src, dst)
|
||||
)
|
||||
return
|
||||
else:
|
||||
raise OSError(errno.EEXIST, "File exists")
|
||||
|
||||
if os.path.islink(src):
|
||||
if not os.path.islink(dst):
|
||||
os.symlink(os.readlink(src), dst)
|
||||
return
|
||||
return
|
||||
|
||||
src_stat = os.stat(src)
|
||||
src_key = (src_stat.st_dev, src_stat.st_ino)
|
||||
if src_key in self._inode_map:
|
||||
# (st_dev, st_ino) found in the mapping
|
||||
self.log_debug(
|
||||
"Harlink detected, hardlinking in destination %s to %s"
|
||||
% (self._inode_map[src_key], dst)
|
||||
)
|
||||
os.link(self._inode_map[src_key], dst)
|
||||
return
|
||||
|
||||
# BEWARE: shutil.copy2 automatically *rewrites* existing files
|
||||
shutil.copy2(src, dst)
|
||||
self._inode_map[src_key] = dst
|
||||
|
||||
def _link_file(self, src, dst, link_type):
|
||||
if link_type == "hardlink":
|
||||
self.hardlink(src, dst)
|
||||
elif link_type == "copy":
|
||||
self.copy(src, dst)
|
||||
elif link_type in ("symlink", "abspath-symlink"):
|
||||
if os.path.islink(src):
|
||||
self.copy(src, dst)
|
||||
else:
|
||||
relative = link_type != "abspath-symlink"
|
||||
self.symlink(src, dst, relative)
|
||||
elif link_type == "hardlink-or-copy":
|
||||
try:
|
||||
self.hardlink(src, dst)
|
||||
except OSError as ex:
|
||||
if ex.errno == errno.EXDEV:
|
||||
self.copy(src, dst)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise ValueError("Unknown link_type: %s" % link_type)
|
||||
|
||||
def link(self, src, dst, link_type="hardlink-or-copy"):
|
||||
"""Link directories recursively."""
|
||||
if os.path.isfile(src) or os.path.islink(src):
|
||||
self._link_file(src, dst, link_type)
|
||||
return
|
||||
|
||||
if os.path.isfile(dst):
|
||||
raise OSError(errno.EEXIST, "File exists")
|
||||
|
||||
if not self.test:
|
||||
if not os.path.exists(dst):
|
||||
makedirs(dst)
|
||||
shutil.copystat(src, dst)
|
||||
|
||||
for i in os.listdir(src):
|
||||
src_path = os.path.join(src, i)
|
||||
dst_path = os.path.join(dst, i)
|
||||
self.link(src_path, dst_path, link_type)
|
127
pungi/media_split.py
Normal file
127
pungi/media_split.py
Normal file
@ -0,0 +1,127 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
SIZE_UNITS = {
|
||||
"b": 1,
|
||||
"k": 1024,
|
||||
"M": 1024**2,
|
||||
"G": 1024**3,
|
||||
}
|
||||
|
||||
|
||||
def convert_media_size(size):
|
||||
if isinstance(size, str):
|
||||
if size[-1] in SIZE_UNITS:
|
||||
num = int(size[:-1])
|
||||
units = size[-1]
|
||||
else:
|
||||
num = int(size)
|
||||
units = "b"
|
||||
result = num * SIZE_UNITS[units]
|
||||
else:
|
||||
result = int(size)
|
||||
|
||||
if result <= 0:
|
||||
raise ValueError("Media size must be a positive number: %s" % size)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def convert_file_size(size, block_size=2048):
|
||||
"""round file size to block"""
|
||||
blocks = int(size / block_size)
|
||||
if size % block_size:
|
||||
blocks += 1
|
||||
return blocks * block_size
|
||||
|
||||
|
||||
class MediaSplitter(object):
|
||||
"""
|
||||
MediaSplitter splits files so that they fit on a media of given size.
|
||||
|
||||
Each file added to the spliter has a size in bytes that will be rounded to
|
||||
the nearest multiple of block size. If the file is sticky, it will be
|
||||
included on each disk. The files will be on disks in the same order they
|
||||
are added; there is no re-ordering. The number of disk is thus not the
|
||||
possible minimum.
|
||||
"""
|
||||
|
||||
def __init__(self, media_size, compose=None, logger=None):
|
||||
self.media_size = media_size
|
||||
self.files = [] # to preserve order
|
||||
self.file_sizes = {}
|
||||
self.sticky_files = set()
|
||||
self.compose = compose
|
||||
self.logger = logger
|
||||
if not self.logger and self.compose:
|
||||
self.logger = self.compose._logger
|
||||
|
||||
def add_file(self, name, size, sticky=False):
|
||||
name = os.path.normpath(name)
|
||||
size = int(size)
|
||||
old_size = self.file_sizes.get(name, None)
|
||||
|
||||
if old_size is not None and old_size != size:
|
||||
raise ValueError(
|
||||
"File size mismatch; file: %s; sizes: %s vs %s" % (name, old_size, size)
|
||||
)
|
||||
if self.media_size and size > self.media_size:
|
||||
raise ValueError("File is larger than media size: %s" % name)
|
||||
|
||||
self.files.append(name)
|
||||
self.file_sizes[name] = size
|
||||
if sticky:
|
||||
self.sticky_files.add(name)
|
||||
|
||||
@property
|
||||
def total_size(self):
|
||||
return sum(self.file_sizes.values())
|
||||
|
||||
@property
|
||||
def total_size_in_blocks(self):
|
||||
return sum([convert_file_size(i) for i in list(self.file_sizes.values())])
|
||||
|
||||
def split(self, first_disk=0, all_disks=0):
|
||||
all_files = []
|
||||
sticky_files = []
|
||||
sticky_files_size = 0
|
||||
|
||||
for name in self.files:
|
||||
if name in self.sticky_files:
|
||||
sticky_files.append(name)
|
||||
sticky_files_size += convert_file_size(self.file_sizes[name])
|
||||
else:
|
||||
all_files.append(name)
|
||||
|
||||
disks = []
|
||||
disk = {}
|
||||
# as it would be on single medium (sticky_files just once)
|
||||
total_size_single = sticky_files_size
|
||||
while all_files:
|
||||
name = all_files.pop(0)
|
||||
size = convert_file_size(self.file_sizes[name])
|
||||
|
||||
if not disks or (self.media_size and disk["size"] + size > self.media_size):
|
||||
disk = {"size": sticky_files_size, "files": sticky_files[:]}
|
||||
disks.append(disk)
|
||||
|
||||
disk["files"].append(name)
|
||||
disk["size"] += size
|
||||
total_size_single += size
|
||||
return disks
|
537
pungi/metadata.py
Normal file
537
pungi/metadata.py
Normal file
@ -0,0 +1,537 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import copy
|
||||
import os
|
||||
import time
|
||||
|
||||
import productmd.composeinfo
|
||||
import productmd.treeinfo
|
||||
from productmd.common import get_major_version
|
||||
from kobo.shortcuts import relative_path, compute_file_checksums
|
||||
|
||||
from pungi.compose_metadata.discinfo import write_discinfo as create_discinfo
|
||||
from pungi.compose_metadata.discinfo import write_media_repo as create_media_repo
|
||||
|
||||
|
||||
def get_description(compose, variant, arch):
|
||||
if "release_discinfo_description" in compose.conf:
|
||||
result = compose.conf["release_discinfo_description"]
|
||||
elif variant.type == "layered-product":
|
||||
# we need to make sure the layered product behaves as it was composed separately
|
||||
result = "%s %s for %s %s" % (
|
||||
variant.release_name,
|
||||
variant.release_version,
|
||||
compose.conf["release_name"],
|
||||
get_major_version(compose.conf["release_version"]),
|
||||
)
|
||||
else:
|
||||
result = "%s %s" % (
|
||||
compose.conf["release_name"],
|
||||
compose.conf["release_version"],
|
||||
)
|
||||
if compose.conf.get("base_product_name", ""):
|
||||
result += " for %s %s" % (
|
||||
compose.conf["base_product_name"],
|
||||
compose.conf["base_product_version"],
|
||||
)
|
||||
|
||||
result = result % {"variant_name": variant.name, "arch": arch}
|
||||
return result
|
||||
|
||||
|
||||
def write_discinfo(compose, arch, variant):
|
||||
if variant.type == "addon":
|
||||
return
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
path = os.path.join(os_tree, ".discinfo")
|
||||
# description = get_volid(compose, arch, variant)
|
||||
description = get_description(compose, variant, arch)
|
||||
return create_discinfo(path, description, arch)
|
||||
|
||||
|
||||
def write_media_repo(compose, arch, variant, timestamp=None):
|
||||
if variant.type == "addon":
|
||||
return
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
path = os.path.join(os_tree, "media.repo")
|
||||
# description = get_volid(compose, arch, variant)
|
||||
description = get_description(compose, variant, arch)
|
||||
return create_media_repo(path, description, timestamp)
|
||||
|
||||
|
||||
def compose_to_composeinfo(compose):
|
||||
ci = productmd.composeinfo.ComposeInfo()
|
||||
|
||||
# compose
|
||||
ci.compose.id = compose.compose_id
|
||||
ci.compose.type = compose.compose_type
|
||||
ci.compose.date = compose.compose_date
|
||||
ci.compose.respin = compose.compose_respin
|
||||
ci.compose.label = compose.compose_label
|
||||
ci.compose.final = compose.supported
|
||||
|
||||
# product
|
||||
ci.release.name = compose.conf["release_name"]
|
||||
ci.release.version = compose.conf["release_version"]
|
||||
ci.release.short = compose.conf["release_short"]
|
||||
ci.release.is_layered = True if compose.conf.get("base_product_name", "") else False
|
||||
ci.release.type = compose.conf["release_type"].lower()
|
||||
ci.release.internal = bool(compose.conf["release_internal"])
|
||||
|
||||
# base product
|
||||
if ci.release.is_layered:
|
||||
ci.base_product.name = compose.conf["base_product_name"]
|
||||
ci.base_product.version = compose.conf["base_product_version"]
|
||||
ci.base_product.short = compose.conf["base_product_short"]
|
||||
ci.base_product.type = compose.conf["base_product_type"].lower()
|
||||
|
||||
def dump_variant(variant, parent=None):
|
||||
var = productmd.composeinfo.Variant(ci)
|
||||
|
||||
tree_arches = compose.conf.get("tree_arches")
|
||||
if tree_arches and not (set(variant.arches) & set(tree_arches)):
|
||||
return None
|
||||
|
||||
# variant details
|
||||
# remove dashes from variant ID, rely on productmd verification
|
||||
var.id = variant.id.replace("-", "")
|
||||
var.uid = variant.uid
|
||||
var.name = variant.name
|
||||
var.type = variant.type
|
||||
var.arches = set(variant.arches)
|
||||
|
||||
if var.type == "layered-product":
|
||||
var.release.name = variant.release_name
|
||||
var.release.short = variant.release_short
|
||||
var.release.version = variant.release_version
|
||||
var.release.is_layered = True
|
||||
var.release.type = ci.release.type
|
||||
|
||||
for arch in variant.arches:
|
||||
# paths: binaries
|
||||
var.paths.os_tree[arch] = relative_path(
|
||||
compose.paths.compose.os_tree(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
var.paths.repository[arch] = relative_path(
|
||||
compose.paths.compose.repository(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
var.paths.packages[arch] = relative_path(
|
||||
compose.paths.compose.packages(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
iso_dir = (
|
||||
compose.paths.compose.iso_dir(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
)
|
||||
or ""
|
||||
)
|
||||
if iso_dir and os.path.isdir(
|
||||
os.path.join(compose.paths.compose.topdir(), iso_dir)
|
||||
):
|
||||
var.paths.isos[arch] = relative_path(
|
||||
iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
image_dir = compose.paths.compose.image_dir(variant=variant) or ""
|
||||
if image_dir:
|
||||
image_dir = image_dir % {"arch": arch}
|
||||
if os.path.isdir(image_dir):
|
||||
var.paths.images[arch] = relative_path(
|
||||
image_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
jigdo_dir = (
|
||||
compose.paths.compose.jigdo_dir(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
)
|
||||
or ""
|
||||
)
|
||||
if jigdo_dir and os.path.isdir(
|
||||
os.path.join(compose.paths.compose.topdir(), jigdo_dir)
|
||||
):
|
||||
var.paths.jigdos[arch] = relative_path(
|
||||
jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
|
||||
# paths: sources
|
||||
var.paths.source_tree[arch] = relative_path(
|
||||
compose.paths.compose.os_tree(
|
||||
arch="source", variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
var.paths.source_repository[arch] = relative_path(
|
||||
compose.paths.compose.repository(
|
||||
arch="source", variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
var.paths.source_packages[arch] = relative_path(
|
||||
compose.paths.compose.packages(
|
||||
arch="source", variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
source_iso_dir = (
|
||||
compose.paths.compose.iso_dir(
|
||||
arch="source", variant=variant, create_dir=False
|
||||
)
|
||||
or ""
|
||||
)
|
||||
if source_iso_dir and os.path.isdir(
|
||||
os.path.join(compose.paths.compose.topdir(), source_iso_dir)
|
||||
):
|
||||
var.paths.source_isos[arch] = relative_path(
|
||||
source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
source_jigdo_dir = (
|
||||
compose.paths.compose.jigdo_dir(
|
||||
arch="source", variant=variant, create_dir=False
|
||||
)
|
||||
or ""
|
||||
)
|
||||
if source_jigdo_dir and os.path.isdir(
|
||||
os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)
|
||||
):
|
||||
var.paths.source_jigdos[arch] = relative_path(
|
||||
source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
|
||||
# paths: debug
|
||||
var.paths.debug_tree[arch] = relative_path(
|
||||
compose.paths.compose.debug_tree(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
var.paths.debug_repository[arch] = relative_path(
|
||||
compose.paths.compose.debug_repository(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
var.paths.debug_packages[arch] = relative_path(
|
||||
compose.paths.compose.debug_packages(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||
).rstrip("/")
|
||||
"""
|
||||
# XXX: not supported (yet?)
|
||||
debug_iso_dir = (
|
||||
compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
|
||||
)
|
||||
if debug_iso_dir:
|
||||
var.debug_iso_dir[arch] = relative_path(
|
||||
debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
debug_jigdo_dir = (
|
||||
compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
|
||||
)
|
||||
if debug_jigdo_dir:
|
||||
var.debug_jigdo_dir[arch] = relative_path(
|
||||
debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||
).rstrip("/")
|
||||
"""
|
||||
|
||||
for v in variant.get_variants(recursive=False):
|
||||
x = dump_variant(v, parent=variant)
|
||||
if x is not None:
|
||||
var.add(x)
|
||||
return var
|
||||
|
||||
for variant_id in sorted(compose.variants):
|
||||
variant = compose.variants[variant_id]
|
||||
v = dump_variant(variant)
|
||||
if v is not None:
|
||||
ci.variants.add(v)
|
||||
return ci
|
||||
|
||||
|
||||
def write_compose_info(compose):
|
||||
ci = compose_to_composeinfo(compose)
|
||||
|
||||
msg = "Writing composeinfo"
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
path = compose.paths.compose.metadata("composeinfo.json")
|
||||
# make a copy of composeinfo and modify the copy
|
||||
# if any path in variant paths doesn't exist or just an empty
|
||||
# dir, set it to None, then it won't be dumped.
|
||||
ci_copy = copy.deepcopy(ci)
|
||||
for variant in ci_copy.variants.variants.values():
|
||||
for field in variant.paths._fields:
|
||||
field_paths = getattr(variant.paths, field)
|
||||
for arch, dirpath in field_paths.items():
|
||||
dirpath = os.path.join(compose.paths.compose.topdir(), dirpath)
|
||||
if not os.path.isdir(dirpath):
|
||||
# If the directory does not exist, do not include the path
|
||||
# in metadata.
|
||||
field_paths[arch] = None
|
||||
ci_copy.dump(path)
|
||||
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
||||
if variant.type in ("addon",) or variant.is_empty:
|
||||
return
|
||||
|
||||
if not timestamp:
|
||||
timestamp = int(time.time())
|
||||
else:
|
||||
timestamp = int(timestamp)
|
||||
|
||||
os_tree = (
|
||||
compose.paths.compose.os_tree(arch=arch, variant=variant).rstrip("/") + "/"
|
||||
)
|
||||
|
||||
ti = productmd.treeinfo.TreeInfo()
|
||||
# load from buildinstall .treeinfo
|
||||
|
||||
if variant.type == "layered-product":
|
||||
# we need to make sure the layered product behaves as it was composed separately
|
||||
|
||||
# release
|
||||
# TODO: read from variants.xml
|
||||
ti.release.name = variant.release_name
|
||||
ti.release.version = variant.release_version
|
||||
ti.release.short = variant.release_short
|
||||
ti.release.is_layered = True
|
||||
ti.release.type = compose.conf["release_type"].lower()
|
||||
|
||||
# base product
|
||||
ti.base_product.name = compose.conf["release_name"]
|
||||
if "." in compose.conf["release_version"]:
|
||||
# remove minor version if present
|
||||
ti.base_product.version = get_major_version(compose.conf["release_version"])
|
||||
else:
|
||||
ti.base_product.version = compose.conf["release_version"]
|
||||
ti.base_product.short = compose.conf["release_short"]
|
||||
else:
|
||||
# release
|
||||
ti.release.name = compose.conf["release_name"]
|
||||
ti.release.version = compose.conf.get(
|
||||
"treeinfo_version", compose.conf["release_version"]
|
||||
)
|
||||
ti.release.short = compose.conf["release_short"]
|
||||
ti.release.is_layered = (
|
||||
True if compose.conf.get("base_product_name", "") else False
|
||||
)
|
||||
ti.release.type = compose.conf["release_type"].lower()
|
||||
|
||||
# base product
|
||||
if ti.release.is_layered:
|
||||
ti.base_product.name = compose.conf["base_product_name"]
|
||||
ti.base_product.version = compose.conf["base_product_version"]
|
||||
ti.base_product.short = compose.conf["base_product_short"]
|
||||
|
||||
# tree
|
||||
ti.tree.arch = arch
|
||||
ti.tree.build_timestamp = timestamp
|
||||
# ti.platforms
|
||||
|
||||
# main variant
|
||||
var = productmd.treeinfo.Variant(ti)
|
||||
if variant.type == "layered-product":
|
||||
var.id = variant.parent.id
|
||||
var.uid = variant.parent.uid
|
||||
var.name = variant.parent.name
|
||||
var.type = "variant"
|
||||
else:
|
||||
# remove dashes from variant ID, rely on productmd verification
|
||||
var.id = variant.id.replace("-", "")
|
||||
var.uid = variant.uid
|
||||
var.name = variant.name
|
||||
var.type = variant.type
|
||||
|
||||
var.paths.packages = (
|
||||
relative_path(
|
||||
compose.paths.compose.packages(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
os_tree,
|
||||
).rstrip("/")
|
||||
or "."
|
||||
)
|
||||
var.paths.repository = (
|
||||
relative_path(
|
||||
compose.paths.compose.repository(
|
||||
arch=arch, variant=variant, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
os_tree,
|
||||
).rstrip("/")
|
||||
or "."
|
||||
)
|
||||
|
||||
ti.variants.add(var)
|
||||
|
||||
repomd_path = os.path.join(var.paths.repository, "repodata", "repomd.xml")
|
||||
createrepo_checksum = compose.conf["createrepo_checksum"]
|
||||
if os.path.isfile(repomd_path):
|
||||
ti.checksums.add(repomd_path, createrepo_checksum, root_dir=os_tree)
|
||||
|
||||
for i in variant.get_variants(types=["addon"], arch=arch):
|
||||
addon = productmd.treeinfo.Variant(ti)
|
||||
addon.id = i.id
|
||||
addon.uid = i.uid
|
||||
addon.name = i.name
|
||||
addon.type = i.type
|
||||
compose.log_debug(
|
||||
"variant '%s' inserting addon uid '%s' type '%s'"
|
||||
% (variant, addon.uid, addon.type)
|
||||
)
|
||||
|
||||
os_tree = compose.paths.compose.os_tree(arch=arch, variant=i).rstrip("/") + "/"
|
||||
addon.paths.packages = (
|
||||
relative_path(
|
||||
compose.paths.compose.packages(
|
||||
arch=arch, variant=i, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
os_tree,
|
||||
).rstrip("/")
|
||||
or "."
|
||||
)
|
||||
addon.paths.repository = (
|
||||
relative_path(
|
||||
compose.paths.compose.repository(
|
||||
arch=arch, variant=i, create_dir=False
|
||||
).rstrip("/")
|
||||
+ "/",
|
||||
os_tree,
|
||||
).rstrip("/")
|
||||
or "."
|
||||
)
|
||||
var.add(addon)
|
||||
|
||||
repomd_path = os.path.join(addon.paths.repository, "repodata", "repomd.xml")
|
||||
if os.path.isfile(repomd_path):
|
||||
ti.checksums.add(repomd_path, createrepo_checksum, root_dir=os_tree)
|
||||
|
||||
class LoraxProduct(productmd.treeinfo.Release):
|
||||
def _validate_short(self):
|
||||
# HACK: set self.short so .treeinfo produced by lorax can be read
|
||||
if not self.short:
|
||||
self.short = compose.conf["release_short"]
|
||||
|
||||
class LoraxTreeInfo(productmd.treeinfo.TreeInfo):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(LoraxTreeInfo, self).__init__(*args, **kwargs)
|
||||
self.release = LoraxProduct(self)
|
||||
|
||||
# images
|
||||
if variant.type == "variant" and bi.succeeded(variant, arch):
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
|
||||
# clone all but 'general' sections from buildinstall .treeinfo
|
||||
|
||||
bi_dir = compose.paths.work.buildinstall_dir(arch)
|
||||
if compose.conf.get("buildinstall_method") == "lorax":
|
||||
# The .treeinfo file produced by lorax is nested in variant
|
||||
# subdirectory. Legacy buildinstall runs once per arch, so there is
|
||||
# only one file.
|
||||
bi_dir = os.path.join(bi_dir, variant.uid)
|
||||
bi_treeinfo = os.path.join(bi_dir, ".treeinfo")
|
||||
|
||||
if os.path.exists(bi_treeinfo):
|
||||
bi_ti = LoraxTreeInfo()
|
||||
bi_ti.load(bi_treeinfo)
|
||||
|
||||
# stage2 - mainimage
|
||||
if bi_ti.stage2.mainimage:
|
||||
ti.stage2.mainimage = bi_ti.stage2.mainimage
|
||||
ti.checksums.add(
|
||||
ti.stage2.mainimage, createrepo_checksum, root_dir=os_tree
|
||||
)
|
||||
|
||||
# stage2 - instimage
|
||||
if bi_ti.stage2.instimage:
|
||||
ti.stage2.instimage = bi_ti.stage2.instimage
|
||||
ti.checksums.add(
|
||||
ti.stage2.instimage, createrepo_checksum, root_dir=os_tree
|
||||
)
|
||||
|
||||
# images
|
||||
for platform in bi_ti.images.images:
|
||||
ti.images.images[platform] = {}
|
||||
ti.tree.platforms.add(platform)
|
||||
for image, path in bi_ti.images.images[platform].items():
|
||||
if not path:
|
||||
# The .treeinfo file contains an image without a path.
|
||||
# We can't add that.
|
||||
continue
|
||||
ti.images.images[platform][image] = path
|
||||
ti.checksums.add(path, createrepo_checksum, root_dir=os_tree)
|
||||
|
||||
path = os.path.join(
|
||||
compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo"
|
||||
)
|
||||
compose.log_info("Writing treeinfo: %s" % path)
|
||||
ti.dump(path)
|
||||
|
||||
|
||||
def populate_extra_files_metadata(
|
||||
metadata, variant, arch, topdir, files, checksum_types, relative_root=None
|
||||
):
|
||||
"""
|
||||
:param metadata: an instance of productmd.extra_files.ExtraFiles to
|
||||
populate with the current files
|
||||
:param Variant variant: under which variant should the files be listed
|
||||
:param str arch: under which arch should the files be listed
|
||||
:param topdir: directory where files are located
|
||||
:param files: list of file paths relative to topdir
|
||||
:param checksum_types: list of checksums to compute
|
||||
:param relative_root: ancestor directory of topdir, this will be removed
|
||||
from paths written to local metadata file
|
||||
"""
|
||||
for copied_file in files:
|
||||
full_path = os.path.join(topdir, copied_file)
|
||||
size = os.path.getsize(full_path)
|
||||
try:
|
||||
checksums = compute_file_checksums(full_path, checksum_types)
|
||||
except IOError as exc:
|
||||
raise RuntimeError(
|
||||
"Failed to calculate checksum for %s: %s" % (full_path, exc)
|
||||
)
|
||||
|
||||
if relative_root:
|
||||
copied_file = os.path.relpath(full_path, relative_root)
|
||||
metadata.add(variant.uid, arch, copied_file, size, checksums)
|
||||
|
||||
strip_prefix = (
|
||||
(os.path.relpath(topdir, relative_root) + "/") if relative_root else ""
|
||||
)
|
||||
with open(os.path.join(topdir, "extra_files.json"), "w") as f:
|
||||
metadata.dump_for_tree(f, variant.uid, arch, strip_prefix)
|
118
pungi/module_util.py
Normal file
118
pungi/module_util.py
Normal file
@ -0,0 +1,118 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
try:
|
||||
import gi
|
||||
|
||||
gi.require_version("Modulemd", "2.0") # noqa
|
||||
from gi.repository import Modulemd
|
||||
except (ImportError, ValueError):
|
||||
Modulemd = None
|
||||
|
||||
|
||||
def iter_module_defaults(path):
|
||||
"""Given a path to a directory with yaml files, yield each module default
|
||||
in there as a pair (module_name, ModuleDefaults instance).
|
||||
"""
|
||||
# It is really tempting to merge all the module indexes into a single one
|
||||
# and work with it. However that does not allow for detecting conflicting
|
||||
# defaults. That should not happen in practice, but better safe than sorry.
|
||||
# Once libmodulemd can report the error, this code can be simplifed by a
|
||||
# lot. It was implemented in
|
||||
# https://github.com/fedora-modularity/libmodulemd/commit/3087e4a5c38a331041fec9b6b8f1a372f9ffe64d
|
||||
# and released in 2.6.0, but 2.8.0 added the need to merge overrides and
|
||||
# that breaks this use case again.
|
||||
for file in glob.glob(os.path.join(path, "*.yaml")):
|
||||
index = Modulemd.ModuleIndex()
|
||||
index.update_from_file(file, strict=False)
|
||||
for module_name in index.get_module_names():
|
||||
yield module_name, index.get_module(module_name).get_defaults()
|
||||
|
||||
|
||||
def get_module_obsoletes_idx(path, mod_list):
|
||||
"""Given a path to a directory with yaml files, return Index with
|
||||
merged all obsoletes.
|
||||
"""
|
||||
|
||||
merger = Modulemd.ModuleIndexMerger.new()
|
||||
md_idxs = []
|
||||
|
||||
# associate_index does NOT copy it's argument (nor increases a
|
||||
# reference counter on the object). It only stores a pointer.
|
||||
for file in glob.glob(os.path.join(path, "*.yaml")):
|
||||
index = Modulemd.ModuleIndex()
|
||||
index.update_from_file(file, strict=False)
|
||||
mod_name = index.get_module_names()[0]
|
||||
|
||||
if mod_name and (mod_name in mod_list or not mod_list):
|
||||
md_idxs.append(index)
|
||||
merger.associate_index(md_idxs[-1], 0)
|
||||
|
||||
merged_idx = merger.resolve()
|
||||
|
||||
return merged_idx
|
||||
|
||||
|
||||
def collect_module_defaults(
|
||||
defaults_dir, modules_to_load=None, mod_index=None, overrides_dir=None
|
||||
):
|
||||
"""Load module defaults into index.
|
||||
|
||||
If `modules_to_load` is passed in, it should be a set of module names. Only
|
||||
defaults for these modules will be loaded.
|
||||
|
||||
If `mod_index` is passed in, it will be updated and returned. If it was
|
||||
not, a new ModuleIndex will be created and returned
|
||||
"""
|
||||
mod_index = mod_index or Modulemd.ModuleIndex()
|
||||
|
||||
temp_index = Modulemd.ModuleIndex.new()
|
||||
temp_index.update_from_defaults_directory(
|
||||
defaults_dir, overrides_path=overrides_dir, strict=False
|
||||
)
|
||||
|
||||
for module_name in temp_index.get_module_names():
|
||||
defaults = temp_index.get_module(module_name).get_defaults()
|
||||
|
||||
if not modules_to_load or module_name in modules_to_load:
|
||||
mod_index.add_defaults(defaults)
|
||||
|
||||
return mod_index
|
||||
|
||||
|
||||
def collect_module_obsoletes(obsoletes_dir, modules_to_load, mod_index=None):
|
||||
"""Load module obsoletes into index.
|
||||
|
||||
This works in a similar fashion as collect_module_defaults except it
|
||||
merges indexes together instead of adding them during iteration.
|
||||
|
||||
Additionally if modules_to_load is not empty returned Index will include
|
||||
only obsoletes for those modules.
|
||||
"""
|
||||
|
||||
obsoletes_index = get_module_obsoletes_idx(obsoletes_dir, modules_to_load)
|
||||
|
||||
# Merge Obsoletes with Modules Index.
|
||||
if mod_index:
|
||||
merger = Modulemd.ModuleIndexMerger.new()
|
||||
merger.associate_index(mod_index, 0)
|
||||
merger.associate_index(obsoletes_index, 0)
|
||||
merged_idx = merger.resolve()
|
||||
obsoletes_index = merged_idx
|
||||
|
||||
return obsoletes_index
|
67
pungi/multilib_dnf.py
Normal file
67
pungi/multilib_dnf.py
Normal file
@ -0,0 +1,67 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from multilib import multilib
|
||||
|
||||
|
||||
class Multilib(object):
|
||||
"""This class decides whether a package should be multilib.
|
||||
|
||||
To use it, create an instance and call the ``is_multilib`` method on it.
|
||||
The blacklist and whitelist in constructor should be sets of package names.
|
||||
|
||||
It may be more convenient to create the instance with the ``from_globs``
|
||||
method that accepts a DNF sach and an iterable of globs that will be used
|
||||
to find package names.
|
||||
"""
|
||||
|
||||
def __init__(self, methods, blacklist, whitelist):
|
||||
self.methods = {}
|
||||
self.blacklist = blacklist
|
||||
self.whitelist = whitelist
|
||||
|
||||
self.all_methods = {
|
||||
"none": multilib.NoMultilibMethod(None),
|
||||
"all": multilib.AllMultilibMethod(None),
|
||||
"devel": multilib.DevelMultilibMethod(None),
|
||||
"runtime": multilib.RuntimeMultilibMethod(None),
|
||||
}
|
||||
|
||||
for method in methods:
|
||||
self.methods[method] = self.all_methods[method]
|
||||
|
||||
@classmethod
|
||||
def from_globs(cls, sack, methods, blacklist=None, whitelist=None):
|
||||
"""Create a Multilib instance with expanded blacklist and whitelist."""
|
||||
return cls(
|
||||
methods,
|
||||
_expand_list(sack, blacklist or []),
|
||||
_expand_list(sack, whitelist or []),
|
||||
)
|
||||
|
||||
def is_multilib(self, pkg):
|
||||
if pkg.name in self.blacklist:
|
||||
return False
|
||||
if pkg.name in self.whitelist:
|
||||
return "whitelist"
|
||||
for method, cls in self.methods.items():
|
||||
if cls.select(pkg):
|
||||
return method
|
||||
return False
|
||||
|
||||
|
||||
def _expand_list(sack, patterns):
|
||||
"""Find all package names that match any of the provided patterns."""
|
||||
return set(pkg.name for pkg in sack.query().filter(name__glob=list(patterns)))
|
295
pungi/multilib_yum.py
Executable file
295
pungi/multilib_yum.py
Executable file
@ -0,0 +1,295 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import re
|
||||
import fnmatch
|
||||
|
||||
import pungi.pathmatch
|
||||
import pungi.gather
|
||||
import pungi.util
|
||||
|
||||
|
||||
LINE_PATTERN_RE = re.compile(r"^\s*(?P<line>[^#]+)(:?\s+(?P<comment>#.*))?$")
|
||||
RUNTIME_PATTERN_SPLIT_RE = re.compile(
|
||||
r"^\s*(?P<path>[^\s]+)\s+(?P<pattern>[^\s]+)(:?\s+(?P<comment>#.*))?$"
|
||||
)
|
||||
SONAME_PATTERN_RE = re.compile(r"^(.+\.so\.[a-zA-Z0-9_\.]+).*$")
|
||||
|
||||
|
||||
def read_lines(lines):
|
||||
result = []
|
||||
for i in lines:
|
||||
i = i.strip()
|
||||
|
||||
if not i:
|
||||
continue
|
||||
|
||||
# skip comments
|
||||
if i.startswith("#"):
|
||||
continue
|
||||
|
||||
match = LINE_PATTERN_RE.match(i)
|
||||
if match is None:
|
||||
raise ValueError("Couldn't parse line: %s" % i)
|
||||
gd = match.groupdict()
|
||||
result.append(gd["line"])
|
||||
return result
|
||||
|
||||
|
||||
def read_lines_from_file(path):
|
||||
lines = open(path, "r").readlines()
|
||||
lines = read_lines(lines)
|
||||
return lines
|
||||
|
||||
|
||||
def read_runtime_patterns(lines):
|
||||
result = []
|
||||
for i in read_lines(lines):
|
||||
match = RUNTIME_PATTERN_SPLIT_RE.match(i)
|
||||
if match is None:
|
||||
raise ValueError("Couldn't parse pattern: %s" % i)
|
||||
gd = match.groupdict()
|
||||
result.append((gd["path"], gd["pattern"]))
|
||||
return result
|
||||
|
||||
|
||||
def read_runtime_patterns_from_file(path):
|
||||
lines = open(path, "r").readlines()
|
||||
return read_runtime_patterns(lines)
|
||||
|
||||
|
||||
def expand_runtime_patterns(patterns):
|
||||
pm = pungi.pathmatch.PathMatch()
|
||||
for path, pattern in patterns:
|
||||
for root in ("", "/opt/*/*/root"):
|
||||
# include Software Collections: /opt/<vendor>/<scl_name>/root/...
|
||||
if "$LIBDIR" in path:
|
||||
for lib_dir in ("/lib", "/lib64", "/usr/lib", "/usr/lib64"):
|
||||
path_pattern = path.replace("$LIBDIR", lib_dir)
|
||||
path_pattern = "%s/%s" % (root, path_pattern.lstrip("/"))
|
||||
pm[path_pattern] = (path_pattern, pattern)
|
||||
else:
|
||||
path_pattern = "%s/%s" % (root, path.lstrip("/"))
|
||||
pm[path_pattern] = (path_pattern, pattern)
|
||||
return pm
|
||||
|
||||
|
||||
class MultilibMethodBase(object):
|
||||
"""a base class for multilib methods"""
|
||||
|
||||
name = "base"
|
||||
|
||||
def __init__(self, config_path):
|
||||
self.config_path = config_path
|
||||
|
||||
def select(self, po):
|
||||
raise NotImplementedError
|
||||
|
||||
def skip(self, po):
|
||||
if (
|
||||
pungi.gather.is_noarch(po)
|
||||
or pungi.gather.is_source(po)
|
||||
or pungi.util.pkg_is_debug(po)
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name == "kernel":
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel_devel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name == "kernel-devel":
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_kernel_or_kernel_devel(self, po):
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name in ("kernel", "kernel-devel"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class NoneMultilibMethod(MultilibMethodBase):
|
||||
"""multilib disabled"""
|
||||
|
||||
name = "none"
|
||||
|
||||
def select(self, po):
|
||||
return False
|
||||
|
||||
|
||||
class AllMultilibMethod(MultilibMethodBase):
|
||||
"""all packages are multilib"""
|
||||
|
||||
name = "all"
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class RuntimeMultilibMethod(MultilibMethodBase):
|
||||
"""pre-defined paths to libs"""
|
||||
|
||||
name = "runtime"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RuntimeMultilibMethod, self).__init__(*args, **kwargs)
|
||||
self.blacklist = read_lines_from_file(
|
||||
self.config_path + "runtime-blacklist.conf"
|
||||
)
|
||||
self.whitelist = read_lines_from_file(
|
||||
self.config_path + "runtime-whitelist.conf"
|
||||
)
|
||||
self.patterns = expand_runtime_patterns(
|
||||
read_runtime_patterns_from_file(self.config_path + "runtime-patterns.conf")
|
||||
)
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
if po.name in self.blacklist:
|
||||
return False
|
||||
if po.name in self.whitelist:
|
||||
return True
|
||||
if self.is_kernel(po):
|
||||
return False
|
||||
|
||||
# gather all *.so.* provides from the RPM header
|
||||
provides = set()
|
||||
for i in po.provides:
|
||||
match = SONAME_PATTERN_RE.match(i[0])
|
||||
if match is not None:
|
||||
provides.add(match.group(1))
|
||||
|
||||
for path in po.returnFileEntries() + po.returnFileEntries("ghost"):
|
||||
dirname, filename = path.rsplit("/", 1)
|
||||
dirname = dirname.rstrip("/")
|
||||
|
||||
patterns = self.patterns[dirname]
|
||||
if not patterns:
|
||||
continue
|
||||
for dir_pattern, file_pattern in patterns:
|
||||
if file_pattern == "-":
|
||||
return True
|
||||
if fnmatch.fnmatch(filename, file_pattern):
|
||||
if ".so.*" in file_pattern:
|
||||
if filename in provides:
|
||||
# return only if the lib is provided in RPM header
|
||||
# (some libs may be private, hence not exposed in Provides)
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class KernelMultilibMethod(MultilibMethodBase):
|
||||
"""kernel and kernel-devel"""
|
||||
|
||||
name = "kernel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(KernelMultilibMethod, self).__init__(*args, **kwargs)
|
||||
|
||||
def select(self, po):
|
||||
if self.is_kernel_or_kernel_devel(po):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class YabootMultilibMethod(MultilibMethodBase):
|
||||
"""yaboot on ppc"""
|
||||
|
||||
name = "yaboot"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(YabootMultilibMethod, self).__init__(*args, **kwargs)
|
||||
|
||||
def select(self, po):
|
||||
if po.arch in ["ppc"]:
|
||||
if po.name.startswith("yaboot"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class DevelMultilibMethod(MultilibMethodBase):
|
||||
"""all -devel and -static packages"""
|
||||
|
||||
name = "devel"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DevelMultilibMethod, self).__init__(*args, **kwargs)
|
||||
self.blacklist = read_lines_from_file(self.config_path + "devel-blacklist.conf")
|
||||
self.whitelist = read_lines_from_file(self.config_path + "devel-whitelist.conf")
|
||||
|
||||
def select(self, po):
|
||||
if self.skip(po):
|
||||
return False
|
||||
if po.name in self.blacklist:
|
||||
return False
|
||||
if po.name in self.whitelist:
|
||||
return True
|
||||
if self.is_kernel_devel(po):
|
||||
return False
|
||||
# HACK: exclude ghc*
|
||||
if po.name.startswith("ghc-"):
|
||||
return False
|
||||
if po.name.endswith("-devel"):
|
||||
return True
|
||||
if po.name.endswith("-static"):
|
||||
return True
|
||||
for p_name, p_flag, (p_e, p_v, p_r) in po.provides:
|
||||
if p_name.endswith("-devel"):
|
||||
return True
|
||||
if p_name.endswith("-static"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
DEFAULT_METHODS = ["devel", "runtime"]
|
||||
METHOD_MAP = {}
|
||||
|
||||
|
||||
def init(config_path="/usr/share/pungi/multilib/"):
|
||||
global METHOD_MAP
|
||||
|
||||
if not config_path.endswith("/"):
|
||||
config_path += "/"
|
||||
|
||||
for cls in (
|
||||
AllMultilibMethod,
|
||||
DevelMultilibMethod,
|
||||
KernelMultilibMethod,
|
||||
NoneMultilibMethod,
|
||||
RuntimeMultilibMethod,
|
||||
YabootMultilibMethod,
|
||||
):
|
||||
method = cls(config_path)
|
||||
METHOD_MAP[method.name] = method
|
||||
|
||||
|
||||
def po_is_multilib(po, methods):
|
||||
for method_name in methods:
|
||||
if not method_name:
|
||||
continue
|
||||
method = METHOD_MAP[method_name]
|
||||
if method.select(po):
|
||||
return method_name
|
||||
return None
|
112
pungi/notifier.py
Normal file
112
pungi/notifier.py
Normal file
@ -0,0 +1,112 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from datetime import datetime
|
||||
import json
|
||||
import os
|
||||
import threading
|
||||
|
||||
import pungi.util
|
||||
|
||||
from kobo import shortcuts
|
||||
|
||||
|
||||
class PungiNotifier(object):
|
||||
"""Wrapper around an external script for sending messages.
|
||||
|
||||
If no script is configured, the messages are just silently ignored. If the
|
||||
script fails, a warning will be logged, but the compose process will not be
|
||||
interrupted.
|
||||
"""
|
||||
|
||||
def __init__(self, cmds):
|
||||
self.cmds = cmds
|
||||
self.lock = threading.Lock()
|
||||
self.compose = None
|
||||
|
||||
def _update_args(self, data):
|
||||
"""Add compose related information to the data."""
|
||||
if not self.compose:
|
||||
return
|
||||
data.setdefault("compose_id", self.compose.compose_id)
|
||||
|
||||
# Publish where in the world this compose will end up living
|
||||
location = pungi.util.translate_path(
|
||||
self.compose, self.compose.paths.compose.topdir()
|
||||
)
|
||||
data.setdefault("location", location)
|
||||
|
||||
# Add information about the compose itself.
|
||||
data.setdefault("compose_date", self.compose.compose_date)
|
||||
data.setdefault("compose_type", self.compose.compose_type)
|
||||
data.setdefault("compose_respin", self.compose.compose_respin)
|
||||
data.setdefault("compose_label", self.compose.compose_label)
|
||||
data.setdefault("compose_path", self.compose.topdir)
|
||||
data.setdefault("release_short", self.compose.conf["release_short"])
|
||||
data.setdefault("release_name", self.compose.conf["release_name"])
|
||||
data.setdefault("release_version", self.compose.conf["release_version"])
|
||||
data.setdefault("release_type", self.compose.conf["release_type"].lower())
|
||||
data.setdefault("release_is_layered", False)
|
||||
|
||||
if self.compose.conf.get("base_product_name", ""):
|
||||
data["release_is_layered"] = True
|
||||
data["base_product_name"] = self.compose.conf["base_product_name"]
|
||||
data["base_product_version"] = self.compose.conf["base_product_version"]
|
||||
data["base_product_short"] = self.compose.conf["base_product_short"]
|
||||
data["base_product_type"] = self.compose.conf["base_product_type"].lower()
|
||||
|
||||
def send(self, msg, workdir=None, **kwargs):
|
||||
"""Send a message.
|
||||
|
||||
The actual meaning of ``msg`` depends on what the notification script
|
||||
will be doing. The keyword arguments will be JSON-encoded and passed on
|
||||
to standard input of the notification process.
|
||||
|
||||
Unless you specify it manually, a ``compose_id`` key with appropriate
|
||||
value will be automatically added.
|
||||
"""
|
||||
if not self.cmds:
|
||||
return
|
||||
|
||||
self._update_args(kwargs)
|
||||
|
||||
with self.lock:
|
||||
for cmd in self.cmds:
|
||||
self._run_script(cmd, msg, workdir, kwargs)
|
||||
|
||||
def _run_script(self, cmd, msg, workdir, kwargs):
|
||||
"""Run a single notification script with proper logging."""
|
||||
logfile = None
|
||||
if self.compose:
|
||||
self.compose.log_debug("Notification: %r %r, %r" % (cmd, msg, kwargs))
|
||||
logfile = os.path.join(
|
||||
self.compose.paths.log.topdir(),
|
||||
"notifications",
|
||||
"notification-%s.log" % datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S"),
|
||||
)
|
||||
pungi.util.makedirs(os.path.dirname(logfile))
|
||||
|
||||
ret, _ = shortcuts.run(
|
||||
(cmd, msg),
|
||||
stdin_data=json.dumps(kwargs),
|
||||
can_fail=True,
|
||||
workdir=workdir,
|
||||
return_stdout=False,
|
||||
show_cmd=True,
|
||||
universal_newlines=True,
|
||||
logfile=logfile,
|
||||
)
|
||||
if ret != 0:
|
||||
if self.compose:
|
||||
self.compose.log_warning("Failed to invoke notification script.")
|
202
pungi/ostree/__init__.py
Normal file
202
pungi/ostree/__init__.py
Normal file
@ -0,0 +1,202 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from .tree import Tree
|
||||
from .installer import Installer
|
||||
from .container import Container
|
||||
|
||||
|
||||
def main(args=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
subparser = parser.add_subparsers(help="Sub commands")
|
||||
|
||||
treep = subparser.add_parser("tree", help="Compose OSTree repository")
|
||||
treep.set_defaults(_class=Tree, func="run")
|
||||
treep.add_argument(
|
||||
"--repo",
|
||||
metavar="PATH",
|
||||
required=True,
|
||||
help="where to put the OSTree repo (required)",
|
||||
)
|
||||
treep.add_argument(
|
||||
"--treefile",
|
||||
metavar="FILE",
|
||||
required=True,
|
||||
help="treefile for rpm-ostree (required)",
|
||||
)
|
||||
treep.add_argument(
|
||||
"--log-dir",
|
||||
metavar="DIR",
|
||||
required=True,
|
||||
help="where to log output and commitid (required). \
|
||||
Note: commitid file will be written to this dir",
|
||||
)
|
||||
treep.add_argument(
|
||||
"--extra-config", metavar="FILE", help="JSON file contains extra configurations"
|
||||
)
|
||||
treep.add_argument(
|
||||
"--version",
|
||||
metavar="VERSION",
|
||||
help="version string to be added as versioning metadata",
|
||||
)
|
||||
treep.add_argument(
|
||||
"--update-summary", action="store_true", help="update summary metadata"
|
||||
)
|
||||
treep.add_argument(
|
||||
"--ostree-ref", metavar="PATH", help="override ref value from treefile"
|
||||
)
|
||||
treep.add_argument(
|
||||
"--force-new-commit",
|
||||
action="store_true",
|
||||
help="do not use rpm-ostree's built-in change detection",
|
||||
)
|
||||
treep.add_argument(
|
||||
"--unified-core",
|
||||
action="store_true",
|
||||
help="use unified core mode in rpm-ostree",
|
||||
)
|
||||
|
||||
container = subparser.add_parser(
|
||||
"container", help="Compose OSTree native container"
|
||||
)
|
||||
container.set_defaults(_class=Container, func="run")
|
||||
container.add_argument(
|
||||
"--name",
|
||||
required=True,
|
||||
help="the name of the the OCI archive (required)",
|
||||
)
|
||||
container.add_argument(
|
||||
"--path",
|
||||
required=True,
|
||||
help="where to output the OCI archive (required)",
|
||||
)
|
||||
container.add_argument(
|
||||
"--treefile",
|
||||
metavar="FILE",
|
||||
required=True,
|
||||
help="treefile for rpm-ostree (required)",
|
||||
)
|
||||
container.add_argument(
|
||||
"--log-dir",
|
||||
metavar="DIR",
|
||||
required=True,
|
||||
help="where to log output (required).",
|
||||
)
|
||||
container.add_argument(
|
||||
"--extra-config", metavar="FILE", help="JSON file contains extra configurations"
|
||||
)
|
||||
container.add_argument(
|
||||
"-v",
|
||||
"--version",
|
||||
metavar="VERSION",
|
||||
required=True,
|
||||
help="version identifier (required)",
|
||||
)
|
||||
|
||||
installerp = subparser.add_parser(
|
||||
"installer", help="Create an OSTree installer image"
|
||||
)
|
||||
installerp.set_defaults(_class=Installer, func="run")
|
||||
installerp.add_argument(
|
||||
"-p",
|
||||
"--product",
|
||||
metavar="PRODUCT",
|
||||
required=True,
|
||||
help="product name (required)",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"-v",
|
||||
"--version",
|
||||
metavar="VERSION",
|
||||
required=True,
|
||||
help="version identifier (required)",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"-r",
|
||||
"--release",
|
||||
metavar="RELEASE",
|
||||
required=True,
|
||||
help="release information (required)",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"-s",
|
||||
"--source",
|
||||
metavar="REPOSITORY",
|
||||
required=True,
|
||||
action="append",
|
||||
help="source repository (required)",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
metavar="DIR",
|
||||
required=True,
|
||||
help="path to image output directory (required)",
|
||||
)
|
||||
installerp.add_argument("--log-dir", metavar="DIR", help="path to log directory")
|
||||
installerp.add_argument("--volid", metavar="VOLID", help="volume id")
|
||||
installerp.add_argument("--variant", metavar="VARIANT", help="variant name")
|
||||
installerp.add_argument("--rootfs-size", metavar="SIZE")
|
||||
installerp.add_argument("--nomacboot", action="store_true", default=False)
|
||||
installerp.add_argument("--noupgrade", action="store_true", default=False)
|
||||
installerp.add_argument("--isfinal", action="store_true", default=False)
|
||||
|
||||
installerp.add_argument(
|
||||
"--installpkgs",
|
||||
metavar="PACKAGE",
|
||||
action="append",
|
||||
help="package glob to install before runtime-install.tmpl",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"--add-template",
|
||||
metavar="FILE",
|
||||
action="append",
|
||||
help="Additional template for runtime image",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"--add-template-var",
|
||||
metavar="ADD_TEMPLATE_VARS",
|
||||
action="append",
|
||||
help="Set variable for runtime image template",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"--add-arch-template",
|
||||
metavar="FILE",
|
||||
action="append",
|
||||
help="Additional template for architecture-specific image",
|
||||
)
|
||||
installerp.add_argument(
|
||||
"--add-arch-template-var",
|
||||
metavar="ADD_ARCH_TEMPLATE_VARS",
|
||||
action="append",
|
||||
help="Set variable for architecture-specific image",
|
||||
)
|
||||
|
||||
installerp.add_argument(
|
||||
"--extra-config", metavar="FILE", help="JSON file contains extra configurations"
|
||||
)
|
||||
|
||||
args = parser.parse_args(args)
|
||||
|
||||
logging.basicConfig(format="%(message)s", level=logging.DEBUG)
|
||||
|
||||
_class = args._class()
|
||||
_class.set_args(args)
|
||||
func = getattr(_class, args.func)
|
||||
func()
|
19
pungi/ostree/base.py
Normal file
19
pungi/ostree/base.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
class OSTree(object):
|
||||
def set_args(self, args):
|
||||
self.args = args
|
86
pungi/ostree/container.py
Normal file
86
pungi/ostree/container.py
Normal file
@ -0,0 +1,86 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import json
|
||||
import six
|
||||
from six.moves import shlex_quote
|
||||
|
||||
|
||||
from .base import OSTree
|
||||
from .utils import tweak_treeconf
|
||||
|
||||
|
||||
def emit(cmd):
|
||||
"""Print line of shell code into the stream."""
|
||||
if isinstance(cmd, six.string_types):
|
||||
print(cmd)
|
||||
else:
|
||||
print(" ".join([shlex_quote(x) for x in cmd]))
|
||||
|
||||
|
||||
class Container(OSTree):
|
||||
def _make_container(self):
|
||||
"""Compose OSTree Container Native image"""
|
||||
stamp_file = os.path.join(self.logdir, "%s.stamp" % self.name)
|
||||
cmd = [
|
||||
"rpm-ostree",
|
||||
"compose",
|
||||
"image",
|
||||
# Always initialize for now
|
||||
"--initialize",
|
||||
# Touch the file if a new commit was created. This can help us tell
|
||||
# if the commitid file is missing because no commit was created or
|
||||
# because something went wrong.
|
||||
"--touch-if-changed=%s" % stamp_file,
|
||||
self.treefile,
|
||||
]
|
||||
fullpath = os.path.join(self.path, "%s.ociarchive" % self.name)
|
||||
cmd.append(fullpath)
|
||||
|
||||
# Set the umask to be more permissive so directories get group write
|
||||
# permissions. See https://pagure.io/releng/issue/8811#comment-629051
|
||||
emit("umask 0002")
|
||||
emit(cmd)
|
||||
|
||||
def run(self):
|
||||
self.name = self.args.name
|
||||
self.path = self.args.path
|
||||
self.treefile = self.args.treefile
|
||||
self.logdir = self.args.log_dir
|
||||
self.extra_config = self.args.extra_config
|
||||
|
||||
if self.extra_config:
|
||||
self.extra_config = json.load(open(self.extra_config, "r"))
|
||||
repos = self.extra_config.get("repo", [])
|
||||
keep_original_sources = self.extra_config.get(
|
||||
"keep_original_sources", False
|
||||
)
|
||||
else:
|
||||
# missing extra_config mustn't affect tweak_treeconf call
|
||||
repos = []
|
||||
keep_original_sources = True
|
||||
|
||||
update_dict = {"automatic-version-prefix": self.args.version}
|
||||
|
||||
self.treefile = tweak_treeconf(
|
||||
self.treefile,
|
||||
source_repos=repos,
|
||||
keep_original_sources=keep_original_sources,
|
||||
update_dict=update_dict,
|
||||
)
|
||||
|
||||
self._make_container()
|
77
pungi/ostree/installer.py
Normal file
77
pungi/ostree/installer.py
Normal file
@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import json
|
||||
from kobo import shortcuts
|
||||
|
||||
from .base import OSTree
|
||||
from ..wrappers import lorax
|
||||
|
||||
|
||||
class Installer(OSTree):
|
||||
def _merge_config(self, config):
|
||||
self.installpkgs.extend(config.get("installpkgs", []))
|
||||
self.add_template.extend(config.get("add_template", []))
|
||||
self.add_template_var.extend(config.get("add_template_var"))
|
||||
self.add_arch_template.extend(config.get("add_arch_template", []))
|
||||
self.add_arch_template_var.extend(config.get("add_arch_template_var", []))
|
||||
|
||||
def run(self):
|
||||
self.product = self.args.product
|
||||
self.version = self.args.version
|
||||
self.release = self.args.release
|
||||
self.sources = self.args.source
|
||||
self.output = self.args.output
|
||||
|
||||
self.logdir = self.args.log_dir
|
||||
self.volid = self.args.volid
|
||||
self.variant = self.args.variant
|
||||
self.rootfs_size = self.args.rootfs_size
|
||||
self.nomacboot = self.args.nomacboot
|
||||
self.noupgrade = self.args.noupgrade
|
||||
self.isfinal = self.args.isfinal
|
||||
|
||||
self.installpkgs = self.args.installpkgs or []
|
||||
self.add_template = self.args.add_template or []
|
||||
self.add_template_var = self.args.add_template_var or []
|
||||
self.add_arch_template = self.args.add_arch_template or []
|
||||
self.add_arch_template_var = self.args.add_arch_template_var or []
|
||||
|
||||
self.extra_config = self.args.extra_config
|
||||
if self.extra_config:
|
||||
self.extra_config = json.load(open(self.extra_config, "r"))
|
||||
self._merge_config(self.extra_config)
|
||||
|
||||
lorax_wrapper = lorax.LoraxWrapper()
|
||||
cmd = lorax_wrapper.get_lorax_cmd(
|
||||
self.product,
|
||||
self.version,
|
||||
self.release,
|
||||
self.sources,
|
||||
self.output,
|
||||
variant=self.variant,
|
||||
nomacboot=self.nomacboot,
|
||||
volid=self.volid,
|
||||
buildinstallpackages=self.installpkgs,
|
||||
add_template=self.add_template,
|
||||
add_template_var=self.add_template_var,
|
||||
add_arch_template=self.add_arch_template,
|
||||
add_arch_template_var=self.add_arch_template_var,
|
||||
rootfs_size=self.rootfs_size,
|
||||
is_final=self.isfinal,
|
||||
log_dir=self.logdir,
|
||||
)
|
||||
shortcuts.run(cmd)
|
158
pungi/ostree/tree.py
Normal file
158
pungi/ostree/tree.py
Normal file
@ -0,0 +1,158 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import json
|
||||
from kobo import shortcuts
|
||||
|
||||
from pungi.util import makedirs
|
||||
from .base import OSTree
|
||||
from .utils import (
|
||||
make_log_file,
|
||||
tweak_treeconf,
|
||||
get_ref_from_treefile,
|
||||
get_commitid_from_commitid_file,
|
||||
)
|
||||
|
||||
|
||||
class Tree(OSTree):
|
||||
def _make_tree(self):
|
||||
"""Compose OSTree tree"""
|
||||
log_file = make_log_file(self.logdir, "create-ostree-repo")
|
||||
cmd = [
|
||||
"rpm-ostree",
|
||||
"compose",
|
||||
"tree",
|
||||
"--repo=%s" % self.repo,
|
||||
"--write-commitid-to=%s" % self.commitid_file,
|
||||
# Touch the file if a new commit was created. This can help us tell
|
||||
# if the commitid file is missing because no commit was created or
|
||||
# because something went wrong.
|
||||
"--touch-if-changed=%s.stamp" % self.commitid_file,
|
||||
]
|
||||
if self.unified_core:
|
||||
# See https://github.com/coreos/rpm-ostree/issues/729
|
||||
cmd.append("--unified-core")
|
||||
if self.version:
|
||||
# Add versioning metadata
|
||||
cmd.append("--add-metadata-string=version=%s" % self.version)
|
||||
# Note renamed from rpm-ostree --force-nocache since it's a better
|
||||
# name; more clearly describes what we're doing here.
|
||||
if self.force_new_commit:
|
||||
cmd.append("--force-nocache")
|
||||
cmd.append(self.treefile)
|
||||
|
||||
# Set the umask to be more permissive so directories get group write
|
||||
# permissions. See https://pagure.io/releng/issue/8811#comment-629051
|
||||
oldumask = os.umask(0o0002)
|
||||
try:
|
||||
shortcuts.run(
|
||||
cmd,
|
||||
show_cmd=True,
|
||||
stdout=True,
|
||||
logfile=log_file,
|
||||
universal_newlines=True,
|
||||
)
|
||||
finally:
|
||||
os.umask(oldumask)
|
||||
|
||||
def _update_summary(self):
|
||||
"""Update summary metadata"""
|
||||
log_file = make_log_file(self.logdir, "ostree-summary")
|
||||
shortcuts.run(
|
||||
["ostree", "summary", "-u", "--repo=%s" % self.repo],
|
||||
show_cmd=True,
|
||||
stdout=True,
|
||||
logfile=log_file,
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
def _update_ref(self):
|
||||
"""
|
||||
Update the ref.
|
||||
|
||||
'--write-commitid-to' is specified when compose the tree, so we need
|
||||
to update the ref by ourselves. ref is retrieved from treefile and
|
||||
commitid is retrieved from the committid file.
|
||||
"""
|
||||
tag_ref = True
|
||||
if self.extra_config:
|
||||
tag_ref = self.extra_config.get("tag_ref", True)
|
||||
if not tag_ref:
|
||||
print("Not updating ref as configured")
|
||||
return
|
||||
ref = get_ref_from_treefile(self.treefile)
|
||||
commitid = get_commitid_from_commitid_file(self.commitid_file)
|
||||
print("Ref: %r, Commit ID: %r" % (ref, commitid))
|
||||
if ref and commitid:
|
||||
print("Updating ref")
|
||||
# Let's write the tag out ourselves
|
||||
heads_dir = os.path.join(self.repo, "refs", "heads")
|
||||
if not os.path.exists(heads_dir):
|
||||
raise RuntimeError("Refs/heads did not exist in ostree repo")
|
||||
|
||||
ref_path = os.path.join(heads_dir, ref)
|
||||
# Set the umask to be more permissive so directories get group write
|
||||
# permissions. See https://pagure.io/releng/issue/8811#comment-629051
|
||||
oldumask = os.umask(0o0002)
|
||||
try:
|
||||
makedirs(os.path.dirname(ref_path))
|
||||
finally:
|
||||
os.umask(oldumask)
|
||||
with open(ref_path, "w") as f:
|
||||
f.write(commitid + "\n")
|
||||
|
||||
def run(self):
|
||||
self.repo = self.args.repo
|
||||
self.treefile = self.args.treefile
|
||||
self.version = self.args.version
|
||||
self.logdir = self.args.log_dir
|
||||
self.update_summary = self.args.update_summary
|
||||
self.extra_config = self.args.extra_config
|
||||
self.ostree_ref = self.args.ostree_ref
|
||||
self.force_new_commit = self.args.force_new_commit
|
||||
self.unified_core = self.args.unified_core
|
||||
|
||||
if self.extra_config or self.ostree_ref:
|
||||
if self.extra_config:
|
||||
self.extra_config = json.load(open(self.extra_config, "r"))
|
||||
repos = self.extra_config.get("repo", [])
|
||||
keep_original_sources = self.extra_config.get(
|
||||
"keep_original_sources", False
|
||||
)
|
||||
else:
|
||||
# missing extra_config mustn't affect tweak_treeconf call
|
||||
repos = []
|
||||
keep_original_sources = True
|
||||
|
||||
update_dict = {}
|
||||
if self.ostree_ref:
|
||||
# override ref value in treefile
|
||||
update_dict["ref"] = self.ostree_ref
|
||||
|
||||
self.treefile = tweak_treeconf(
|
||||
self.treefile,
|
||||
source_repos=repos,
|
||||
keep_original_sources=keep_original_sources,
|
||||
update_dict=update_dict,
|
||||
)
|
||||
|
||||
self.commitid_file = make_log_file(self.logdir, "commitid")
|
||||
|
||||
self._make_tree()
|
||||
self._update_ref()
|
||||
if self.update_summary:
|
||||
self._update_summary()
|
126
pungi/ostree/utils.py
Normal file
126
pungi/ostree/utils.py
Normal file
@ -0,0 +1,126 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import yaml
|
||||
|
||||
from pungi.arch_utils import getBaseArch
|
||||
from pungi.util import makedirs
|
||||
|
||||
|
||||
def make_log_file(log_dir, filename):
|
||||
"""Return path to log file with given name, if log_dir is set."""
|
||||
if not log_dir:
|
||||
return None
|
||||
makedirs(log_dir)
|
||||
return os.path.join(log_dir, "%s.log" % filename)
|
||||
|
||||
|
||||
def get_ref_from_treefile(treefile, arch=None, logger=None):
|
||||
"""
|
||||
Return ref name by parsing the tree config file. Replacing ${basearch} with
|
||||
the basearch of the architecture we are running on or of the passed in arch.
|
||||
"""
|
||||
logger = logger or logging.getLogger(__name__)
|
||||
if os.path.isfile(treefile):
|
||||
with open(treefile, "r") as f:
|
||||
try:
|
||||
# rpm-ostree now supports YAML
|
||||
# https://github.com/projectatomic/rpm-ostree/pull/1377
|
||||
if treefile.endswith(".yaml"):
|
||||
parsed = yaml.safe_load(f)
|
||||
else:
|
||||
parsed = json.load(f)
|
||||
return parsed["ref"].replace("${basearch}", getBaseArch(arch))
|
||||
except Exception as e:
|
||||
logger.error("Unable to get ref from treefile: %s" % e)
|
||||
else:
|
||||
logger.error("Unable to open treefile")
|
||||
return None
|
||||
|
||||
|
||||
def get_commitid_from_commitid_file(commitid_file):
|
||||
"""Return commit id which is read from the commitid file"""
|
||||
if not os.path.exists(commitid_file + ".stamp"):
|
||||
# The stamp does not exist, so no new commit.
|
||||
return None
|
||||
with open(commitid_file, "r") as f:
|
||||
return f.read().replace("\n", "")
|
||||
|
||||
|
||||
def tweak_treeconf(
|
||||
treeconf, source_repos=None, keep_original_sources=False, update_dict=None
|
||||
):
|
||||
"""
|
||||
Update tree config file by adding new repos, and remove existing repos
|
||||
from the tree config file if 'keep_original_sources' is not enabled.
|
||||
Additionally, other values can be passed to method by 'update_dict' parameter to
|
||||
update treefile content.
|
||||
"""
|
||||
|
||||
# backup the old tree config
|
||||
shutil.copy2(treeconf, "{0}.bak".format(treeconf))
|
||||
|
||||
treeconf_dir = os.path.dirname(treeconf)
|
||||
with open(treeconf, "r") as f:
|
||||
# rpm-ostree now supports YAML, but we'll end up converting it to JSON.
|
||||
# https://github.com/projectatomic/rpm-ostree/pull/1377
|
||||
if treeconf.endswith(".yaml"):
|
||||
treeconf_content = yaml.safe_load(f)
|
||||
treeconf = treeconf.replace(".yaml", ".json")
|
||||
else:
|
||||
treeconf_content = json.load(f)
|
||||
|
||||
repos = []
|
||||
if source_repos:
|
||||
# Sort to ensure reliable ordering
|
||||
source_repos = sorted(source_repos, key=lambda x: x["name"])
|
||||
# Now, since pungi includes timestamps in the repo names which
|
||||
# currently defeats rpm-ostree's change detection, let's just
|
||||
# use repos named 'repo-<number>'.
|
||||
# https://pagure.io/pungi/issue/811
|
||||
with open("{0}/pungi.repo".format(treeconf_dir), "w") as f:
|
||||
for i, repo in enumerate(source_repos):
|
||||
name = "repo-{0}".format(i)
|
||||
f.write("[%s]\n" % name)
|
||||
f.write("name=%s\n" % name)
|
||||
f.write("baseurl=%s\n" % repo["baseurl"])
|
||||
exclude = repo.get("exclude", None)
|
||||
if exclude:
|
||||
f.write("exclude=%s\n" % exclude)
|
||||
gpgcheck = "1" if repo.get("gpgcheck", False) else "0"
|
||||
f.write("gpgcheck=%s\n" % gpgcheck)
|
||||
|
||||
repos.append(name)
|
||||
|
||||
original_repos = treeconf_content.get("repos", [])
|
||||
if keep_original_sources:
|
||||
treeconf_content["repos"] = original_repos + repos
|
||||
else:
|
||||
treeconf_content["repos"] = repos
|
||||
|
||||
# update content with config values from dictionary (for example 'ref')
|
||||
if isinstance(update_dict, dict):
|
||||
treeconf_content.update(update_dict)
|
||||
|
||||
# update tree config to add new repos
|
||||
with open(treeconf, "w") as f:
|
||||
json.dump(treeconf_content, f, indent=4)
|
||||
return treeconf
|
73
pungi/pathmatch.py
Normal file
73
pungi/pathmatch.py
Normal file
@ -0,0 +1,73 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import fnmatch
|
||||
|
||||
|
||||
def head_tail_split(name):
|
||||
name_split = name.strip("/").split("/", 1)
|
||||
if len(name_split) == 2:
|
||||
head = name_split[0]
|
||||
tail = name_split[1].strip("/")
|
||||
else:
|
||||
head, tail = name_split[0], None
|
||||
return head, tail
|
||||
|
||||
|
||||
class PathMatch(object):
|
||||
def __init__(self, parent=None, desc=None):
|
||||
self._patterns = {}
|
||||
self._final_patterns = {}
|
||||
self._values = []
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
head, tail = head_tail_split(name)
|
||||
|
||||
if tail is not None:
|
||||
# recursion
|
||||
if head not in self._patterns:
|
||||
self._patterns[head] = PathMatch(parent=self, desc=head)
|
||||
self._patterns[head][tail] = value
|
||||
else:
|
||||
if head not in self._final_patterns:
|
||||
self._final_patterns[head] = PathMatch(parent=self, desc=head)
|
||||
if value not in self._final_patterns[head]._values:
|
||||
self._final_patterns[head]._values.append(value)
|
||||
|
||||
def __getitem__(self, name):
|
||||
result = []
|
||||
head, tail = head_tail_split(name)
|
||||
for pattern in self._patterns:
|
||||
if fnmatch.fnmatch(head, pattern):
|
||||
if tail is None:
|
||||
values = self._patterns[pattern]._values
|
||||
else:
|
||||
values = self._patterns[pattern][tail]
|
||||
for value in values:
|
||||
if value not in result:
|
||||
result.append(value)
|
||||
|
||||
for pattern in self._final_patterns:
|
||||
if tail is None:
|
||||
x = head
|
||||
else:
|
||||
x = "%s/%s" % (head, tail)
|
||||
if fnmatch.fnmatch(x, pattern):
|
||||
values = self._final_patterns[pattern]._values
|
||||
for value in values:
|
||||
if value not in result:
|
||||
result.append(value)
|
||||
return result
|
841
pungi/paths.py
Normal file
841
pungi/paths.py
Normal file
@ -0,0 +1,841 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
__all__ = ("Paths",)
|
||||
|
||||
|
||||
import errno
|
||||
import os
|
||||
|
||||
from kobo.shortcuts import relative_path
|
||||
from pungi.util import makedirs, find_old_compose
|
||||
|
||||
|
||||
class Paths(object):
|
||||
def __init__(self, compose):
|
||||
self._compose = compose
|
||||
paths_module_name = compose.conf.get("paths_module")
|
||||
if paths_module_name:
|
||||
# custom paths
|
||||
paths_module = __import__(
|
||||
paths_module_name,
|
||||
globals(),
|
||||
locals(),
|
||||
["LogPaths", "WorkPaths", "ComposePaths"],
|
||||
)
|
||||
self.compose = paths_module.ComposePaths(compose)
|
||||
self.log = paths_module.LogPaths(compose)
|
||||
self.work = paths_module.WorkPaths(compose)
|
||||
else:
|
||||
# default paths
|
||||
self.compose = ComposePaths(compose)
|
||||
self.log = LogPaths(compose)
|
||||
self.work = WorkPaths(compose)
|
||||
# self.metadata ?
|
||||
|
||||
def get_old_compose_topdir(self, **kwargs):
|
||||
"""
|
||||
Finds old compose using the `find_old_compose` function and returns
|
||||
the path to it. The `kwargs` are passed to `find_old_compose`.
|
||||
"""
|
||||
is_layered = self._compose.ci_base.release.is_layered
|
||||
return find_old_compose(
|
||||
self._compose.old_composes,
|
||||
self._compose.ci_base.release.short,
|
||||
self._compose.ci_base.release.version,
|
||||
self._compose.ci_base.release.type_suffix,
|
||||
self._compose.ci_base.base_product.short if is_layered else None,
|
||||
self._compose.ci_base.base_product.version if is_layered else None,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def old_compose_path(self, path, **kwargs):
|
||||
"""
|
||||
Translates `path` to the topdir of old compose.
|
||||
|
||||
:param str path: Path to translate.
|
||||
:param kwargs: The kwargs passed to `find_old_compose` function.
|
||||
:return: None if old compose cannot be used or if `path` does not exist
|
||||
in the old compose topdir. Otherwise path translated to old_compose
|
||||
topdir.
|
||||
|
||||
Example:
|
||||
old_repo_dir = compose.old_compose_path(
|
||||
compose.paths.work.pkgset_repo(pkgset.name, arch="global"))
|
||||
"""
|
||||
old_compose_topdir = self.get_old_compose_topdir(**kwargs)
|
||||
if not old_compose_topdir:
|
||||
return None
|
||||
|
||||
rel_path = relative_path(path, self._compose.topdir.rstrip("/") + "/")
|
||||
old_path = os.path.join(old_compose_topdir, rel_path)
|
||||
if not os.path.exists(old_path):
|
||||
return None
|
||||
return old_path
|
||||
|
||||
|
||||
class LogPaths(object):
|
||||
def __init__(self, compose):
|
||||
self.compose = compose
|
||||
|
||||
def topdir(self, arch=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
log/global
|
||||
log/x86_64
|
||||
"""
|
||||
arch = arch or "global"
|
||||
path = os.path.join(self.compose.topdir, "logs", arch)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def koji_tasks_dir(self, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
logs/global/koji-tasks
|
||||
"""
|
||||
path = os.path.join(self.topdir(create_dir=create_dir), "koji-tasks")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def log_file(self, arch, log_name, create_dir=True, ext=None):
|
||||
ext = ext or "log"
|
||||
arch = arch or "global"
|
||||
if log_name.endswith(".log"):
|
||||
log_name = log_name[:-4]
|
||||
return os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir), "%s.%s.%s" % (log_name, arch, ext)
|
||||
)
|
||||
|
||||
|
||||
class WorkPaths(object):
|
||||
def __init__(self, compose):
|
||||
self.compose = compose
|
||||
|
||||
def topdir(self, arch=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/global
|
||||
work/x86_64
|
||||
"""
|
||||
arch = arch or "global"
|
||||
path = os.path.join(self.compose.topdir, "work", arch)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def variants_file(self, arch=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/global/variants.xml
|
||||
"""
|
||||
arch = "global"
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "variants.xml")
|
||||
return path
|
||||
|
||||
def comps(self, arch=None, variant=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/comps/comps-86_64.xml
|
||||
work/x86_64/comps/comps-Server.x86_64.xml
|
||||
"""
|
||||
arch = arch or "global"
|
||||
if variant is None:
|
||||
file_name = "comps-%s.xml" % arch
|
||||
else:
|
||||
file_name = "comps-%s.%s.xml" % (variant.uid, arch)
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "comps")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
||||
|
||||
def gather_result(self, arch=None, variant=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/gather_result/x86_64.result
|
||||
work/x86_64/gather_result/Server.x86_64.result
|
||||
"""
|
||||
arch = arch or "global"
|
||||
file_name = ""
|
||||
if variant:
|
||||
file_name += variant.uid + "."
|
||||
file_name += arch + "."
|
||||
file_name += "result"
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "gather_result")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
||||
|
||||
def pungi_conf(self, arch=None, variant=None, create_dir=True, source_name=None):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/pungi/x86_64.conf
|
||||
work/x86_64/pungi/Server.x86_64.conf
|
||||
"""
|
||||
arch = arch or "global"
|
||||
file_name = ""
|
||||
if variant:
|
||||
file_name += variant.uid + "."
|
||||
file_name += arch + "."
|
||||
if source_name:
|
||||
file_name += source_name + "."
|
||||
file_name += "conf"
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
||||
|
||||
def fus_conf(self, arch, variant, iteration, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/fus/Server-solvables.x86_64.conf
|
||||
"""
|
||||
file_name = "%s-solvables-%d.%s.conf" % (variant.uid, iteration, arch)
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "fus")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return os.path.join(path, file_name)
|
||||
|
||||
def pungi_log(self, arch=None, variant=None, create_dir=True, source_name=None):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/pungi/x86_64.log
|
||||
work/x86_64/pungi/Server.x86_64.log
|
||||
"""
|
||||
path = self.pungi_conf(arch, variant, create_dir=create_dir)
|
||||
path = path[:-5]
|
||||
if source_name:
|
||||
path += "." + source_name
|
||||
return path + ".log"
|
||||
|
||||
def pungi_cache_dir(self, arch, variant=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/global/pungi-cache
|
||||
"""
|
||||
# WARNING: Using the same cache dir with repos of the same names
|
||||
# may lead to a race condition.
|
||||
# We should use per arch variant cache dirs to workaround this.
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi-cache")
|
||||
if variant:
|
||||
path = os.path.join(path, variant.uid)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def _repo(self, type, arch=None, variant=None, create_dir=True):
|
||||
arch = arch or "global"
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "%s_repo" % type)
|
||||
if variant:
|
||||
path += "_" + variant.uid
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def comps_repo(self, arch=None, variant=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/comps_repo_Server
|
||||
work/global/comps_repo
|
||||
"""
|
||||
return self._repo("comps", arch, variant, create_dir=create_dir)
|
||||
|
||||
def pkgset_repo(self, pkgset_name, arch=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/repo/f30-compose
|
||||
work/global/repo/f30-compose
|
||||
"""
|
||||
arch = arch or "global"
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir), "repo", pkgset_name
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def lookaside_repo(self, arch, variant, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/Server/lookaside_repo
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir), variant.uid, "lookaside_repo"
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def package_list(
|
||||
self, arch=None, variant=None, pkgset=None, pkg_type=None, create_dir=True
|
||||
):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/package_list/x86_64.conf
|
||||
work/x86_64/package_list/Server.x86_64.conf
|
||||
work/x86_64/package_list/Server.x86_64.rpm.conf
|
||||
"""
|
||||
arch = arch or "global"
|
||||
if variant is not None:
|
||||
file_name = "%s.%s" % (variant, arch)
|
||||
else:
|
||||
file_name = "%s" % arch
|
||||
if pkgset:
|
||||
file_name += "." + pkgset.name
|
||||
if pkg_type is not None:
|
||||
file_name += ".%s" % pkg_type
|
||||
file_name += ".conf"
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "package_list")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
||||
|
||||
def lookaside_package_list(self, arch, variant, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/package_list/Server.x86_64.lookaside.conf
|
||||
"""
|
||||
return self.package_list(
|
||||
arch, variant, pkg_type="lookaside", create_dir=create_dir
|
||||
)
|
||||
|
||||
def pungi_download_dir(self, arch, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/pungi_download
|
||||
"""
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi_download")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def buildinstall_dir(
|
||||
self, arch, create_dir=True, allow_topdir_override=False, variant=None
|
||||
):
|
||||
"""
|
||||
:param bool allow_topdir_override: When True, the
|
||||
"buildinstall_topdir" will be used (if set) instead of real
|
||||
"topdir".
|
||||
Examples:
|
||||
work/x86_64/buildinstall
|
||||
"""
|
||||
if arch == "global":
|
||||
raise RuntimeError("Global buildinstall dir makes no sense.")
|
||||
|
||||
buildinstall_topdir = self.compose.conf.get("buildinstall_topdir", "")
|
||||
if allow_topdir_override and buildinstall_topdir:
|
||||
topdir_basename = os.path.basename(self.compose.topdir)
|
||||
path = os.path.join(
|
||||
buildinstall_topdir, "buildinstall-%s" % topdir_basename, arch
|
||||
)
|
||||
else:
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir), "buildinstall"
|
||||
)
|
||||
|
||||
if variant:
|
||||
path = os.path.join(path, variant.uid)
|
||||
return path
|
||||
|
||||
def extra_files_dir(self, arch, variant, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/Server/extra-files
|
||||
"""
|
||||
if arch == "global":
|
||||
raise RuntimeError("Global extra files dir makes no sense.")
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir), variant.uid, "extra-files"
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def extra_iso_extra_files_dir(self, arch, variant, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/Server/extra-iso-extra-files
|
||||
"""
|
||||
if arch == "global":
|
||||
raise RuntimeError("Global extra files dir makes no sense.")
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir),
|
||||
variant.uid,
|
||||
"extra-iso-extra-files",
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def iso_staging_dir(self, arch, variant, filename, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/Server/iso-staging-dir/file.iso/
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir),
|
||||
variant.uid,
|
||||
"iso-staging-dir",
|
||||
filename,
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def repo_package_list(self, arch, variant, pkg_type=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/repo_package_list/Server.x86_64.rpm.conf
|
||||
"""
|
||||
file_name = "%s.%s" % (variant.uid, arch)
|
||||
if pkg_type is not None:
|
||||
file_name += ".%s" % pkg_type
|
||||
file_name += ".conf"
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir), "repo_package_list"
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
||||
|
||||
def iso_dir(self, arch, filename, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/iso/Project-1.0-20151203.0-Client-x86_64-dvd1.iso
|
||||
"""
|
||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "iso", filename)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def tmp_dir(self, arch=None, variant=None, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/global/tmp
|
||||
work/x86_64/tmp
|
||||
work/x86_64/tmp-Server
|
||||
"""
|
||||
dir_name = "tmp"
|
||||
if variant:
|
||||
dir_name += "-%s" % variant.uid
|
||||
path = os.path.join(self.topdir(arch=arch, create_dir=create_dir), dir_name)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def product_id(self, arch, variant, create_dir=True):
|
||||
"""
|
||||
Examples:
|
||||
work/x86_64/product_id/productid-Server.x86_64.pem/productid
|
||||
"""
|
||||
# file_name = "%s.%s.pem" % (variant, arch)
|
||||
# HACK: modifyrepo doesn't handle renames -> $dir/productid
|
||||
file_name = "productid"
|
||||
path = os.path.join(
|
||||
self.topdir(arch, create_dir=create_dir),
|
||||
"product_id",
|
||||
"%s.%s.pem" % (variant, arch),
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
||||
|
||||
def image_build_dir(self, variant, create_dir=True):
|
||||
"""
|
||||
@param variant
|
||||
@param create_dir=True
|
||||
|
||||
Examples:
|
||||
work/image-build/Server
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.topdir("image-build", create_dir=create_dir), variant.uid
|
||||
)
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def image_build_conf(
|
||||
self, variant, image_name, image_type, arches=None, create_dir=True
|
||||
):
|
||||
"""
|
||||
@param variant
|
||||
@param image-name
|
||||
@param image-type (e.g docker)
|
||||
@param arches
|
||||
@param create_dir=True
|
||||
|
||||
Examples:
|
||||
work/image-build/Server/docker_rhel-server-docker.cfg
|
||||
work/image-build/Server/docker_rhel-server-docker_x86_64.cfg
|
||||
work/image-build/Server/docker_rhel-server-docker_x86_64-ppc64le.cfg
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.image_build_dir(variant), "%s_%s" % (image_type, image_name)
|
||||
)
|
||||
if arches is not None:
|
||||
path = "%s_%s" % (path, "-".join(list(arches)))
|
||||
path = "%s.cfg" % path
|
||||
return path
|
||||
|
||||
def module_defaults_dir(self, create_dir=True):
|
||||
"""
|
||||
Example:
|
||||
work/global/module_defaults
|
||||
"""
|
||||
path = os.path.join(self.topdir(create_dir=create_dir), "module_defaults")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def module_obsoletes_dir(self, create_dir=True):
|
||||
"""
|
||||
Example:
|
||||
work/global/module_obsoletes
|
||||
"""
|
||||
path = os.path.join(self.topdir(create_dir=create_dir), "module_obsoletes")
|
||||
if create_dir:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def pkgset_file_cache(self, pkgset_name):
|
||||
"""
|
||||
Returns the path to file in which the cached version of
|
||||
PackageSetBase.file_cache should be stored.
|
||||
|
||||
Example:
|
||||
work/global/pkgset_f33-compose_file_cache.pickle
|
||||
"""
|
||||
filename = "pkgset_%s_file_cache.pickle" % pkgset_name
|
||||
return os.path.join(self.topdir(arch="global"), filename)
|
||||
|
||||
def pkgset_reuse_file(self, pkgset_name):
|
||||
"""
|
||||
Example:
|
||||
work/global/pkgset_f30-compose_reuse.pickle
|
||||
"""
|
||||
filename = "pkgset_%s_reuse.pickle" % pkgset_name
|
||||
return os.path.join(self.topdir(arch="global", create_dir=False), filename)
|
||||
|
||||
|
||||
class ComposePaths(object):
|
||||
def __init__(self, compose):
|
||||
self.compose = compose
|
||||
# TODO: TREES?
|
||||
|
||||
def topdir(self, arch=None, variant=None, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose
|
||||
compose/Server/x86_64
|
||||
"""
|
||||
if bool(arch) != bool(variant):
|
||||
raise TypeError("topdir(): either none or 2 arguments are expected")
|
||||
|
||||
path = ""
|
||||
if not relative:
|
||||
path = os.path.join(self.compose.topdir, "compose")
|
||||
|
||||
if arch or variant:
|
||||
if variant.type == "addon":
|
||||
return self.topdir(
|
||||
arch, variant.parent, create_dir=create_dir, relative=relative
|
||||
)
|
||||
path = os.path.join(path, variant.uid, arch)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def tree_dir(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/os
|
||||
compose/Server-optional/x86_64/os
|
||||
"""
|
||||
if arch == "src":
|
||||
arch = "source"
|
||||
|
||||
if arch == "source":
|
||||
tree_dir = "tree"
|
||||
else:
|
||||
# use 'os' dir due to historical reasons
|
||||
tree_dir = "os"
|
||||
|
||||
path = os.path.join(
|
||||
self.topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||
tree_dir,
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def os_tree(self, arch, variant, create_dir=True, relative=False):
|
||||
return self.tree_dir(arch, variant, create_dir=create_dir, relative=relative)
|
||||
|
||||
def repository(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/os
|
||||
compose/Server/x86_64/addons/LoadBalancer
|
||||
"""
|
||||
if variant.type == "addon":
|
||||
path = self.packages(
|
||||
arch, variant, create_dir=create_dir, relative=relative
|
||||
)
|
||||
else:
|
||||
path = self.tree_dir(
|
||||
arch, variant, create_dir=create_dir, relative=relative
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def packages(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/os/Packages
|
||||
compose/Server/x86_64/os/addons/LoadBalancer
|
||||
compose/Server-optional/x86_64/os/Packages
|
||||
"""
|
||||
if variant.type == "addon":
|
||||
path = os.path.join(
|
||||
self.tree_dir(arch, variant, create_dir=create_dir, relative=relative),
|
||||
"addons",
|
||||
variant.id,
|
||||
)
|
||||
else:
|
||||
path = os.path.join(
|
||||
self.tree_dir(arch, variant, create_dir=create_dir, relative=relative),
|
||||
"Packages",
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def debug_topdir(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/debug
|
||||
compose/Server-optional/x86_64/debug
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||
"debug",
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def debug_tree(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/debug/tree
|
||||
compose/Server-optional/x86_64/debug/tree
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.debug_topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||
"tree",
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def debug_packages(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/debug/tree/Packages
|
||||
compose/Server/x86_64/debug/tree/addons/LoadBalancer
|
||||
compose/Server-optional/x86_64/debug/tree/Packages
|
||||
"""
|
||||
if arch in ("source", "src"):
|
||||
return None
|
||||
if variant.type == "addon":
|
||||
path = os.path.join(
|
||||
self.debug_tree(
|
||||
arch, variant, create_dir=create_dir, relative=relative
|
||||
),
|
||||
"addons",
|
||||
variant.id,
|
||||
)
|
||||
else:
|
||||
path = os.path.join(
|
||||
self.debug_tree(
|
||||
arch, variant, create_dir=create_dir, relative=relative
|
||||
),
|
||||
"Packages",
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def debug_repository(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/debug/tree
|
||||
compose/Server/x86_64/debug/tree/addons/LoadBalancer
|
||||
compose/Server-optional/x86_64/debug/tree
|
||||
"""
|
||||
if arch in ("source", "src"):
|
||||
return None
|
||||
if variant.type == "addon":
|
||||
path = os.path.join(
|
||||
self.debug_tree(
|
||||
arch, variant, create_dir=create_dir, relative=relative
|
||||
),
|
||||
"addons",
|
||||
variant.id,
|
||||
)
|
||||
else:
|
||||
path = self.debug_tree(
|
||||
arch, variant, create_dir=create_dir, relative=relative
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def iso_dir(self, arch, variant, symlink_to=None, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/iso
|
||||
None
|
||||
"""
|
||||
if variant.type == "addon":
|
||||
return None
|
||||
if variant.type == "optional":
|
||||
if not self.compose.conf.get("create_optional_isos", False):
|
||||
return None
|
||||
if arch == "src":
|
||||
arch = "source"
|
||||
path = os.path.join(
|
||||
self.topdir(arch, variant, create_dir=create_dir, relative=relative), "iso"
|
||||
)
|
||||
|
||||
if symlink_to:
|
||||
# TODO: create_dir
|
||||
topdir = self.compose.topdir.rstrip("/") + "/"
|
||||
relative_dir = path[len(topdir) :]
|
||||
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
|
||||
if create_dir and not relative:
|
||||
makedirs(target_dir)
|
||||
try:
|
||||
os.symlink(target_dir, path)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
msg = "Symlink pointing to '%s' expected: %s" % (target_dir, path)
|
||||
if not os.path.islink(path):
|
||||
raise RuntimeError(msg)
|
||||
if os.path.abspath(os.readlink(path)) != target_dir:
|
||||
raise RuntimeError(msg)
|
||||
else:
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def iso_path(
|
||||
self, arch, variant, filename, symlink_to=None, create_dir=True, relative=False
|
||||
):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/iso/rhel-7.0-20120127.0-Server-x86_64-dvd1.iso
|
||||
None
|
||||
"""
|
||||
path = self.iso_dir(
|
||||
arch,
|
||||
variant,
|
||||
symlink_to=symlink_to,
|
||||
create_dir=create_dir,
|
||||
relative=relative,
|
||||
)
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
return os.path.join(path, filename)
|
||||
|
||||
def image_dir(self, variant, symlink_to=None, relative=False):
|
||||
"""
|
||||
The arch is listed as literal '%(arch)s'
|
||||
Examples:
|
||||
compose/Server/%(arch)s/images
|
||||
None
|
||||
@param variant
|
||||
@param symlink_to=None
|
||||
@param relative=False
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.topdir("%(arch)s", variant, create_dir=False, relative=relative),
|
||||
"images",
|
||||
)
|
||||
if symlink_to:
|
||||
topdir = self.compose.topdir.rstrip("/") + "/"
|
||||
relative_dir = path[len(topdir) :]
|
||||
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
|
||||
try:
|
||||
os.symlink(target_dir, path)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
msg = "Symlink pointing to '%s' expected: %s" % (target_dir, path)
|
||||
if not os.path.islink(path):
|
||||
raise RuntimeError(msg)
|
||||
if os.path.abspath(os.readlink(path)) != target_dir:
|
||||
raise RuntimeError(msg)
|
||||
return path
|
||||
|
||||
def jigdo_dir(self, arch, variant, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/Server/x86_64/jigdo
|
||||
None
|
||||
"""
|
||||
if variant.type == "addon":
|
||||
return None
|
||||
if variant.type == "optional":
|
||||
if not self.compose.conf.get("create_optional_isos", False):
|
||||
return None
|
||||
if arch == "src":
|
||||
arch = "source"
|
||||
path = os.path.join(
|
||||
self.topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||
"jigdo",
|
||||
)
|
||||
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
return path
|
||||
|
||||
def metadata(self, file_name=None, create_dir=True, relative=False):
|
||||
"""
|
||||
Examples:
|
||||
compose/metadata
|
||||
compose/metadata/rpms.json
|
||||
"""
|
||||
path = os.path.join(
|
||||
self.topdir(create_dir=create_dir, relative=relative), "metadata"
|
||||
)
|
||||
if create_dir and not relative:
|
||||
makedirs(path)
|
||||
if file_name:
|
||||
path = os.path.join(path, file_name)
|
||||
return path
|
44
pungi/phases/__init__.py
Normal file
44
pungi/phases/__init__.py
Normal file
@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
|
||||
# phases in runtime order
|
||||
from .init import InitPhase # noqa
|
||||
from .weaver import WeaverPhase # noqa
|
||||
from .pkgset import PkgsetPhase # noqa
|
||||
from .gather import GatherPhase # noqa
|
||||
from .createrepo import CreaterepoPhase # noqa
|
||||
from .buildinstall import BuildinstallPhase # noqa
|
||||
from .extra_files import ExtraFilesPhase # noqa
|
||||
from .createiso import CreateisoPhase # noqa
|
||||
from .extra_isos import ExtraIsosPhase # noqa
|
||||
from .image_build import ImageBuildPhase # noqa
|
||||
from .image_container import ImageContainerPhase # noqa
|
||||
from .kiwibuild import KiwiBuildPhase # noqa
|
||||
from .osbuild import OSBuildPhase # noqa
|
||||
from .repoclosure import RepoclosurePhase # noqa
|
||||
from .test import TestPhase # noqa
|
||||
from .image_checksum import ImageChecksumPhase # noqa
|
||||
from .livemedia_phase import LiveMediaPhase # noqa
|
||||
from .ostree import OSTreePhase # noqa
|
||||
from .ostree_installer import OstreeInstallerPhase # noqa
|
||||
from .ostree_container import OSTreeContainerPhase # noqa
|
||||
from .osbs import OSBSPhase # noqa
|
||||
from .phases_metadata import gather_phases_metadata # noqa
|
||||
|
||||
|
||||
this_module = sys.modules[__name__]
|
||||
PHASES_NAMES = gather_phases_metadata(this_module)
|
224
pungi/phases/base.py
Normal file
224
pungi/phases/base.py
Normal file
@ -0,0 +1,224 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
|
||||
from pungi import util
|
||||
|
||||
|
||||
class PhaseBase(object):
|
||||
def __init__(self, compose):
|
||||
self.compose = compose
|
||||
self.msg = "---------- PHASE: %s ----------" % self.name.upper()
|
||||
self.finished = False
|
||||
self._skipped = False
|
||||
|
||||
# A set of config patterns that were actually used. Starts as None, and
|
||||
# when config is queried the variable turns into a set of patterns.
|
||||
self.used_patterns = None
|
||||
|
||||
def validate(self):
|
||||
pass
|
||||
|
||||
def conf_assert_str(self, name):
|
||||
missing = []
|
||||
invalid = []
|
||||
if name not in self.compose.conf:
|
||||
missing.append(name)
|
||||
elif not isinstance(self.compose.conf[name], str):
|
||||
invalid.append(name, type(self.compose.conf[name]), str)
|
||||
return missing, invalid
|
||||
|
||||
def skip(self):
|
||||
if self._skipped:
|
||||
return True
|
||||
if self.compose.just_phases and self.name not in self.compose.just_phases:
|
||||
return True
|
||||
if self.name in self.compose.skip_phases:
|
||||
return True
|
||||
if self.name in self.compose.conf["skip_phases"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def start(self):
|
||||
self._skipped = self.skip()
|
||||
if self._skipped:
|
||||
self.compose.log_warning("[SKIP ] %s" % self.msg)
|
||||
self.finished = True
|
||||
return
|
||||
self._start_time = time.time()
|
||||
self.compose.log_info("[BEGIN] %s" % self.msg)
|
||||
self.compose.notifier.send("phase-start", phase_name=self.name)
|
||||
self.run()
|
||||
|
||||
def get_config_block(self, variant, arch=None):
|
||||
"""In config for current phase, find a block corresponding to given
|
||||
variant and arch. The arch should be given if and only if the config
|
||||
uses variant/arch mapping.
|
||||
"""
|
||||
self.used_patterns = self.used_patterns or set()
|
||||
if arch is not None:
|
||||
return util.get_arch_variant_data(
|
||||
self.compose.conf, self.name, arch, variant, keys=self.used_patterns
|
||||
)
|
||||
else:
|
||||
return util.get_variant_data(
|
||||
self.compose.conf, self.name, variant, keys=self.used_patterns
|
||||
)
|
||||
|
||||
def get_all_patterns(self):
|
||||
"""Get all variant patterns from config file for this phase."""
|
||||
if isinstance(self.compose.conf.get(self.name), dict):
|
||||
return set(self.compose.conf.get(self.name, {}).keys())
|
||||
else:
|
||||
return set(x[0] for x in self.compose.conf.get(self.name, []))
|
||||
|
||||
def report_unused_patterns(self):
|
||||
"""Log warning about unused parts of the config.
|
||||
|
||||
This is not technically an error, but can help debug when something
|
||||
expected is missing.
|
||||
"""
|
||||
all_patterns = self.get_all_patterns()
|
||||
unused_patterns = all_patterns - self.used_patterns
|
||||
if unused_patterns:
|
||||
self.compose.log_warning(
|
||||
"[%s] Patterns in config do not match any variant: %s"
|
||||
% (self.name.upper(), ", ".join(sorted(unused_patterns)))
|
||||
)
|
||||
self.compose.log_info(
|
||||
"Note that variants can be excluded in configuration file"
|
||||
)
|
||||
|
||||
def stop(self):
|
||||
if self.finished:
|
||||
return
|
||||
if hasattr(self, "pool"):
|
||||
self.pool.stop()
|
||||
self.finished = True
|
||||
self.compose.log_info("[DONE ] %s" % self.msg)
|
||||
|
||||
if hasattr(self, "_start_time"):
|
||||
self.compose.log_info(
|
||||
"PHASE %s took %d seconds"
|
||||
% (self.name.upper(), math.ceil(time.time() - self._start_time))
|
||||
)
|
||||
|
||||
if self.used_patterns is not None:
|
||||
# We only want to report this if the config was actually queried.
|
||||
self.report_unused_patterns()
|
||||
self.compose.notifier.send("phase-stop", phase_name=self.name)
|
||||
|
||||
def run(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ConfigGuardedPhase(PhaseBase):
|
||||
"""A phase that is skipped unless config option is set."""
|
||||
|
||||
def skip(self):
|
||||
if super(ConfigGuardedPhase, self).skip():
|
||||
return True
|
||||
if not self.compose.conf.get(self.name):
|
||||
self.compose.log_info(
|
||||
"Config section '%s' was not found. Skipping." % self.name
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ImageConfigMixin(object):
|
||||
"""
|
||||
A mixin for phase that needs to access image related settings: ksurl,
|
||||
version, target and release.
|
||||
|
||||
First, it checks config object given as argument, then it checks
|
||||
phase-level configuration and finally falls back to global configuration.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ImageConfigMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_config(self, cfg, opt):
|
||||
return cfg.get(
|
||||
opt,
|
||||
self.compose.conf.get(
|
||||
"%s_%s" % (self.name, opt), self.compose.conf.get("global_%s" % opt)
|
||||
),
|
||||
)
|
||||
|
||||
def get_version(self, cfg):
|
||||
"""
|
||||
Get version from configuration hierarchy or fall back to release
|
||||
version.
|
||||
"""
|
||||
return (
|
||||
util.version_generator(self.compose, self.get_config(cfg, "version"))
|
||||
or self.get_config(cfg, "version")
|
||||
or self.compose.image_version
|
||||
)
|
||||
|
||||
def get_release(self, cfg):
|
||||
"""
|
||||
If release is set to a magic string (or explicitly to None -
|
||||
deprecated), replace it with a generated value. Uses configuration
|
||||
passed as argument, phase specific settings and global settings.
|
||||
"""
|
||||
for key, conf in [
|
||||
("release", cfg),
|
||||
("%s_release" % self.name, self.compose.conf),
|
||||
("global_release", self.compose.conf),
|
||||
]:
|
||||
if key in conf:
|
||||
return (
|
||||
util.version_generator(self.compose, conf[key])
|
||||
or self.compose.image_release
|
||||
)
|
||||
return None
|
||||
|
||||
def get_ksurl(self, cfg):
|
||||
"""
|
||||
Get ksurl from `cfg`. If not present, fall back to phase defined one or
|
||||
global one.
|
||||
"""
|
||||
return (
|
||||
cfg.get("ksurl")
|
||||
or self.compose.conf.get("%s_ksurl" % self.name)
|
||||
or self.compose.conf.get("global_ksurl")
|
||||
)
|
||||
|
||||
|
||||
class PhaseLoggerMixin(object):
|
||||
"""
|
||||
A mixin that can extend a phase with a new logging logger that copy
|
||||
handlers from compose, but with different formatter that includes phase name.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PhaseLoggerMixin, self).__init__(*args, **kwargs)
|
||||
self.logger = None
|
||||
if self.compose._logger and self.compose._logger.handlers:
|
||||
self.logger = logging.getLogger(self.name.upper())
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
format = "%(asctime)s [%(name)-16s] [%(levelname)-8s] %(message)s"
|
||||
import copy
|
||||
|
||||
for handler in self.compose._logger.handlers:
|
||||
hl = copy.copy(handler)
|
||||
hl.setFormatter(logging.Formatter(format, datefmt="%Y-%m-%d %H:%M:%S"))
|
||||
hl.setLevel(logging.DEBUG)
|
||||
self.logger.addHandler(hl)
|
950
pungi/phases/buildinstall.py
Normal file
950
pungi/phases/buildinstall.py
Normal file
@ -0,0 +1,950 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import errno
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import re
|
||||
from six.moves import cPickle as pickle
|
||||
from copy import copy
|
||||
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import run, force_list
|
||||
import kobo.rpmlib
|
||||
from productmd.images import Image
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.arch import get_valid_arches
|
||||
from pungi.util import get_volid, get_arch_variant_data
|
||||
from pungi.util import get_file_size, get_mtime, failable, makedirs
|
||||
from pungi.util import copy_all, translate_path
|
||||
from pungi.wrappers.lorax import LoraxWrapper
|
||||
from pungi.wrappers import iso
|
||||
from pungi.wrappers.scm import get_file
|
||||
from pungi.wrappers.scm import get_file_from_scm
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.phases.base import PhaseBase
|
||||
from pungi.runroot import Runroot, download_and_extract_archive
|
||||
|
||||
|
||||
class BuildinstallPhase(PhaseBase):
|
||||
name = "buildinstall"
|
||||
|
||||
def __init__(self, compose, pkgset_phase=None):
|
||||
PhaseBase.__init__(self, compose)
|
||||
self.pool = ThreadPool(logger=self.compose._logger)
|
||||
# A set of (variant_uid, arch) pairs that completed successfully. This
|
||||
# is needed to skip copying files for failed tasks.
|
||||
self.pool.finished_tasks = set()
|
||||
# A set of (variant_uid, arch) pairs that were reused from previous
|
||||
# compose.
|
||||
self.pool.reused_tasks = set()
|
||||
self.buildinstall_method = self.compose.conf.get("buildinstall_method")
|
||||
self.lorax_use_koji_plugin = self.compose.conf.get("lorax_use_koji_plugin")
|
||||
self.used_lorax = self.buildinstall_method == "lorax"
|
||||
self.pkgset_phase = pkgset_phase
|
||||
|
||||
self.warned_skipped = False
|
||||
|
||||
def skip(self):
|
||||
if PhaseBase.skip(self):
|
||||
return True
|
||||
if not self.compose.conf.get("buildinstall_method"):
|
||||
if not self.warned_skipped:
|
||||
msg = "Not a bootable product. Skipping buildinstall."
|
||||
self.compose.log_debug(msg)
|
||||
self.warned_skipped = True
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_lorax_cmd(
|
||||
self,
|
||||
repo_baseurl,
|
||||
output_dir,
|
||||
variant,
|
||||
arch,
|
||||
buildarch,
|
||||
volid,
|
||||
final_output_dir,
|
||||
):
|
||||
noupgrade = True
|
||||
bugurl = None
|
||||
nomacboot = True
|
||||
add_template = []
|
||||
add_arch_template = []
|
||||
add_template_var = []
|
||||
add_arch_template_var = []
|
||||
dracut_args = []
|
||||
rootfs_size = None
|
||||
skip_branding = False
|
||||
squashfs_only = False
|
||||
configuration_file = None
|
||||
configuration_file_source = None
|
||||
version = self.compose.conf.get(
|
||||
"treeinfo_version", self.compose.conf["release_version"]
|
||||
)
|
||||
for data in get_arch_variant_data(
|
||||
self.compose.conf, "lorax_options", arch, variant
|
||||
):
|
||||
if not data.get("noupgrade", True):
|
||||
noupgrade = False
|
||||
if data.get("bugurl"):
|
||||
bugurl = data.get("bugurl")
|
||||
if not data.get("nomacboot", True):
|
||||
nomacboot = False
|
||||
if "rootfs_size" in data:
|
||||
rootfs_size = data.get("rootfs_size")
|
||||
add_template.extend(data.get("add_template", []))
|
||||
add_arch_template.extend(data.get("add_arch_template", []))
|
||||
add_template_var.extend(data.get("add_template_var", []))
|
||||
add_arch_template_var.extend(data.get("add_arch_template_var", []))
|
||||
dracut_args.extend(data.get("dracut_args", []))
|
||||
skip_branding = data.get("skip_branding", False)
|
||||
configuration_file_source = data.get("configuration_file")
|
||||
squashfs_only = data.get("squashfs_only", False)
|
||||
if "version" in data:
|
||||
version = data["version"]
|
||||
output_dir = os.path.join(output_dir, variant.uid)
|
||||
output_topdir = output_dir
|
||||
|
||||
# The paths module will modify the filename (by inserting arch). But we
|
||||
# only care about the directory anyway.
|
||||
log_dir = _get_log_dir(self.compose, variant, arch)
|
||||
# Place the lorax.conf as specified by
|
||||
# the configuration_file parameter of lorax_options to the log directory.
|
||||
if configuration_file_source:
|
||||
configuration_file_destination = os.path.join(log_dir, "lorax.conf")
|
||||
# Obtain lorax.conf for the buildInstall phase
|
||||
get_file(
|
||||
configuration_file_source,
|
||||
configuration_file_destination,
|
||||
compose=self.compose,
|
||||
)
|
||||
configuration_file = configuration_file_destination
|
||||
|
||||
repos = repo_baseurl[:]
|
||||
repos.extend(
|
||||
get_arch_variant_data(
|
||||
self.compose.conf, "lorax_extra_sources", arch, variant
|
||||
)
|
||||
)
|
||||
if self.compose.has_comps:
|
||||
comps_repo = self.compose.paths.work.comps_repo(arch, variant)
|
||||
if final_output_dir != output_dir or self.lorax_use_koji_plugin:
|
||||
comps_repo = translate_path(self.compose, comps_repo)
|
||||
repos.append(comps_repo)
|
||||
|
||||
if self.lorax_use_koji_plugin:
|
||||
return {
|
||||
"product": self.compose.conf["release_name"],
|
||||
"version": version,
|
||||
"release": version,
|
||||
"sources": force_list(repos),
|
||||
"variant": variant.uid,
|
||||
"installpkgs": variant.buildinstallpackages,
|
||||
"isfinal": self.compose.supported,
|
||||
"buildarch": buildarch,
|
||||
"volid": volid,
|
||||
"nomacboot": nomacboot,
|
||||
"bugurl": bugurl,
|
||||
"add-template": add_template,
|
||||
"add-arch-template": add_arch_template,
|
||||
"add-template-var": add_template_var,
|
||||
"add-arch-template-var": add_arch_template_var,
|
||||
"noupgrade": noupgrade,
|
||||
"rootfs-size": rootfs_size,
|
||||
"dracut-args": dracut_args,
|
||||
"skip_branding": skip_branding,
|
||||
"squashfs_only": squashfs_only,
|
||||
"configuration_file": configuration_file,
|
||||
}
|
||||
else:
|
||||
# If the buildinstall_topdir is set, it means Koji is used for
|
||||
# buildinstall phase and the filesystem with Koji is read-only.
|
||||
# In that case, we have to write logs to buildinstall_topdir and
|
||||
# later copy them back to our local log directory.
|
||||
if self.compose.conf.get("buildinstall_topdir", None):
|
||||
output_dir = os.path.join(output_dir, "results")
|
||||
|
||||
lorax = LoraxWrapper()
|
||||
lorax_cmd = lorax.get_lorax_cmd(
|
||||
self.compose.conf["release_name"],
|
||||
version,
|
||||
version,
|
||||
repos,
|
||||
output_dir,
|
||||
variant=variant.uid,
|
||||
buildinstallpackages=variant.buildinstallpackages,
|
||||
is_final=self.compose.supported,
|
||||
buildarch=buildarch,
|
||||
volid=volid,
|
||||
nomacboot=nomacboot,
|
||||
bugurl=bugurl,
|
||||
add_template=add_template,
|
||||
add_arch_template=add_arch_template,
|
||||
add_template_var=add_template_var,
|
||||
add_arch_template_var=add_arch_template_var,
|
||||
noupgrade=noupgrade,
|
||||
rootfs_size=rootfs_size,
|
||||
log_dir=log_dir,
|
||||
dracut_args=dracut_args,
|
||||
skip_branding=skip_branding,
|
||||
squashfs_only=squashfs_only,
|
||||
configuration_file=configuration_file,
|
||||
)
|
||||
return "rm -rf %s && %s" % (
|
||||
shlex_quote(output_topdir),
|
||||
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||
)
|
||||
|
||||
def get_repos(self, arch):
|
||||
repos = []
|
||||
for pkgset in self.pkgset_phase.package_sets:
|
||||
repos.append(pkgset.paths[arch])
|
||||
return repos
|
||||
|
||||
def run(self):
|
||||
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||
|
||||
# Prepare kickstart file for final images.
|
||||
self.pool.kickstart_file = get_kickstart_file(self.compose)
|
||||
|
||||
for arch in self.compose.get_arches():
|
||||
commands = []
|
||||
|
||||
output_dir = self.compose.paths.work.buildinstall_dir(
|
||||
arch, allow_topdir_override=True
|
||||
)
|
||||
final_output_dir = self.compose.paths.work.buildinstall_dir(
|
||||
arch, allow_topdir_override=False
|
||||
)
|
||||
makedirs(final_output_dir)
|
||||
repo_baseurls = self.get_repos(arch)
|
||||
if final_output_dir != output_dir or self.lorax_use_koji_plugin:
|
||||
repo_baseurls = [translate_path(self.compose, r) for r in repo_baseurls]
|
||||
|
||||
if self.buildinstall_method == "lorax":
|
||||
buildarch = get_valid_arches(arch)[0]
|
||||
for variant in self.compose.get_variants(arch=arch, types=["variant"]):
|
||||
if variant.is_empty:
|
||||
continue
|
||||
|
||||
skip = get_arch_variant_data(
|
||||
self.compose.conf, "buildinstall_skip", arch, variant
|
||||
)
|
||||
if skip == [True]:
|
||||
self.compose.log_info(
|
||||
"Skipping buildinstall for %s.%s due to config option"
|
||||
% (variant, arch)
|
||||
)
|
||||
continue
|
||||
|
||||
volid = get_volid(
|
||||
self.compose, arch, variant=variant, disc_type=disc_type
|
||||
)
|
||||
commands.append(
|
||||
(
|
||||
variant,
|
||||
self._get_lorax_cmd(
|
||||
repo_baseurls,
|
||||
output_dir,
|
||||
variant,
|
||||
arch,
|
||||
buildarch,
|
||||
volid,
|
||||
final_output_dir,
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
||||
)
|
||||
|
||||
for variant, cmd in commands:
|
||||
self.pool.add(BuildinstallThread(self.pool))
|
||||
self.pool.queue_put(
|
||||
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
||||
)
|
||||
|
||||
self.pool.start()
|
||||
|
||||
def succeeded(self, variant, arch):
|
||||
# If the phase is skipped, we can treat it as successful. Either there
|
||||
# will be no output, or it's a debug run of compose where anything can
|
||||
# happen.
|
||||
return (
|
||||
super(BuildinstallPhase, self).skip()
|
||||
or (variant.uid if self.used_lorax else None, arch)
|
||||
in self.pool.finished_tasks
|
||||
)
|
||||
|
||||
def reused(self, variant, arch):
|
||||
"""
|
||||
Check if buildinstall phase reused previous results for given variant
|
||||
and arch. If the phase is skipped, the results will be considered
|
||||
reused as well.
|
||||
"""
|
||||
return (
|
||||
super(BuildinstallPhase, self).skip()
|
||||
or (variant.uid if self.used_lorax else None, arch)
|
||||
in self.pool.reused_tasks
|
||||
)
|
||||
|
||||
|
||||
def get_kickstart_file(compose):
|
||||
scm_dict = compose.conf.get("buildinstall_kickstart")
|
||||
if not scm_dict:
|
||||
compose.log_debug("Path to ks.cfg (buildinstall_kickstart) not specified.")
|
||||
return
|
||||
|
||||
msg = "Getting ks.cfg"
|
||||
kickstart_path = os.path.join(compose.paths.work.topdir(arch="global"), "ks.cfg")
|
||||
if os.path.exists(kickstart_path):
|
||||
compose.log_warning("[SKIP ] %s" % msg)
|
||||
return kickstart_path
|
||||
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
if isinstance(scm_dict, dict):
|
||||
kickstart_name = os.path.basename(scm_dict["file"])
|
||||
if scm_dict["scm"] == "file":
|
||||
scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"])
|
||||
else:
|
||||
kickstart_name = os.path.basename(scm_dict)
|
||||
scm_dict = os.path.join(compose.config_dir, scm_dict)
|
||||
|
||||
tmp_dir = compose.mkdtemp(prefix="buildinstall_kickstart_")
|
||||
get_file_from_scm(scm_dict, tmp_dir, compose=compose)
|
||||
src = os.path.join(tmp_dir, kickstart_name)
|
||||
shutil.copy2(src, kickstart_path)
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
return kickstart_path
|
||||
|
||||
|
||||
BOOT_CONFIGS = [
|
||||
"isolinux/isolinux.cfg",
|
||||
"etc/yaboot.conf",
|
||||
"ppc/ppc64/yaboot.conf",
|
||||
"EFI/BOOT/BOOTX64.conf",
|
||||
"EFI/BOOT/grub.cfg",
|
||||
]
|
||||
BOOT_IMAGES = [
|
||||
"images/efiboot.img",
|
||||
]
|
||||
|
||||
|
||||
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||
"""
|
||||
Put escaped volume ID and possibly kickstart file into the boot
|
||||
configuration files.
|
||||
:returns: list of paths to modified config files
|
||||
"""
|
||||
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
||||
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
||||
found_configs = []
|
||||
for config in configs:
|
||||
config_path = os.path.join(path, config)
|
||||
if not os.path.exists(config_path):
|
||||
continue
|
||||
|
||||
with open(config_path, "r") as f:
|
||||
data = original_data = f.read()
|
||||
os.unlink(config_path) # break hadlink by removing file writing a new one
|
||||
|
||||
# double-escape volid in yaboot.conf
|
||||
new_volid = volid_escaped_2 if "yaboot" in config else volid_escaped
|
||||
|
||||
ks = (" inst.ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
|
||||
|
||||
# pre-f18
|
||||
data = re.sub(r":CDLABEL=[^ \n]*", r":CDLABEL=%s%s" % (new_volid, ks), data)
|
||||
# f18+
|
||||
data = re.sub(r":LABEL=[^ \n]*", r":LABEL=%s%s" % (new_volid, ks), data)
|
||||
data = re.sub(r"(search .* -l) '[^'\n]*'", r"\1 '%s'" % volid, data)
|
||||
|
||||
with open(config_path, "w") as f:
|
||||
f.write(data)
|
||||
|
||||
if data != original_data:
|
||||
found_configs.append(config)
|
||||
if logger:
|
||||
# Generally lorax should create file with correct volume id
|
||||
# already. If we don't have a kickstart, this function should
|
||||
# be a no-op.
|
||||
logger.info("Boot config %s changed" % config_path)
|
||||
|
||||
return found_configs
|
||||
|
||||
|
||||
# HACK: this is a hack!
|
||||
# * it's quite trivial to replace volids
|
||||
# * it's not easy to replace menu titles
|
||||
# * we probably need to get this into lorax
|
||||
def tweak_buildinstall(
|
||||
compose, src, dst, arch, variant, label, volid, kickstart_file=None
|
||||
):
|
||||
tmp_dir = compose.mkdtemp(prefix="tweak_buildinstall_")
|
||||
|
||||
# verify src
|
||||
if not os.path.isdir(src):
|
||||
raise OSError(errno.ENOENT, "Directory does not exist: %s" % src)
|
||||
|
||||
# create dst
|
||||
try:
|
||||
os.makedirs(dst)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
# copy src to temp
|
||||
# TODO: place temp on the same device as buildinstall dir so we can hardlink
|
||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||
shlex_quote(src),
|
||||
shlex_quote(tmp_dir),
|
||||
)
|
||||
run(cmd)
|
||||
|
||||
found_configs = tweak_configs(
|
||||
tmp_dir, volid, kickstart_file, logger=compose._logger
|
||||
)
|
||||
if kickstart_file and found_configs:
|
||||
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
||||
|
||||
images = [os.path.join(tmp_dir, img) for img in BOOT_IMAGES]
|
||||
if found_configs:
|
||||
for image in images:
|
||||
if not os.path.isfile(image):
|
||||
continue
|
||||
|
||||
with iso.mount(
|
||||
image,
|
||||
logger=compose._logger,
|
||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||
) as mount_tmp_dir:
|
||||
for config in found_configs:
|
||||
# Put each modified config file into the image (overwriting the
|
||||
# original).
|
||||
config_path = os.path.join(tmp_dir, config)
|
||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||
|
||||
if os.path.isfile(config_in_image):
|
||||
cmd = [
|
||||
"cp",
|
||||
"-v",
|
||||
"--remove-destination",
|
||||
config_path,
|
||||
config_in_image,
|
||||
]
|
||||
run(cmd)
|
||||
|
||||
# HACK: make buildinstall files world readable
|
||||
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
||||
|
||||
# copy temp to dst
|
||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||
shlex_quote(tmp_dir),
|
||||
shlex_quote(dst),
|
||||
)
|
||||
run(cmd)
|
||||
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
|
||||
def link_boot_iso(compose, arch, variant, can_fail):
|
||||
if arch == "src":
|
||||
return
|
||||
|
||||
disc_type = compose.conf["disc_types"].get("boot", "boot")
|
||||
|
||||
symlink_isos_to = compose.conf.get("symlink_isos_to")
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
# TODO: find in treeinfo?
|
||||
boot_iso_path = os.path.join(os_tree, "images", "boot.iso")
|
||||
if not os.path.isfile(boot_iso_path):
|
||||
return
|
||||
|
||||
msg = "Linking boot.iso (arch: %s, variant: %s)" % (arch, variant)
|
||||
filename = compose.get_image_name(
|
||||
arch, variant, disc_type=disc_type, disc_num=None, suffix=".iso"
|
||||
)
|
||||
new_boot_iso_path = compose.paths.compose.iso_path(
|
||||
arch, variant, filename, symlink_to=symlink_isos_to
|
||||
)
|
||||
new_boot_iso_relative_path = compose.paths.compose.iso_path(
|
||||
arch, variant, filename, relative=True
|
||||
)
|
||||
if os.path.exists(new_boot_iso_path):
|
||||
# TODO: log
|
||||
compose.log_warning("[SKIP ] %s" % msg)
|
||||
return
|
||||
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
# Try to hardlink, and copy if that fails
|
||||
try:
|
||||
os.link(boot_iso_path, new_boot_iso_path)
|
||||
except OSError:
|
||||
shutil.copy2(boot_iso_path, new_boot_iso_path)
|
||||
|
||||
implant_md5 = iso.get_implanted_md5(new_boot_iso_path)
|
||||
iso_name = os.path.basename(new_boot_iso_path)
|
||||
iso_dir = os.path.dirname(new_boot_iso_path)
|
||||
|
||||
# create iso manifest
|
||||
run(iso.get_manifest_cmd(iso_name), workdir=iso_dir)
|
||||
|
||||
img = Image(compose.im)
|
||||
img.path = new_boot_iso_relative_path
|
||||
img.mtime = get_mtime(new_boot_iso_path)
|
||||
img.size = get_file_size(new_boot_iso_path)
|
||||
img.arch = arch
|
||||
img.type = "boot"
|
||||
img.format = "iso"
|
||||
img.disc_number = 1
|
||||
img.disc_count = 1
|
||||
img.bootable = True
|
||||
img.subvariant = variant.uid
|
||||
img.implant_md5 = implant_md5
|
||||
setattr(img, "can_fail", can_fail)
|
||||
setattr(img, "deliverable", "buildinstall")
|
||||
try:
|
||||
img.volume_id = iso.get_volume_id(
|
||||
new_boot_iso_path,
|
||||
compose.conf.get("createiso_use_xorrisofs"),
|
||||
)
|
||||
except RuntimeError:
|
||||
pass
|
||||
# In this phase we should add to compose only the images that
|
||||
# will be used only as netinstall.
|
||||
# On this step lorax generates environment
|
||||
# for creating isos and create them.
|
||||
# On step `extra_isos` we overwrite the not needed iso `boot Minimal` by
|
||||
# new iso. It already contains necessary packages from incldued variants.
|
||||
if variant.uid in compose.conf['netinstall_variants']:
|
||||
compose.im.add(variant.uid, arch, img)
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
class BuildinstallThread(WorkerThread):
|
||||
def process(self, item, num):
|
||||
# The variant is None unless lorax is used as buildinstall method.
|
||||
compose, arch, variant, cmd, pkgset_phase = item
|
||||
can_fail = compose.can_fail(variant, arch, "buildinstall")
|
||||
with failable(compose, can_fail, variant, arch, "buildinstall"):
|
||||
try:
|
||||
self.worker(compose, arch, variant, cmd, pkgset_phase, num)
|
||||
except RuntimeError:
|
||||
self._print_depsolve_error(compose, arch, variant)
|
||||
raise
|
||||
|
||||
def _print_depsolve_error(self, compose, arch, variant):
|
||||
try:
|
||||
log_file = os.path.join(_get_log_dir(compose, variant, arch), "pylorax.log")
|
||||
with open(log_file) as f:
|
||||
matched = False
|
||||
for line in f:
|
||||
if re.match("Dependency check failed", line):
|
||||
matched = True
|
||||
if matched:
|
||||
compose.log_error(line.rstrip())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _generate_buildinstall_metadata(
|
||||
self, compose, arch, variant, cmd, buildroot_rpms, pkgset_phase
|
||||
):
|
||||
"""
|
||||
Generate buildinstall.metadata dict.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param str arch: Current architecture.
|
||||
:param Variant variant: Compose variant.
|
||||
:param list cmd: List of command line arguments passed to buildinstall task.
|
||||
:param list buildroot_rpms: List of NVRAs of all RPMs installed in the
|
||||
buildinstall task's buildroot.
|
||||
:param PkgsetPhase pkgset_phase: Package set phase instance.
|
||||
:return: The buildinstall.metadata dict.
|
||||
"""
|
||||
# Load the list of packages installed in the boot.iso.
|
||||
# The list of installed packages is logged by Lorax in the "pkglists"
|
||||
# directory. There is one file for each installed RPM and the name
|
||||
# of the file is the name of the RPM.
|
||||
# We need to resolve the name of each RPM back to its NVRA.
|
||||
installed_rpms = []
|
||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname, False))
|
||||
pkglists_dir = os.path.join(log_dir, "pkglists")
|
||||
if os.path.exists(pkglists_dir):
|
||||
for pkg_name in os.listdir(pkglists_dir):
|
||||
for pkgset in pkgset_phase.package_sets:
|
||||
global_pkgset = pkgset["global"]
|
||||
# We actually do not care from which package_set the RPM
|
||||
# came from or if there are multiple versions/release of
|
||||
# the single RPM in more packages sets. We simply include
|
||||
# all RPMs with this name in the metadata.
|
||||
# Later when deciding if the buildinstall phase results
|
||||
# can be reused, we check that all the RPMs with this name
|
||||
# are still the same in old/new compose.
|
||||
for rpm_path, rpm_obj in global_pkgset.file_cache.items():
|
||||
if rpm_obj.name == pkg_name:
|
||||
installed_rpms.append(rpm_path)
|
||||
|
||||
# Store the metadata in `buildinstall.metadata`.
|
||||
metadata = {
|
||||
"cmd": cmd,
|
||||
"buildroot_rpms": sorted(buildroot_rpms),
|
||||
"installed_rpms": sorted(installed_rpms),
|
||||
}
|
||||
return metadata
|
||||
|
||||
def _write_buildinstall_metadata(
|
||||
self, compose, arch, variant, cmd, buildroot_rpms, pkgset_phase
|
||||
):
|
||||
"""
|
||||
Write buildinstall.metadata file containing all the information about
|
||||
buildinstall phase input and environment.
|
||||
|
||||
This file is later used to decide whether old buildinstall results can
|
||||
be reused instead of generating them again.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param str arch: Current architecture.
|
||||
:param Variant variant: Compose variant.
|
||||
:param list cmd: List of command line arguments passed to buildinstall task.
|
||||
:param list buildroot_rpms: List of NVRAs of all RPMs installed in the
|
||||
buildinstall task's buildroot.
|
||||
:param PkgsetPhase pkgset_phase: Package set phase instance.
|
||||
"""
|
||||
# Generate the list of `*-RPMs` log file.
|
||||
log_filename = ("buildinstall-%s" % variant.uid) if variant else "buildinstall"
|
||||
log_file = compose.paths.log.log_file(arch, log_filename + "-RPMs")
|
||||
with open(log_file, "w") as f:
|
||||
f.write("\n".join(buildroot_rpms))
|
||||
|
||||
# Write buildinstall.metadata only if particular variant is defined.
|
||||
# The `variant` is `None` only if old "buildinstall" method is used.
|
||||
if not variant:
|
||||
return
|
||||
|
||||
metadata = self._generate_buildinstall_metadata(
|
||||
compose, arch, variant, cmd, buildroot_rpms, pkgset_phase
|
||||
)
|
||||
|
||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
||||
metadata_path = os.path.join(log_dir, "buildinstall.metadata")
|
||||
with open(metadata_path, "wb") as f:
|
||||
pickle.dump(metadata, f, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def _load_old_buildinstall_metadata(self, compose, arch, variant):
|
||||
"""
|
||||
Helper method to load "buildinstall.metadata" from old compose.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param str arch: Current architecture.
|
||||
:param Variant variant: Compose variant.
|
||||
"""
|
||||
if not variant:
|
||||
return None
|
||||
|
||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
metadata = os.path.join(
|
||||
os.path.dirname(compose.paths.log.log_file(arch, log_fname)),
|
||||
"buildinstall.metadata",
|
||||
)
|
||||
old_metadata = compose.paths.old_compose_path(metadata)
|
||||
if not old_metadata:
|
||||
return None
|
||||
|
||||
compose.log_info("Loading old BUILDINSTALL phase metadata: %s", old_metadata)
|
||||
try:
|
||||
with open(old_metadata, "rb") as f:
|
||||
old_result = pickle.load(f)
|
||||
return old_result
|
||||
except Exception as e:
|
||||
compose.log_debug(
|
||||
"Failed to load old BUILDINSTALL phase metadata %s : %s"
|
||||
% (old_metadata, str(e))
|
||||
)
|
||||
return None
|
||||
|
||||
def _reuse_old_buildinstall_result(self, compose, arch, variant, cmd, pkgset_phase):
|
||||
"""
|
||||
Try to reuse old buildinstall results.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param str arch: Current architecture.
|
||||
:param Variant variant: Compose variant.
|
||||
:param list cmd: List of command line arguments passed to buildinstall task.
|
||||
:param list buildroot_rpms: List of NVRAs of all RPMs installed in the
|
||||
buildinstall task's buildroot.
|
||||
:param PkgsetPhase pkgset_phase: Package set phase instance.
|
||||
:return: True if old buildinstall phase results have been reused.
|
||||
"""
|
||||
log_msg = "Cannot reuse old BUILDINSTALL phase results - %s"
|
||||
|
||||
if not compose.conf["buildinstall_allow_reuse"]:
|
||||
compose.log_info(log_msg % "reuse of old buildinstall results is disabled.")
|
||||
return
|
||||
|
||||
# Load the old buildinstall.metadata.
|
||||
old_metadata = self._load_old_buildinstall_metadata(compose, arch, variant)
|
||||
if old_metadata is None:
|
||||
compose.log_info(log_msg % "no old BUILDINSTALL metadata.")
|
||||
return
|
||||
|
||||
# For now try to reuse only if pungi_buildinstall plugin is used.
|
||||
# This is the easiest approach, because we later need to filter out
|
||||
# some parts of `cmd` and for pungi_buildinstall, the `cmd` is a dict
|
||||
# which makes this easy.
|
||||
if not isinstance(old_metadata["cmd"], dict) or not isinstance(cmd, dict):
|
||||
compose.log_info(log_msg % "pungi_buildinstall plugin is not used.")
|
||||
return
|
||||
|
||||
# Filter out "outputdir" and "sources" because they change every time.
|
||||
# The "sources" are not important, because we check the buildinstall
|
||||
# input on RPM level.
|
||||
cmd_copy = copy(cmd)
|
||||
for key in ["outputdir", "sources"]:
|
||||
cmd_copy.pop(key, None)
|
||||
old_metadata["cmd"].pop(key, None)
|
||||
|
||||
# Do not reuse if command line arguments are not the same.
|
||||
if old_metadata["cmd"] != cmd_copy:
|
||||
compose.log_info(log_msg % "lorax command line arguments differ.")
|
||||
return
|
||||
|
||||
# Check that the RPMs installed in the old boot.iso exists in the very
|
||||
# same versions/releases in this compose.
|
||||
for rpm_path in old_metadata["installed_rpms"]:
|
||||
found = False
|
||||
for pkgset in pkgset_phase.package_sets:
|
||||
global_pkgset = pkgset["global"]
|
||||
if rpm_path in global_pkgset.file_cache:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
compose.log_info(
|
||||
log_msg % "RPM %s does not exist in new compose." % rpm_path
|
||||
)
|
||||
return
|
||||
|
||||
# Ask Koji for all the RPMs in the `runroot_tag` and check that
|
||||
# those installed in the old buildinstall buildroot are still in the
|
||||
# very same versions/releases.
|
||||
koji_wrapper = kojiwrapper.KojiWrapper(compose)
|
||||
rpms = koji_wrapper.koji_proxy.listTaggedRPMS(
|
||||
compose.conf.get("runroot_tag"), inherit=True, latest=True
|
||||
)[0]
|
||||
rpm_nvras = set()
|
||||
for rpm in rpms:
|
||||
rpm_nvras.add(kobo.rpmlib.make_nvra(rpm, add_rpm=False, force_epoch=False))
|
||||
for old_nvra in old_metadata["buildroot_rpms"]:
|
||||
if old_nvra not in rpm_nvras:
|
||||
compose.log_info(
|
||||
log_msg % "RPM %s does not exist in new buildroot." % old_nvra
|
||||
)
|
||||
return
|
||||
|
||||
# We can reuse the old buildinstall results!
|
||||
compose.log_info("Reusing old BUILDINSTALL phase output")
|
||||
|
||||
# Copy old buildinstall output to this this compose.
|
||||
final_output_dir = compose.paths.work.buildinstall_dir(arch, variant=variant)
|
||||
old_final_output_dir = compose.paths.old_compose_path(final_output_dir)
|
||||
copy_all(old_final_output_dir, final_output_dir)
|
||||
|
||||
# Copy old buildinstall logs to this compose.
|
||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
||||
old_final_log_dir = compose.paths.old_compose_path(final_log_dir)
|
||||
if not os.path.exists(final_log_dir):
|
||||
makedirs(final_log_dir)
|
||||
copy_all(old_final_log_dir, final_log_dir)
|
||||
|
||||
# Write the buildinstall metadata so next compose can reuse this compose.
|
||||
self._write_buildinstall_metadata(
|
||||
compose, arch, variant, cmd, old_metadata["buildroot_rpms"], pkgset_phase
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def worker(self, compose, arch, variant, cmd, pkgset_phase, num):
|
||||
buildinstall_method = compose.conf["buildinstall_method"]
|
||||
lorax_use_koji_plugin = compose.conf["lorax_use_koji_plugin"]
|
||||
log_filename = ("buildinstall-%s" % variant.uid) if variant else "buildinstall"
|
||||
log_file = compose.paths.log.log_file(arch, log_filename)
|
||||
|
||||
msg = "Running buildinstall for arch %s, variant %s" % (arch, variant)
|
||||
|
||||
output_dir = compose.paths.work.buildinstall_dir(
|
||||
arch, allow_topdir_override=True, variant=variant
|
||||
)
|
||||
final_output_dir = compose.paths.work.buildinstall_dir(arch, variant=variant)
|
||||
|
||||
if (
|
||||
os.path.isdir(output_dir)
|
||||
and os.listdir(output_dir)
|
||||
or os.path.isdir(final_output_dir)
|
||||
and os.listdir(final_output_dir)
|
||||
):
|
||||
# output dir is *not* empty -> SKIP
|
||||
self.pool.log_warning(
|
||||
"[SKIP ] Buildinstall for arch %s, variant %s" % (arch, variant)
|
||||
)
|
||||
return
|
||||
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
# Get list of packages which are needed in runroot.
|
||||
packages = []
|
||||
chown_paths = [output_dir]
|
||||
if buildinstall_method == "lorax":
|
||||
packages += ["lorax"]
|
||||
chown_paths.append(_get_log_dir(compose, variant, arch))
|
||||
packages += get_arch_variant_data(
|
||||
compose.conf, "buildinstall_packages", arch, variant
|
||||
)
|
||||
if self._reuse_old_buildinstall_result(
|
||||
compose, arch, variant, cmd, pkgset_phase
|
||||
):
|
||||
self.copy_files(compose, variant, arch)
|
||||
self.pool.finished_tasks.add((variant.uid if variant else None, arch))
|
||||
self.pool.reused_tasks.add((variant.uid if variant else None, arch))
|
||||
self.pool.log_info("[DONE ] %s" % msg)
|
||||
return
|
||||
|
||||
# This should avoid a possible race condition with multiple processes
|
||||
# trying to get a kerberos ticket at the same time.
|
||||
# Kerberos authentication failed:
|
||||
# Permission denied in replay cache code (-1765328215)
|
||||
time.sleep(num * 3)
|
||||
|
||||
# Start the runroot task.
|
||||
runroot = Runroot(compose, phase="buildinstall")
|
||||
task_id = None
|
||||
if buildinstall_method == "lorax" and lorax_use_koji_plugin:
|
||||
task_id = runroot.run_pungi_buildinstall(
|
||||
cmd,
|
||||
log_file=log_file,
|
||||
arch=arch,
|
||||
packages=packages,
|
||||
weight=compose.conf["runroot_weights"].get("buildinstall"),
|
||||
)
|
||||
else:
|
||||
try:
|
||||
lorax_log_dir = _get_log_dir(compose, variant, arch)
|
||||
except Exception:
|
||||
lorax_log_dir = None
|
||||
runroot.run(
|
||||
cmd,
|
||||
log_file=log_file,
|
||||
arch=arch,
|
||||
packages=packages,
|
||||
mounts=[compose.topdir],
|
||||
weight=compose.conf["runroot_weights"].get("buildinstall"),
|
||||
chown_paths=chown_paths,
|
||||
log_dir=lorax_log_dir,
|
||||
)
|
||||
|
||||
if final_output_dir != output_dir:
|
||||
if not os.path.exists(final_output_dir):
|
||||
makedirs(final_output_dir)
|
||||
results_dir = os.path.join(output_dir, "results")
|
||||
copy_all(results_dir, final_output_dir)
|
||||
|
||||
# Get the log_dir into which we should copy the resulting log files.
|
||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
||||
if not os.path.exists(final_log_dir):
|
||||
makedirs(final_log_dir)
|
||||
log_dir = os.path.join(output_dir, "logs")
|
||||
copy_all(log_dir, final_log_dir)
|
||||
elif lorax_use_koji_plugin:
|
||||
# If Koji pungi-buildinstall is used, then the buildinstall results
|
||||
# are attached as outputs to the Koji task. Download and unpack
|
||||
# them to the correct location.
|
||||
download_and_extract_archive(
|
||||
compose, task_id, "results.tar.gz", final_output_dir
|
||||
)
|
||||
|
||||
# Download the logs into proper location too.
|
||||
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
||||
download_and_extract_archive(compose, task_id, "logs.tar.gz", final_log_dir)
|
||||
|
||||
rpms = runroot.get_buildroot_rpms()
|
||||
self._write_buildinstall_metadata(
|
||||
compose, arch, variant, cmd, rpms, pkgset_phase
|
||||
)
|
||||
|
||||
self.copy_files(compose, variant, arch)
|
||||
|
||||
self.pool.finished_tasks.add((variant.uid if variant else None, arch))
|
||||
|
||||
self.pool.log_info("[DONE ] %s" % msg)
|
||||
|
||||
def copy_files(self, compose, variant, arch):
|
||||
disc_type = compose.conf["disc_types"].get("dvd", "dvd")
|
||||
|
||||
buildinstall_dir = compose.paths.work.buildinstall_dir(arch)
|
||||
|
||||
# Lorax runs per-variant, so we need to tweak the source path
|
||||
# to include variant.
|
||||
if variant:
|
||||
buildinstall_dir = os.path.join(buildinstall_dir, variant.uid)
|
||||
|
||||
# Find all relevant variants if lorax is not used.
|
||||
variants = (
|
||||
[variant]
|
||||
if variant
|
||||
else compose.get_variants(arch=arch, types=["self", "variant"])
|
||||
)
|
||||
for var in variants:
|
||||
os_tree = compose.paths.compose.os_tree(arch, var)
|
||||
# TODO: label is not used
|
||||
label = ""
|
||||
volid = get_volid(compose, arch, var, disc_type=disc_type)
|
||||
can_fail = compose.can_fail(var, arch, "buildinstall")
|
||||
tweak_buildinstall(
|
||||
compose,
|
||||
buildinstall_dir,
|
||||
os_tree,
|
||||
arch,
|
||||
var.uid,
|
||||
label,
|
||||
volid,
|
||||
self.pool.kickstart_file,
|
||||
)
|
||||
link_boot_iso(compose, arch, var, can_fail)
|
||||
|
||||
|
||||
def _get_log_dir(compose, variant, arch):
|
||||
"""Find directory where to store lorax logs in. If it's inside the compose,
|
||||
create the directory.
|
||||
"""
|
||||
if compose.conf.get("buildinstall_topdir"):
|
||||
log_dir = compose.paths.work.buildinstall_dir(
|
||||
arch, allow_topdir_override=True, variant=variant
|
||||
)
|
||||
return os.path.join(log_dir, "logs")
|
||||
|
||||
# The paths module will modify the filename (by inserting arch). But we
|
||||
# only care about the directory anyway.
|
||||
log_filename = "buildinstall-%s-logs/dummy" % variant.uid
|
||||
log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_filename))
|
||||
makedirs(log_dir)
|
||||
return log_dir
|
932
pungi/phases/createiso.py
Normal file
932
pungi/phases/createiso.py
Normal file
@ -0,0 +1,932 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import json
|
||||
|
||||
import productmd.treeinfo
|
||||
from productmd.images import Image
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import run, relative_path, compute_file_checksums
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from pungi.wrappers import iso
|
||||
from pungi.wrappers.createrepo import CreaterepoWrapper
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.phases.base import PhaseBase, PhaseLoggerMixin
|
||||
from pungi.util import (
|
||||
makedirs,
|
||||
get_volid,
|
||||
get_arch_variant_data,
|
||||
failable,
|
||||
get_file_size,
|
||||
get_mtime,
|
||||
read_json_file,
|
||||
)
|
||||
from pungi.media_split import MediaSplitter, convert_media_size
|
||||
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
||||
from pungi.runroot import Runroot
|
||||
|
||||
from .. import createiso
|
||||
|
||||
|
||||
class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
||||
name = "createiso"
|
||||
|
||||
def __init__(self, compose, buildinstall_phase):
|
||||
super(CreateisoPhase, self).__init__(compose)
|
||||
self.pool = ThreadPool(logger=self.logger)
|
||||
self.bi = buildinstall_phase
|
||||
|
||||
def _find_rpms(self, path):
|
||||
"""Check if there are some RPMs in the path."""
|
||||
for _, _, files in os.walk(path):
|
||||
for fn in files:
|
||||
if fn.endswith(".rpm"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_bootable(self, variant, arch):
|
||||
if arch == "src":
|
||||
return False
|
||||
if variant.type != "variant":
|
||||
return False
|
||||
skip = get_arch_variant_data(
|
||||
self.compose.conf, "buildinstall_skip", arch, variant
|
||||
)
|
||||
if skip == [True]:
|
||||
# Buildinstall is skipped for this tree. Can't create a bootable ISO.
|
||||
return False
|
||||
return bool(self.compose.conf.get("buildinstall_method", ""))
|
||||
|
||||
def _metadata_path(self, variant, arch, disc_num, disc_count):
|
||||
return self.compose.paths.log.log_file(
|
||||
arch,
|
||||
"createiso-%s-%d-%d" % (variant.uid, disc_num, disc_count),
|
||||
ext="json",
|
||||
)
|
||||
|
||||
def save_reuse_metadata(self, cmd, variant, arch, opts):
|
||||
"""Save metadata for future composes to verify if the compose can be reused."""
|
||||
metadata = {
|
||||
"cmd": cmd,
|
||||
"opts": opts._asdict(),
|
||||
}
|
||||
|
||||
metadata_path = self._metadata_path(
|
||||
variant, arch, cmd["disc_num"], cmd["disc_count"]
|
||||
)
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
return metadata
|
||||
|
||||
def _load_old_metadata(self, cmd, variant, arch):
|
||||
metadata_path = self._metadata_path(
|
||||
variant, arch, cmd["disc_num"], cmd["disc_count"]
|
||||
)
|
||||
old_path = self.compose.paths.old_compose_path(metadata_path)
|
||||
self.logger.info(
|
||||
"Loading old metadata for %s.%s from: %s", variant, arch, old_path
|
||||
)
|
||||
try:
|
||||
return read_json_file(old_path)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def perform_reuse(self, cmd, variant, arch, opts, iso_path):
|
||||
"""
|
||||
Copy all related files from old compose to the new one. As a last step
|
||||
add the new image to metadata.
|
||||
"""
|
||||
linker = OldFileLinker(self.logger)
|
||||
old_file_name = os.path.basename(iso_path)
|
||||
current_file_name = os.path.basename(cmd["iso_path"])
|
||||
try:
|
||||
# Hardlink ISO and manifest
|
||||
for suffix in ("", ".manifest"):
|
||||
linker.link(iso_path + suffix, cmd["iso_path"] + suffix)
|
||||
# Copy log files
|
||||
# The log file name includes filename of the image, so we need to
|
||||
# find old file with the old name, and rename it to the new name.
|
||||
log_file = self.compose.paths.log.log_file(
|
||||
arch, "createiso-%s" % current_file_name
|
||||
)
|
||||
old_log_file = self.compose.paths.old_compose_path(
|
||||
self.compose.paths.log.log_file(arch, "createiso-%s" % old_file_name)
|
||||
)
|
||||
linker.link(old_log_file, log_file)
|
||||
# Copy jigdo files
|
||||
if opts.jigdo_dir:
|
||||
old_jigdo_dir = self.compose.paths.old_compose_path(opts.jigdo_dir)
|
||||
for suffix in (".template", ".jigdo"):
|
||||
linker.link(
|
||||
os.path.join(old_jigdo_dir, old_file_name) + suffix,
|
||||
os.path.join(opts.jigdo_dir, current_file_name) + suffix,
|
||||
)
|
||||
except Exception:
|
||||
# A problem happened while linking some file, let's clean up
|
||||
# everything.
|
||||
linker.abort()
|
||||
raise
|
||||
# Add image to manifest
|
||||
add_iso_to_metadata(
|
||||
self.compose,
|
||||
variant,
|
||||
arch,
|
||||
cmd["iso_path"],
|
||||
bootable=cmd["bootable"],
|
||||
disc_num=cmd["disc_num"],
|
||||
disc_count=cmd["disc_count"],
|
||||
)
|
||||
if self.compose.notifier:
|
||||
self.compose.notifier.send(
|
||||
"createiso-imagedone",
|
||||
file=cmd["iso_path"],
|
||||
arch=arch,
|
||||
variant=str(variant),
|
||||
)
|
||||
|
||||
def try_reuse(self, cmd, variant, arch, opts):
|
||||
"""Try to reuse image from previous compose.
|
||||
|
||||
:returns bool: True if reuse was successful, False otherwise
|
||||
"""
|
||||
if not self.compose.conf["createiso_allow_reuse"]:
|
||||
return
|
||||
|
||||
log_msg = "Cannot reuse ISO for %s.%s" % (variant, arch)
|
||||
current_metadata = self.save_reuse_metadata(cmd, variant, arch, opts)
|
||||
|
||||
if opts.buildinstall_method and not self.bi.reused(variant, arch):
|
||||
# If buildinstall phase was not reused for some reason, we can not
|
||||
# reuse any bootable image. If a package change caused rebuild of
|
||||
# boot.iso, we would catch it here too, but there could be a
|
||||
# configuration change in lorax template which would remain
|
||||
# undetected.
|
||||
self.logger.info("%s - boot configuration changed", log_msg)
|
||||
return False
|
||||
|
||||
# Check old compose configuration: extra_files and product_ids can be
|
||||
# reflected on ISO.
|
||||
old_config = self.compose.load_old_compose_config()
|
||||
if not old_config:
|
||||
self.logger.info("%s - no config for old compose", log_msg)
|
||||
return False
|
||||
|
||||
# Disable reuse if unsigned packages are allowed. The older compose
|
||||
# could have unsigned packages, and those may have been signed since
|
||||
# then. We want to regenerate the ISO to have signatures.
|
||||
if None in self.compose.conf["sigkeys"]:
|
||||
self.logger.info("%s - unsigned packages are allowed", log_msg)
|
||||
return False
|
||||
|
||||
# Convert current configuration to JSON and back to encode it similarly
|
||||
# to the old one
|
||||
config = json.loads(json.dumps(self.compose.conf))
|
||||
for opt in self.compose.conf:
|
||||
# Skip a selection of options: these affect what packages can be
|
||||
# included, which we explicitly check later on.
|
||||
config_whitelist = set(
|
||||
[
|
||||
"gather_lookaside_repos",
|
||||
"pkgset_koji_builds",
|
||||
"pkgset_koji_scratch_tasks",
|
||||
"pkgset_koji_module_builds",
|
||||
]
|
||||
)
|
||||
# Skip irrelevant options
|
||||
config_whitelist.update(["osbs", "osbuild"])
|
||||
if opt in config_whitelist:
|
||||
continue
|
||||
|
||||
if old_config.get(opt) != config.get(opt):
|
||||
self.logger.info("%s - option %s differs", log_msg, opt)
|
||||
return False
|
||||
|
||||
old_metadata = self._load_old_metadata(cmd, variant, arch)
|
||||
if not old_metadata:
|
||||
self.logger.info("%s - no old metadata found", log_msg)
|
||||
return False
|
||||
|
||||
# Test if volume ID matches - volid can be generated dynamically based on
|
||||
# other values, and could change even if nothing else is different.
|
||||
if current_metadata["opts"]["volid"] != old_metadata["opts"]["volid"]:
|
||||
self.logger.info("%s - volume ID differs", log_msg)
|
||||
return False
|
||||
|
||||
# Compare packages on the ISO.
|
||||
if compare_packages(
|
||||
old_metadata["opts"]["graft_points"],
|
||||
current_metadata["opts"]["graft_points"],
|
||||
):
|
||||
self.logger.info("%s - packages differ", log_msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
self.perform_reuse(
|
||||
cmd,
|
||||
variant,
|
||||
arch,
|
||||
opts,
|
||||
old_metadata["cmd"]["iso_path"],
|
||||
)
|
||||
return True
|
||||
except Exception as exc:
|
||||
self.compose.log_error(
|
||||
"Error while reusing ISO for %s.%s: %s", variant, arch, exc
|
||||
)
|
||||
self.compose.traceback("createiso-reuse-%s-%s" % (variant, arch))
|
||||
return False
|
||||
|
||||
def run(self):
|
||||
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
||||
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||
deliverables = []
|
||||
|
||||
commands = []
|
||||
for variant in self.compose.get_variants(
|
||||
types=["variant", "layered-product", "optional"]
|
||||
):
|
||||
if variant.is_empty:
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
skip_iso = get_arch_variant_data(
|
||||
self.compose.conf, "createiso_skip", arch, variant
|
||||
)
|
||||
if skip_iso == [True]:
|
||||
self.logger.info(
|
||||
"Skipping createiso for %s.%s due to config option"
|
||||
% (variant, arch)
|
||||
)
|
||||
continue
|
||||
|
||||
volid = get_volid(self.compose, arch, variant, disc_type=disc_type)
|
||||
os_tree = self.compose.paths.compose.os_tree(arch, variant)
|
||||
|
||||
iso_dir = self.compose.paths.compose.iso_dir(
|
||||
arch, variant, symlink_to=symlink_isos_to
|
||||
)
|
||||
if not iso_dir:
|
||||
continue
|
||||
|
||||
if not self._find_rpms(os_tree):
|
||||
self.logger.warning(
|
||||
"No RPMs found for %s.%s, skipping ISO" % (variant.uid, arch)
|
||||
)
|
||||
continue
|
||||
|
||||
bootable = self._is_bootable(variant, arch)
|
||||
|
||||
if bootable and not self.bi.succeeded(variant, arch):
|
||||
self.logger.warning(
|
||||
"ISO should be bootable, but buildinstall failed. "
|
||||
"Skipping for %s.%s" % (variant, arch)
|
||||
)
|
||||
continue
|
||||
|
||||
split_iso_data = split_iso(
|
||||
self.compose, arch, variant, no_split=bootable, logger=self.logger
|
||||
)
|
||||
disc_count = len(split_iso_data)
|
||||
|
||||
for disc_num, iso_data in enumerate(split_iso_data):
|
||||
disc_num += 1
|
||||
|
||||
filename = self.compose.get_image_name(
|
||||
arch, variant, disc_type=disc_type, disc_num=disc_num
|
||||
)
|
||||
iso_path = self.compose.paths.compose.iso_path(
|
||||
arch, variant, filename, symlink_to=symlink_isos_to
|
||||
)
|
||||
if os.path.isfile(iso_path):
|
||||
self.logger.warning(
|
||||
"Skipping mkisofs, image already exists: %s", iso_path
|
||||
)
|
||||
continue
|
||||
deliverables.append(iso_path)
|
||||
|
||||
graft_points = prepare_iso(
|
||||
self.compose,
|
||||
arch,
|
||||
variant,
|
||||
disc_num=disc_num,
|
||||
disc_count=disc_count,
|
||||
split_iso_data=iso_data,
|
||||
)
|
||||
|
||||
cmd = {
|
||||
"iso_path": iso_path,
|
||||
"bootable": bootable,
|
||||
"cmd": [],
|
||||
"label": "", # currently not used
|
||||
"disc_num": disc_num,
|
||||
"disc_count": disc_count,
|
||||
}
|
||||
|
||||
if os.path.islink(iso_dir):
|
||||
cmd["mount"] = os.path.abspath(
|
||||
os.path.join(os.path.dirname(iso_dir), os.readlink(iso_dir))
|
||||
)
|
||||
|
||||
opts = createiso.CreateIsoOpts(
|
||||
output_dir=iso_dir,
|
||||
iso_name=filename,
|
||||
volid=volid,
|
||||
graft_points=graft_points,
|
||||
arch=arch,
|
||||
supported=self.compose.supported,
|
||||
hfs_compat=self.compose.conf["iso_hfs_ppc64le_compatible"],
|
||||
use_xorrisofs=self.compose.conf.get("createiso_use_xorrisofs"),
|
||||
iso_level=get_iso_level_config(self.compose, variant, arch),
|
||||
)
|
||||
|
||||
if bootable:
|
||||
opts = opts._replace(
|
||||
buildinstall_method=self.compose.conf[
|
||||
"buildinstall_method"
|
||||
],
|
||||
boot_iso=os.path.join(os_tree, "images", "boot.iso"),
|
||||
)
|
||||
|
||||
if self.compose.conf["create_jigdo"]:
|
||||
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
|
||||
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
||||
|
||||
# Try to reuse
|
||||
if self.try_reuse(cmd, variant, arch, opts):
|
||||
# Reuse was successful, go to next ISO
|
||||
continue
|
||||
|
||||
script_dir = self.compose.paths.work.tmp_dir(arch, variant)
|
||||
opts = opts._replace(script_dir=script_dir)
|
||||
script_file = os.path.join(script_dir, "createiso-%s.sh" % filename)
|
||||
with open(script_file, "w") as f:
|
||||
createiso.write_script(opts, f)
|
||||
cmd["cmd"] = ["bash", script_file]
|
||||
commands.append((cmd, variant, arch))
|
||||
|
||||
if self.compose.notifier:
|
||||
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
||||
|
||||
for cmd, variant, arch in commands:
|
||||
self.pool.add(CreateIsoThread(self.pool))
|
||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||
|
||||
self.pool.start()
|
||||
|
||||
|
||||
def read_packages(graft_points):
|
||||
"""Read packages that were listed in given graft points file.
|
||||
|
||||
Only files under Packages directory are considered. Particularly this
|
||||
excludes .discinfo, .treeinfo and media.repo as well as repodata and
|
||||
any extra files.
|
||||
|
||||
Extra files are easier to check by configuration (same name doesn't
|
||||
imply same content). Repodata depend entirely on included packages (and
|
||||
possibly product id certificate), but are affected by current time
|
||||
which can change checksum despite data being the same.
|
||||
"""
|
||||
with open(graft_points) as f:
|
||||
return set(
|
||||
line.split("=", 1)[0]
|
||||
for line in f
|
||||
if line.startswith("Packages/") or "/Packages/" in line
|
||||
)
|
||||
|
||||
|
||||
def compare_packages(old_graft_points, new_graft_points):
|
||||
"""Read packages from the two files and compare them.
|
||||
|
||||
:returns bool: True if there are differences, False otherwise
|
||||
"""
|
||||
old_files = read_packages(old_graft_points)
|
||||
new_files = read_packages(new_graft_points)
|
||||
return old_files != new_files
|
||||
|
||||
|
||||
class CreateIsoThread(WorkerThread):
|
||||
def fail(self, compose, cmd, variant, arch):
|
||||
self.pool.log_error("CreateISO failed, removing ISO: %s" % cmd["iso_path"])
|
||||
try:
|
||||
# remove incomplete ISO
|
||||
os.unlink(cmd["iso_path"])
|
||||
# TODO: remove jigdo & template
|
||||
except OSError:
|
||||
pass
|
||||
if compose.notifier:
|
||||
compose.notifier.send(
|
||||
"createiso-imagefail",
|
||||
file=cmd["iso_path"],
|
||||
arch=arch,
|
||||
variant=str(variant),
|
||||
)
|
||||
|
||||
def process(self, item, num):
|
||||
compose, cmd, variant, arch = item
|
||||
can_fail = compose.can_fail(variant, arch, "iso")
|
||||
with failable(
|
||||
compose, can_fail, variant, arch, "iso", logger=self.pool._logger
|
||||
):
|
||||
self.worker(compose, cmd, variant, arch, num)
|
||||
|
||||
def worker(self, compose, cmd, variant, arch, num):
|
||||
mounts = [compose.topdir]
|
||||
if "mount" in cmd:
|
||||
mounts.append(cmd["mount"])
|
||||
|
||||
bootable = cmd["bootable"]
|
||||
log_file = compose.paths.log.log_file(
|
||||
arch, "createiso-%s" % os.path.basename(cmd["iso_path"])
|
||||
)
|
||||
|
||||
msg = "Creating ISO (arch: %s, variant: %s): %s" % (
|
||||
arch,
|
||||
variant,
|
||||
os.path.basename(cmd["iso_path"]),
|
||||
)
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
try:
|
||||
run_createiso_command(
|
||||
num,
|
||||
compose,
|
||||
bootable,
|
||||
arch,
|
||||
cmd["cmd"],
|
||||
mounts,
|
||||
log_file,
|
||||
cmd["iso_path"],
|
||||
)
|
||||
except Exception:
|
||||
self.fail(compose, cmd, variant, arch)
|
||||
raise
|
||||
|
||||
add_iso_to_metadata(
|
||||
compose,
|
||||
variant,
|
||||
arch,
|
||||
cmd["iso_path"],
|
||||
cmd["bootable"],
|
||||
cmd["disc_num"],
|
||||
cmd["disc_count"],
|
||||
)
|
||||
|
||||
# Delete staging directory if present.
|
||||
staging_dir = compose.paths.work.iso_staging_dir(
|
||||
arch, variant, filename=os.path.basename(cmd["iso_path"]), create_dir=False
|
||||
)
|
||||
if os.path.exists(staging_dir):
|
||||
try:
|
||||
shutil.rmtree(staging_dir)
|
||||
except Exception as e:
|
||||
self.pool.log_warning(
|
||||
"Failed to clean up staging dir: %s %s" % (staging_dir, str(e))
|
||||
)
|
||||
|
||||
self.pool.log_info("[DONE ] %s" % msg)
|
||||
if compose.notifier:
|
||||
compose.notifier.send(
|
||||
"createiso-imagedone",
|
||||
file=cmd["iso_path"],
|
||||
arch=arch,
|
||||
variant=str(variant),
|
||||
)
|
||||
|
||||
|
||||
def add_iso_to_metadata(
|
||||
compose,
|
||||
variant,
|
||||
arch,
|
||||
iso_path,
|
||||
bootable,
|
||||
disc_num=1,
|
||||
disc_count=1,
|
||||
additional_variants=None,
|
||||
):
|
||||
img = Image(compose.im)
|
||||
img.path = iso_path.replace(compose.paths.compose.topdir(), "").lstrip("/")
|
||||
img.mtime = get_mtime(iso_path)
|
||||
img.size = get_file_size(iso_path)
|
||||
img.arch = arch
|
||||
# XXX: HARDCODED
|
||||
img.type = "dvd"
|
||||
img.format = "iso"
|
||||
img.disc_number = disc_num
|
||||
img.disc_count = disc_count
|
||||
img.bootable = bootable
|
||||
img.subvariant = variant.uid
|
||||
img.implant_md5 = iso.get_implanted_md5(iso_path, logger=compose._logger)
|
||||
if additional_variants:
|
||||
img.unified = True
|
||||
img.additional_variants = additional_variants
|
||||
setattr(img, "can_fail", compose.can_fail(variant, arch, "iso"))
|
||||
setattr(img, "deliverable", "iso")
|
||||
try:
|
||||
img.volume_id = iso.get_volume_id(
|
||||
iso_path,
|
||||
compose.conf.get("createiso_use_xorrisofs"),
|
||||
)
|
||||
except RuntimeError:
|
||||
pass
|
||||
if arch == "src":
|
||||
for variant_arch in variant.arches:
|
||||
compose.im.add(variant.uid, variant_arch, img)
|
||||
else:
|
||||
compose.im.add(variant.uid, arch, img)
|
||||
return img
|
||||
|
||||
|
||||
def run_createiso_command(
|
||||
num, compose, bootable, arch, cmd, mounts, log_file, iso_path
|
||||
):
|
||||
packages = [
|
||||
"coreutils",
|
||||
"xorriso" if compose.conf.get("createiso_use_xorrisofs") else "genisoimage",
|
||||
"isomd5sum",
|
||||
]
|
||||
if compose.conf["create_jigdo"]:
|
||||
packages.append("jigdo")
|
||||
if bootable:
|
||||
extra_packages = {
|
||||
"lorax": ["lorax", "which"],
|
||||
}
|
||||
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
||||
|
||||
runroot = Runroot(compose, phase="createiso")
|
||||
|
||||
build_arch = arch
|
||||
if runroot.runroot_method == "koji" and not bootable:
|
||||
runroot_tag = compose.conf["runroot_tag"]
|
||||
koji_wrapper = kojiwrapper.KojiWrapper(compose)
|
||||
koji_proxy = koji_wrapper.koji_proxy
|
||||
tag_info = koji_proxy.getTag(runroot_tag)
|
||||
if not tag_info:
|
||||
raise RuntimeError('Tag "%s" does not exist.' % runroot_tag)
|
||||
tag_arches = tag_info["arches"].split(" ")
|
||||
|
||||
if "x86_64" in tag_arches:
|
||||
# assign non-bootable images to x86_64 if possible
|
||||
build_arch = "x86_64"
|
||||
elif build_arch == "src":
|
||||
# pick random arch from available runroot tag arches
|
||||
build_arch = random.choice(tag_arches)
|
||||
|
||||
runroot.run(
|
||||
cmd,
|
||||
log_file=log_file,
|
||||
arch=build_arch,
|
||||
packages=packages,
|
||||
mounts=mounts,
|
||||
weight=compose.conf["runroot_weights"].get("createiso"),
|
||||
)
|
||||
|
||||
if bootable and compose.conf.get("createiso_use_xorrisofs"):
|
||||
fix_treeinfo_checksums(compose, iso_path, arch)
|
||||
|
||||
|
||||
def fix_treeinfo_checksums(compose, iso_path, arch):
|
||||
"""It is possible for the ISO to contain a .treefile with incorrect
|
||||
checksums. By modifying the ISO (adding files) some of the images may
|
||||
change.
|
||||
|
||||
This function fixes that after the fact by looking for incorrect checksums,
|
||||
recalculating them and updating the .treeinfo file. Since the size of the
|
||||
file doesn't change, this seems to not change any images.
|
||||
"""
|
||||
modified = False
|
||||
with iso.mount(iso_path, compose._logger) as mountpoint:
|
||||
ti = productmd.TreeInfo()
|
||||
ti.load(os.path.join(mountpoint, ".treeinfo"))
|
||||
for image, (type_, expected) in ti.checksums.checksums.items():
|
||||
checksums = compute_file_checksums(os.path.join(mountpoint, image), [type_])
|
||||
actual = checksums[type_]
|
||||
if actual == expected:
|
||||
# Everything fine here, skip to next image.
|
||||
continue
|
||||
|
||||
compose.log_debug("%s: %s: checksum mismatch", iso_path, image)
|
||||
# Update treeinfo with correct checksum
|
||||
ti.checksums.checksums[image] = (type_, actual)
|
||||
modified = True
|
||||
|
||||
if not modified:
|
||||
compose.log_debug("%s: All checksums match, nothing to do.", iso_path)
|
||||
return
|
||||
|
||||
try:
|
||||
tmpdir = compose.mkdtemp(arch, prefix="fix-checksum-")
|
||||
# Write modified .treeinfo
|
||||
ti_path = os.path.join(tmpdir, ".treeinfo")
|
||||
compose.log_debug("Storing modified .treeinfo in %s", ti_path)
|
||||
ti.dump(ti_path)
|
||||
# Write a modified DVD into a temporary path, that is atomically moved
|
||||
# over the original file.
|
||||
fixed_path = os.path.join(tmpdir, "fixed-checksum-dvd.iso")
|
||||
cmd = ["xorriso"]
|
||||
cmd.extend(
|
||||
itertools.chain.from_iterable(
|
||||
iso.xorriso_commands(arch, iso_path, fixed_path)
|
||||
)
|
||||
)
|
||||
cmd.extend(["-map", ti_path, ".treeinfo"])
|
||||
run(
|
||||
cmd,
|
||||
logfile=compose.paths.log.log_file(
|
||||
arch, "checksum-fix_generate_%s" % os.path.basename(iso_path)
|
||||
),
|
||||
)
|
||||
# The modified ISO no longer has implanted MD5, so that needs to be
|
||||
# fixed again.
|
||||
compose.log_debug("Implanting new MD5 to %s", fixed_path)
|
||||
run(
|
||||
iso.get_implantisomd5_cmd(fixed_path, compose.supported),
|
||||
logfile=compose.paths.log.log_file(
|
||||
arch, "checksum-fix_implantisomd5_%s" % os.path.basename(iso_path)
|
||||
),
|
||||
)
|
||||
# All done, move the updated image to the final location.
|
||||
compose.log_debug("Updating %s", iso_path)
|
||||
os.rename(fixed_path, iso_path)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def split_iso(compose, arch, variant, no_split=False, logger=None):
|
||||
"""
|
||||
Split contents of the os/ directory for given tree into chunks fitting on ISO.
|
||||
|
||||
All files from the directory are taken except for possible boot.iso image.
|
||||
Files added in extra_files phase are put on all disks.
|
||||
|
||||
If `no_split` is set, we will pretend that the media is practically
|
||||
infinite so that everything goes on single disc. A warning is printed if
|
||||
the size is bigger than configured.
|
||||
"""
|
||||
if not logger:
|
||||
logger = compose._logger
|
||||
media_size = compose.conf["iso_size"]
|
||||
media_reserve = compose.conf["split_iso_reserve"]
|
||||
split_size = convert_media_size(media_size) - convert_media_size(media_reserve)
|
||||
real_size = None if no_split else split_size
|
||||
|
||||
ms = MediaSplitter(real_size, compose, logger=logger)
|
||||
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
extra_files_dir = compose.paths.work.extra_files_dir(arch, variant)
|
||||
|
||||
# scan extra files to mark them "sticky" -> they'll be on all media after split
|
||||
extra_files = set(["media.repo"])
|
||||
for root, dirs, files in os.walk(extra_files_dir):
|
||||
for fn in files:
|
||||
path = os.path.join(root, fn)
|
||||
rel_path = relative_path(path, extra_files_dir.rstrip("/") + "/")
|
||||
extra_files.add(rel_path)
|
||||
|
||||
packages = []
|
||||
all_files = []
|
||||
all_files_ignore = []
|
||||
|
||||
ti = productmd.treeinfo.TreeInfo()
|
||||
ti.load(os.path.join(os_tree, ".treeinfo"))
|
||||
boot_iso_rpath = ti.images.images.get(arch, {}).get("boot.iso", None)
|
||||
if boot_iso_rpath:
|
||||
all_files_ignore.append(boot_iso_rpath)
|
||||
if all_files_ignore:
|
||||
logger.debug("split_iso all_files_ignore = %s" % ", ".join(all_files_ignore))
|
||||
|
||||
for root, dirs, files in os.walk(os_tree):
|
||||
for dn in dirs[:]:
|
||||
repo_dir = os.path.join(root, dn)
|
||||
if repo_dir == os.path.join(
|
||||
compose.paths.compose.repository(arch, variant), "repodata"
|
||||
):
|
||||
dirs.remove(dn)
|
||||
|
||||
for fn in files:
|
||||
path = os.path.join(root, fn)
|
||||
rel_path = relative_path(path, os_tree.rstrip("/") + "/")
|
||||
sticky = rel_path in extra_files
|
||||
if rel_path in all_files_ignore:
|
||||
logger.info("split_iso: Skipping %s" % rel_path)
|
||||
continue
|
||||
if root.startswith(compose.paths.compose.packages(arch, variant)):
|
||||
packages.append((path, os.path.getsize(path), sticky))
|
||||
else:
|
||||
all_files.append((path, os.path.getsize(path), sticky))
|
||||
|
||||
for path, size, sticky in all_files + packages:
|
||||
ms.add_file(path, size, sticky)
|
||||
|
||||
logger.debug("Splitting media for %s.%s:" % (variant.uid, arch))
|
||||
result = ms.split()
|
||||
if no_split and result[0]["size"] > split_size:
|
||||
logger.warning(
|
||||
"ISO for %s.%s does not fit on single media! It is %s bytes too big. "
|
||||
"(Total size: %s B)"
|
||||
% (variant.uid, arch, result[0]["size"] - split_size, result[0]["size"])
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def prepare_iso(
|
||||
compose, arch, variant, disc_num=1, disc_count=None, split_iso_data=None
|
||||
):
|
||||
tree_dir = compose.paths.compose.os_tree(arch, variant)
|
||||
filename = compose.get_image_name(arch, variant, disc_num=disc_num)
|
||||
iso_dir = compose.paths.work.iso_dir(arch, filename)
|
||||
|
||||
# modify treeinfo
|
||||
ti_path = os.path.join(tree_dir, ".treeinfo")
|
||||
ti = load_and_tweak_treeinfo(ti_path, disc_num, disc_count)
|
||||
|
||||
copy_boot_images(tree_dir, iso_dir)
|
||||
|
||||
if disc_count > 1:
|
||||
# remove repodata/repomd.xml from checksums, create a new one later
|
||||
if "repodata/repomd.xml" in ti.checksums.checksums:
|
||||
del ti.checksums.checksums["repodata/repomd.xml"]
|
||||
|
||||
# rebuild repodata
|
||||
createrepo_c = compose.conf["createrepo_c"]
|
||||
createrepo_checksum = compose.conf["createrepo_checksum"]
|
||||
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
||||
|
||||
file_list = "%s-file-list" % iso_dir
|
||||
packages_dir = compose.paths.compose.packages(arch, variant)
|
||||
file_list_content = []
|
||||
for i in split_iso_data["files"]:
|
||||
if not i.endswith(".rpm"):
|
||||
continue
|
||||
if not i.startswith(packages_dir):
|
||||
continue
|
||||
rel_path = relative_path(i, tree_dir.rstrip("/") + "/")
|
||||
file_list_content.append(rel_path)
|
||||
|
||||
if file_list_content:
|
||||
# write modified repodata only if there are packages available
|
||||
run("cp -a %s/repodata %s/" % (shlex_quote(tree_dir), shlex_quote(iso_dir)))
|
||||
with open(file_list, "w") as f:
|
||||
f.write("\n".join(file_list_content))
|
||||
cmd = repo.get_createrepo_cmd(
|
||||
tree_dir,
|
||||
update=True,
|
||||
database=True,
|
||||
skip_stat=True,
|
||||
pkglist=file_list,
|
||||
outputdir=iso_dir,
|
||||
workers=compose.conf["createrepo_num_workers"],
|
||||
checksum=createrepo_checksum,
|
||||
)
|
||||
run(cmd)
|
||||
# add repodata/repomd.xml back to checksums
|
||||
ti.checksums.add(
|
||||
"repodata/repomd.xml", createrepo_checksum, root_dir=iso_dir
|
||||
)
|
||||
|
||||
new_ti_path = os.path.join(iso_dir, ".treeinfo")
|
||||
ti.dump(new_ti_path)
|
||||
|
||||
# modify discinfo
|
||||
di_path = os.path.join(tree_dir, ".discinfo")
|
||||
data = read_discinfo(di_path)
|
||||
data["disc_numbers"] = [disc_num]
|
||||
new_di_path = os.path.join(iso_dir, ".discinfo")
|
||||
write_discinfo(new_di_path, **data)
|
||||
|
||||
if not disc_count or disc_count == 1:
|
||||
data = iso.get_graft_points(compose.paths.compose.topdir(), [tree_dir, iso_dir])
|
||||
else:
|
||||
data = iso.get_graft_points(
|
||||
compose.paths.compose.topdir(),
|
||||
[iso._paths_from_list(tree_dir, split_iso_data["files"]), iso_dir],
|
||||
)
|
||||
|
||||
if compose.conf["createiso_break_hardlinks"]:
|
||||
compose.log_debug(
|
||||
"Breaking hardlinks for ISO %s for %s.%s" % (filename, variant, arch)
|
||||
)
|
||||
break_hardlinks(
|
||||
data, compose.paths.work.iso_staging_dir(arch, variant, filename)
|
||||
)
|
||||
# Create hardlinks for files with duplicate contents.
|
||||
compose.log_debug(
|
||||
"Creating hardlinks for ISO %s for %s.%s" % (filename, variant, arch)
|
||||
)
|
||||
create_hardlinks(
|
||||
compose.paths.work.iso_staging_dir(arch, variant, filename),
|
||||
log_file=compose.paths.log.log_file(
|
||||
arch, "iso-hardlink-%s.log" % variant.uid
|
||||
),
|
||||
)
|
||||
|
||||
# TODO: /content /graft-points
|
||||
gp = "%s-graft-points" % iso_dir
|
||||
iso.write_graft_points(gp, data, exclude=["*/lost+found", "*/boot.iso"])
|
||||
return gp
|
||||
|
||||
|
||||
def load_and_tweak_treeinfo(ti_path, disc_num=1, disc_count=1):
|
||||
"""Treeinfo on the media should not contain any reference to boot.iso and
|
||||
it should also have a valid [media] section.
|
||||
"""
|
||||
ti = productmd.treeinfo.TreeInfo()
|
||||
ti.load(ti_path)
|
||||
ti.media.totaldiscs = disc_count or 1
|
||||
ti.media.discnum = disc_num
|
||||
|
||||
# remove boot.iso from all sections
|
||||
paths = set()
|
||||
for platform in ti.images.images:
|
||||
if "boot.iso" in ti.images.images[platform]:
|
||||
paths.add(ti.images.images[platform].pop("boot.iso"))
|
||||
|
||||
# remove boot.iso from checksums
|
||||
for i in paths:
|
||||
if i in ti.checksums.checksums.keys():
|
||||
del ti.checksums.checksums[i]
|
||||
|
||||
return ti
|
||||
|
||||
|
||||
def copy_boot_images(src, dest):
|
||||
"""When mkisofs is called it tries to modify isolinux/isolinux.bin and
|
||||
images/boot.img. Therefore we need to make copies of them.
|
||||
"""
|
||||
for i in ("isolinux/isolinux.bin", "images/boot.img"):
|
||||
src_path = os.path.join(src, i)
|
||||
dst_path = os.path.join(dest, i)
|
||||
if os.path.exists(src_path):
|
||||
makedirs(os.path.dirname(dst_path))
|
||||
shutil.copy2(src_path, dst_path)
|
||||
|
||||
|
||||
def break_hardlinks(graft_points, staging_dir):
|
||||
"""Iterate over graft points and copy any file that has more than 1
|
||||
hardlink into the staging directory. Replace the entry in the dict.
|
||||
"""
|
||||
for f in graft_points:
|
||||
info = os.stat(graft_points[f])
|
||||
if stat.S_ISREG(info.st_mode) and info.st_nlink > 1:
|
||||
dest_path = os.path.join(staging_dir, graft_points[f].lstrip("/"))
|
||||
makedirs(os.path.dirname(dest_path))
|
||||
shutil.copy2(graft_points[f], dest_path)
|
||||
graft_points[f] = dest_path
|
||||
|
||||
|
||||
def create_hardlinks(staging_dir, log_file):
|
||||
"""Create hardlinks within the staging directory.
|
||||
Should happen after break_hardlinks()
|
||||
"""
|
||||
cmd = ["/usr/sbin/hardlink", "-c", "-vv", staging_dir]
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
|
||||
|
||||
class OldFileLinker(object):
|
||||
"""
|
||||
A wrapper around os.link that remembers which files were linked and can
|
||||
clean them up.
|
||||
"""
|
||||
|
||||
def __init__(self, logger):
|
||||
self.logger = logger
|
||||
self.linked_files = []
|
||||
|
||||
def link(self, src, dst):
|
||||
self.logger.debug("Hardlinking %s to %s", src, dst)
|
||||
os.link(src, dst)
|
||||
self.linked_files.append(dst)
|
||||
|
||||
def abort(self):
|
||||
"""Clean up all files created by this instance."""
|
||||
for f in self.linked_files:
|
||||
os.unlink(f)
|
||||
|
||||
|
||||
def get_iso_level_config(compose, variant, arch):
|
||||
"""
|
||||
Get configured ISO level for this variant and architecture.
|
||||
"""
|
||||
level = compose.conf.get("iso_level")
|
||||
if isinstance(level, list):
|
||||
level = None
|
||||
for c in get_arch_variant_data(compose.conf, "iso_level", arch, variant):
|
||||
level = c
|
||||
return level
|
496
pungi/phases/createrepo.py
Normal file
496
pungi/phases/createrepo.py
Normal file
@ -0,0 +1,496 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
__all__ = ("create_variant_repo",)
|
||||
|
||||
import copy
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import threading
|
||||
import xml.dom.minidom
|
||||
|
||||
import productmd.modules
|
||||
import productmd.rpms
|
||||
from kobo.shortcuts import relative_path, run
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
|
||||
from ..module_util import Modulemd, collect_module_defaults, collect_module_obsoletes
|
||||
from ..util import (
|
||||
get_arch_variant_data,
|
||||
read_single_module_stream_from_file,
|
||||
temp_dir,
|
||||
)
|
||||
from ..wrappers.createrepo import CreaterepoWrapper
|
||||
from ..wrappers.scm import get_dir_from_scm
|
||||
from .base import PhaseBase
|
||||
|
||||
CACHE_TOPDIR = "/var/cache/pungi/createrepo_c/"
|
||||
createrepo_lock = threading.Lock()
|
||||
createrepo_dirs = set()
|
||||
|
||||
|
||||
class CreaterepoPhase(PhaseBase):
|
||||
name = "createrepo"
|
||||
|
||||
def __init__(self, compose, pkgset_phase=None):
|
||||
PhaseBase.__init__(self, compose)
|
||||
self.pool = ThreadPool(logger=self.compose._logger)
|
||||
self.modules_metadata = ModulesMetadata(compose)
|
||||
self.pkgset_phase = pkgset_phase
|
||||
|
||||
def validate(self):
|
||||
errors = []
|
||||
|
||||
if not self.compose.old_composes and self.compose.conf.get("createrepo_deltas"):
|
||||
errors.append("Can not generate deltas without old compose")
|
||||
|
||||
if errors:
|
||||
raise ValueError("\n".join(errors))
|
||||
|
||||
def run(self):
|
||||
get_productids_from_scm(self.compose)
|
||||
reference_pkgset = None
|
||||
if self.pkgset_phase and self.pkgset_phase.package_sets:
|
||||
reference_pkgset = self.pkgset_phase.package_sets[-1]
|
||||
for i in range(self.compose.conf["createrepo_num_threads"]):
|
||||
self.pool.add(
|
||||
CreaterepoThread(self.pool, reference_pkgset, self.modules_metadata)
|
||||
)
|
||||
|
||||
for variant in self.compose.get_variants():
|
||||
if variant.is_empty:
|
||||
continue
|
||||
|
||||
if variant.uid in self.compose.conf.get("createrepo_extra_modulemd", {}):
|
||||
# Clone extra modulemd repository if it's configured.
|
||||
get_dir_from_scm(
|
||||
self.compose.conf["createrepo_extra_modulemd"][variant.uid],
|
||||
self.compose.paths.work.tmp_dir(variant=variant, create_dir=False),
|
||||
compose=self.compose,
|
||||
)
|
||||
|
||||
self.pool.queue_put((self.compose, None, variant, "srpm"))
|
||||
for arch in variant.arches:
|
||||
self.pool.queue_put((self.compose, arch, variant, "rpm"))
|
||||
self.pool.queue_put((self.compose, arch, variant, "debuginfo"))
|
||||
|
||||
self.pool.start()
|
||||
|
||||
def stop(self):
|
||||
super(CreaterepoPhase, self).stop()
|
||||
self.modules_metadata.write_modules_metadata()
|
||||
|
||||
|
||||
def create_variant_repo(
|
||||
compose, arch, variant, pkg_type, pkgset, modules_metadata=None
|
||||
):
|
||||
types = {
|
||||
"rpm": (
|
||||
"binary",
|
||||
lambda **kwargs: compose.paths.compose.repository(
|
||||
arch=arch, variant=variant, **kwargs
|
||||
),
|
||||
),
|
||||
"srpm": (
|
||||
"source",
|
||||
lambda **kwargs: compose.paths.compose.repository(
|
||||
arch="src", variant=variant, **kwargs
|
||||
),
|
||||
),
|
||||
"debuginfo": (
|
||||
"debug",
|
||||
lambda **kwargs: compose.paths.compose.debug_repository(
|
||||
arch=arch, variant=variant, **kwargs
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
if variant.is_empty or (arch is None and pkg_type != "srpm"):
|
||||
compose.log_info(
|
||||
"[SKIP ] Creating repo (arch: %s, variant: %s)" % (arch, variant)
|
||||
)
|
||||
return
|
||||
|
||||
createrepo_c = compose.conf["createrepo_c"]
|
||||
createrepo_checksum = compose.conf["createrepo_checksum"]
|
||||
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
||||
repo_dir_arch = None
|
||||
if pkgset:
|
||||
repo_dir_arch = pkgset.paths["global" if pkg_type == "srpm" else arch]
|
||||
|
||||
try:
|
||||
repo_dir = types[pkg_type][1]()
|
||||
except KeyError:
|
||||
raise ValueError("Unknown package type: %s" % pkg_type)
|
||||
|
||||
msg = "Creating repo (arch: %s, variant: %s): %s" % (arch, variant, repo_dir)
|
||||
|
||||
# HACK: using global lock
|
||||
# This is important when addons put packages into parent variant directory.
|
||||
# There can't be multiple createrepo processes operating on the same
|
||||
# directory.
|
||||
with createrepo_lock:
|
||||
if repo_dir in createrepo_dirs:
|
||||
compose.log_warning("[SKIP ] Already in progress: %s" % msg)
|
||||
return
|
||||
createrepo_dirs.add(repo_dir)
|
||||
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
# We only want delta RPMs for binary repos.
|
||||
with_deltas = pkg_type == "rpm" and _has_deltas(compose, variant, arch)
|
||||
|
||||
rpms = set()
|
||||
rpm_nevras = set()
|
||||
|
||||
# read rpms from metadata rather than guessing it by scanning filesystem
|
||||
manifest_file = compose.paths.compose.metadata("rpms.json")
|
||||
manifest = productmd.rpms.Rpms()
|
||||
manifest.load(manifest_file)
|
||||
|
||||
for rpms_arch, data in manifest.rpms.get(variant.uid, {}).items():
|
||||
if arch is not None and arch != rpms_arch:
|
||||
continue
|
||||
for srpm_data in data.values():
|
||||
for rpm_nevra, rpm_data in srpm_data.items():
|
||||
if types[pkg_type][0] != rpm_data["category"]:
|
||||
continue
|
||||
path = os.path.join(compose.topdir, "compose", rpm_data["path"])
|
||||
rel_path = relative_path(path, repo_dir.rstrip("/") + "/")
|
||||
rpms.add(rel_path)
|
||||
rpm_nevras.add(str(rpm_nevra))
|
||||
|
||||
file_list = compose.paths.work.repo_package_list(arch, variant, pkg_type)
|
||||
with open(file_list, "w") as f:
|
||||
for rel_path in sorted(rpms):
|
||||
f.write("%s\n" % rel_path)
|
||||
|
||||
# Only find last compose when we actually want delta RPMs.
|
||||
old_package_dirs = _get_old_package_dirs(compose, repo_dir) if with_deltas else None
|
||||
if old_package_dirs:
|
||||
# If we are creating deltas, we can not reuse existing metadata, as
|
||||
# that would stop deltas from being created.
|
||||
# This seems to only affect createrepo_c though.
|
||||
repo_dir_arch = None
|
||||
|
||||
comps_path = None
|
||||
if compose.has_comps and pkg_type == "rpm":
|
||||
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
||||
|
||||
if compose.conf["createrepo_enable_cache"]:
|
||||
cachedir = os.path.join(
|
||||
CACHE_TOPDIR,
|
||||
"%s-%s" % (compose.conf["release_short"], os.getuid()),
|
||||
)
|
||||
if not os.path.exists(cachedir):
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except Exception as e:
|
||||
compose.log_warning(
|
||||
"Cache disabled because cannot create cache dir %s %s"
|
||||
% (cachedir, str(e))
|
||||
)
|
||||
cachedir = None
|
||||
else:
|
||||
cachedir = None
|
||||
cmd = repo.get_createrepo_cmd(
|
||||
repo_dir,
|
||||
update=True,
|
||||
database=compose.should_create_yum_database,
|
||||
skip_stat=True,
|
||||
pkglist=file_list,
|
||||
outputdir=repo_dir,
|
||||
workers=compose.conf["createrepo_num_workers"],
|
||||
groupfile=comps_path,
|
||||
update_md_path=repo_dir_arch,
|
||||
checksum=createrepo_checksum,
|
||||
deltas=with_deltas,
|
||||
oldpackagedirs=old_package_dirs,
|
||||
use_xz=compose.conf["createrepo_use_xz"],
|
||||
extra_args=compose.conf["createrepo_extra_args"],
|
||||
cachedir=cachedir,
|
||||
)
|
||||
log_file = compose.paths.log.log_file(
|
||||
arch, "createrepo-%s.%s" % (variant, pkg_type)
|
||||
)
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
|
||||
# call modifyrepo to inject productid
|
||||
product_id = compose.conf.get("product_id")
|
||||
if product_id and pkg_type == "rpm":
|
||||
# add product certificate to base (rpm) repo; skip source and debug
|
||||
product_id_path = compose.paths.work.product_id(arch, variant)
|
||||
if os.path.isfile(product_id_path):
|
||||
cmd = repo.get_modifyrepo_cmd(
|
||||
os.path.join(repo_dir, "repodata"), product_id_path, compress_type="gz"
|
||||
)
|
||||
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
# productinfo is not supported by modifyrepo in any way
|
||||
# this is a HACK to make CDN happy (dmach: at least I think,
|
||||
# need to confirm with dgregor)
|
||||
shutil.copy2(
|
||||
product_id_path, os.path.join(repo_dir, "repodata", "productid")
|
||||
)
|
||||
|
||||
# call modifyrepo to inject modulemd if needed
|
||||
if pkg_type == "rpm" and arch in variant.arch_mmds and Modulemd is not None:
|
||||
mod_index = Modulemd.ModuleIndex()
|
||||
metadata = []
|
||||
|
||||
for module_id, mmd in variant.arch_mmds.get(arch, {}).items():
|
||||
if modules_metadata:
|
||||
module_rpms = mmd.get_rpm_artifacts()
|
||||
metadata.append((module_id, module_rpms))
|
||||
mod_index.add_module_stream(mmd)
|
||||
|
||||
module_names = set(mod_index.get_module_names())
|
||||
defaults_dir = compose.paths.work.module_defaults_dir()
|
||||
overrides_dir = compose.conf.get("module_defaults_override_dir")
|
||||
collect_module_defaults(
|
||||
defaults_dir, module_names, mod_index, overrides_dir=overrides_dir
|
||||
)
|
||||
|
||||
obsoletes_dir = compose.paths.work.module_obsoletes_dir()
|
||||
mod_index = collect_module_obsoletes(obsoletes_dir, module_names, mod_index)
|
||||
|
||||
# Add extra modulemd files
|
||||
if variant.uid in compose.conf.get("createrepo_extra_modulemd", {}):
|
||||
compose.log_debug("Adding extra modulemd for %s.%s", variant.uid, arch)
|
||||
dirname = compose.paths.work.tmp_dir(variant=variant, create_dir=False)
|
||||
for filepath in glob.glob(os.path.join(dirname, arch) + "/*.yaml"):
|
||||
module_stream = read_single_module_stream_from_file(filepath)
|
||||
if not mod_index.add_module_stream(module_stream):
|
||||
raise RuntimeError(
|
||||
"Failed parsing modulemd data from %s" % filepath
|
||||
)
|
||||
# Add the module to metadata with dummy tag. We can't leave the
|
||||
# value empty, but we don't know what the correct tag is.
|
||||
nsvc = module_stream.get_nsvc()
|
||||
variant.module_uid_to_koji_tag[nsvc] = "DUMMY"
|
||||
metadata.append((nsvc, []))
|
||||
|
||||
log_file = compose.paths.log.log_file(arch, "modifyrepo-modules-%s" % variant)
|
||||
add_modular_metadata(repo, repo_dir, mod_index, log_file)
|
||||
|
||||
for module_id, module_rpms in metadata:
|
||||
modulemd_path = os.path.join(
|
||||
types[pkg_type][1](relative=True),
|
||||
find_file_in_repodata(repo_dir, "modules"),
|
||||
)
|
||||
modules_metadata.prepare_module_metadata(
|
||||
variant,
|
||||
arch,
|
||||
module_id,
|
||||
modulemd_path,
|
||||
types[pkg_type][0],
|
||||
list(module_rpms),
|
||||
)
|
||||
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
def add_modular_metadata(repo, repo_path, mod_index, log_file):
|
||||
"""Add modular metadata into a repository."""
|
||||
# Dumping empty index fails, we need to check for that.
|
||||
if not mod_index.get_module_names():
|
||||
return
|
||||
|
||||
with temp_dir() as tmp_dir:
|
||||
modules_path = os.path.join(tmp_dir, "modules.yaml")
|
||||
with open(modules_path, "w") as f:
|
||||
f.write(mod_index.dump_to_string())
|
||||
|
||||
cmd = repo.get_modifyrepo_cmd(
|
||||
os.path.join(repo_path, "repodata"),
|
||||
modules_path,
|
||||
mdtype="modules",
|
||||
compress_type="gz",
|
||||
)
|
||||
run(cmd, logfile=log_file, show_cmd=True)
|
||||
|
||||
|
||||
def find_file_in_repodata(repo_path, type_):
|
||||
dom = xml.dom.minidom.parse(os.path.join(repo_path, "repodata", "repomd.xml"))
|
||||
for entry in dom.getElementsByTagName("data"):
|
||||
if entry.getAttribute("type") == type_:
|
||||
return entry.getElementsByTagName("location")[0].getAttribute("href")
|
||||
entry.unlink()
|
||||
raise RuntimeError("No such file in repodata: %s" % type_)
|
||||
|
||||
|
||||
class CreaterepoThread(WorkerThread):
|
||||
def __init__(self, pool, reference_pkgset, modules_metadata):
|
||||
super(CreaterepoThread, self).__init__(pool)
|
||||
self.reference_pkgset = reference_pkgset
|
||||
self.modules_metadata = modules_metadata
|
||||
|
||||
def process(self, item, num):
|
||||
compose, arch, variant, pkg_type = item
|
||||
create_variant_repo(
|
||||
compose,
|
||||
arch,
|
||||
variant,
|
||||
pkg_type=pkg_type,
|
||||
pkgset=self.reference_pkgset,
|
||||
modules_metadata=self.modules_metadata,
|
||||
)
|
||||
|
||||
|
||||
def get_productids_from_scm(compose):
|
||||
# product_id is a scm_dict: {scm, repo, branch, dir}
|
||||
# expected file name format: $variant_uid-$arch-*.pem
|
||||
product_id = compose.conf.get("product_id")
|
||||
if not product_id:
|
||||
compose.log_info("No product certificates specified")
|
||||
return
|
||||
|
||||
product_id_allow_missing = compose.conf["product_id_allow_missing"]
|
||||
|
||||
msg = "Getting product certificates from SCM..."
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
tmp_dir = compose.mkdtemp(prefix="pungi_")
|
||||
try:
|
||||
get_dir_from_scm(product_id, tmp_dir, compose=compose)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT and product_id_allow_missing:
|
||||
compose.log_warning("No product IDs in %s" % product_id)
|
||||
return
|
||||
raise
|
||||
|
||||
if compose.conf["product_id_allow_name_prefix"]:
|
||||
pattern = "%s/*%s-%s-*.pem"
|
||||
else:
|
||||
pattern = "%s/%s-%s-*.pem"
|
||||
|
||||
for arch in compose.get_arches():
|
||||
for variant in compose.get_variants(arch=arch):
|
||||
# some layered products may use base product name before variant
|
||||
pem_files = glob.glob(pattern % (tmp_dir, variant.uid, arch))
|
||||
if not pem_files:
|
||||
warning = "No product certificate found (arch: %s, variant: %s)" % (
|
||||
arch,
|
||||
variant.uid,
|
||||
)
|
||||
if product_id_allow_missing:
|
||||
compose.log_warning(warning)
|
||||
continue
|
||||
else:
|
||||
shutil.rmtree(tmp_dir)
|
||||
raise RuntimeError(warning)
|
||||
if len(pem_files) > 1:
|
||||
shutil.rmtree(tmp_dir)
|
||||
raise RuntimeError(
|
||||
"Multiple product certificates found (arch: %s, variant: %s): %s"
|
||||
% (
|
||||
arch,
|
||||
variant.uid,
|
||||
", ".join(sorted([os.path.basename(i) for i in pem_files])),
|
||||
)
|
||||
)
|
||||
product_id_path = compose.paths.work.product_id(arch, variant)
|
||||
shutil.copy2(pem_files[0], product_id_path)
|
||||
|
||||
try:
|
||||
shutil.rmtree(tmp_dir)
|
||||
except Exception as e:
|
||||
compose.log_warning("Failed to clean up tmp dir: %s %s" % (tmp_dir, str(e)))
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
def _get_old_package_dirs(compose, repo_dir):
|
||||
"""Given a compose and a path to a repo in it, try to find corresponding
|
||||
repo in an older compose and return a list of paths to directories with
|
||||
packages in it.
|
||||
"""
|
||||
if not compose.conf["createrepo_deltas"]:
|
||||
return None
|
||||
old_package_dirs = compose.paths.old_compose_path(
|
||||
repo_dir, allowed_statuses=["FINISHED", "FINISHED_INCOMPLETE"]
|
||||
)
|
||||
if not old_package_dirs:
|
||||
compose.log_info("No suitable old compose found in: %s" % compose.old_composes)
|
||||
return None
|
||||
old_package_dirs = os.path.join(old_package_dirs, "Packages")
|
||||
if compose.conf["hashed_directories"]:
|
||||
old_package_dirs = _find_package_dirs(old_package_dirs)
|
||||
return old_package_dirs
|
||||
|
||||
|
||||
def _find_package_dirs(base):
|
||||
"""Assuming the packages are in directories hashed by first letter, find
|
||||
all the buckets in given base.
|
||||
"""
|
||||
buckets = set()
|
||||
try:
|
||||
for subdir in os.listdir(base):
|
||||
bucket = os.path.join(base, subdir)
|
||||
if os.path.isdir(bucket):
|
||||
buckets.add(bucket)
|
||||
except OSError:
|
||||
# The directory does not exist, so no drpms for you!
|
||||
pass
|
||||
return sorted(buckets)
|
||||
|
||||
|
||||
def _has_deltas(compose, variant, arch):
|
||||
"""Check if delta RPMs are enabled for given variant and architecture."""
|
||||
key = "createrepo_deltas"
|
||||
if isinstance(compose.conf.get(key), bool):
|
||||
return compose.conf[key]
|
||||
return any(get_arch_variant_data(compose.conf, key, arch, variant))
|
||||
|
||||
|
||||
class ModulesMetadata(object):
|
||||
def __init__(self, compose):
|
||||
# Prepare empty module metadata
|
||||
self.compose = compose
|
||||
self.modules_metadata_file = self.compose.paths.compose.metadata("modules.json")
|
||||
self.productmd_modules_metadata = productmd.modules.Modules()
|
||||
self.productmd_modules_metadata.compose.id = copy.copy(self.compose.compose_id)
|
||||
self.productmd_modules_metadata.compose.type = copy.copy(
|
||||
self.compose.compose_type
|
||||
)
|
||||
self.productmd_modules_metadata.compose.date = copy.copy(
|
||||
self.compose.compose_date
|
||||
)
|
||||
self.productmd_modules_metadata.compose.respin = copy.copy(
|
||||
self.compose.compose_respin
|
||||
)
|
||||
|
||||
def write_modules_metadata(self):
|
||||
"""
|
||||
flush modules metadata into file
|
||||
"""
|
||||
self.compose.log_info(
|
||||
"Writing modules metadata: %s" % self.modules_metadata_file
|
||||
)
|
||||
self.productmd_modules_metadata.dump(self.modules_metadata_file)
|
||||
|
||||
def prepare_module_metadata(
|
||||
self, variant, arch, nsvc, modulemd_path, category, module_rpms
|
||||
):
|
||||
"""
|
||||
Find koji tag which corresponds to the module and add record into
|
||||
module metadata structure.
|
||||
"""
|
||||
koji_tag = variant.module_uid_to_koji_tag[nsvc]
|
||||
self.productmd_modules_metadata.add(
|
||||
variant.uid, arch, nsvc, koji_tag, modulemd_path, category, module_rpms
|
||||
)
|
141
pungi/phases/extra_files.py
Normal file
141
pungi/phases/extra_files.py
Normal file
@ -0,0 +1,141 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import copy
|
||||
import fnmatch
|
||||
|
||||
from productmd.extra_files import ExtraFiles
|
||||
|
||||
from pungi.util import get_arch_variant_data, pkg_is_rpm, copy_all
|
||||
from pungi.arch import split_name_arch
|
||||
from pungi.wrappers.scm import get_file_from_scm, get_dir_from_scm
|
||||
from pungi.phases.base import ConfigGuardedPhase
|
||||
from pungi import metadata
|
||||
|
||||
|
||||
class ExtraFilesPhase(ConfigGuardedPhase):
|
||||
"""EXTRA_FILES"""
|
||||
|
||||
name = "extra_files"
|
||||
|
||||
def __init__(self, compose, pkgset_phase):
|
||||
super(ExtraFilesPhase, self).__init__(compose)
|
||||
# pkgset_phase provides package_sets
|
||||
self.pkgset_phase = pkgset_phase
|
||||
# Prepare metadata
|
||||
self.metadata = ExtraFiles()
|
||||
self.metadata.compose.id = self.compose.compose_id
|
||||
self.metadata.compose.type = self.compose.compose_type
|
||||
self.metadata.compose.date = self.compose.compose_date
|
||||
self.metadata.compose.respin = self.compose.compose_respin
|
||||
|
||||
def run(self):
|
||||
for variant in self.compose.get_variants():
|
||||
if variant.is_empty:
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
cfg = get_arch_variant_data(self.compose.conf, self.name, arch, variant)
|
||||
if cfg:
|
||||
copy_extra_files(
|
||||
self.compose,
|
||||
cfg,
|
||||
arch,
|
||||
variant,
|
||||
self.pkgset_phase.package_sets,
|
||||
self.metadata,
|
||||
)
|
||||
else:
|
||||
self.compose.log_info(
|
||||
"[SKIP ] No extra files (arch: %s, variant: %s)"
|
||||
% (arch, variant.uid)
|
||||
)
|
||||
|
||||
metadata_path = self.compose.paths.compose.metadata("extra_files.json")
|
||||
self.compose.log_info("Writing global extra files metadata: %s" % metadata_path)
|
||||
self.metadata.dump(metadata_path)
|
||||
|
||||
|
||||
def copy_extra_files(
|
||||
compose, cfg, arch, variant, package_sets, extra_metadata, checksum_type=None
|
||||
):
|
||||
checksum_type = checksum_type or compose.conf["media_checksums"]
|
||||
var_dict = {
|
||||
"arch": arch,
|
||||
"variant_id": variant.id,
|
||||
"variant_id_lower": variant.id.lower(),
|
||||
"variant_uid": variant.uid,
|
||||
"variant_uid_lower": variant.uid.lower(),
|
||||
}
|
||||
|
||||
msg = "Getting extra files (arch: %s, variant: %s)" % (arch, variant)
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
extra_files_dir = compose.paths.work.extra_files_dir(arch, variant)
|
||||
|
||||
for scm_dict in cfg:
|
||||
scm_dict = copy.deepcopy(scm_dict)
|
||||
# if scm is "rpm" and repo contains only a package name, find the
|
||||
# package(s) in package set
|
||||
if scm_dict["scm"] == "rpm" and not _is_external(scm_dict["repo"]):
|
||||
rpms = []
|
||||
pattern = scm_dict["repo"] % var_dict
|
||||
pkg_name, pkg_arch = split_name_arch(pattern)
|
||||
for package_set in package_sets:
|
||||
for pkgset_file in package_set[arch]:
|
||||
pkg_obj = package_set[arch][pkgset_file]
|
||||
if pkg_is_rpm(pkg_obj) and _pkg_matches(
|
||||
pkg_obj, pkg_name, pkg_arch
|
||||
):
|
||||
rpms.append(pkg_obj.file_path)
|
||||
if not rpms:
|
||||
raise RuntimeError(
|
||||
"No package matching %s in the package set." % pattern
|
||||
)
|
||||
scm_dict["repo"] = rpms
|
||||
|
||||
getter = get_file_from_scm if "file" in scm_dict else get_dir_from_scm
|
||||
target_path = os.path.join(
|
||||
extra_files_dir, scm_dict.get("target", "").lstrip("/")
|
||||
)
|
||||
getter(scm_dict, target_path, compose=compose)
|
||||
|
||||
if os.listdir(extra_files_dir):
|
||||
metadata.populate_extra_files_metadata(
|
||||
extra_metadata,
|
||||
variant,
|
||||
arch,
|
||||
os_tree,
|
||||
copy_all(extra_files_dir, os_tree),
|
||||
compose.conf["media_checksums"],
|
||||
relative_root=compose.paths.compose.topdir(),
|
||||
)
|
||||
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
|
||||
|
||||
def _pkg_matches(pkg_obj, name_glob, arch):
|
||||
"""Check if `pkg_obj` matches name and arch."""
|
||||
return fnmatch.fnmatch(pkg_obj.name, name_glob) and (
|
||||
arch is None or arch == pkg_obj.arch
|
||||
)
|
||||
|
||||
|
||||
def _is_external(rpm):
|
||||
"""Check if path to rpm points outside of the compose: i.e. it is an
|
||||
absolute path or a URL."""
|
||||
return rpm.startswith("/") or "://" in rpm
|
554
pungi/phases/extra_isos.py
Normal file
554
pungi/phases/extra_isos.py
Normal file
@ -0,0 +1,554 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
from kobo.shortcuts import force_list
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
import productmd.treeinfo
|
||||
from productmd.extra_files import ExtraFiles
|
||||
|
||||
from pungi import createiso
|
||||
from pungi import metadata
|
||||
from pungi.phases.base import ConfigGuardedPhase, PhaseBase, PhaseLoggerMixin
|
||||
from pungi.phases.createiso import (
|
||||
add_iso_to_metadata,
|
||||
copy_boot_images,
|
||||
run_createiso_command,
|
||||
load_and_tweak_treeinfo,
|
||||
compare_packages,
|
||||
OldFileLinker,
|
||||
get_iso_level_config,
|
||||
)
|
||||
from pungi.util import (
|
||||
failable,
|
||||
get_format_substs,
|
||||
get_variant_data,
|
||||
get_volid,
|
||||
read_json_file,
|
||||
)
|
||||
from pungi.wrappers import iso
|
||||
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
||||
|
||||
|
||||
class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
||||
name = "extra_isos"
|
||||
|
||||
def __init__(self, compose, buildinstall_phase):
|
||||
super(ExtraIsosPhase, self).__init__(compose)
|
||||
self.pool = ThreadPool(logger=self.logger)
|
||||
self.bi = buildinstall_phase
|
||||
|
||||
def validate(self):
|
||||
for variant in self.compose.get_variants(types=["variant"]):
|
||||
for config in get_variant_data(self.compose.conf, self.name, variant):
|
||||
extra_arches = set(config.get("arches", [])) - set(variant.arches)
|
||||
if extra_arches:
|
||||
self.compose.log_warning(
|
||||
"Extra iso config for %s mentions non-existing arches: %s"
|
||||
% (variant, ", ".join(sorted(extra_arches)))
|
||||
)
|
||||
|
||||
def run(self):
|
||||
commands = []
|
||||
|
||||
for variant in self.compose.get_variants(types=["variant"]):
|
||||
for config in get_variant_data(self.compose.conf, self.name, variant):
|
||||
arches = set(variant.arches)
|
||||
if config.get("arches"):
|
||||
arches &= set(config["arches"])
|
||||
if not config["skip_src"]:
|
||||
arches.add("src")
|
||||
for arch in sorted(arches):
|
||||
commands.append((config, variant, arch))
|
||||
|
||||
for config, variant, arch in commands:
|
||||
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
||||
self.pool.queue_put((self.compose, config, variant, arch))
|
||||
|
||||
self.pool.start()
|
||||
|
||||
|
||||
class ExtraIsosThread(WorkerThread):
|
||||
def __init__(self, pool, buildinstall_phase):
|
||||
super(ExtraIsosThread, self).__init__(pool)
|
||||
self.bi = buildinstall_phase
|
||||
|
||||
def process(self, item, num):
|
||||
self.num = num
|
||||
compose, config, variant, arch = item
|
||||
can_fail = arch in config.get("failable_arches", [])
|
||||
with failable(
|
||||
compose, can_fail, variant, arch, "extra_iso", logger=self.pool._logger
|
||||
):
|
||||
self.worker(compose, config, variant, arch)
|
||||
|
||||
def worker(self, compose, config, variant, arch):
|
||||
filename = get_filename(compose, variant, arch, config.get("filename"))
|
||||
volid = get_volume_id(compose, variant, arch, config.get("volid", []))
|
||||
iso_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||
iso_path = os.path.join(iso_dir, filename)
|
||||
|
||||
prepare_media_metadata(compose, variant, arch)
|
||||
|
||||
msg = "Creating ISO (arch: %s, variant: %s): %s" % (arch, variant, filename)
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
get_extra_files(compose, variant, arch, config.get("extra_files", []))
|
||||
|
||||
bootable = arch != "src" and bool(compose.conf.get("buildinstall_method"))
|
||||
|
||||
graft_points = get_iso_contents(
|
||||
compose,
|
||||
variant,
|
||||
arch,
|
||||
config["include_variants"],
|
||||
filename,
|
||||
bootable=bootable,
|
||||
inherit_extra_files=config.get("inherit_extra_files", False),
|
||||
)
|
||||
|
||||
opts = createiso.CreateIsoOpts(
|
||||
output_dir=iso_dir,
|
||||
iso_name=filename,
|
||||
volid=volid,
|
||||
graft_points=graft_points,
|
||||
arch=arch,
|
||||
supported=compose.supported,
|
||||
hfs_compat=compose.conf["iso_hfs_ppc64le_compatible"],
|
||||
use_xorrisofs=compose.conf.get("createiso_use_xorrisofs"),
|
||||
iso_level=get_iso_level_config(compose, variant, arch),
|
||||
)
|
||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||
if compose.conf["create_jigdo"]:
|
||||
jigdo_dir = compose.paths.compose.jigdo_dir(arch, variant)
|
||||
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
||||
|
||||
if bootable:
|
||||
opts = opts._replace(
|
||||
buildinstall_method=compose.conf["buildinstall_method"],
|
||||
boot_iso=os.path.join(os_tree, "images", "boot.iso"),
|
||||
)
|
||||
|
||||
# Check if it can be reused.
|
||||
hash = hashlib.sha256()
|
||||
hash.update(json.dumps(config, sort_keys=True).encode("utf-8"))
|
||||
config_hash = hash.hexdigest()
|
||||
|
||||
if not self.try_reuse(compose, variant, arch, config_hash, opts):
|
||||
script_dir = compose.paths.work.tmp_dir(arch, variant)
|
||||
opts = opts._replace(script_dir=script_dir)
|
||||
script_file = os.path.join(script_dir, "extraiso-%s.sh" % filename)
|
||||
with open(script_file, "w") as f:
|
||||
createiso.write_script(opts, f)
|
||||
|
||||
run_createiso_command(
|
||||
self.num,
|
||||
compose,
|
||||
bootable,
|
||||
arch,
|
||||
["bash", script_file],
|
||||
[compose.topdir],
|
||||
log_file=compose.paths.log.log_file(
|
||||
arch, "extraiso-%s" % os.path.basename(iso_path)
|
||||
),
|
||||
iso_path=iso_path,
|
||||
)
|
||||
|
||||
img = add_iso_to_metadata(
|
||||
compose,
|
||||
variant,
|
||||
arch,
|
||||
iso_path,
|
||||
bootable,
|
||||
additional_variants=config["include_variants"],
|
||||
)
|
||||
img._max_size = config.get("max_size")
|
||||
|
||||
save_reuse_metadata(compose, variant, arch, config_hash, opts, iso_path)
|
||||
|
||||
self.pool.log_info("[DONE ] %s" % msg)
|
||||
|
||||
def try_reuse(self, compose, variant, arch, config_hash, opts):
|
||||
# Check explicit config
|
||||
if not compose.conf["extraiso_allow_reuse"]:
|
||||
return
|
||||
|
||||
log_msg = "Cannot reuse ISO for %s.%s" % (variant, arch)
|
||||
|
||||
if opts.buildinstall_method and not self.bi.reused(variant, arch):
|
||||
# If buildinstall phase was not reused for some reason, we can not
|
||||
# reuse any bootable image. If a package change caused rebuild of
|
||||
# boot.iso, we would catch it here too, but there could be a
|
||||
# configuration change in lorax template which would remain
|
||||
# undetected.
|
||||
self.pool.log_info("%s - boot configuration changed", log_msg)
|
||||
return False
|
||||
|
||||
# Check old compose configuration: extra_files and product_ids can be
|
||||
# reflected on ISO.
|
||||
old_config = compose.load_old_compose_config()
|
||||
if not old_config:
|
||||
self.pool.log_info("%s - no config for old compose", log_msg)
|
||||
return False
|
||||
|
||||
# Disable reuse if unsigned packages are allowed. The older compose
|
||||
# could have unsigned packages, and those may have been signed since
|
||||
# then. We want to regenerate the ISO to have signatures.
|
||||
if None in compose.conf["sigkeys"]:
|
||||
self.pool.log_info("%s - unsigned packages are allowed", log_msg)
|
||||
return False
|
||||
|
||||
# Convert current configuration to JSON and back to encode it similarly
|
||||
# to the old one
|
||||
config = json.loads(json.dumps(compose.conf))
|
||||
for opt in compose.conf:
|
||||
# Skip a selection of options: these affect what packages can be
|
||||
# included, which we explicitly check later on.
|
||||
config_whitelist = set(
|
||||
[
|
||||
"gather_lookaside_repos",
|
||||
"pkgset_koji_builds",
|
||||
"pkgset_koji_scratch_tasks",
|
||||
"pkgset_koji_module_builds",
|
||||
]
|
||||
)
|
||||
# Skip irrelevant options
|
||||
config_whitelist.update(["osbs", "osbuild"])
|
||||
if opt in config_whitelist:
|
||||
continue
|
||||
|
||||
if old_config.get(opt) != config.get(opt):
|
||||
self.pool.log_info("%s - option %s differs", log_msg, opt)
|
||||
return False
|
||||
|
||||
old_metadata = load_old_metadata(compose, variant, arch, config_hash)
|
||||
if not old_metadata:
|
||||
self.pool.log_info("%s - no old metadata found", log_msg)
|
||||
return False
|
||||
|
||||
# Test if volume ID matches - volid can be generated dynamically based on
|
||||
# other values, and could change even if nothing else is different.
|
||||
if opts.volid != old_metadata["opts"]["volid"]:
|
||||
self.pool.log_info("%s - volume ID differs", log_msg)
|
||||
return False
|
||||
|
||||
# Compare packages on the ISO.
|
||||
if compare_packages(
|
||||
old_metadata["opts"]["graft_points"],
|
||||
opts.graft_points,
|
||||
):
|
||||
self.pool.log_info("%s - packages differ", log_msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
self.perform_reuse(
|
||||
compose,
|
||||
variant,
|
||||
arch,
|
||||
opts,
|
||||
old_metadata["opts"]["output_dir"],
|
||||
old_metadata["opts"]["iso_name"],
|
||||
)
|
||||
return True
|
||||
except Exception as exc:
|
||||
self.pool.log_error(
|
||||
"Error while reusing ISO for %s.%s: %s", variant, arch, exc
|
||||
)
|
||||
compose.traceback("extraiso-reuse-%s-%s-%s" % (variant, arch, config_hash))
|
||||
return False
|
||||
|
||||
def perform_reuse(self, compose, variant, arch, opts, old_iso_dir, old_file_name):
|
||||
"""
|
||||
Copy all related files from old compose to the new one. As a last step
|
||||
add the new image to metadata.
|
||||
"""
|
||||
linker = OldFileLinker(self.pool._logger)
|
||||
old_iso_path = os.path.join(old_iso_dir, old_file_name)
|
||||
iso_path = os.path.join(opts.output_dir, opts.iso_name)
|
||||
try:
|
||||
# Hardlink ISO and manifest
|
||||
for suffix in ("", ".manifest"):
|
||||
linker.link(old_iso_path + suffix, iso_path + suffix)
|
||||
# Copy log files
|
||||
# The log file name includes filename of the image, so we need to
|
||||
# find old file with the old name, and rename it to the new name.
|
||||
log_file = compose.paths.log.log_file(arch, "extraiso-%s" % opts.iso_name)
|
||||
old_log_file = compose.paths.old_compose_path(
|
||||
compose.paths.log.log_file(arch, "extraiso-%s" % old_file_name)
|
||||
)
|
||||
linker.link(old_log_file, log_file)
|
||||
# Copy jigdo files
|
||||
if opts.jigdo_dir:
|
||||
old_jigdo_dir = compose.paths.old_compose_path(opts.jigdo_dir)
|
||||
for suffix in (".template", ".jigdo"):
|
||||
linker.link(
|
||||
os.path.join(old_jigdo_dir, old_file_name) + suffix,
|
||||
os.path.join(opts.jigdo_dir, opts.iso_name) + suffix,
|
||||
)
|
||||
except Exception:
|
||||
# A problem happened while linking some file, let's clean up
|
||||
# everything.
|
||||
linker.abort()
|
||||
raise
|
||||
|
||||
|
||||
def save_reuse_metadata(compose, variant, arch, config_hash, opts, iso_path):
|
||||
"""
|
||||
Save metadata for possible reuse of this image. The file name is determined
|
||||
from the hash of a configuration snippet for this image. Any change in that
|
||||
configuration in next compose will change the hash and thus reuse will be
|
||||
blocked.
|
||||
"""
|
||||
metadata = {"opts": opts._asdict()}
|
||||
metadata_path = compose.paths.log.log_file(
|
||||
arch,
|
||||
"extraiso-reuse-%s-%s-%s" % (variant.uid, arch, config_hash),
|
||||
ext="json",
|
||||
)
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
|
||||
|
||||
def load_old_metadata(compose, variant, arch, config_hash):
|
||||
metadata_path = compose.paths.log.log_file(
|
||||
arch,
|
||||
"extraiso-reuse-%s-%s-%s" % (variant.uid, arch, config_hash),
|
||||
ext="json",
|
||||
)
|
||||
old_path = compose.paths.old_compose_path(metadata_path)
|
||||
try:
|
||||
return read_json_file(old_path)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def get_extra_files(compose, variant, arch, extra_files):
|
||||
"""Clone the configured files into a directory from where they can be
|
||||
included in the ISO.
|
||||
"""
|
||||
extra_files_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
||||
filelist = []
|
||||
for scm_dict in extra_files:
|
||||
getter = get_file_from_scm if "file" in scm_dict else get_dir_from_scm
|
||||
target = scm_dict.get("target", "").lstrip("/")
|
||||
target_path = os.path.join(extra_files_dir, target).rstrip("/")
|
||||
filelist.extend(
|
||||
os.path.join(target, f)
|
||||
for f in getter(scm_dict, target_path, compose=compose)
|
||||
)
|
||||
|
||||
if filelist:
|
||||
metadata.populate_extra_files_metadata(
|
||||
ExtraFiles(),
|
||||
variant,
|
||||
arch,
|
||||
extra_files_dir,
|
||||
filelist,
|
||||
compose.conf["media_checksums"],
|
||||
)
|
||||
|
||||
|
||||
def get_iso_contents(
|
||||
compose, variant, arch, include_variants, filename, bootable, inherit_extra_files
|
||||
):
|
||||
"""Find all files that should be on the ISO. For bootable image we start
|
||||
with the boot configuration. Then for each variant we add packages,
|
||||
repodata and extra files. Finally we add top-level extra files.
|
||||
"""
|
||||
iso_dir = compose.paths.work.iso_dir(arch, filename)
|
||||
|
||||
files = {}
|
||||
if bootable:
|
||||
buildinstall_dir = compose.paths.work.buildinstall_dir(arch, create_dir=False)
|
||||
if compose.conf["buildinstall_method"] == "lorax":
|
||||
buildinstall_dir = os.path.join(buildinstall_dir, variant.uid)
|
||||
|
||||
copy_boot_images(buildinstall_dir, iso_dir)
|
||||
files = iso.get_graft_points(
|
||||
compose.paths.compose.topdir(), [buildinstall_dir, iso_dir]
|
||||
)
|
||||
|
||||
# We need to point efiboot.img to compose/ tree, because it was
|
||||
# modified in buildinstall phase and the file in work/ has different
|
||||
# checksum to what is in the .treeinfo.
|
||||
if "images/efiboot.img" in files:
|
||||
files["images/efiboot.img"] = os.path.join(
|
||||
compose.paths.compose.os_tree(arch, variant), "images/efiboot.img"
|
||||
)
|
||||
|
||||
variants = [variant.uid] + include_variants
|
||||
for variant_uid in variants:
|
||||
var = compose.all_variants[variant_uid]
|
||||
|
||||
# Get packages...
|
||||
package_dir = compose.paths.compose.packages(arch, var)
|
||||
for k, v in iso.get_graft_points(
|
||||
compose.paths.compose.topdir(), [package_dir]
|
||||
).items():
|
||||
files[os.path.join(var.uid, "Packages", k)] = v
|
||||
|
||||
# Get repodata...
|
||||
tree_dir = compose.paths.compose.repository(arch, var)
|
||||
repo_dir = os.path.join(tree_dir, "repodata")
|
||||
for k, v in iso.get_graft_points(
|
||||
compose.paths.compose.topdir(), [repo_dir]
|
||||
).items():
|
||||
files[os.path.join(var.uid, "repodata", k)] = v
|
||||
|
||||
if inherit_extra_files:
|
||||
# Get extra files...
|
||||
extra_files_dir = compose.paths.work.extra_files_dir(arch, var)
|
||||
for k, v in iso.get_graft_points(
|
||||
compose.paths.compose.topdir(), [extra_files_dir]
|
||||
).items():
|
||||
files[os.path.join(var.uid, k)] = v
|
||||
|
||||
extra_files_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
||||
|
||||
original_treeinfo = os.path.join(
|
||||
compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo"
|
||||
)
|
||||
tweak_treeinfo(
|
||||
compose,
|
||||
include_variants,
|
||||
original_treeinfo,
|
||||
os.path.join(extra_files_dir, ".treeinfo"),
|
||||
)
|
||||
tweak_repo_treeinfo(
|
||||
compose,
|
||||
include_variants,
|
||||
original_treeinfo,
|
||||
original_treeinfo,
|
||||
)
|
||||
|
||||
# Add extra files specific for the ISO
|
||||
files.update(
|
||||
iso.get_graft_points(compose.paths.compose.topdir(), [extra_files_dir])
|
||||
)
|
||||
|
||||
gp = "%s-graft-points" % iso_dir
|
||||
iso.write_graft_points(gp, files, exclude=["*/lost+found", "*/boot.iso"])
|
||||
return gp
|
||||
|
||||
|
||||
def tweak_repo_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
"""
|
||||
The method includes the variants to file .treeinfo of a variant. It takes
|
||||
the variants which are described
|
||||
by options `extra_isos -> include_variants`.
|
||||
"""
|
||||
ti = productmd.treeinfo.TreeInfo()
|
||||
ti.load(source_file)
|
||||
main_variant = next(iter(ti.variants))
|
||||
for variant_uid in include_variants:
|
||||
variant = compose.all_variants[variant_uid]
|
||||
var = productmd.treeinfo.Variant(ti)
|
||||
var.id = variant.id
|
||||
var.uid = variant.uid
|
||||
var.name = variant.name
|
||||
var.type = variant.type
|
||||
ti.variants.add(var)
|
||||
|
||||
for variant_id in ti.variants:
|
||||
var = ti.variants[variant_id]
|
||||
if variant_id == main_variant:
|
||||
var.paths.packages = 'Packages'
|
||||
var.paths.repository = '.'
|
||||
else:
|
||||
var.paths.packages = os.path.join(
|
||||
'../../..',
|
||||
var.uid,
|
||||
var.arch,
|
||||
'os/Packages',
|
||||
)
|
||||
var.paths.repository = os.path.join(
|
||||
'../../..',
|
||||
var.uid,
|
||||
var.arch,
|
||||
'os',
|
||||
)
|
||||
ti.dump(dest_file, main_variant=main_variant)
|
||||
|
||||
|
||||
def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||
ti = load_and_tweak_treeinfo(source_file)
|
||||
for variant_uid in include_variants:
|
||||
variant = compose.all_variants[variant_uid]
|
||||
var = productmd.treeinfo.Variant(ti)
|
||||
var.id = variant.id
|
||||
var.uid = variant.uid
|
||||
var.name = variant.name
|
||||
var.type = variant.type
|
||||
ti.variants.add(var)
|
||||
|
||||
for variant_id in ti.variants:
|
||||
var = ti.variants[variant_id]
|
||||
var.paths.packages = os.path.join(var.uid, "Packages")
|
||||
var.paths.repository = var.uid
|
||||
ti.dump(dest_file)
|
||||
|
||||
|
||||
def get_filename(compose, variant, arch, format):
|
||||
disc_type = compose.conf["disc_types"].get("dvd", "dvd")
|
||||
base_filename = compose.get_image_name(
|
||||
arch, variant, disc_type=disc_type, disc_num=1
|
||||
)
|
||||
if not format:
|
||||
return base_filename
|
||||
kwargs = {
|
||||
"arch": arch,
|
||||
"disc_type": disc_type,
|
||||
"disc_num": 1,
|
||||
"suffix": ".iso",
|
||||
"filename": base_filename,
|
||||
"variant": variant,
|
||||
}
|
||||
args = get_format_substs(compose, **kwargs)
|
||||
try:
|
||||
return (format % args).format(**args)
|
||||
except KeyError as err:
|
||||
raise RuntimeError(
|
||||
"Failed to create image name: unknown format element: %s" % err
|
||||
)
|
||||
|
||||
|
||||
def get_volume_id(compose, variant, arch, formats):
|
||||
disc_type = compose.conf["disc_types"].get("dvd", "dvd")
|
||||
# Get volume ID for regular ISO so that we can substitute it in.
|
||||
volid = get_volid(compose, arch, variant, disc_type=disc_type)
|
||||
return get_volid(
|
||||
compose,
|
||||
arch,
|
||||
variant,
|
||||
disc_type=disc_type,
|
||||
formats=force_list(formats),
|
||||
volid=volid,
|
||||
)
|
||||
|
||||
|
||||
def prepare_media_metadata(compose, variant, arch):
|
||||
"""Write a .discinfo and media.repo files to a directory that will be
|
||||
included on the ISO. It's possible to overwrite the files by using extra
|
||||
files.
|
||||
"""
|
||||
md_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
||||
description = metadata.get_description(compose, variant, arch)
|
||||
metadata.create_media_repo(
|
||||
os.path.join(md_dir, "media.repo"), description, timestamp=None
|
||||
)
|
||||
metadata.create_discinfo(os.path.join(md_dir, ".discinfo"), description, arch)
|
1157
pungi/phases/gather/__init__.py
Normal file
1157
pungi/phases/gather/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
167
pungi/phases/gather/link.py
Normal file
167
pungi/phases/gather/link.py
Normal file
@ -0,0 +1,167 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import kobo.rpmlib
|
||||
|
||||
from pungi.linker import LinkerPool
|
||||
|
||||
|
||||
# TODO: global Linker instance - to keep hardlinks on dest?
|
||||
# DONE: show overall progress, not each file
|
||||
# TODO: (these should be logged separately)
|
||||
|
||||
|
||||
def _get_src_nevra(compose, pkg_obj, srpm_map):
|
||||
"""Return source N-E:V-R.A.rpm; guess if necessary."""
|
||||
result = srpm_map.get(pkg_obj.sourcerpm, None)
|
||||
if not result:
|
||||
nvra = kobo.rpmlib.parse_nvra(pkg_obj.sourcerpm)
|
||||
nvra["epoch"] = pkg_obj.epoch
|
||||
result = kobo.rpmlib.make_nvra(nvra, add_rpm=True, force_epoch=True)
|
||||
compose.log_warning(
|
||||
"Package %s has no SRPM available, guessing epoch: %s"
|
||||
% (pkg_obj.nevra, result)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def get_package_path(filename, hashed_directory=False):
|
||||
"""Get path for filename. If ``hashed_directory`` is ``True``, the path
|
||||
will include a prefix based on the initial letter.
|
||||
|
||||
>>> get_package_path('my-package.rpm')
|
||||
'my-package.rpm'
|
||||
>>> get_package_path('my-package.rpm', True)
|
||||
'm/my-package.rpm'
|
||||
>>> get_package_path('My-Package.rpm', True)
|
||||
'm/My-Package.rpm'
|
||||
"""
|
||||
if hashed_directory:
|
||||
prefix = filename[0].lower()
|
||||
return os.path.join(prefix, filename)
|
||||
return filename
|
||||
|
||||
|
||||
def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={}):
|
||||
# srpm_map instance is shared between link_files() runs
|
||||
|
||||
msg = "Linking packages (arch: %s, variant: %s)" % (arch, variant)
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
link_type = compose.conf["link_type"]
|
||||
|
||||
pool = LinkerPool.with_workers(10, link_type, logger=compose._logger)
|
||||
|
||||
hashed_directories = compose.conf["hashed_directories"]
|
||||
|
||||
# Create temporary dict mapping package path to package object from pkgset
|
||||
# so we do not have to search all pkg_sets for every package in pkg_map.
|
||||
pkg_by_path = {}
|
||||
for pkg_set in pkg_sets:
|
||||
for path in pkg_set[arch]:
|
||||
pkg_by_path[path] = pkg_set[arch][path]
|
||||
|
||||
packages_dir = compose.paths.compose.packages("src", variant)
|
||||
packages_dir_relpath = compose.paths.compose.packages("src", variant, relative=True)
|
||||
for pkg in pkg_map["srpm"]:
|
||||
if "lookaside" in pkg["flags"]:
|
||||
continue
|
||||
package_path = get_package_path(
|
||||
os.path.basename(pkg["path"]), hashed_directories
|
||||
)
|
||||
dst = os.path.join(packages_dir, package_path)
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
nevra = pkg_obj.nevra
|
||||
manifest.add(
|
||||
variant.uid,
|
||||
arch,
|
||||
nevra,
|
||||
path=dst_relpath,
|
||||
sigkey=pkg_obj.signature,
|
||||
category="source",
|
||||
)
|
||||
|
||||
# update srpm_map
|
||||
srpm_map.setdefault(pkg_obj.file_name, nevra)
|
||||
|
||||
packages_dir = compose.paths.compose.packages(arch, variant)
|
||||
packages_dir_relpath = compose.paths.compose.packages(arch, variant, relative=True)
|
||||
for pkg in pkg_map["rpm"]:
|
||||
if "lookaside" in pkg["flags"]:
|
||||
continue
|
||||
package_path = get_package_path(
|
||||
os.path.basename(pkg["path"]), hashed_directories
|
||||
)
|
||||
dst = os.path.join(packages_dir, package_path)
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
nevra = pkg_obj.nevra
|
||||
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
|
||||
manifest.add(
|
||||
variant.uid,
|
||||
arch,
|
||||
nevra,
|
||||
path=dst_relpath,
|
||||
sigkey=pkg_obj.signature,
|
||||
category="binary",
|
||||
srpm_nevra=src_nevra,
|
||||
)
|
||||
|
||||
packages_dir = compose.paths.compose.debug_packages(arch, variant)
|
||||
packages_dir_relpath = compose.paths.compose.debug_packages(
|
||||
arch, variant, relative=True
|
||||
)
|
||||
for pkg in pkg_map["debuginfo"]:
|
||||
if "lookaside" in pkg["flags"]:
|
||||
continue
|
||||
package_path = get_package_path(
|
||||
os.path.basename(pkg["path"]), hashed_directories
|
||||
)
|
||||
dst = os.path.join(packages_dir, package_path)
|
||||
dst_relpath = os.path.join(packages_dir_relpath, package_path)
|
||||
|
||||
# link file
|
||||
pool.queue_put((pkg["path"], dst))
|
||||
|
||||
# update rpm manifest
|
||||
pkg_obj = pkg_by_path[pkg["path"]]
|
||||
nevra = pkg_obj.nevra
|
||||
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
|
||||
manifest.add(
|
||||
variant.uid,
|
||||
arch,
|
||||
nevra,
|
||||
path=dst_relpath,
|
||||
sigkey=pkg_obj.signature,
|
||||
category="debug",
|
||||
srpm_nevra=src_nevra,
|
||||
)
|
||||
|
||||
pool.start()
|
||||
pool.stop()
|
||||
compose.log_info("[DONE ] %s" % msg)
|
19
pungi/phases/gather/method.py
Normal file
19
pungi/phases/gather/method.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
class GatherMethodBase(object):
|
||||
def __init__(self, compose):
|
||||
self.compose = compose
|
24
pungi/phases/gather/methods/__init__.py
Normal file
24
pungi/phases/gather/methods/__init__.py
Normal file
@ -0,0 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from .method_deps import GatherMethodDeps
|
||||
from .method_nodeps import GatherMethodNodeps
|
||||
from .method_hybrid import GatherMethodHybrid
|
||||
|
||||
ALL_METHODS = {
|
||||
"deps": GatherMethodDeps,
|
||||
"nodeps": GatherMethodNodeps,
|
||||
"hybrid": GatherMethodHybrid,
|
||||
}
|
286
pungi/phases/gather/methods/method_deps.py
Normal file
286
pungi/phases/gather/methods/method_deps.py
Normal file
@ -0,0 +1,286 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from kobo.shortcuts import run
|
||||
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
||||
from kobo.rpmlib import parse_nvra
|
||||
|
||||
from pungi.util import get_arch_variant_data, temp_dir
|
||||
from pungi.wrappers.pungi import PungiWrapper
|
||||
|
||||
from pungi.arch import tree_arch_to_yum_arch
|
||||
import pungi.phases.gather
|
||||
from pungi.phases.pkgset.pkgsets import ExtendedRpmWrapper
|
||||
|
||||
import pungi.phases.gather.method
|
||||
|
||||
|
||||
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
|
||||
def __call__(
|
||||
self,
|
||||
arch,
|
||||
variant,
|
||||
packages,
|
||||
groups,
|
||||
filter_packages,
|
||||
multilib_whitelist,
|
||||
multilib_blacklist,
|
||||
package_sets,
|
||||
path_prefix=None,
|
||||
fulltree_excludes=None,
|
||||
prepopulate=None,
|
||||
):
|
||||
# result = {
|
||||
# "rpm": [],
|
||||
# "srpm": [],
|
||||
# "debuginfo": [],
|
||||
# }
|
||||
|
||||
write_pungi_config(
|
||||
self.compose,
|
||||
arch,
|
||||
variant,
|
||||
packages,
|
||||
groups,
|
||||
filter_packages,
|
||||
multilib_whitelist,
|
||||
multilib_blacklist,
|
||||
fulltree_excludes=fulltree_excludes,
|
||||
prepopulate=prepopulate,
|
||||
source_name=self.source_name,
|
||||
package_sets=package_sets,
|
||||
)
|
||||
result, missing_deps = resolve_deps(
|
||||
self.compose, arch, variant, source_name=self.source_name
|
||||
)
|
||||
raise_on_invalid_sigkeys(arch, variant, package_sets, result)
|
||||
check_deps(self.compose, arch, variant, missing_deps)
|
||||
return result
|
||||
|
||||
|
||||
def raise_on_invalid_sigkeys(arch, variant, package_sets, result):
|
||||
"""
|
||||
Raises RuntimeError if some package in compose is signed with an invalid
|
||||
sigkey.
|
||||
"""
|
||||
invalid_sigkey_rpms = {}
|
||||
for package in result["rpm"]:
|
||||
name = parse_nvra(package["path"])["name"]
|
||||
for pkgset in package_sets:
|
||||
for forbidden_package in pkgset["global"].invalid_sigkey_rpms:
|
||||
if name == forbidden_package["name"]:
|
||||
invalid_sigkey_rpms.setdefault(
|
||||
pkgset["global"].sigkey_ordering, []
|
||||
).append(forbidden_package)
|
||||
|
||||
if invalid_sigkey_rpms:
|
||||
package_sets[0]["global"].raise_invalid_sigkeys_exception(invalid_sigkey_rpms)
|
||||
|
||||
|
||||
def _format_packages(pkgs):
|
||||
"""Sort packages and merge name with arch."""
|
||||
result = set()
|
||||
for pkg, pkg_arch in pkgs:
|
||||
if type(pkg) in [SimpleRpmWrapper, RpmWrapper, ExtendedRpmWrapper]:
|
||||
pkg_name = pkg.name
|
||||
else:
|
||||
pkg_name = pkg
|
||||
if pkg_arch:
|
||||
result.add("%s.%s" % (pkg_name, pkg_arch))
|
||||
else:
|
||||
result.add(pkg_name)
|
||||
return sorted(result)
|
||||
|
||||
|
||||
def write_pungi_config(
|
||||
compose,
|
||||
arch,
|
||||
variant,
|
||||
packages,
|
||||
groups,
|
||||
filter_packages,
|
||||
multilib_whitelist,
|
||||
multilib_blacklist,
|
||||
fulltree_excludes=None,
|
||||
prepopulate=None,
|
||||
source_name=None,
|
||||
package_sets=None,
|
||||
):
|
||||
"""write pungi config (kickstart) for arch/variant"""
|
||||
pungi_wrapper = PungiWrapper()
|
||||
pungi_cfg = compose.paths.work.pungi_conf(
|
||||
variant=variant, arch=arch, source_name=source_name
|
||||
)
|
||||
|
||||
compose.log_info(
|
||||
"Writing pungi config (arch: %s, variant: %s): %s", arch, variant, pungi_cfg
|
||||
)
|
||||
|
||||
repos = {}
|
||||
for i, pkgset in enumerate(package_sets or []):
|
||||
if not variant.pkgsets or pkgset.name in variant.pkgsets:
|
||||
repos["pungi-repo-%d" % i] = pkgset.paths[arch]
|
||||
if compose.has_comps:
|
||||
repos["comps-repo"] = compose.paths.work.comps_repo(arch=arch, variant=variant)
|
||||
if variant.type == "optional":
|
||||
for var in variant.parent.get_variants(
|
||||
arch=arch, types=["self", "variant", "addon", "layered-product"]
|
||||
):
|
||||
repos["%s-comps" % var.uid] = compose.paths.work.comps_repo(
|
||||
arch=arch, variant=var
|
||||
)
|
||||
if variant.type in ["addon", "layered-product"]:
|
||||
repos["parent-comps"] = compose.paths.work.comps_repo(
|
||||
arch=arch, variant=variant.parent
|
||||
)
|
||||
|
||||
lookaside_repos = {}
|
||||
for i, repo_url in enumerate(
|
||||
pungi.phases.gather.get_lookaside_repos(compose, arch, variant)
|
||||
):
|
||||
lookaside_repos["lookaside-repo-%s" % i] = repo_url
|
||||
|
||||
packages_str = list(_format_packages(packages))
|
||||
filter_packages_str = list(_format_packages(filter_packages))
|
||||
|
||||
if not groups and not packages_str and not prepopulate:
|
||||
raise RuntimeError(
|
||||
"No packages included in %s.%s "
|
||||
"(no comps groups, no input packages, no prepopulate)" % (variant.uid, arch)
|
||||
)
|
||||
|
||||
pungi_wrapper.write_kickstart(
|
||||
ks_path=pungi_cfg,
|
||||
repos=repos,
|
||||
groups=groups,
|
||||
packages=packages_str,
|
||||
exclude_packages=filter_packages_str,
|
||||
lookaside_repos=lookaside_repos,
|
||||
fulltree_excludes=fulltree_excludes,
|
||||
multilib_whitelist=multilib_whitelist,
|
||||
multilib_blacklist=multilib_blacklist,
|
||||
prepopulate=prepopulate,
|
||||
)
|
||||
|
||||
|
||||
def resolve_deps(compose, arch, variant, source_name=None):
|
||||
pungi_wrapper = PungiWrapper()
|
||||
pungi_log = compose.paths.work.pungi_log(arch, variant, source_name=source_name)
|
||||
|
||||
msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant)
|
||||
|
||||
compose.log_info("[BEGIN] %s" % msg)
|
||||
pungi_conf = compose.paths.work.pungi_conf(arch, variant, source_name=source_name)
|
||||
|
||||
multilib_methods = get_arch_variant_data(compose.conf, "multilib", arch, variant)
|
||||
|
||||
greedy_method = compose.conf["greedy_method"]
|
||||
|
||||
# variant
|
||||
fulltree = compose.conf["gather_fulltree"]
|
||||
selfhosting = compose.conf["gather_selfhosting"]
|
||||
|
||||
# profiling
|
||||
profiler = compose.conf["gather_profiler"]
|
||||
|
||||
# optional
|
||||
if variant.type == "optional":
|
||||
fulltree = True
|
||||
selfhosting = True
|
||||
|
||||
# addon
|
||||
if variant.type in ["addon", "layered-product"]:
|
||||
# packages having SRPM in parent variant are excluded from
|
||||
# fulltree (via %fulltree-excludes)
|
||||
fulltree = True
|
||||
selfhosting = False
|
||||
|
||||
lookaside_repos = {}
|
||||
for i, repo_url in enumerate(
|
||||
pungi.phases.gather.get_lookaside_repos(compose, arch, variant)
|
||||
):
|
||||
lookaside_repos["lookaside-repo-%s" % i] = repo_url
|
||||
|
||||
yum_arch = tree_arch_to_yum_arch(arch)
|
||||
tmp_dir = compose.paths.work.tmp_dir(arch, variant)
|
||||
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
||||
# TODO: remove YUM code, fully migrate to DNF
|
||||
backends = {
|
||||
"yum": pungi_wrapper.get_pungi_cmd,
|
||||
"dnf": pungi_wrapper.get_pungi_cmd_dnf,
|
||||
}
|
||||
get_cmd = backends[compose.conf["gather_backend"]]
|
||||
cmd = get_cmd(
|
||||
pungi_conf,
|
||||
destdir=tmp_dir,
|
||||
name=variant.uid,
|
||||
selfhosting=selfhosting,
|
||||
fulltree=fulltree,
|
||||
arch=yum_arch,
|
||||
full_archlist=True,
|
||||
greedy=greedy_method,
|
||||
cache_dir=cache_dir,
|
||||
lookaside_repos=lookaside_repos,
|
||||
multilib_methods=multilib_methods,
|
||||
profiler=profiler,
|
||||
)
|
||||
# Use temp working directory directory as workaround for
|
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
||||
with temp_dir(prefix="pungi_") as work_dir:
|
||||
run(cmd, logfile=pungi_log, show_cmd=True, workdir=work_dir, env=os.environ)
|
||||
|
||||
# Clean up tmp dir
|
||||
# Workaround for rpm not honoring sgid bit which only appears when yum is used.
|
||||
yumroot_dir = os.path.join(tmp_dir, "work", arch, "yumroot")
|
||||
if os.path.isdir(yumroot_dir):
|
||||
try:
|
||||
shutil.rmtree(yumroot_dir)
|
||||
except Exception as e:
|
||||
compose.log_warning(
|
||||
"Failed to clean up tmp dir: %s %s" % (yumroot_dir, str(e))
|
||||
)
|
||||
|
||||
with open(pungi_log, "r") as f:
|
||||
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
||||
|
||||
if missing_comps_pkgs:
|
||||
log_msg = "Packages mentioned in comps do not exist for %s.%s: %s" % (
|
||||
variant.uid,
|
||||
arch,
|
||||
", ".join(sorted(missing_comps_pkgs)),
|
||||
)
|
||||
compose.log_warning(log_msg)
|
||||
if compose.conf["require_all_comps_packages"]:
|
||||
raise RuntimeError(log_msg)
|
||||
|
||||
compose.log_info("[DONE ] %s" % msg)
|
||||
return packages, broken_deps
|
||||
|
||||
|
||||
def check_deps(compose, arch, variant, missing_deps):
|
||||
if not compose.conf["check_deps"]:
|
||||
return
|
||||
|
||||
if missing_deps:
|
||||
for pkg in sorted(missing_deps):
|
||||
compose.log_error(
|
||||
"Unresolved dependencies for %s.%s in package %s: %s"
|
||||
% (variant, arch, pkg, sorted(missing_deps[pkg]))
|
||||
)
|
||||
raise RuntimeError("Unresolved dependencies detected")
|
580
pungi/phases/gather/methods/method_hybrid.py
Normal file
580
pungi/phases/gather/methods/method_hybrid.py
Normal file
@ -0,0 +1,580 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
import gzip
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import createrepo_c as cr
|
||||
import kobo.rpmlib
|
||||
from kobo.shortcuts import run
|
||||
|
||||
import pungi.phases.gather.method
|
||||
from pungi import multilib_dnf
|
||||
from pungi.module_util import Modulemd
|
||||
from pungi.arch import get_valid_arches, tree_arch_to_yum_arch
|
||||
from pungi.phases.gather import _mk_pkg_map
|
||||
from pungi.util import get_arch_variant_data, pkg_is_debug, temp_dir, as_local_file
|
||||
from pungi.wrappers import fus
|
||||
from pungi.wrappers.comps import CompsWrapper
|
||||
|
||||
from .method_nodeps import expand_groups
|
||||
|
||||
|
||||
class FakePackage(object):
|
||||
"""This imitates a DNF package object and can be passed to python-multilib
|
||||
library.
|
||||
"""
|
||||
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.pkg, attr)
|
||||
|
||||
@property
|
||||
def files(self):
|
||||
paths = []
|
||||
# createrepo_c.Package.files is a tuple, but its length differs across
|
||||
# versions. The constants define index at which the related value is
|
||||
# located.
|
||||
for entry in self.pkg.files:
|
||||
paths.append(
|
||||
os.path.join(entry[cr.FILE_ENTRY_PATH], entry[cr.FILE_ENTRY_NAME])
|
||||
)
|
||||
return paths
|
||||
|
||||
@property
|
||||
def provides(self):
|
||||
# This is supposed to match what yum package object returns. It's a
|
||||
# nested tuple (name, flag, (epoch, version, release)). This code only
|
||||
# fills in the name, because that's all that python-multilib is using..
|
||||
return [(p[0].split()[0], None, (None, None, None)) for p in self.pkg.provides]
|
||||
|
||||
|
||||
class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(GatherMethodHybrid, self).__init__(*args, **kwargs)
|
||||
self.package_maps = {}
|
||||
self.packages = {}
|
||||
# Mapping from package name to set of langpack packages (stored as
|
||||
# names).
|
||||
self.langpacks = {}
|
||||
# Set of packages for which we already added langpacks.
|
||||
self.added_langpacks = set()
|
||||
# Set of NEVRAs of modular packages
|
||||
self.modular_packages = set()
|
||||
# Arch -> pkg name -> set of pkg object
|
||||
self.debuginfo = defaultdict(lambda: defaultdict(set))
|
||||
|
||||
# caches for processed packages
|
||||
self.processed_multilib = set()
|
||||
self.processed_debuginfo = set()
|
||||
|
||||
def _get_pkg_map(self, arch):
|
||||
"""Create a mapping from NEVRA to actual package object. This will be
|
||||
done once for each architecture, since the package set is the same for
|
||||
all variants.
|
||||
|
||||
The keys are in NEVRA format and only include the epoch if it's not
|
||||
zero. This makes it easier to query by results for the depsolver.
|
||||
"""
|
||||
if arch not in self.package_maps:
|
||||
pkg_map = {}
|
||||
for pkgset in self.package_sets:
|
||||
for pkg_arch in pkgset.package_sets[arch].rpms_by_arch:
|
||||
for pkg in pkgset.package_sets[arch].rpms_by_arch[pkg_arch]:
|
||||
pkg_map[_fmt_nevra(pkg, pkg_arch)] = pkg
|
||||
self.package_maps[arch] = pkg_map
|
||||
|
||||
return self.package_maps[arch]
|
||||
|
||||
def _prepare_packages(self):
|
||||
for repo_path in self.get_repos():
|
||||
md = cr.Metadata()
|
||||
md.locate_and_load_xml(repo_path)
|
||||
for key in md.keys():
|
||||
pkg = md.get(key)
|
||||
if pkg.arch in self.valid_arches:
|
||||
self.packages[_fmt_nevra(pkg, arch=pkg.arch)] = FakePackage(pkg)
|
||||
|
||||
def _get_package(self, nevra):
|
||||
if not self.packages:
|
||||
self._prepare_packages()
|
||||
return self.packages[nevra]
|
||||
|
||||
def _prepare_debuginfo(self):
|
||||
"""Prepare cache of debuginfo packages for easy access. The cache is
|
||||
indexed by package architecture and then by package name. There can be
|
||||
more than one debuginfo package with the same name.
|
||||
"""
|
||||
for pkgset in self.package_sets:
|
||||
for pkg_arch in pkgset.package_sets[self.arch].rpms_by_arch:
|
||||
for pkg in pkgset.package_sets[self.arch].rpms_by_arch[pkg_arch]:
|
||||
self.debuginfo[pkg.arch][pkg.name].add(pkg)
|
||||
|
||||
def _get_debuginfo(self, name, arch):
|
||||
if not self.debuginfo:
|
||||
self._prepare_debuginfo()
|
||||
return self.debuginfo.get(arch, {}).get(name, set())
|
||||
|
||||
def expand_list(self, patterns):
|
||||
"""Given a list of globs, create a list of package names matching any
|
||||
of the pattern.
|
||||
"""
|
||||
expanded = set()
|
||||
for pkgset in self.package_sets:
|
||||
for pkg_arch in pkgset.package_sets[self.arch].rpms_by_arch:
|
||||
for pkg in pkgset.package_sets[self.arch].rpms_by_arch[pkg_arch]:
|
||||
for pattern in patterns:
|
||||
if fnmatch(pkg.name, pattern):
|
||||
expanded.add(pkg)
|
||||
break
|
||||
return expanded
|
||||
|
||||
def prepare_modular_packages(self):
|
||||
for var in self.compose.all_variants.values():
|
||||
for mmd in var.arch_mmds.get(self.arch, {}).values():
|
||||
self.modular_packages.update(mmd.get_rpm_artifacts())
|
||||
|
||||
def prepare_langpacks(self, arch, variant):
|
||||
if not self.compose.has_comps:
|
||||
return
|
||||
comps_file = self.compose.paths.work.comps(arch, variant, create_dir=False)
|
||||
comps = CompsWrapper(comps_file)
|
||||
|
||||
for name, install in comps.get_langpacks().items():
|
||||
# Replace %s with * for fnmatch.
|
||||
install_match = install % "*"
|
||||
self.langpacks[name] = set()
|
||||
for pkgset in self.package_sets:
|
||||
for pkg_arch in pkgset.package_sets[arch].rpms_by_arch:
|
||||
for pkg in pkgset.package_sets[arch].rpms_by_arch[pkg_arch]:
|
||||
if not fnmatch(pkg.name, install_match):
|
||||
# Does not match the pattern, ignore...
|
||||
continue
|
||||
if pkg.name.endswith("-devel") or pkg.name.endswith("-static"):
|
||||
continue
|
||||
if pkg_is_debug(pkg):
|
||||
continue
|
||||
self.langpacks[name].add(pkg.name)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
arch,
|
||||
variant,
|
||||
package_sets,
|
||||
packages=[],
|
||||
groups=[],
|
||||
multilib_whitelist=[],
|
||||
multilib_blacklist=[],
|
||||
filter_packages=[],
|
||||
prepopulate=[],
|
||||
**kwargs
|
||||
):
|
||||
self.arch = arch
|
||||
self.variant = variant
|
||||
self.valid_arches = get_valid_arches(arch, multilib=True)
|
||||
self.package_sets = package_sets
|
||||
|
||||
self.prepare_langpacks(arch, variant)
|
||||
self.prepare_modular_packages()
|
||||
|
||||
self.multilib_methods = get_arch_variant_data(
|
||||
self.compose.conf, "multilib", arch, variant
|
||||
)
|
||||
self.multilib = multilib_dnf.Multilib(
|
||||
self.multilib_methods,
|
||||
set(p.name for p in self.expand_list(multilib_blacklist)),
|
||||
set(p.name for p in self.expand_list(multilib_whitelist)),
|
||||
)
|
||||
|
||||
platform = get_platform(self.compose, variant, arch)
|
||||
|
||||
packages.update(
|
||||
expand_groups(self.compose, arch, variant, groups, set_pkg_arch=False)
|
||||
)
|
||||
|
||||
packages.update(tuple(pkg.rsplit(".", 1)) for pkg in prepopulate)
|
||||
|
||||
# Filters are received as tuples (name, arch), we should convert it to
|
||||
# strings.
|
||||
filters = [_fmt_pkg(*p) for p in filter_packages]
|
||||
|
||||
cache_prefix = "fus-cache-%s-%s-%s-" % (self.compose.compose_id, variant, arch)
|
||||
with temp_dir(prefix=cache_prefix) as cache_dir:
|
||||
nvrs, out_modules = self.run_solver(
|
||||
variant, arch, packages, platform, filters, cache_dir=cache_dir
|
||||
)
|
||||
filter_modules(variant, arch, out_modules)
|
||||
return expand_packages(
|
||||
self._get_pkg_map(arch),
|
||||
pungi.phases.gather.get_lookaside_repos(self.compose, arch, variant),
|
||||
nvrs,
|
||||
filter_packages=filter_packages,
|
||||
)
|
||||
# maybe check invalid sigkeys
|
||||
|
||||
def get_repos(self):
|
||||
repos = []
|
||||
for pkgset in self.package_sets:
|
||||
if self.variant.pkgsets and pkgset.name not in self.variant.pkgsets:
|
||||
continue
|
||||
repos.append(pkgset.paths[self.arch])
|
||||
return repos
|
||||
|
||||
def run_solver(self, variant, arch, packages, platform, filter_packages, cache_dir):
|
||||
repos = self.get_repos()
|
||||
results = set()
|
||||
result_modules = set()
|
||||
|
||||
modules = []
|
||||
for mmd in variant.arch_mmds.get(arch, {}).values():
|
||||
modules.append("%s:%s" % (mmd.get_module_name(), mmd.get_stream_name()))
|
||||
|
||||
input_packages = []
|
||||
for pkg_name, pkg_arch in packages:
|
||||
input_packages.extend(self._expand_wildcard(pkg_name, pkg_arch))
|
||||
|
||||
step = 0
|
||||
|
||||
while True:
|
||||
step += 1
|
||||
conf_file = self.compose.paths.work.fus_conf(arch, variant, step)
|
||||
fus.write_config(conf_file, sorted(modules), sorted(input_packages))
|
||||
cmd = fus.get_cmd(
|
||||
conf_file,
|
||||
tree_arch_to_yum_arch(arch),
|
||||
repos,
|
||||
pungi.phases.gather.get_lookaside_repos(self.compose, arch, variant),
|
||||
platform=platform,
|
||||
filter_packages=filter_packages,
|
||||
)
|
||||
logfile = self.compose.paths.log.log_file(
|
||||
arch, "hybrid-depsolver-%s-iter-%d" % (variant, step)
|
||||
)
|
||||
# Adding this environment variable will tell GLib not to prefix
|
||||
# any log messages with the PID of the fus process (which is quite
|
||||
# useless for us anyway).
|
||||
env = os.environ.copy()
|
||||
env["G_MESSAGES_PREFIXED"] = ""
|
||||
env["XDG_CACHE_HOME"] = cache_dir
|
||||
self.compose.log_debug(
|
||||
"[BEGIN] Running fus (arch: %s, variant: %s)" % (arch, variant)
|
||||
)
|
||||
run(cmd, logfile=logfile, show_cmd=True, env=env)
|
||||
output, out_modules = fus.parse_output(logfile)
|
||||
self.compose.log_debug(
|
||||
"[DONE ] Running fus (arch: %s, variant: %s)" % (arch, variant)
|
||||
)
|
||||
# No need to resolve modules again. They are not going to change.
|
||||
modules = []
|
||||
# Reset input packages as well to only solve newly added things.
|
||||
input_packages = []
|
||||
# Preserve the results from this iteration.
|
||||
results.update(output)
|
||||
result_modules.update(out_modules)
|
||||
|
||||
new_multilib = self.add_multilib(variant, arch, output)
|
||||
input_packages.extend(
|
||||
_fmt_pkg(pkg_name, pkg_arch)
|
||||
for pkg_name, pkg_arch in sorted(new_multilib)
|
||||
)
|
||||
|
||||
new_debuginfo = self.add_debuginfo(arch, output)
|
||||
input_packages.extend(
|
||||
_fmt_pkg(pkg_name, pkg_arch)
|
||||
for pkg_name, pkg_arch in sorted(new_debuginfo)
|
||||
)
|
||||
|
||||
new_langpacks = self.add_langpacks(output)
|
||||
input_packages.extend(new_langpacks)
|
||||
|
||||
if not input_packages:
|
||||
# Nothing new was added, we can stop now.
|
||||
break
|
||||
|
||||
return results, result_modules
|
||||
|
||||
def add_multilib(self, variant, arch, nvrs):
|
||||
added = set()
|
||||
if not self.multilib_methods:
|
||||
return []
|
||||
|
||||
for nvr, pkg_arch, flags in nvrs:
|
||||
if (nvr, pkg_arch) in self.processed_multilib:
|
||||
continue
|
||||
self.processed_multilib.add((nvr, pkg_arch))
|
||||
|
||||
if "modular" in flags:
|
||||
continue
|
||||
|
||||
if pkg_arch != arch:
|
||||
# Not a native package, not checking to add multilib
|
||||
continue
|
||||
|
||||
nevr = kobo.rpmlib.parse_nvr(nvr)
|
||||
|
||||
for add_arch in self.valid_arches:
|
||||
if add_arch == arch:
|
||||
continue
|
||||
try:
|
||||
multilib_candidate = self._get_package("%s.%s" % (nvr, add_arch))
|
||||
except KeyError:
|
||||
continue
|
||||
if self.multilib.is_multilib(multilib_candidate):
|
||||
added.add((nevr["name"], add_arch))
|
||||
|
||||
return added
|
||||
|
||||
def add_debuginfo(self, arch, nvrs):
|
||||
added = set()
|
||||
|
||||
for nvr, pkg_arch, flags in nvrs:
|
||||
if (nvr, pkg_arch) in self.processed_debuginfo:
|
||||
continue
|
||||
self.processed_debuginfo.add((nvr, pkg_arch))
|
||||
|
||||
if "modular" in flags:
|
||||
continue
|
||||
|
||||
pkg = self._get_package("%s.%s" % (nvr, pkg_arch))
|
||||
|
||||
# There are two ways how the debuginfo package can be named. We
|
||||
# want to get them all.
|
||||
source_name = kobo.rpmlib.parse_nvra(pkg.rpm_sourcerpm)["name"]
|
||||
for debuginfo_name in [
|
||||
"%s-debuginfo" % pkg.name,
|
||||
"%s-debugsource" % source_name,
|
||||
]:
|
||||
debuginfo = self._get_debuginfo(debuginfo_name, pkg_arch)
|
||||
for dbg in debuginfo:
|
||||
# For each debuginfo package that matches on name and
|
||||
# architecture, we also need to check if it comes from the
|
||||
# same build.
|
||||
if dbg.sourcerpm == pkg.rpm_sourcerpm:
|
||||
added.add((dbg.name, dbg.arch))
|
||||
|
||||
return added
|
||||
|
||||
def add_langpacks(self, nvrs):
|
||||
if not self.langpacks:
|
||||
return set()
|
||||
|
||||
added = set()
|
||||
for nvr, pkg_arch, flags in nvrs:
|
||||
if "modular" in flags:
|
||||
continue
|
||||
name = nvr.rsplit("-", 2)[0]
|
||||
if name in self.added_langpacks:
|
||||
# This package is already processed.
|
||||
continue
|
||||
added.update(self.langpacks.get(name, []))
|
||||
self.added_langpacks.add(name)
|
||||
|
||||
return sorted(added)
|
||||
|
||||
def _expand_wildcard(self, pkg_name, pkg_arch):
|
||||
if "*" not in pkg_name:
|
||||
return [_fmt_pkg(pkg_name, pkg_arch)]
|
||||
|
||||
packages = []
|
||||
|
||||
for pkg in self.expand_list([pkg_name]):
|
||||
if pkg_is_debug(pkg):
|
||||
# No debuginfo
|
||||
continue
|
||||
|
||||
if pkg_arch:
|
||||
if pkg_arch != pkg.arch:
|
||||
# Arch is specified and does not match, skip the package.
|
||||
continue
|
||||
else:
|
||||
if pkg.arch not in ("noarch", self.arch):
|
||||
# No arch specified and package does not match
|
||||
continue
|
||||
|
||||
strict_nevra = "%s-%s:%s-%s.%s" % (
|
||||
pkg.name,
|
||||
pkg.epoch or "0",
|
||||
pkg.version,
|
||||
pkg.release,
|
||||
pkg.arch,
|
||||
)
|
||||
if strict_nevra in self.modular_packages:
|
||||
# Wildcards should not match modular packages.
|
||||
continue
|
||||
|
||||
packages.append(_fmt_nevra(pkg, pkg.arch))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def iter_platforms_in_repo(url):
|
||||
"""Find all platform streams that any module in give repo requires at runtime.
|
||||
Yields lists of stream names (possible empty).
|
||||
"""
|
||||
repomd = os.path.join(url, "repodata/repomd.xml")
|
||||
with as_local_file(repomd) as url_:
|
||||
repomd = cr.Repomd(url_)
|
||||
for rec in repomd.records:
|
||||
if rec.type != "modules":
|
||||
continue
|
||||
# No with statement on Python 2.6 for GzipFile...
|
||||
record_url = os.path.join(url, rec.location_href)
|
||||
with as_local_file(record_url) as url_:
|
||||
gzipped_file = gzip.GzipFile(url_, "rb")
|
||||
mod_index = Modulemd.ModuleIndex.new()
|
||||
mod_index.update_from_string(gzipped_file.read().decode("utf-8"), False)
|
||||
gzipped_file.close()
|
||||
for module_name in mod_index.get_module_names():
|
||||
module = mod_index.get_module(module_name)
|
||||
for module_stream in module.get_all_streams():
|
||||
module_stream = module_stream.upgrade(2)
|
||||
for dep in module_stream.get_dependencies():
|
||||
yield dep.get_runtime_streams("platform")
|
||||
|
||||
|
||||
def get_platform_from_lookasides(compose, variant, arch):
|
||||
"""Find a set of all platform dependencies in all lookaside repos."""
|
||||
platforms = set()
|
||||
for repo in pungi.phases.gather.get_lookaside_repos(compose, arch, variant):
|
||||
for ps in iter_platforms_in_repo(fus._prep_path(repo)):
|
||||
platforms.update(ps)
|
||||
return platforms
|
||||
|
||||
|
||||
def get_platform(compose, variant, arch):
|
||||
"""Find platform stream for modules. Raises RuntimeError if there are
|
||||
conflicting requests.
|
||||
"""
|
||||
platforms = get_platform_from_lookasides(compose, variant, arch)
|
||||
|
||||
for var in compose.all_variants.values():
|
||||
for mmd in var.arch_mmds.get(arch, {}).values():
|
||||
for dep in mmd.get_dependencies():
|
||||
streams = dep.get_runtime_streams("platform")
|
||||
if streams:
|
||||
platforms.update(streams)
|
||||
|
||||
if len(platforms) > 1:
|
||||
raise RuntimeError("There are conflicting requests for platform.")
|
||||
|
||||
return list(platforms)[0] if platforms else None
|
||||
|
||||
|
||||
def _fmt_pkg(pkg_name, arch):
|
||||
if arch:
|
||||
pkg_name += ".%s" % arch
|
||||
return pkg_name
|
||||
|
||||
|
||||
def _nevra(**kwargs):
|
||||
if kwargs.get("epoch") not in (None, "", 0, "0"):
|
||||
return "%(name)s-%(epoch)s:%(version)s-%(release)s.%(arch)s" % kwargs
|
||||
return "%(name)s-%(version)s-%(release)s.%(arch)s" % kwargs
|
||||
|
||||
|
||||
def _fmt_nevra(pkg, arch):
|
||||
return _nevra(
|
||||
name=pkg.name,
|
||||
epoch=pkg.epoch,
|
||||
version=pkg.version,
|
||||
release=pkg.release,
|
||||
arch=arch,
|
||||
)
|
||||
|
||||
|
||||
def _get_srpm_nevra(pkg):
|
||||
nevra = kobo.rpmlib.parse_nvra(pkg.sourcerpm)
|
||||
nevra["epoch"] = nevra["epoch"] or pkg.epoch
|
||||
return _nevra(**nevra)
|
||||
|
||||
|
||||
def _make_result(paths):
|
||||
return [{"path": path, "flags": []} for path in sorted(paths)]
|
||||
|
||||
|
||||
def get_repo_packages(path):
|
||||
"""Extract file names of all packages in the given repository."""
|
||||
|
||||
packages = set()
|
||||
|
||||
def callback(pkg):
|
||||
packages.add(os.path.basename(pkg.location_href))
|
||||
|
||||
repomd = os.path.join(path, "repodata/repomd.xml")
|
||||
with as_local_file(repomd) as url_:
|
||||
repomd = cr.Repomd(url_)
|
||||
for rec in repomd.records:
|
||||
if rec.type != "primary":
|
||||
continue
|
||||
record_url = os.path.join(path, rec.location_href)
|
||||
with as_local_file(record_url) as url_:
|
||||
cr.xml_parse_primary(url_, pkgcb=callback, do_files=False)
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def expand_packages(nevra_to_pkg, lookasides, nvrs, filter_packages):
|
||||
"""For each package add source RPM."""
|
||||
# This will serve as the final result. We collect sets of paths to the
|
||||
# packages.
|
||||
rpms = set()
|
||||
srpms = set()
|
||||
debuginfo = set()
|
||||
|
||||
filters = set(filter_packages)
|
||||
|
||||
lookaside_packages = set()
|
||||
for repo in lookasides:
|
||||
lookaside_packages.update(get_repo_packages(repo))
|
||||
|
||||
for nvr, pkg_arch, flags in nvrs:
|
||||
pkg = nevra_to_pkg["%s.%s" % (nvr, pkg_arch)]
|
||||
if os.path.basename(pkg.file_path) in lookaside_packages:
|
||||
# Fus can return lookaside package in output if the package is
|
||||
# explicitly listed as input. This can happen during comps
|
||||
# expansion.
|
||||
continue
|
||||
if pkg_is_debug(pkg):
|
||||
debuginfo.add(pkg.file_path)
|
||||
else:
|
||||
rpms.add(pkg.file_path)
|
||||
|
||||
try:
|
||||
srpm_nevra = _get_srpm_nevra(pkg)
|
||||
srpm = nevra_to_pkg[srpm_nevra]
|
||||
if (srpm.name, "src") in filters:
|
||||
# Filtered package, skipping
|
||||
continue
|
||||
if os.path.basename(srpm.file_path) not in lookaside_packages:
|
||||
srpms.add(srpm.file_path)
|
||||
except KeyError:
|
||||
# Didn't find source RPM.. this should be logged
|
||||
pass
|
||||
|
||||
return _mk_pkg_map(_make_result(rpms), _make_result(srpms), _make_result(debuginfo))
|
||||
|
||||
|
||||
def filter_modules(variant, arch, nsvcs_to_keep):
|
||||
"""Remove any arch-specific module metadata from the module if it's not
|
||||
listed in the list to keep. This will ultimately cause the module to not be
|
||||
included in the final repodata and module metadata.
|
||||
"""
|
||||
for nsvc in list(variant.arch_mmds.get(arch, {}).keys()):
|
||||
if nsvc not in nsvcs_to_keep:
|
||||
del variant.arch_mmds[arch][nsvc]
|
184
pungi/phases/gather/methods/method_nodeps.py
Normal file
184
pungi/phases/gather/methods/method_nodeps.py
Normal file
@ -0,0 +1,184 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from pprint import pformat
|
||||
import re
|
||||
import six
|
||||
|
||||
import pungi.arch
|
||||
from pungi.util import pkg_is_rpm, pkg_is_srpm, pkg_is_debug
|
||||
from pungi.wrappers.comps import CompsWrapper
|
||||
from pungi.phases.pkgset.pkgsets import ExtendedRpmWrapper
|
||||
|
||||
import pungi.phases.gather.method
|
||||
from kobo.pkgset import SimpleRpmWrapper, RpmWrapper
|
||||
|
||||
|
||||
class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
||||
def __call__(self, arch, variant, *args, **kwargs):
|
||||
fname = "gather-nodeps-%s" % variant.uid
|
||||
if self.source_name:
|
||||
fname += "-" + self.source_name
|
||||
log_file = self.compose.paths.log.log_file(arch, fname)
|
||||
with open(log_file, "w") as log:
|
||||
return self.worker(log, arch, variant, *args, **kwargs)
|
||||
|
||||
def worker(
|
||||
self,
|
||||
log,
|
||||
arch,
|
||||
variant,
|
||||
pkgs,
|
||||
groups,
|
||||
filter_packages,
|
||||
multilib_whitelist,
|
||||
multilib_blacklist,
|
||||
package_sets,
|
||||
path_prefix=None,
|
||||
fulltree_excludes=None,
|
||||
prepopulate=None,
|
||||
):
|
||||
result = {
|
||||
"rpm": [],
|
||||
"srpm": [],
|
||||
"debuginfo": [],
|
||||
}
|
||||
|
||||
group_packages = expand_groups(self.compose, arch, variant, groups)
|
||||
packages = pkgs | group_packages
|
||||
log.write("Requested packages:\n%s\n" % pformat(packages))
|
||||
|
||||
seen_rpms = {}
|
||||
seen_srpms = {}
|
||||
|
||||
valid_arches = pungi.arch.get_valid_arches(arch, multilib=True)
|
||||
compatible_arches = {}
|
||||
for i in valid_arches:
|
||||
compatible_arches[i] = pungi.arch.get_compatible_arches(i)
|
||||
|
||||
log.write("\nGathering rpms\n")
|
||||
for pkg in iterate_packages(package_sets, arch):
|
||||
if not pkg_is_rpm(pkg):
|
||||
continue
|
||||
for gathered_pkg, pkg_arch in packages:
|
||||
if isinstance(gathered_pkg, six.string_types) and not re.match(
|
||||
gathered_pkg.replace(".", "\\.")
|
||||
.replace("+", "\\+")
|
||||
.replace("*", ".*")
|
||||
+ "$",
|
||||
pkg.name,
|
||||
):
|
||||
continue
|
||||
elif (
|
||||
type(gathered_pkg)
|
||||
in [SimpleRpmWrapper, RpmWrapper, ExtendedRpmWrapper]
|
||||
and pkg.nevra != gathered_pkg.nevra
|
||||
):
|
||||
continue
|
||||
if (
|
||||
pkg_arch is not None
|
||||
and pkg.arch != pkg_arch
|
||||
and pkg.arch != "noarch"
|
||||
):
|
||||
continue
|
||||
result["rpm"].append({"path": pkg.file_path, "flags": ["input"]})
|
||||
seen_rpms.setdefault(pkg.name, set()).add(pkg.arch)
|
||||
seen_srpms.setdefault(pkg.sourcerpm, set()).add(pkg.arch)
|
||||
log.write(
|
||||
"Added %s (matched %s.%s) (sourcerpm: %s)\n"
|
||||
% (pkg, gathered_pkg, pkg_arch, pkg.sourcerpm)
|
||||
)
|
||||
|
||||
log.write("\nGathering source rpms\n")
|
||||
for pkg in iterate_packages(package_sets, arch):
|
||||
if not pkg_is_srpm(pkg):
|
||||
continue
|
||||
if pkg.file_name in seen_srpms:
|
||||
result["srpm"].append({"path": pkg.file_path, "flags": ["input"]})
|
||||
log.write("Adding %s\n" % pkg)
|
||||
|
||||
log.write("\nGathering debuginfo packages\n")
|
||||
for pkg in iterate_packages(package_sets, arch):
|
||||
if not pkg_is_debug(pkg):
|
||||
continue
|
||||
if pkg.sourcerpm not in seen_srpms:
|
||||
log.write("Not considering %s: corresponding srpm not included\n" % pkg)
|
||||
continue
|
||||
pkg_arches = set(compatible_arches[pkg.arch]) - set(["noarch"])
|
||||
seen_arches = set(seen_srpms[pkg.sourcerpm]) - set(["noarch"])
|
||||
if not (pkg_arches & seen_arches):
|
||||
# We only want to pull in a debuginfo if we have a binary
|
||||
# package for a compatible arch. Noarch packages should not
|
||||
# pull debuginfo (they would pull in all architectures).
|
||||
log.write("Not including %s: no package for this arch\n" % pkg)
|
||||
continue
|
||||
result["debuginfo"].append({"path": pkg.file_path, "flags": ["input"]})
|
||||
log.write("Adding %s\n" % pkg)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def expand_groups(compose, arch, variant, groups, set_pkg_arch=True):
|
||||
"""Read comps file filtered for given architecture and variant and return
|
||||
all packages in given groups.
|
||||
|
||||
:returns: A set of tuples (pkg_name, arch)
|
||||
"""
|
||||
if not groups:
|
||||
# No groups, nothing to do (this also covers case when there is no
|
||||
# comps file.
|
||||
return set()
|
||||
comps = []
|
||||
comps_file = compose.paths.work.comps(arch, variant, create_dir=False)
|
||||
comps.append(CompsWrapper(comps_file))
|
||||
|
||||
if variant and variant.parent:
|
||||
parent_comps_file = compose.paths.work.comps(
|
||||
arch, variant.parent, create_dir=False
|
||||
)
|
||||
comps.append(CompsWrapper(parent_comps_file))
|
||||
|
||||
if variant.type == "optional":
|
||||
for v in variant.parent.variants.values():
|
||||
if v.id == variant.id:
|
||||
continue
|
||||
comps_file = compose.paths.work.comps(arch, v, create_dir=False)
|
||||
if os.path.exists(comps_file):
|
||||
comps.append(CompsWrapper(comps_file))
|
||||
|
||||
packages = set()
|
||||
pkg_arch = arch if set_pkg_arch else None
|
||||
for group in groups:
|
||||
found = False
|
||||
ex = None
|
||||
for c in comps:
|
||||
try:
|
||||
packages.update([(pkg, pkg_arch) for pkg in c.get_packages(group)])
|
||||
found = True
|
||||
break
|
||||
except KeyError as e:
|
||||
ex = e
|
||||
|
||||
if not found:
|
||||
raise ex
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def iterate_packages(package_sets, arch):
|
||||
for pkgset in package_sets:
|
||||
for pkg in pkgset[arch]:
|
||||
yield pkgset[arch][pkg]
|
19
pungi/phases/gather/source.py
Normal file
19
pungi/phases/gather/source.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
class GatherSourceBase(object):
|
||||
def __init__(self, compose):
|
||||
self.compose = compose
|
26
pungi/phases/gather/sources/__init__.py
Normal file
26
pungi/phases/gather/sources/__init__.py
Normal file
@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
from .source_comps import GatherSourceComps
|
||||
from .source_json import GatherSourceJson
|
||||
from .source_module import GatherSourceModule
|
||||
from .source_none import GatherSourceNone
|
||||
|
||||
ALL_SOURCES = {
|
||||
"comps": GatherSourceComps,
|
||||
"json": GatherSourceJson,
|
||||
"module": GatherSourceModule,
|
||||
"none": GatherSourceNone,
|
||||
}
|
42
pungi/phases/gather/sources/source_comps.py
Normal file
42
pungi/phases/gather/sources/source_comps.py
Normal file
@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
Get a package list based on comps.xml.
|
||||
|
||||
Input format:
|
||||
see comps.dtd
|
||||
|
||||
Output:
|
||||
set([(rpm_name, rpm_arch or None)])
|
||||
"""
|
||||
|
||||
|
||||
from pungi.wrappers.comps import CompsWrapper
|
||||
import pungi.phases.gather.source
|
||||
|
||||
|
||||
class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
|
||||
def __call__(self, arch, variant):
|
||||
groups = set()
|
||||
if not self.compose.conf.get("comps_file"):
|
||||
return set(), set()
|
||||
|
||||
comps = CompsWrapper(self.compose.paths.work.comps(arch=arch, variant=variant))
|
||||
|
||||
for i in comps.get_comps_groups():
|
||||
groups.add(i)
|
||||
return set(), groups
|
62
pungi/phases/gather/sources/source_json.py
Normal file
62
pungi/phases/gather/sources/source_json.py
Normal file
@ -0,0 +1,62 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
Get a package list based on a JSON mapping.
|
||||
|
||||
Input format:
|
||||
{
|
||||
variant: {
|
||||
tree_arch: {
|
||||
rpm_name: [rpm_arch, rpm_arch, ... (or None for any/best arch)],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Output:
|
||||
set([(rpm_name, rpm_arch or None)])
|
||||
"""
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import pungi.phases.gather.source
|
||||
|
||||
|
||||
class GatherSourceJson(pungi.phases.gather.source.GatherSourceBase):
|
||||
def __call__(self, arch, variant):
|
||||
json_path = self.compose.conf.get("gather_source_mapping")
|
||||
if not json_path:
|
||||
return set(), set()
|
||||
with open(os.path.join(self.compose.config_dir, json_path), "r") as f:
|
||||
mapping = json.load(f)
|
||||
|
||||
packages = set()
|
||||
if variant is None:
|
||||
# get all packages for all variants
|
||||
for variant_uid in mapping:
|
||||
for pkg_name, pkg_arches in mapping[variant_uid].get(arch, {}).items():
|
||||
for pkg_arch in pkg_arches:
|
||||
packages.add((pkg_name, pkg_arch))
|
||||
else:
|
||||
# get packages for a particular variant
|
||||
for pkg_name, pkg_arches in (
|
||||
mapping.get(variant.uid, {}).get(arch, {}).items()
|
||||
):
|
||||
for pkg_arch in pkg_arches:
|
||||
packages.add((pkg_name, pkg_arch))
|
||||
return packages, set()
|
52
pungi/phases/gather/sources/source_module.py
Normal file
52
pungi/phases/gather/sources/source_module.py
Normal file
@ -0,0 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
Get a package list based on modulemd metadata loaded in pkgset phase. Each
|
||||
modulemd file contains a list of exact RPM NEVRAs that should be include, so
|
||||
just go over all modules in a given variant and join all lists together.
|
||||
"""
|
||||
|
||||
|
||||
import pungi.arch
|
||||
import pungi.phases.gather.source
|
||||
|
||||
|
||||
class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):
|
||||
def __call__(self, arch, variant):
|
||||
groups = set()
|
||||
packages = set()
|
||||
|
||||
# Check if there is a variant. The method only makes sense for variants.
|
||||
if variant is None:
|
||||
return packages, groups
|
||||
|
||||
compatible_arches = pungi.arch.get_compatible_arches(arch, multilib=True)
|
||||
|
||||
for nsvc, module_stream in variant.arch_mmds.get(arch, {}).items():
|
||||
available_rpms = sum(
|
||||
(
|
||||
variant.nsvc_to_pkgset[nsvc].rpms_by_arch.get(a, [])
|
||||
for a in compatible_arches
|
||||
),
|
||||
[],
|
||||
)
|
||||
to_include = set(module_stream.get_rpm_artifacts())
|
||||
for rpm_obj in available_rpms:
|
||||
if rpm_obj.nevra in to_include:
|
||||
packages.add((rpm_obj, None))
|
||||
|
||||
return packages, groups
|
33
pungi/phases/gather/sources/source_none.py
Normal file
33
pungi/phases/gather/sources/source_none.py
Normal file
@ -0,0 +1,33 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
"""
|
||||
Get an empty package list.
|
||||
|
||||
Input:
|
||||
none
|
||||
|
||||
Output:
|
||||
set()
|
||||
"""
|
||||
|
||||
|
||||
import pungi.phases.gather.source
|
||||
|
||||
|
||||
class GatherSourceNone(pungi.phases.gather.source.GatherSourceBase):
|
||||
def __call__(self, arch, variant):
|
||||
return set(), set()
|
526
pungi/phases/image_build.py
Normal file
526
pungi/phases/image_build.py
Normal file
@ -0,0 +1,526 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from kobo import shortcuts
|
||||
|
||||
from pungi.util import makedirs, get_mtime, get_file_size, failable, log_failed_task
|
||||
from pungi.util import as_local_file, translate_path, get_repo_urls, version_generator
|
||||
from pungi.phases import base
|
||||
from pungi.linker import Linker
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
from kobo.threads import ThreadPool, WorkerThread
|
||||
from kobo.shortcuts import force_list
|
||||
from productmd.images import Image
|
||||
from productmd.rpms import Rpms
|
||||
|
||||
|
||||
# This is a mapping from formats to file extensions. The format is what koji
|
||||
# image-build command expects as argument, and the extension is what the file
|
||||
# name will be ending with. The extensions are used to filter out which task
|
||||
# results will be pulled into the compose.
|
||||
EXTENSIONS = {
|
||||
"docker": ["tar.gz", "tar.xz"],
|
||||
"iso": ["iso"],
|
||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||
"qcow": ["qcow"],
|
||||
"qcow2": ["qcow2"],
|
||||
"raw": ["raw"],
|
||||
"raw-xz": ["raw.xz"],
|
||||
"rhevm-ova": ["rhevm.ova"],
|
||||
"tar-gz": ["tar.gz"],
|
||||
"vagrant-hyperv": ["vagrant-hyperv.box"],
|
||||
"vagrant-libvirt": ["vagrant-libvirt.box"],
|
||||
"vagrant-virtualbox": ["vagrant-virtualbox.box"],
|
||||
"vagrant-vmware-fusion": ["vagrant-vmware-fusion.box"],
|
||||
"vdi": ["vdi"],
|
||||
"vmdk": ["vmdk"],
|
||||
"vpc": ["vhd"],
|
||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||
"vsphere-ova": ["vsphere.ova"],
|
||||
}
|
||||
|
||||
|
||||
class ImageBuildPhase(
|
||||
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||
):
|
||||
"""class for wrapping up koji image-build"""
|
||||
|
||||
name = "image_build"
|
||||
|
||||
def __init__(self, compose, buildinstall_phase=None):
|
||||
super(ImageBuildPhase, self).__init__(compose)
|
||||
self.pool = ThreadPool(logger=self.logger)
|
||||
self.buildinstall_phase = buildinstall_phase
|
||||
|
||||
def _get_install_tree(self, image_conf, variant):
|
||||
"""
|
||||
Get a path to os tree for a variant specified in `install_tree_from` or
|
||||
current variant. If the config is set, it will be removed from the
|
||||
dict.
|
||||
"""
|
||||
if variant.type != "variant":
|
||||
# Buildinstall only runs for top-level variants. Nested variants
|
||||
# need to re-use install tree from parent.
|
||||
variant = variant.parent
|
||||
|
||||
install_tree_from = image_conf.pop("install_tree_from", variant.uid)
|
||||
if "://" in install_tree_from:
|
||||
# It's a URL, return it unchanged
|
||||
return install_tree_from
|
||||
if install_tree_from.startswith("/"):
|
||||
# It's a path on local filesystem.
|
||||
return translate_path(self.compose, install_tree_from)
|
||||
|
||||
install_tree_source = self.compose.all_variants.get(install_tree_from)
|
||||
if not install_tree_source:
|
||||
raise RuntimeError(
|
||||
"There is no variant %s to get install tree from "
|
||||
"when building image for %s." % (install_tree_from, variant.uid)
|
||||
)
|
||||
return translate_path(
|
||||
self.compose,
|
||||
self.compose.paths.compose.os_tree(
|
||||
"$arch", install_tree_source, create_dir=False
|
||||
),
|
||||
)
|
||||
|
||||
def _get_repo(self, image_conf, variant):
|
||||
"""
|
||||
Get a comma separated list of repos. First included are those
|
||||
explicitly listed in config, followed by by repo for current variant
|
||||
if it's not included in the list already.
|
||||
"""
|
||||
repos = shortcuts.force_list(image_conf.get("repo", []))
|
||||
|
||||
if not variant.is_empty and variant.uid not in repos:
|
||||
repos.append(variant.uid)
|
||||
|
||||
return ",".join(get_repo_urls(self.compose, repos, arch="$arch"))
|
||||
|
||||
def _get_arches(self, image_conf, arches):
|
||||
if "arches" in image_conf["image-build"]:
|
||||
arches = set(image_conf["image-build"].get("arches", [])) & arches
|
||||
return sorted(arches)
|
||||
|
||||
def _set_release(self, image_conf):
|
||||
"""If release is set explicitly to None, replace it with date and respin."""
|
||||
if "release" in image_conf:
|
||||
image_conf["release"] = (
|
||||
version_generator(self.compose, image_conf["release"])
|
||||
or self.compose.image_release
|
||||
)
|
||||
|
||||
def run(self):
|
||||
for variant in self.compose.get_variants():
|
||||
arches = set([x for x in variant.arches if x != "src"])
|
||||
|
||||
for image_conf in self.get_config_block(variant):
|
||||
# We will modify the data, so we need to make a copy to
|
||||
# prevent problems in next iteration where the original
|
||||
# value is needed.
|
||||
image_conf = copy.deepcopy(image_conf)
|
||||
original_image_conf = copy.deepcopy(image_conf)
|
||||
|
||||
# image_conf is passed to get_image_build_cmd as dict
|
||||
|
||||
image_conf["image-build"]["arches"] = self._get_arches(
|
||||
image_conf, arches
|
||||
)
|
||||
if not image_conf["image-build"]["arches"]:
|
||||
continue
|
||||
|
||||
# Replace possible ambiguous ref name with explicit hash.
|
||||
ksurl = self.get_ksurl(image_conf["image-build"])
|
||||
if ksurl:
|
||||
image_conf["image-build"]["ksurl"] = ksurl
|
||||
|
||||
image_conf["image-build"]["variant"] = variant
|
||||
|
||||
image_conf["image-build"]["install_tree"] = self._get_install_tree(
|
||||
image_conf["image-build"], variant
|
||||
)
|
||||
|
||||
release = self.get_release(image_conf["image-build"])
|
||||
if release:
|
||||
image_conf["image-build"]["release"] = release
|
||||
|
||||
image_conf["image-build"]["version"] = self.get_version(
|
||||
image_conf["image-build"]
|
||||
)
|
||||
image_conf["image-build"]["target"] = self.get_config(
|
||||
image_conf["image-build"], "target"
|
||||
)
|
||||
|
||||
# Pungi config can either contain old [(format, suffix)], or
|
||||
# just list of formats, or a single format.
|
||||
formats = []
|
||||
for format in force_list(image_conf["image-build"]["format"]):
|
||||
formats.append(
|
||||
format[0] if isinstance(format, (tuple, list)) else format
|
||||
)
|
||||
image_conf["image-build"]["format"] = formats
|
||||
image_conf["image-build"]["repo"] = self._get_repo(
|
||||
image_conf["image-build"], variant
|
||||
)
|
||||
|
||||
can_fail = image_conf["image-build"].pop("failable", [])
|
||||
if can_fail == ["*"]:
|
||||
can_fail = image_conf["image-build"]["arches"]
|
||||
if can_fail:
|
||||
image_conf["image-build"]["can_fail"] = sorted(can_fail)
|
||||
|
||||
cmd = {
|
||||
"original_image_conf": original_image_conf,
|
||||
"image_conf": image_conf,
|
||||
"conf_file": self.compose.paths.work.image_build_conf(
|
||||
image_conf["image-build"]["variant"],
|
||||
image_name=image_conf["image-build"]["name"],
|
||||
image_type="-".join(formats),
|
||||
arches=image_conf["image-build"]["arches"],
|
||||
),
|
||||
"image_dir": self.compose.paths.compose.image_dir(variant),
|
||||
"relative_image_dir": self.compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
),
|
||||
"link_type": self.compose.conf["link_type"],
|
||||
"scratch": image_conf["image-build"].pop("scratch", False),
|
||||
}
|
||||
self.pool.add(CreateImageBuildThread(self.pool))
|
||||
self.pool.queue_put((self.compose, cmd, self.buildinstall_phase))
|
||||
|
||||
self.pool.start()
|
||||
|
||||
|
||||
class CreateImageBuildThread(WorkerThread):
|
||||
def fail(self, compose, cmd):
|
||||
self.pool.log_error("CreateImageBuild failed.")
|
||||
|
||||
def process(self, item, num):
|
||||
compose, cmd, buildinstall_phase = item
|
||||
variant = cmd["image_conf"]["image-build"]["variant"]
|
||||
subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)
|
||||
self.failable_arches = cmd["image_conf"]["image-build"].get("can_fail", "")
|
||||
self.can_fail = (
|
||||
self.failable_arches == cmd["image_conf"]["image-build"]["arches"]
|
||||
)
|
||||
with failable(
|
||||
compose,
|
||||
self.can_fail,
|
||||
variant,
|
||||
"*",
|
||||
"image-build",
|
||||
subvariant,
|
||||
logger=self.pool._logger,
|
||||
):
|
||||
self.worker(num, compose, variant, subvariant, cmd, buildinstall_phase)
|
||||
|
||||
def worker(self, num, compose, variant, subvariant, cmd, buildinstall_phase):
|
||||
arches = cmd["image_conf"]["image-build"]["arches"]
|
||||
formats = "-".join(cmd["image_conf"]["image-build"]["format"])
|
||||
dash_arches = "-".join(arches)
|
||||
log_file = compose.paths.log.log_file(
|
||||
dash_arches, "imagebuild-%s-%s-%s" % (variant.uid, subvariant, formats)
|
||||
)
|
||||
metadata_file = log_file[:-4] + ".reuse.json"
|
||||
|
||||
external_repo_checksum = {}
|
||||
try:
|
||||
for repo in cmd["original_image_conf"]["image-build"]["repo"]:
|
||||
if repo in compose.all_variants:
|
||||
continue
|
||||
with as_local_file(
|
||||
os.path.join(repo, "repodata/repomd.xml")
|
||||
) as filename:
|
||||
with open(filename, "rb") as f:
|
||||
external_repo_checksum[repo] = hashlib.sha256(
|
||||
f.read()
|
||||
).hexdigest()
|
||||
except Exception as e:
|
||||
external_repo_checksum = None
|
||||
self.pool.log_info(
|
||||
"Can't calculate checksum of repomd.xml of external repo - %s" % str(e)
|
||||
)
|
||||
|
||||
if self._try_to_reuse(
|
||||
compose,
|
||||
variant,
|
||||
subvariant,
|
||||
metadata_file,
|
||||
log_file,
|
||||
cmd,
|
||||
external_repo_checksum,
|
||||
buildinstall_phase,
|
||||
):
|
||||
return
|
||||
|
||||
msg = (
|
||||
"Creating image (formats: %s, arches: %s, variant: %s, subvariant: %s)"
|
||||
% (formats, dash_arches, variant, subvariant)
|
||||
)
|
||||
self.pool.log_info("[BEGIN] %s" % msg)
|
||||
|
||||
koji_wrapper = KojiWrapper(compose)
|
||||
|
||||
# writes conf file for koji image-build
|
||||
self.pool.log_info(
|
||||
"Writing image-build config for %s.%s into %s"
|
||||
% (variant, dash_arches, cmd["conf_file"])
|
||||
)
|
||||
|
||||
koji_cmd = koji_wrapper.get_image_build_cmd(
|
||||
cmd["image_conf"], conf_file_dest=cmd["conf_file"], scratch=cmd["scratch"]
|
||||
)
|
||||
|
||||
# avoid race conditions?
|
||||
# Kerberos authentication failed:
|
||||
# Permission denied in replay cache code (-1765328215)
|
||||
# [workaround] Increased time delay from 3 to 10 sec until the issue in
|
||||
# koji gets fixed https://pagure.io/koji/issue/2138
|
||||
time.sleep(num * 10)
|
||||
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
||||
self.pool.log_debug("build-image outputs: %s" % (output))
|
||||
if output["retcode"] != 0:
|
||||
self.fail(compose, cmd)
|
||||
raise RuntimeError(
|
||||
"ImageBuild task failed: %s. See %s for more details."
|
||||
% (output["task_id"], log_file)
|
||||
)
|
||||
|
||||
# copy image to images/
|
||||
image_infos = []
|
||||
|
||||
paths = koji_wrapper.get_image_paths(
|
||||
output["task_id"],
|
||||
callback=lambda arch: log_failed_task(
|
||||
compose, variant, arch, "image-build", subvariant
|
||||
),
|
||||
)
|
||||
|
||||
for arch, paths in paths.items():
|
||||
for path in paths:
|
||||
for format in cmd["image_conf"]["image-build"]["format"]:
|
||||
for suffix in EXTENSIONS[format]:
|
||||
if path.endswith(suffix):
|
||||
image_infos.append(
|
||||
{
|
||||
"path": path,
|
||||
"suffix": suffix,
|
||||
"type": format,
|
||||
"arch": arch,
|
||||
}
|
||||
)
|
||||
break
|
||||
|
||||
self._link_images(compose, variant, subvariant, cmd, image_infos)
|
||||
self._write_reuse_metadata(
|
||||
compose, metadata_file, cmd, image_infos, external_repo_checksum
|
||||
)
|
||||
|
||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||
|
||||
def _link_images(self, compose, variant, subvariant, cmd, image_infos):
|
||||
"""Link images to compose and update image manifest.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param Variant variant: Current variant.
|
||||
:param str subvariant:
|
||||
:param dict cmd: Dict of params for image-build.
|
||||
:param dict image_infos: Dict contains image info.
|
||||
"""
|
||||
# The usecase here is that you can run koji image-build with multiple --format
|
||||
# It's ok to do it serialized since we're talking about max 2 images per single
|
||||
# image_build record
|
||||
linker = Linker(logger=self.pool._logger)
|
||||
for image_info in image_infos:
|
||||
image_dir = cmd["image_dir"] % {"arch": image_info["arch"]}
|
||||
makedirs(image_dir)
|
||||
relative_image_dir = cmd["relative_image_dir"] % {
|
||||
"arch": image_info["arch"]
|
||||
}
|
||||
|
||||
# let's not change filename of koji outputs
|
||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||
|
||||
src_file = compose.koji_downloader.get_file(
|
||||
os.path.realpath(image_info["path"])
|
||||
)
|
||||
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
||||
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
img.type = image_info["type"]
|
||||
img.format = image_info["suffix"]
|
||||
img.path = os.path.join(relative_image_dir, os.path.basename(image_dest))
|
||||
img.mtime = get_mtime(image_dest)
|
||||
img.size = get_file_size(image_dest)
|
||||
img.arch = image_info["arch"]
|
||||
img.disc_number = 1 # We don't expect multiple disks
|
||||
img.disc_count = 1
|
||||
img.bootable = False
|
||||
img.subvariant = subvariant
|
||||
setattr(img, "can_fail", self.can_fail)
|
||||
setattr(img, "deliverable", "image-build")
|
||||
compose.im.add(variant=variant.uid, arch=image_info["arch"], image=img)
|
||||
|
||||
def _try_to_reuse(
|
||||
self,
|
||||
compose,
|
||||
variant,
|
||||
subvariant,
|
||||
metadata_file,
|
||||
log_file,
|
||||
cmd,
|
||||
external_repo_checksum,
|
||||
buildinstall_phase,
|
||||
):
|
||||
"""Try to reuse images from old compose.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param Variant variant: Current variant.
|
||||
:param str subvariant:
|
||||
:param str metadata_file: Path to reuse metadata file.
|
||||
:param str log_file: Path to log file.
|
||||
:param dict cmd: Dict of params for image-build.
|
||||
:param dict external_repo_checksum: Dict contains checksum of repomd.xml
|
||||
or None if can't get checksum.
|
||||
:param BuildinstallPhase buildinstall_phase: buildinstall phase of
|
||||
current compose.
|
||||
"""
|
||||
log_msg = "Cannot reuse old image_build phase results - %s"
|
||||
if not compose.conf["image_build_allow_reuse"]:
|
||||
self.pool.log_info(
|
||||
log_msg % "reuse of old image_build results is disabled."
|
||||
)
|
||||
return False
|
||||
|
||||
if external_repo_checksum is None:
|
||||
self.pool.log_info(
|
||||
log_msg % "Can't ensure that external repo is not changed."
|
||||
)
|
||||
return False
|
||||
|
||||
old_metadata_file = compose.paths.old_compose_path(metadata_file)
|
||||
if not old_metadata_file:
|
||||
self.pool.log_info(log_msg % "Can't find old reuse metadata file")
|
||||
return False
|
||||
|
||||
try:
|
||||
old_metadata = self._load_reuse_metadata(old_metadata_file)
|
||||
except Exception as e:
|
||||
self.pool.log_info(
|
||||
log_msg % "Can't load old reuse metadata file: %s" % str(e)
|
||||
)
|
||||
return False
|
||||
|
||||
if old_metadata["cmd"]["original_image_conf"] != cmd["original_image_conf"]:
|
||||
self.pool.log_info(log_msg % "image_build config changed")
|
||||
return False
|
||||
|
||||
# Make sure external repo does not change
|
||||
if (
|
||||
old_metadata["external_repo_checksum"] is None
|
||||
or old_metadata["external_repo_checksum"] != external_repo_checksum
|
||||
):
|
||||
self.pool.log_info(log_msg % "External repo may be changed")
|
||||
return False
|
||||
|
||||
# Make sure buildinstall phase is reused
|
||||
for arch in cmd["image_conf"]["image-build"]["arches"]:
|
||||
if buildinstall_phase and not buildinstall_phase.reused(variant, arch):
|
||||
self.pool.log_info(log_msg % "buildinstall phase changed")
|
||||
return False
|
||||
|
||||
# Make sure packages in variant not change
|
||||
rpm_manifest_file = compose.paths.compose.metadata("rpms.json")
|
||||
rpm_manifest = Rpms()
|
||||
rpm_manifest.load(rpm_manifest_file)
|
||||
|
||||
old_rpm_manifest_file = compose.paths.old_compose_path(rpm_manifest_file)
|
||||
old_rpm_manifest = Rpms()
|
||||
old_rpm_manifest.load(old_rpm_manifest_file)
|
||||
|
||||
for repo in cmd["original_image_conf"]["image-build"]["repo"]:
|
||||
if repo not in compose.all_variants:
|
||||
# External repos are checked using other logic.
|
||||
continue
|
||||
for arch in cmd["image_conf"]["image-build"]["arches"]:
|
||||
if (
|
||||
rpm_manifest.rpms[variant.uid][arch]
|
||||
!= old_rpm_manifest.rpms[variant.uid][arch]
|
||||
):
|
||||
self.pool.log_info(
|
||||
log_msg % "Packages in %s.%s changed." % (variant.uid, arch)
|
||||
)
|
||||
return False
|
||||
|
||||
self.pool.log_info(
|
||||
"Reusing images from old compose for variant %s" % variant.uid
|
||||
)
|
||||
try:
|
||||
self._link_images(
|
||||
compose, variant, subvariant, cmd, old_metadata["image_infos"]
|
||||
)
|
||||
except Exception as e:
|
||||
self.pool.log_info(log_msg % "Can't link images %s" % str(e))
|
||||
return False
|
||||
|
||||
old_log_file = compose.paths.old_compose_path(log_file)
|
||||
try:
|
||||
shutil.copy2(old_log_file, log_file)
|
||||
except Exception as e:
|
||||
self.pool.log_info(
|
||||
log_msg % "Can't copy old log_file: %s %s" % (old_log_file, str(e))
|
||||
)
|
||||
return False
|
||||
|
||||
self._write_reuse_metadata(
|
||||
compose,
|
||||
metadata_file,
|
||||
cmd,
|
||||
old_metadata["image_infos"],
|
||||
external_repo_checksum,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def _write_reuse_metadata(
|
||||
self, compose, metadata_file, cmd, image_infos, external_repo_checksum
|
||||
):
|
||||
"""Write metadata file.
|
||||
|
||||
:param Compose compose: Current compose.
|
||||
:param str metadata_file: Path to reuse metadata file.
|
||||
:param dict cmd: Dict of params for image-build.
|
||||
:param dict image_infos: Dict contains image info.
|
||||
:param dict external_repo_checksum: Dict contains checksum of repomd.xml
|
||||
or None if can't get checksum.
|
||||
"""
|
||||
msg = "Writing reuse metadata file: %s" % metadata_file
|
||||
self.pool.log_info(msg)
|
||||
|
||||
cmd_copy = copy.deepcopy(cmd)
|
||||
del cmd_copy["image_conf"]["image-build"]["variant"]
|
||||
|
||||
data = {
|
||||
"cmd": cmd_copy,
|
||||
"image_infos": image_infos,
|
||||
"external_repo_checksum": external_repo_checksum,
|
||||
}
|
||||
try:
|
||||
with open(metadata_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
except Exception as e:
|
||||
self.pool.log_info("%s Failed: %s" % (msg, str(e)))
|
||||
|
||||
def _load_reuse_metadata(self, metadata_file):
|
||||
"""Load metadata file.
|
||||
|
||||
:param str metadata_file: Path to reuse metadata file.
|
||||
"""
|
||||
with open(metadata_file, "r") as f:
|
||||
return json.load(f)
|
206
pungi/phases/image_checksum.py
Normal file
206
pungi/phases/image_checksum.py
Normal file
@ -0,0 +1,206 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from kobo import shortcuts
|
||||
from collections import defaultdict
|
||||
import threading
|
||||
|
||||
from .base import PhaseBase
|
||||
from ..util import get_format_substs, get_file_size
|
||||
|
||||
|
||||
MULTIPLE_CHECKSUMS_ERROR = (
|
||||
'Config option "media_checksum_one_file" requires only one checksum'
|
||||
' to be configured in "media_checksums".'
|
||||
)
|
||||
|
||||
|
||||
class ImageChecksumPhase(PhaseBase):
|
||||
"""Go through images specified in image manifest and generate their
|
||||
checksums. The manifest will be updated with the checksums.
|
||||
"""
|
||||
|
||||
name = "image_checksum"
|
||||
|
||||
def __init__(self, compose):
|
||||
super(ImageChecksumPhase, self).__init__(compose)
|
||||
self.checksums = self.compose.conf["media_checksums"]
|
||||
self.one_file = self.compose.conf["media_checksum_one_file"]
|
||||
|
||||
def skip(self):
|
||||
# Skipping this phase does not make sense:
|
||||
# * if there are no images, it doesn't do anything and is quick
|
||||
# * if there are images, they must have checksums computed or else
|
||||
# writing metadata will fail
|
||||
return False
|
||||
|
||||
def validate(self):
|
||||
errors = []
|
||||
|
||||
if self.one_file and len(self.checksums) != 1:
|
||||
errors.append(MULTIPLE_CHECKSUMS_ERROR)
|
||||
|
||||
if errors:
|
||||
raise ValueError("\n".join(errors))
|
||||
|
||||
def _get_images(self):
|
||||
"""Returns a mapping from directories to sets of ``Image``s.
|
||||
|
||||
The paths to dirs are absolute.
|
||||
"""
|
||||
top_dir = self.compose.paths.compose.topdir()
|
||||
images = {}
|
||||
for variant in self.compose.im.images:
|
||||
for arch in self.compose.im.images[variant]:
|
||||
for image in self.compose.im.images[variant][arch]:
|
||||
path = os.path.dirname(os.path.join(top_dir, image.path))
|
||||
images.setdefault((variant, arch, path), set()).add(image)
|
||||
return images
|
||||
|
||||
def _get_base_filename(self, variant, arch, **kwargs):
|
||||
base_checksum_name = self.compose.conf["media_checksum_base_filename"]
|
||||
if base_checksum_name:
|
||||
substs = get_format_substs(
|
||||
self.compose, variant=variant, arch=arch, **kwargs
|
||||
)
|
||||
base_checksum_name = (base_checksum_name % substs).format(**substs)
|
||||
base_checksum_name += "-"
|
||||
return base_checksum_name
|
||||
|
||||
def run(self):
|
||||
topdir = self.compose.paths.compose.topdir()
|
||||
|
||||
make_checksums(
|
||||
topdir,
|
||||
self.compose.im,
|
||||
self.checksums,
|
||||
self.one_file,
|
||||
self._get_base_filename,
|
||||
)
|
||||
|
||||
|
||||
def _compute_checksums(
|
||||
results,
|
||||
cache,
|
||||
variant,
|
||||
arch,
|
||||
path,
|
||||
images,
|
||||
checksum_types,
|
||||
base_checksum_name_gen,
|
||||
one_file,
|
||||
results_lock,
|
||||
cache_lock,
|
||||
):
|
||||
for image in images:
|
||||
filename = os.path.basename(image.path)
|
||||
full_path = os.path.join(path, filename)
|
||||
if not os.path.exists(full_path):
|
||||
continue
|
||||
|
||||
filesize = image.size or get_file_size(full_path)
|
||||
|
||||
cache_lock.acquire()
|
||||
if full_path not in cache:
|
||||
cache_lock.release()
|
||||
# Source ISO is listed under each binary architecture. There's no
|
||||
# point in checksumming it twice, so we can just remember the
|
||||
# digest from first run..
|
||||
checksum_value = shortcuts.compute_file_checksums(full_path, checksum_types)
|
||||
with cache_lock:
|
||||
cache[full_path] = checksum_value
|
||||
else:
|
||||
cache_lock.release()
|
||||
|
||||
with cache_lock:
|
||||
digests = cache[full_path]
|
||||
|
||||
for checksum, digest in digests.items():
|
||||
# Update metadata with the checksum
|
||||
image.add_checksum(None, checksum, digest)
|
||||
# If not turned of, create the file-specific checksum file
|
||||
if not one_file:
|
||||
checksum_filename = os.path.join(
|
||||
path, "%s.%sSUM" % (filename, checksum.upper())
|
||||
)
|
||||
with results_lock:
|
||||
results[checksum_filename].add(
|
||||
(filename, filesize, checksum, digest)
|
||||
)
|
||||
|
||||
if one_file:
|
||||
dirname = os.path.basename(path)
|
||||
base_checksum_name = base_checksum_name_gen(
|
||||
variant, arch, dirname=dirname
|
||||
)
|
||||
checksum_filename = base_checksum_name + "CHECKSUM"
|
||||
else:
|
||||
base_checksum_name = base_checksum_name_gen(variant, arch)
|
||||
checksum_filename = "%s%sSUM" % (base_checksum_name, checksum.upper())
|
||||
checksum_path = os.path.join(path, checksum_filename)
|
||||
|
||||
with results_lock:
|
||||
results[checksum_path].add((filename, filesize, checksum, digest))
|
||||
|
||||
|
||||
def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen):
|
||||
results = defaultdict(set)
|
||||
cache = {}
|
||||
threads = []
|
||||
results_lock = threading.Lock() # lock to synchronize access to the results dict.
|
||||
cache_lock = threading.Lock() # lock to synchronize access to the cache dict.
|
||||
|
||||
# create all worker threads
|
||||
for (variant, arch, path), images in get_images(topdir, im).items():
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_compute_checksums,
|
||||
args=[
|
||||
results,
|
||||
cache,
|
||||
variant,
|
||||
arch,
|
||||
path,
|
||||
images,
|
||||
checksum_types,
|
||||
base_checksum_name_gen,
|
||||
one_file,
|
||||
results_lock,
|
||||
cache_lock,
|
||||
],
|
||||
)
|
||||
)
|
||||
threads[-1].start()
|
||||
|
||||
# wait for all worker threads to finish
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
for file in results:
|
||||
dump_checksums(file, results[file])
|
||||
|
||||
|
||||
def dump_checksums(checksum_file, data):
|
||||
"""Write checksums to file.
|
||||
|
||||
:param checksum_file: where to write the checksums
|
||||
:param data: an iterable of tuples (filename, filesize, checksum_type, hash)
|
||||
"""
|
||||
with open(checksum_file, "w") as f:
|
||||
for filename, filesize, alg, checksum in sorted(data):
|
||||
f.write("# %s: %s bytes\n" % (filename, filesize))
|
||||
f.write("%s (%s) = %s\n" % (alg.upper(), filename, checksum))
|
||||
|
||||
|
||||
def get_images(top_dir, manifest):
|
||||
"""Returns a mapping from directories to sets of ``Image``s.
|
||||
|
||||
The paths to dirs are absolute.
|
||||
"""
|
||||
images = {}
|
||||
for variant in manifest.images:
|
||||
for arch in manifest.images[variant]:
|
||||
for image in manifest.images[variant][arch]:
|
||||
path = os.path.dirname(os.path.join(top_dir, image.path))
|
||||
images.setdefault((variant, arch, path), []).append(image)
|
||||
return images
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user