diff --git a/SOURCES/0070-chore-deps-update-peter-evans-create-or-update-comme.patch b/SOURCES/0070-chore-deps-update-peter-evans-create-or-update-comme.patch new file mode 100644 index 0000000..effc875 --- /dev/null +++ b/SOURCES/0070-chore-deps-update-peter-evans-create-or-update-comme.patch @@ -0,0 +1,26 @@ +From 1340bd735f731087aad53c8159a3616298fe0f57 Mon Sep 17 00:00:00 2001 +From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> +Date: Mon, 10 Nov 2025 18:43:05 +0000 +Subject: [PATCH 070/111] chore(deps): update + peter-evans/create-or-update-comment action to v5 + +--- + .github/workflows/pr-welcome-msg.yml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/.github/workflows/pr-welcome-msg.yml b/.github/workflows/pr-welcome-msg.yml +index f056fb79..4c12ab2a 100644 +--- a/.github/workflows/pr-welcome-msg.yml ++++ b/.github/workflows/pr-welcome-msg.yml +@@ -14,7 +14,7 @@ jobs: + runs-on: ubuntu-latest + steps: + - name: Create comment +- uses: peter-evans/create-or-update-comment@v4 ++ uses: peter-evans/create-or-update-comment@v5 + with: + issue-number: ${{ github.event.pull_request.number }} + body: | +-- +2.52.0 + diff --git a/SOURCES/0071-fix-cgroups-v1-inhibitor-remediation.patch b/SOURCES/0071-fix-cgroups-v1-inhibitor-remediation.patch new file mode 100644 index 0000000..c08e187 --- /dev/null +++ b/SOURCES/0071-fix-cgroups-v1-inhibitor-remediation.patch @@ -0,0 +1,28 @@ +From 4feb11f7d4d0a265611e5d2f80b91c05116885b7 Mon Sep 17 00:00:00 2001 +From: Peter Mocary +Date: Fri, 21 Nov 2025 15:16:31 +0100 +Subject: [PATCH 071/111] fix cgroups-v1 inhibitor remediation + +The inhibitor, that checks if cgroups-v1 are enabled, generated remediation +command which had incorrect form. This patch fixes the separator used +for kernel arguments that need to be removed. +--- + .../actors/inhibitcgroupsv1/libraries/inhibitcgroupsv1.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/repos/system_upgrade/el9toel10/actors/inhibitcgroupsv1/libraries/inhibitcgroupsv1.py b/repos/system_upgrade/el9toel10/actors/inhibitcgroupsv1/libraries/inhibitcgroupsv1.py +index 6c891f22..0a38ace3 100644 +--- a/repos/system_upgrade/el9toel10/actors/inhibitcgroupsv1/libraries/inhibitcgroupsv1.py ++++ b/repos/system_upgrade/el9toel10/actors/inhibitcgroupsv1/libraries/inhibitcgroupsv1.py +@@ -48,7 +48,7 @@ def process(): + [ + "grubby", + "--update-kernel=ALL", +- '--remove-args="{}"'.format(",".join(remediation_cmd_args)), ++ '--remove-args="{}"'.format(" ".join(remediation_cmd_args)), + ], + ], + ), +-- +2.52.0 + diff --git a/SOURCES/0072-Update-rhel-gpg-signatures-map.patch b/SOURCES/0072-Update-rhel-gpg-signatures-map.patch new file mode 100644 index 0000000..643c363 --- /dev/null +++ b/SOURCES/0072-Update-rhel-gpg-signatures-map.patch @@ -0,0 +1,41 @@ +From 3f5bb62e03a33e43893234d5912570b3dad15f82 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Fri, 19 Sep 2025 16:37:14 +0200 +Subject: [PATCH 072/111] Update rhel gpg-signatures map + +Add auxilliary key 2, auxilliary key 3 and the old signing key to +"keys". These were handled in obsoleted-keys, but not used here. + +Add beta key to keys obsoleted in rhel 8. +--- + .../common/files/distro/rhel/gpg-signatures.json | 8 ++++++-- + 1 file changed, 6 insertions(+), 2 deletions(-) + +diff --git a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json +index 3cc67f82..5b27e197 100644 +--- a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json ++++ b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json +@@ -4,14 +4,18 @@ + "5326810137017186", + "938a80caf21541eb", + "fd372689897da07a", +- "45689c882fa658e0" ++ "45689c882fa658e0", ++ "f76f66c3d4082792", ++ "5054e4a45a6340b3", ++ "219180cddb42a60e" + ], + "obsoleted-keys": { + "7": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", +- "gpg-pubkey-db42a60e-37ea5438" ++ "gpg-pubkey-db42a60e-37ea5438", ++ "gpg-pubkey-897da07a-3c979a7f" + ], + "9": ["gpg-pubkey-d4082792-5b32db75"], + "10": ["gpg-pubkey-fd431d51-4ae0493b"] +-- +2.52.0 + diff --git a/SOURCES/0073-Update-centos-gpg-signatures-map.patch b/SOURCES/0073-Update-centos-gpg-signatures-map.patch new file mode 100644 index 0000000..4c222ca --- /dev/null +++ b/SOURCES/0073-Update-centos-gpg-signatures-map.patch @@ -0,0 +1,28 @@ +From 7e345c872b073022c459f40ae404a8a38a90038b Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Fri, 19 Sep 2025 16:41:22 +0200 +Subject: [PATCH 073/111] Update centos gpg-signatures map + +Add SIG Extras key to "keys". +--- + .../common/files/distro/centos/gpg-signatures.json | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json +index fe85e03c..1be56176 100644 +--- a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json ++++ b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json +@@ -1,8 +1,9 @@ + { + "keys": [ + "24c6a8a7f4a80eb5", ++ "4eb84e71f2ee9d55", + "05b555b38483c65d", +- "4eb84e71f2ee9d55" ++ "1ff6a2171d997668" + ], + "obsoleted-keys": { + "10": ["gpg-pubkey-8483c65d-5ccc5b19"] +-- +2.52.0 + diff --git a/SOURCES/0074-gpg-almalinux-Remove-Eurolinux-Tuxcare-GPG-key.patch b/SOURCES/0074-gpg-almalinux-Remove-Eurolinux-Tuxcare-GPG-key.patch new file mode 100644 index 0000000..8727ba8 --- /dev/null +++ b/SOURCES/0074-gpg-almalinux-Remove-Eurolinux-Tuxcare-GPG-key.patch @@ -0,0 +1,32 @@ +From 4dff2a8d33fafc65e6c76687cc62006f82f7360a Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Mon, 24 Nov 2025 00:23:37 +0100 +Subject: [PATCH 074/111] gpg/almalinux: Remove Eurolinux Tuxcare GPG key + +Only RPM GPG keys provided by the distribution should be included in the +gpg-signatures.json files as they are used to detect which packages are +1st party. + +The Eurolinux Tuxcare GPG key is 3rd party, therefore it should not be +included. +--- + .../common/files/distro/almalinux/gpg-signatures.json | 3 +-- + 1 file changed, 1 insertion(+), 2 deletions(-) + +diff --git a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json +index 24bc93ba..18b6c516 100644 +--- a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json ++++ b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json +@@ -3,8 +3,7 @@ + "51d6647ec21ad6ea", + "d36cb86cb86b3716", + "2ae81e8aced7258b", +- "429785e181b961a5", +- "d07bf2a08d50eb66" ++ "429785e181b961a5" + ], + "obsoleted-keys": { + "7": [], +-- +2.52.0 + diff --git a/SOURCES/0075-removeobsoletegpgkeys-Adjust-for-converting.patch b/SOURCES/0075-removeobsoletegpgkeys-Adjust-for-converting.patch new file mode 100644 index 0000000..0db766a --- /dev/null +++ b/SOURCES/0075-removeobsoletegpgkeys-Adjust-for-converting.patch @@ -0,0 +1,532 @@ +From 37a071df9242e10821b8d6ab7a0e727ffb7e871d Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Fri, 19 Sep 2025 17:18:48 +0200 +Subject: [PATCH 075/111] removeobsoletegpgkeys: Adjust for converting + +When doing upgrade + conversion, removing obsolete GPG from the current +(or target for that matter) distro doesn't make sense, because we are +moving to a different distro. +Instead, all distro provided keys from the source distro need to be +removed, as the target distro uses it's own keys. Those are imported +elsewhere later during the upgrade. + +A new list is added to the gpg-signatures.json maps, which contains the +names of the fake RPMs "generated" upon importing a GPG key into the RPM +DB. +These are in the order the key IDs ("keys" in the map) are in, however +the mapping is not always 1:1 (e.g. the Centos SIG Extras keys). + +The key id could be mapped to the RPM names, however since the RPM NVR +format is: +gpg-pubkey-- +there could be a collision between the key IDs. + +Some key RPMs are missing in Alma Linux map as I couldn't find out what +keys some the fingerprints correspond to. + +Jira: RHEL-110190 + +I addded annotations to the keys at: +https://github.com/oamg/leapp-repository/wiki/gpg%E2%80%90signatures.json-key-annotations. +--- + .../actors/removeobsoletegpgkeys/actor.py | 11 +- + .../libraries/removeobsoleterpmgpgkeys.py | 50 +++- + .../tests/test_removeobsoleterpmgpgkeys.py | 244 ++++++++++++------ + .../distro/almalinux/gpg-signatures.json | 12 +- + .../files/distro/centos/gpg-signatures.json | 12 +- + .../files/distro/rhel/gpg-signatures.json | 20 +- + 6 files changed, 229 insertions(+), 120 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py +index 5674ee3f..58b15a84 100644 +--- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py ++++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py +@@ -8,9 +8,14 @@ class RemoveObsoleteGpgKeys(Actor): + """ + Remove obsoleted RPM GPG keys. + +- New version might make existing RPM GPG keys obsolete. This might be caused +- for example by the hashing algorithm becoming deprecated or by the key +- getting replaced. ++ The definition of what keys are considered obsolete depends on whether the ++ upgrade also does a conversion: ++ - If not converting, the obsolete keys are those that are no longer valid ++ on the target version. This might be caused for example by the hashing ++ algorithm becoming deprecated or by the key getting replaced. Note that ++ only keys provided by the vendor of the OS are handled. ++ - If converting, the obsolete keys are all of the keys provided by the ++ vendor of the source distribution. + + A DNFWorkaround is registered to actually remove the keys. + """ +diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py +index df08e6fa..7d047395 100644 +--- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py ++++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py +@@ -1,3 +1,5 @@ ++import itertools ++ + from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id + from leapp.libraries.common.config.version import get_target_major_version + from leapp.libraries.common.distro import get_distribution_data +@@ -6,18 +8,25 @@ from leapp.libraries.stdlib import api + from leapp.models import DNFWorkaround, InstalledRPM + + ++def _is_key_installed(key): ++ """ ++ :param key: The NVR of the gpg key RPM (e.g. gpg-pubkey-1d997668-61bae63b) ++ """ ++ name, version, release = key.rsplit("-", 2) ++ return has_package(InstalledRPM, name, version=version, release=release) ++ ++ + def _get_obsolete_keys(): + """ +- Return keys obsoleted in target and previous versions ++ Get keys obsoleted in target and previous major versions + """ + distribution = get_target_distro_id() +- obsoleted_keys_map = get_distribution_data(distribution).get('obsoleted-keys', {}) ++ obsoleted_keys_map = get_distribution_data(distribution).get("obsoleted-keys", {}) + keys = [] + for version in range(7, int(get_target_major_version()) + 1): + try: + for key in obsoleted_keys_map[str(version)]: +- name, version, release = key.rsplit("-", 2) +- if has_package(InstalledRPM, name, version=version, release=release): ++ if _is_key_installed(key): + keys.append(key) + except KeyError: + pass +@@ -25,6 +34,22 @@ def _get_obsolete_keys(): + return keys + + ++def _get_source_distro_keys(): ++ """ ++ Get all known keys of the source distro ++ ++ This includes keys from all relevant previous OS versions as all of those ++ might be present on the system. ++ """ ++ distribution = get_source_distro_id() ++ keys = get_distribution_data(distribution).get("keys", {}) ++ return [ ++ key ++ for key in itertools.chain.from_iterable(keys.values()) ++ if _is_key_installed(key) ++ ] ++ ++ + def register_dnfworkaround(keys): + api.produce( + DNFWorkaround( +@@ -36,13 +61,12 @@ def register_dnfworkaround(keys): + + + def process(): +- if get_source_distro_id() != get_target_distro_id(): +- # TODO adjust for conversions, in the current state it would not have +- # any effect, just skip it +- return +- +- keys = _get_obsolete_keys() +- if not keys: +- return ++ if get_source_distro_id() == get_target_distro_id(): ++ # only upgrading - remove keys obsoleted in previous versions ++ keys = _get_obsolete_keys() ++ else: ++ # also converting - we need to remove all keys from the source distro ++ keys = _get_source_distro_keys() + +- register_dnfworkaround(keys) ++ if keys: ++ register_dnfworkaround(keys) +diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py +index b78174cc..8b9b842b 100644 +--- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py ++++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py +@@ -1,77 +1,79 @@ + import os ++import unittest.mock as mock + + import pytest + + from leapp.libraries.actor import removeobsoleterpmgpgkeys +-from leapp.libraries.common.config.version import get_target_major_version +-from leapp.libraries.common.rpms import has_package + from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked + from leapp.libraries.stdlib import api +-from leapp.models import DNFWorkaround, InstalledRPM, RPM ++from leapp.models import InstalledRPM, RPM + ++_CUR_DIR = os.path.dirname(os.path.abspath(__file__)) + +-def _get_test_installedrpm(): +- return InstalledRPM( ++ ++def common_folder_path_mocked(folder): ++ return os.path.join(_CUR_DIR, "../../../files/", folder) ++ ++ ++def test_is_key_installed(monkeypatch): ++ installed_rpms = InstalledRPM( + items=[ + RPM( +- name='gpg-pubkey', +- version='d4082792', +- release='5b32db75', +- epoch='0', +- packager='Red Hat, Inc. (auxiliary key 2) ', +- arch='noarch', +- pgpsig='' ++ name="gpg-pubkey", ++ version="d4082792", ++ release="5b32db75", ++ epoch="0", ++ packager="Red Hat, Inc. (auxiliary key 2) ", ++ arch="noarch", ++ pgpsig="", + ), + RPM( +- name='gpg-pubkey', +- version='2fa658e0', +- release='45700c69', +- epoch='0', +- packager='Red Hat, Inc. (auxiliary key) ', +- arch='noarch', +- pgpsig='' ++ name="gpg-pubkey", ++ version="2fa658e0", ++ release="45700c69", ++ epoch="0", ++ packager="Red Hat, Inc. (auxiliary key) ", ++ arch="noarch", ++ pgpsig="", + ), + RPM( +- name='gpg-pubkey', +- version='12345678', +- release='abcdefgh', +- epoch='0', +- packager='made up', +- arch='noarch', +- pgpsig='' ++ name="gpg-pubkey", ++ version="12345678", ++ release="abcdefgh", ++ epoch="0", ++ packager="made up", ++ arch="noarch", ++ pgpsig="", + ), + ] + ) + ++ monkeypatch.setattr( ++ api, "current_actor", CurrentActorMocked(msgs=[installed_rpms]) ++ ) ++ ++ assert removeobsoleterpmgpgkeys._is_key_installed("gpg-pubkey-d4082792-5b32db75") ++ assert removeobsoleterpmgpgkeys._is_key_installed("gpg-pubkey-2fa658e0-45700c69") ++ assert removeobsoleterpmgpgkeys._is_key_installed("gpg-pubkey-12345678-abcdefgh") ++ assert not removeobsoleterpmgpgkeys._is_key_installed( ++ "gpg-pubkey-db42a60e-37ea5438" ++ ) ++ + + @pytest.mark.parametrize( + "version, expected", + [ +- (9, ["gpg-pubkey-d4082792-5b32db75", "gpg-pubkey-2fa658e0-45700c69"]), +- (8, ["gpg-pubkey-2fa658e0-45700c69"]) ++ ("9", ["gpg-pubkey-d4082792-5b32db75", "gpg-pubkey-2fa658e0-45700c69"]), ++ ("8", ["gpg-pubkey-2fa658e0-45700c69"]) + ] + ) + def test_get_obsolete_keys(monkeypatch, version, expected): +- def get_target_major_version_mocked(): +- return version +- +- monkeypatch.setattr( +- removeobsoleterpmgpgkeys, +- "get_target_major_version", +- get_target_major_version_mocked, +- ) +- ++ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(dst_ver=version)) ++ monkeypatch.setattr(api, "get_common_folder_path", common_folder_path_mocked) + monkeypatch.setattr( +- api, +- "current_actor", +- CurrentActorMocked( +- msgs=[_get_test_installedrpm()] +- ), ++ removeobsoleterpmgpgkeys, "_is_key_installed", lambda key: key in expected + ) + +- cur_dir = os.path.dirname(os.path.abspath(__file__)) +- monkeypatch.setattr(api, 'get_common_folder_path', lambda folder: os.path.join(cur_dir, '../../../files/', folder)) +- + keys = removeobsoleterpmgpgkeys._get_obsolete_keys() + assert set(keys) == set(expected) + +@@ -79,50 +81,83 @@ def test_get_obsolete_keys(monkeypatch, version, expected): + @pytest.mark.parametrize( + "version, obsoleted_keys, expected", + [ +- (10, None, []), +- (10, {}, []), +- (10, {"8": ["gpg-pubkey-888-abc"], "10": ["gpg-pubkey-10-10"]}, ["gpg-pubkey-888-abc", "gpg-pubkey-10-10"]), +- (9, {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ["gpg-pubkey-999-def", "gpg-pubkey-888-abc"]), +- (8, {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ["gpg-pubkey-888-abc"]) +- ] ++ ("10", None, []), ++ ("10", {}, []), ++ ( ++ "10", ++ {"8": ["gpg-pubkey-888-abc"], "10": ["gpg-pubkey-10-10"]}, ++ ["gpg-pubkey-888-abc", "gpg-pubkey-10-10"], ++ ), ++ ( ++ "9", ++ {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ++ ["gpg-pubkey-999-def", "gpg-pubkey-888-abc"], ++ ), ++ ( ++ "8", ++ {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ++ ["gpg-pubkey-888-abc"], ++ ), ++ ], + ) +-def test_get_obsolete_keys_incomplete_data(monkeypatch, version, obsoleted_keys, expected): +- def get_target_major_version_mocked(): +- return version ++def test_get_obsolete_keys_incomplete_data( ++ monkeypatch, version, obsoleted_keys, expected ++): ++ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(dst_ver=version)) ++ monkeypatch.setattr( ++ removeobsoleterpmgpgkeys, "_is_key_installed", lambda key: key in expected ++ ) + + def get_distribution_data_mocked(_distro): + if obsoleted_keys is None: + return {} +- return {'obsoleted-keys': obsoleted_keys} +- +- def has_package_mocked(*args, **kwargs): +- return True ++ return {"obsoleted-keys": obsoleted_keys} + + monkeypatch.setattr( +- removeobsoleterpmgpgkeys, +- "get_target_major_version", +- get_target_major_version_mocked, ++ removeobsoleterpmgpgkeys, "get_distribution_data", get_distribution_data_mocked + ) + +- monkeypatch.setattr( +- removeobsoleterpmgpgkeys, +- "get_distribution_data", +- get_distribution_data_mocked, +- ) ++ keys = removeobsoleterpmgpgkeys._get_obsolete_keys() ++ assert set(keys) == set(expected) + +- monkeypatch.setattr( +- removeobsoleterpmgpgkeys, +- "has_package", +- has_package_mocked, +- ) + ++@pytest.mark.parametrize( ++ "distro, expected", ++ [ ++ ( ++ "centos", ++ [ ++ "gpg-pubkey-8483c65d-5ccc5b19", ++ "gpg-pubkey-1d997668-621e3cac", ++ "gpg-pubkey-1d997668-61bae63b", ++ ], ++ ), ++ ( ++ "rhel", ++ [ ++ "gpg-pubkey-fd431d51-4ae0493b", ++ "gpg-pubkey-37017186-45761324", ++ "gpg-pubkey-f21541eb-4a5233e8", ++ "gpg-pubkey-897da07a-3c979a7f", ++ "gpg-pubkey-2fa658e0-45700c69", ++ "gpg-pubkey-d4082792-5b32db75", ++ "gpg-pubkey-5a6340b3-6229229e", ++ "gpg-pubkey-db42a60e-37ea5438", ++ ], ++ ), ++ ], ++) ++def test_get_source_distro_keys(monkeypatch, distro, expected): ++ """ ++ Test that the correct keys are returned for each distro. ++ """ ++ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(src_distro=distro)) ++ monkeypatch.setattr(api, "get_common_folder_path", common_folder_path_mocked) + monkeypatch.setattr( +- api, +- "current_actor", +- CurrentActorMocked(), ++ removeobsoleterpmgpgkeys, "_is_key_installed", lambda _key: True + ) + +- keys = removeobsoleterpmgpgkeys._get_obsolete_keys() ++ keys = removeobsoleterpmgpgkeys._get_source_distro_keys() + assert set(keys) == set(expected) + + +@@ -134,16 +169,61 @@ def test_get_obsolete_keys_incomplete_data(monkeypatch, version, obsoleted_keys, + ] + ) + def test_workaround_should_register(monkeypatch, keys, should_register): +- def get_obsolete_keys_mocked(): +- return keys +- + monkeypatch.setattr( +- removeobsoleterpmgpgkeys, +- '_get_obsolete_keys', +- get_obsolete_keys_mocked ++ removeobsoleterpmgpgkeys, "_get_obsolete_keys", lambda: keys + ) +- monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(api, "produce", produce_mocked()) + monkeypatch.setattr(api, "current_actor", CurrentActorMocked()) + + removeobsoleterpmgpgkeys.process() + assert api.produce.called == should_register ++ ++ ++def test_process(monkeypatch): ++ """ ++ Test that the correct path is taken depending on whether also converting ++ """ ++ obsolete = ["gpg-pubkey-12345678-abcdefgh"] ++ source_distro = ["gpg-pubkey-87654321-hgfedcba"] ++ ++ monkeypatch.setattr( ++ removeobsoleterpmgpgkeys, "_get_obsolete_keys", lambda: obsolete ++ ) ++ monkeypatch.setattr( ++ removeobsoleterpmgpgkeys, "_get_source_distro_keys", lambda: source_distro, ++ ) ++ ++ # upgrade only path ++ monkeypatch.setattr( ++ api, "current_actor", CurrentActorMocked(src_distro="rhel", dst_distro="rhel") ++ ) ++ with mock.patch( ++ "leapp.libraries.actor.removeobsoleterpmgpgkeys.register_dnfworkaround" ++ ): ++ removeobsoleterpmgpgkeys.process() ++ removeobsoleterpmgpgkeys.register_dnfworkaround.assert_called_once_with( ++ obsolete ++ ) ++ ++ # upgrade + conversion paths ++ monkeypatch.setattr( ++ api, "current_actor", CurrentActorMocked(src_distro="rhel", dst_distro="centos") ++ ) ++ with mock.patch( ++ "leapp.libraries.actor.removeobsoleterpmgpgkeys.register_dnfworkaround" ++ ): ++ removeobsoleterpmgpgkeys.process() ++ removeobsoleterpmgpgkeys.register_dnfworkaround.assert_called_once_with( ++ source_distro ++ ) ++ ++ monkeypatch.setattr( ++ api, "current_actor", CurrentActorMocked(src_distro="centos", dst_distro="rhel") ++ ) ++ with mock.patch( ++ "leapp.libraries.actor.removeobsoleterpmgpgkeys.register_dnfworkaround" ++ ): ++ removeobsoleterpmgpgkeys.process() ++ removeobsoleterpmgpgkeys.register_dnfworkaround.assert_called_once_with( ++ source_distro ++ ) +diff --git a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json +index 18b6c516..b17e8a66 100644 +--- a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json ++++ b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json +@@ -1,10 +1,10 @@ + { +- "keys": [ +- "51d6647ec21ad6ea", +- "d36cb86cb86b3716", +- "2ae81e8aced7258b", +- "429785e181b961a5" +- ], ++ "keys": { ++ "51d6647ec21ad6ea": ["gpg-pubkey-3abb34f8-5ffd890e"], ++ "d36cb86cb86b3716": ["gpg-pubkey-ced7258b-6525146f"], ++ "2ae81e8aced7258b": ["gpg-pubkey-b86b3716-61e69f29"], ++ "429785e181b961a5": ["gpg-pubkey-81b961a5-64106f70"] ++ }, + "obsoleted-keys": { + "7": [], + "8": [], +diff --git a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json +index 1be56176..1092ff58 100644 +--- a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json ++++ b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json +@@ -1,10 +1,10 @@ + { +- "keys": [ +- "24c6a8a7f4a80eb5", +- "4eb84e71f2ee9d55", +- "05b555b38483c65d", +- "1ff6a2171d997668" +- ], ++ "keys": { ++ "24c6a8a7f4a80eb5": [], ++ "4eb84e71f2ee9d55": [], ++ "05b555b38483c65d": ["gpg-pubkey-8483c65d-5ccc5b19"], ++ "1ff6a2171d997668": ["gpg-pubkey-1d997668-621e3cac", "gpg-pubkey-1d997668-61bae63b"] ++ }, + "obsoleted-keys": { + "10": ["gpg-pubkey-8483c65d-5ccc5b19"] + } +diff --git a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json +index 5b27e197..d6c2328d 100644 +--- a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json ++++ b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json +@@ -1,14 +1,14 @@ + { +- "keys": [ +- "199e2f91fd431d51", +- "5326810137017186", +- "938a80caf21541eb", +- "fd372689897da07a", +- "45689c882fa658e0", +- "f76f66c3d4082792", +- "5054e4a45a6340b3", +- "219180cddb42a60e" +- ], ++ "keys": { ++ "199e2f91fd431d51": ["gpg-pubkey-fd431d51-4ae0493b"], ++ "5326810137017186": ["gpg-pubkey-37017186-45761324"], ++ "938a80caf21541eb": ["gpg-pubkey-f21541eb-4a5233e8"], ++ "fd372689897da07a": ["gpg-pubkey-897da07a-3c979a7f"], ++ "45689c882fa658e0": ["gpg-pubkey-2fa658e0-45700c69"], ++ "f76f66c3d4082792": ["gpg-pubkey-d4082792-5b32db75"], ++ "5054e4a45a6340b3": ["gpg-pubkey-5a6340b3-6229229e"], ++ "219180cddb42a60e": ["gpg-pubkey-db42a60e-37ea5438"] ++ }, + "obsoleted-keys": { + "7": [], + "8": [ +-- +2.52.0 + diff --git a/SOURCES/0076-docs-Add-doc-about-community-upgrades.patch b/SOURCES/0076-docs-Add-doc-about-community-upgrades.patch new file mode 100644 index 0000000..0549342 --- /dev/null +++ b/SOURCES/0076-docs-Add-doc-about-community-upgrades.patch @@ -0,0 +1,228 @@ +From eabab8c496a7d6a76ff1aa0d7e34b0345530e30a Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Mon, 24 Nov 2025 16:44:53 +0100 +Subject: [PATCH 076/111] docs: Add doc about community upgrades + +The existing coding and PR workflow guidelines are split into separate +pages under "Contrbuting" and the new doc is added there as well. + +Jira: RHEL-110563 +--- + .../coding-guidelines.md} | 53 +------------------ + .../source/contributing/community-upgrades.md | 39 ++++++++++++++ + docs/source/contributing/index.rst | 18 +++++++ + docs/source/contributing/pr-guidelines.md | 48 +++++++++++++++++ + docs/source/index.rst | 2 +- + 5 files changed, 107 insertions(+), 53 deletions(-) + rename docs/source/{contrib-and-devel-guidelines.md => contributing/coding-guidelines.md} (68%) + create mode 100644 docs/source/contributing/community-upgrades.md + create mode 100644 docs/source/contributing/index.rst + create mode 100644 docs/source/contributing/pr-guidelines.md + +diff --git a/docs/source/contrib-and-devel-guidelines.md b/docs/source/contributing/coding-guidelines.md +similarity index 68% +rename from docs/source/contrib-and-devel-guidelines.md +rename to docs/source/contributing/coding-guidelines.md +index 3229c8a4..d06d0200 100644 +--- a/docs/source/contrib-and-devel-guidelines.md ++++ b/docs/source/contributing/coding-guidelines.md +@@ -1,5 +1,4 @@ +-# Contribution and development guidelines +-## Code guidelines ++# Coding guidelines + + Your code should follow the [Python Coding Guidelines](https://leapp.readthedocs.io/en/latest/contributing.html#follow-python-coding-guidelines) used for the leapp project. On top of these rules follow instructions + below. +@@ -84,53 +83,3 @@ guaranteed to exist and executable. + The use of the {py:mod}`subprocess` library is forbidden in leapp repositories. + Use of the library would require very good reasoning, why the + {py:func}`~leapp.libraries.stdlib.run` function cannot be used. +- +-## Commits and pull requests (PRs) +-### PR description +-The description should contain information about all introduced changes: +-* What has been changed +-* How it has been changed +-* The reason for the change +-* How could people try/test the PR +-* Reference to a Jira ticket, Github issue, ... if applicable +- +-Good description provides all information for readers without the need to +-read the code. Note that reviewers can decline to review the PR with a poor +-description. +- +-### Commit messages +-When your pull-request is ready to be reviewed, every commit needs to include +-a title and a body continuing a description of the change --- what problem is +-being solved and how. The end of the commit body should contain Jira issue +-number (if applicable), GitHub issue that is being fixed, etc.: +-``` +- Commit title +- +- Commit message body on multiple lines +- +- Jira-ref: +-``` +- +-Note that good commit message should provide information in similar way like +-the PR description. Poorly written commit messages can block the merge of PR +-or proper review. +- +-### Granularity of commits +-The granularity of commits depends strongly on the problem being solved. However, +-a large number of small commits is typically undesired. If possible, aim a +-Git history such that commits can be reverted individually, without requiring reverting +-numerous other dependent commits in order to get the `main` branch into a working state. +- +-Note that commits fixing problems of other commits in the PR are expected to be +-squashed before the final review and merge of the PR. Using of `git commit --fixup ...` +-and `git commit --squash ...` commands can help you to prepare such commits +-properly in advance and make the rebase later easier using `git rebase -i --autosquash`. +-We suggest you to get familiar with these commands as it can make your work really +-easier. Note that when you are starting to get higher number of such fixing commits +-in your PR, it's good practice to use the rebase more often. High numbers of such +-commits could make the final rebase more tricky in the end. So your PR should not +-have more than 15 commits at any time. +- +-### Create a separate git branch for your changes +-TBD +- +diff --git a/docs/source/contributing/community-upgrades.md b/docs/source/contributing/community-upgrades.md +new file mode 100644 +index 00000000..cbec0a24 +--- /dev/null ++++ b/docs/source/contributing/community-upgrades.md +@@ -0,0 +1,39 @@ ++# Community upgrades for Centos-like distros ++ ++In the past, this project was solely focused on Red Hat Enterprise Linux upgrades. Recently, we've been extending and refactoring the `leapp-repository` codebase to allow upgrades of other distributions, such as CentOS Stream and also upgrades + conversions between different distributions in one step. ++ ++This document outlines the state of support for upgrades of distributions other than RHEL. Note that support in this case doesn't mean what the codebase allows, but what the core leapp team supports in terms of issues, bugfixes, feature requests, testing, etc. ++ ++RHEL upgrades and upgrades + conversions *to* RHEL are the only officially supported upgrade paths and are the primary focus of leapp developers. However, we are open to and welcome contributions from the community, allowing other upgrade (and conversion) paths in the codebase. For example, we've already integrated a contribution introducing upgrade paths for Alma Linux upgrades. ++ ++This does not mean that we won't offer help outside of the outlined scope, but it is primarily up to the contributors contributing a particular upgrade path to maintain and test it. Also, it can take us some time to get to such PRs, so be patient please. ++ ++Upon agreement we can also update the upgrade paths (in `upgrade_paths.json`) when there is a new release of the particular distribution. However note that we might include some upgrade paths required for conversions *to* RHEL on top of that. ++ ++Contributions improving the overall upgrade experience are also welcome, as they always have been. ++ ++```{note} ++By default, upgrade + conversion paths are automatically derived from upgrade paths. If this is not desired or other paths are required, feel free to open a pull request or open a [discussion](https://github.com/oamg/leapp-repository/discussions) on that topic. ++``` ++ ++## How to contribute ++ ++Currently, the process for enabling upgrades and conversions for other distributions is not fully documented. In the meantime you can use the [pull request introducing Alma Linux upgrades](https://github.com/oamg/leapp-repository/pull/1391/) as reference. However, note that the leapp upgrade data files have special rules for updates, described below. ++ ++### Leapp data files ++ ++#### repomap.json ++ ++To use correct target repositories during the upgrade automatically, the `repomap.json` data file needs to be updated to cover repositories of the newly added distribution. However, the file cannot be updated manually as its content is generated, hence any manual changes would be overwritten with the next update. Currently there is not straightforward way for the community to update our generators, but you can ++ ++- submit a separate PR of how the resulting `repomap.json` file should look like, for an example you can take a look at [this PR](https://github.com/oamg/leapp-repository/pull/1395) ++- or provide the list of repositories (possibly also architectures) present on the distribution ++ ++and we will update the generators accordingly, asking you to review the result then. We are discussing an improvement to make this more community friendly. ++ ++#### pes-events.json and device_driver_deprecation_data.json ++ ++Both PES events and device driver deprecation data only contain data for RHEL in the upstream `leapp-repository` and we will not include any data unrelated to RHEL. If you find a bug in the data, you can open a bug in the [RHEL Jira](https://issues.redhat.com/) for the `leapp-repository` component. ++ ++Before contributing, make sure your PR conforms to our {doc}`Coding guidelines` ++ and {doc}`PR guidelines`. +diff --git a/docs/source/contributing/index.rst b/docs/source/contributing/index.rst +new file mode 100644 +index 00000000..ebdc9151 +--- /dev/null ++++ b/docs/source/contributing/index.rst +@@ -0,0 +1,18 @@ ++Contributing ++======================================================== ++ ++.. toctree:: ++ :maxdepth: 4 ++ :caption: Contents: ++ :glob: ++ ++ coding-guidelines ++ pr-guidelines ++ community-upgrades ++ ++.. Indices and tables ++.. ================== ++.. ++.. * :ref:`genindex` ++.. * :ref:`modindex` ++.. * :ref:`search` +diff --git a/docs/source/contributing/pr-guidelines.md b/docs/source/contributing/pr-guidelines.md +new file mode 100644 +index 00000000..4f6ee4fe +--- /dev/null ++++ b/docs/source/contributing/pr-guidelines.md +@@ -0,0 +1,48 @@ ++# Commits and pull requests (PRs) ++## PR description ++The description should contain information about all introduced changes: ++* What has been changed ++* How it has been changed ++* The reason for the change ++* How could people try/test the PR ++* Reference to a Jira ticket, Github issue, ... if applicable ++ ++Good description provides all information for readers without the need to ++read the code. Note that reviewers can decline to review the PR with a poor ++description. ++ ++## Commit messages ++When your pull-request is ready to be reviewed, every commit needs to include ++a title and a body continuing a description of the change --- what problem is ++being solved and how. The end of the commit body should contain Jira issue ++number (if applicable), GitHub issue that is being fixed, etc.: ++``` ++ Commit title ++ ++ Commit message body on multiple lines ++ ++ Jira-ref: ++``` ++ ++Note that good commit message should provide information in similar way like ++the PR description. Poorly written commit messages can block the merge of PR ++or proper review. ++ ++## Granularity of commits ++The granularity of commits depends strongly on the problem being solved. However, ++a large number of small commits is typically undesired. If possible, aim a ++Git history such that commits can be reverted individually, without requiring reverting ++numerous other dependent commits in order to get the `main` branch into a working state. ++ ++Note that commits fixing problems of other commits in the PR are expected to be ++squashed before the final review and merge of the PR. Using of `git commit --fixup ...` ++and `git commit --squash ...` commands can help you to prepare such commits ++properly in advance and make the rebase later easier using `git rebase -i --autosquash`. ++We suggest you to get familiar with these commands as it can make your work really ++easier. Note that when you are starting to get higher number of such fixing commits ++in your PR, it's good practice to use the rebase more often. High numbers of such ++commits could make the final rebase more tricky in the end. So your PR should not ++have more than 15 commits at any time. ++ ++## Create a separate git branch for your changes ++TBD +diff --git a/docs/source/index.rst b/docs/source/index.rst +index 27537ca4..ed68f751 100644 +--- a/docs/source/index.rst ++++ b/docs/source/index.rst +@@ -21,7 +21,7 @@ providing Red Hat Enterprise Linux in-place upgrade functionality. + upgrade-architecture-and-workflow/index + configuring-ipu/index + libraries-and-api/index +- contrib-and-devel-guidelines ++ contributing/index + faq + + .. Indices and tables +-- +2.52.0 + diff --git a/SOURCES/0077-Point-the-test-pipelines-to-the-next-branch-1461.patch b/SOURCES/0077-Point-the-test-pipelines-to-the-next-branch-1461.patch new file mode 100644 index 0000000..3fd9b8d --- /dev/null +++ b/SOURCES/0077-Point-the-test-pipelines-to-the-next-branch-1461.patch @@ -0,0 +1,34 @@ +From 3fe0ed289460919b4f08e1ceb1f1be17dd99b302 Mon Sep 17 00:00:00 2001 +From: Daniel Diblik <8378124+danmyway@users.noreply.github.com> +Date: Fri, 5 Dec 2025 16:09:06 +0100 +Subject: [PATCH 077/111] Point the test pipelines to the next branch (#1461) + +--- + .packit.yaml | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/.packit.yaml b/.packit.yaml +index 0c3f682a..83b7ce6a 100644 +--- a/.packit.yaml ++++ b/.packit.yaml +@@ -110,7 +110,7 @@ jobs: + job: tests + trigger: ignore + fmf_url: "https://gitlab.cee.redhat.com/oamg/leapp-tests" +- fmf_ref: "main" ++ fmf_ref: "next" + use_internal_tf: True + labels: + - sanity +@@ -447,7 +447,7 @@ jobs: + job: tests + trigger: ignore + fmf_url: "https://gitlab.cee.redhat.com/oamg/leapp-tests" +- fmf_ref: "main" ++ fmf_ref: "next" + use_internal_tf: True + labels: + - sanity +-- +2.52.0 + diff --git a/SOURCES/0078-boot-fix-deps-when-bindmounting-boot-to-sysroot-boot.patch b/SOURCES/0078-boot-fix-deps-when-bindmounting-boot-to-sysroot-boot.patch new file mode 100644 index 0000000..e39a9c4 --- /dev/null +++ b/SOURCES/0078-boot-fix-deps-when-bindmounting-boot-to-sysroot-boot.patch @@ -0,0 +1,34 @@ +From 2fb6beaec3e2f9badf5bf2956e4523c1b588b657 Mon Sep 17 00:00:00 2001 +From: Michal Hecko +Date: Thu, 27 Nov 2025 22:30:31 +0100 +Subject: [PATCH 078/111] boot: fix deps when bindmounting /boot to + /sysroot/boot + +When /boot is a separate partition, we bindmount what=/sysroot/boot to +/boot, so that we can perform necessary checks when booting with FIPS +enabled. However, the current solution contains incorrect unit +dependencies: it requires sysroot-boot.target. There is no such target, +and the correct value should be sysroot-boot.mount. This patch corrects +the dependencies. +--- + .../mount_units_generator/files/bundled_units/boot.mount | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount +index 869c5e4c..531f6c75 100644 +--- a/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount ++++ b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount +@@ -1,8 +1,8 @@ + [Unit] + DefaultDependencies=no + Before=local-fs.target +-After=sysroot-boot.target +-Requires=sysroot-boot.target ++After=sysroot-boot.mount ++Requires=sysroot-boot.mount + + [Mount] + What=/sysroot/boot +-- +2.52.0 + diff --git a/SOURCES/0079-handle-multipath-devices-in-upgrade-initramfs.patch b/SOURCES/0079-handle-multipath-devices-in-upgrade-initramfs.patch new file mode 100644 index 0000000..34e068b --- /dev/null +++ b/SOURCES/0079-handle-multipath-devices-in-upgrade-initramfs.patch @@ -0,0 +1,1511 @@ +From 84b9fa49440f32e1747f697be6c36342facd762e Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Michal=20He=C4=8Dko?= +Date: Sat, 6 Dec 2025 14:34:48 +0100 +Subject: [PATCH 079/111] handle multipath devices in upgrade initramfs + +Previously, multipath configs were scanned (and optionally) modified +only for the 8>9 upgrade paths. Known backward-compatibility issues were +fixed in the ApplicationsPhase, but any support for mounting multipath +devices was lacking. This patch refactors the handling of multipath so +that: +1) all multipath-related configs are copied into target uspace +2) any changes done in the ApplicationsPhase are performed to these + copies instead +3) our modified multipath configs are included into the initramfs +4) dracut 'multipath' module is enabled when building upgrade initramfs +5) system configs are patched so that in the end they will look the same + as if modified in the ApplicationPhase. This is achieved by simply + copying our modified configs and replacing the old system ones. + +These changes avoid reaching into the config-parsing code. An ugly +consequence of this is that some models (MultipathConfFacts8to9) needed +to be placed into the common repository, although they are only used for +8>9 upgrades. To address this ugliness would require designing and +producing a message that would contain the entire contents of the +primary multipath config (if we want to avoid parsing it twice). + +Introduces new models / messages: + * MultipathInfo + * MultipathConfigUpdatesInfo + +jira-ref: RHEL-14712, RHEL-124306 (dev task) +--- + .../libraries/upgradeinitramfsgenerator.py | 1 + + .../actors/multipath/config_reader/actor.py | 28 +++ + .../libraries/multipathconfread.py | 71 +++---- + .../tests/files/all_the_things.conf | 0 + .../config_reader}/tests/files/allow_usb.conf | 2 +- + .../tests/files}/complicated.conf | 2 +- + .../tests/files/conf1.d/empty.conf | 0 + .../files/conf1.d/nothing_important.conf | 0 + .../tests/files/conf2.d/all_true.conf | 0 + .../config_reader}/tests/files/conf3.d/README | 0 + .../tests/files/converted_the_things.conf | 0 + .../tests/files/default_rhel8.conf | 0 + .../config_reader}/tests/files/empty.conf | 0 + .../config_reader}/tests/files/empty_dir.conf | 0 + .../tests/files/missing_dir.conf | 0 + .../tests/files/no_defaults.conf | 2 +- + .../tests/files/no_foreign.conf | 2 +- + .../tests/files/not_set_dir.conf | 0 + .../tests/files/set_in_dir.conf | 0 + .../tests/files/two_defaults.conf | 0 + .../tests/test_multipath_conf_read_8to9.py | 92 ++++++--- + .../multipath/system_conf_patcher/actor.py | 23 +++ + .../libraries/system_config_patcher.py | 17 ++ + .../tests/test_config_patcher.py | 41 ++++ + .../multipath/target_uspace_configs/actor.py | 22 +++ + .../target_uspace_multipath_configs.py | 66 +++++++ + .../system_upgrade/common/models/multipath.py | 78 ++++++++ + .../actor.py | 19 +- + .../libraries/multipathconfupdate.py | 43 ++++- + .../tests/files/after/all_the_things.conf | 0 + .../tests/files/after/allow_usb.conf | 2 +- + .../tests/files/after/complicated.conf | 2 +- + .../tests/files/after/conf2.d/all_true.conf | 0 + .../tests/files/after/default_rhel8.conf | 0 + .../tests/files/after/empty.conf | 0 + .../tests/files/after/empty_dir.conf | 0 + .../tests/files/after/missing_dir.conf | 0 + .../tests/files/after/no_defaults.conf | 2 +- + .../tests/files/after/no_foreign.conf | 2 +- + .../tests/files/after/not_set_dir.conf | 0 + .../tests/files/after/two_defaults.conf | 0 + .../tests/files/before/all_the_things.conf | 0 + .../tests/files/before/allow_usb.conf | 2 +- + .../tests/files/before}/complicated.conf | 2 +- + .../tests/files/before/conf1.d/empty.conf | 0 + .../before/conf1.d/nothing_important.conf | 0 + .../tests/files/before/conf2.d/all_true.conf | 0 + .../tests/files/before/conf3.d/README | 0 + .../files/before/converted_the_things.conf | 0 + .../tests/files/before/default_rhel8.conf | 0 + .../tests/files/before/empty.conf | 0 + .../tests/files/before/empty_dir.conf | 0 + .../tests/files/before/missing_dir.conf | 0 + .../tests/files/before/no_defaults.conf | 2 +- + .../tests/files/before/no_foreign.conf | 2 +- + .../tests/files/before/not_set_dir.conf | 0 + .../tests/files/before/set_in_dir.conf | 0 + .../tests/files/before/two_defaults.conf | 0 + .../tests/test_multipath_conf_update_8to9.py | 179 ++++++++++++++++++ + .../actors/multipathconfread/actor.py | 33 ---- + .../tests/test_multipath_conf_update_8to9.py | 119 ------------ + .../el8toel9/models/multipathconffacts.py | 30 --- + 62 files changed, 604 insertions(+), 282 deletions(-) + create mode 100644 repos/system_upgrade/common/actors/multipath/config_reader/actor.py + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/libraries/multipathconfread.py (54%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/all_the_things.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/allow_usb.conf (99%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfupdate/tests/files/before => common/actors/multipath/config_reader/tests/files}/complicated.conf (99%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/conf1.d/empty.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/conf1.d/nothing_important.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/conf2.d/all_true.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/conf3.d/README (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/converted_the_things.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/default_rhel8.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/empty.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/empty_dir.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/missing_dir.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/no_defaults.conf (99%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/no_foreign.conf (99%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/not_set_dir.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/set_in_dir.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/files/two_defaults.conf (100%) + rename repos/system_upgrade/{el8toel9/actors/multipathconfread => common/actors/multipath/config_reader}/tests/test_multipath_conf_read_8to9.py (58%) + create mode 100644 repos/system_upgrade/common/actors/multipath/system_conf_patcher/actor.py + create mode 100644 repos/system_upgrade/common/actors/multipath/system_conf_patcher/libraries/system_config_patcher.py + create mode 100644 repos/system_upgrade/common/actors/multipath/system_conf_patcher/tests/test_config_patcher.py + create mode 100644 repos/system_upgrade/common/actors/multipath/target_uspace_configs/actor.py + create mode 100644 repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py + create mode 100644 repos/system_upgrade/common/models/multipath.py + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/actor.py (57%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/libraries/multipathconfupdate.py (67%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/all_the_things.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/allow_usb.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/complicated.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/conf2.d/all_true.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/default_rhel8.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/empty.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/empty_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/missing_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/no_defaults.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/no_foreign.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/not_set_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/after/two_defaults.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/all_the_things.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/allow_usb.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfread/tests/files => multipath_upgrade_conf_patcher/tests/files/before}/complicated.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/conf1.d/empty.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/conf1.d/nothing_important.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/conf2.d/all_true.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/conf3.d/README (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/converted_the_things.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/default_rhel8.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/empty.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/empty_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/missing_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/no_defaults.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/no_foreign.conf (99%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/not_set_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/set_in_dir.conf (100%) + rename repos/system_upgrade/el8toel9/actors/{multipathconfupdate => multipath_upgrade_conf_patcher}/tests/files/before/two_defaults.conf (100%) + create mode 100644 repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/test_multipath_conf_update_8to9.py + delete mode 100644 repos/system_upgrade/el8toel9/actors/multipathconfread/actor.py + delete mode 100644 repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/test_multipath_conf_update_8to9.py + delete mode 100644 repos/system_upgrade/el8toel9/models/multipathconffacts.py + +diff --git a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py +index f7e4a8af..eefdb41a 100644 +--- a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py ++++ b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py +@@ -193,6 +193,7 @@ def _copy_files(context, files): + context.remove_tree(file_task.dst) + context.copytree_to(file_task.src, file_task.dst) + else: ++ context.makedirs(os.path.dirname(file_task.dst)) + context.copy_to(file_task.src, file_task.dst) + + +diff --git a/repos/system_upgrade/common/actors/multipath/config_reader/actor.py b/repos/system_upgrade/common/actors/multipath/config_reader/actor.py +new file mode 100644 +index 00000000..a7238a25 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/actor.py +@@ -0,0 +1,28 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import multipathconfread ++from leapp.models import DistributionSignedRPM, MultipathConfFacts8to9, MultipathInfo ++from leapp.tags import FactsPhaseTag, IPUWorkflowTag ++ ++ ++class MultipathConfRead(Actor): ++ """ ++ Read multipath configuration files and extract the necessary information ++ ++ Related files: ++ - /etc/multipath.conf ++ - /etc/multipath/ - any files inside the directory ++ - /etc/xdrdevices.conf ++ ++ Two kinds of messages are generated: ++ - MultipathInfo - general information about multipath, version agnostic ++ - upgrade-path-specific messages such as MultipathConfFacts8to9 (produced only ++ when upgrading from 8 to 9) ++ """ ++ ++ name = 'multipath_conf_read' ++ consumes = (DistributionSignedRPM,) ++ produces = (MultipathInfo, MultipathConfFacts8to9) ++ tags = (FactsPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ multipathconfread.scan_and_emit_multipath_info() +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py b/repos/system_upgrade/common/actors/multipath/config_reader/libraries/multipathconfread.py +similarity index 54% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py +rename to repos/system_upgrade/common/actors/multipath/config_reader/libraries/multipathconfread.py +index 5b1cef50..e733500b 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/libraries/multipathconfread.py +@@ -2,15 +2,10 @@ import errno + import os + + from leapp.libraries.common import multipathutil ++from leapp.libraries.common.config.version import get_source_major_version + from leapp.libraries.common.rpms import has_package + from leapp.libraries.stdlib import api +-from leapp.models import ( +- CopyFile, +- DistributionSignedRPM, +- MultipathConfFacts8to9, +- MultipathConfig8to9, +- TargetUserSpaceUpgradeTasks +-) ++from leapp.models import DistributionSignedRPM, MultipathConfFacts8to9, MultipathConfig8to9, MultipathInfo + + _regexes = ('vendor', 'product', 'revision', 'product_blacklist', 'devnode', + 'wwid', 'property', 'protocol') +@@ -88,46 +83,30 @@ def is_processable(): + return res + + +-def get_multipath_conf_facts(config_file='/etc/multipath.conf'): +- res_configs = [] +- conf = _parse_config(config_file) +- if not conf: +- return None +- res_configs.append(conf) +- if conf.config_dir: +- res_configs.extend(_parse_config_dir(conf.config_dir)) +- else: +- res_configs.extend(_parse_config_dir('/etc/multipath/conf.d')) +- return MultipathConfFacts8to9(configs=res_configs) +- ++def scan_and_emit_multipath_info(default_config_path='/etc/multipath.conf'): ++ if not is_processable(): ++ return + +-def produce_copy_to_target_task(): +- """ +- Produce task to copy files into the target userspace ++ primary_config = _parse_config(default_config_path) ++ if not primary_config: ++ api.current_logger().debug( ++ 'Primary multipath config /etc/multipath.conf is not present - multipath ' ++ 'is not used.' ++ ) ++ mpath_info = MultipathInfo(is_configured=False) ++ api.produce(mpath_info) ++ return + +- The multipath configuration files are needed when the upgrade init ramdisk +- is generated to ensure we are able to boot into the upgrade environment +- and start the upgrade process itself. By this msg it's told that these +- files/dirs will be available when the upgrade init ramdisk is generated. ++ multipath_info = MultipathInfo( ++ is_configured=True, ++ config_dir=primary_config.config_dir or '/etc/multipath/conf.d' ++ ) ++ api.produce(multipath_info) + +- See TargetUserSpaceUpgradeTasks and UpgradeInitramfsTasks for more info. +- """ +- # TODO(pstodulk): move the function to the multipathconfcheck actor +- # and get rid of the hardcoded stuff. +- # - The current behaviour looks from the user POV same as before this +- # * commit. I am going to keep the proper fix for additional PR as we do +- # * not want to make the current PR even more complex than now and the solution +- # * is not so trivial. +- # - As well, I am missing some information around xDR devices, which are +- # * possibly not handled correctly (maybe missing some executables?..) +- # * Update: practically we do not have enough info about xDR drivers, but +- # * discussed with Ben Marzinski, as the multipath dracut module includes +- # * the xDR utils stuff, we should handle it in the same way. +- # * See xdrgetuid, xdrgetinfo (these two utils are now missing in our initramfs) +- copy_files = [] +- for fname in ['/etc/multipath.conf', '/etc/multipath', '/etc/xdrdevices.conf']: +- if os.path.exists(fname): +- copy_files.append(CopyFile(src=fname)) ++ # Handle upgrade-path-specific config actions ++ if get_source_major_version() == '8': ++ secondary_configs = _parse_config_dir(multipath_info.config_dir) ++ all_configs = [primary_config] + secondary_configs + +- if copy_files: +- api.produce(TargetUserSpaceUpgradeTasks(copy_files=copy_files)) ++ config_facts_for_8to9 = MultipathConfFacts8to9(configs=all_configs) ++ api.produce(config_facts_for_8to9) +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/all_the_things.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/all_the_things.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/all_the_things.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/all_the_things.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/allow_usb.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/allow_usb.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/allow_usb.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/allow_usb.conf +index 57b6f97b..39681b85 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/allow_usb.conf ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/allow_usb.conf +@@ -1074,5 +1074,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/complicated.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/complicated.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/complicated.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/complicated.conf +index 23d93ecf..c889461c 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/complicated.conf ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/complicated.conf +@@ -1103,5 +1103,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf1.d/empty.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf1.d/empty.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf1.d/empty.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf1.d/empty.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf1.d/nothing_important.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf1.d/nothing_important.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf1.d/nothing_important.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf1.d/nothing_important.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf2.d/all_true.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf2.d/all_true.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf2.d/all_true.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf2.d/all_true.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf3.d/README b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf3.d/README +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/conf3.d/README +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/conf3.d/README +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/converted_the_things.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/converted_the_things.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/converted_the_things.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/converted_the_things.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/default_rhel8.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/default_rhel8.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/default_rhel8.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/default_rhel8.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/empty.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/empty.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/empty.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/empty.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/empty_dir.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/empty_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/empty_dir.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/empty_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/missing_dir.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/missing_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/missing_dir.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/missing_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/no_defaults.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/no_defaults.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/no_defaults.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/no_defaults.conf +index f7885ca8..ec8ddee2 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/no_defaults.conf ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/no_defaults.conf +@@ -1045,5 +1045,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/no_foreign.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/no_foreign.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/no_foreign.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/no_foreign.conf +index 9525731c..87f9a24c 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/no_foreign.conf ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/no_foreign.conf +@@ -1085,5 +1085,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/not_set_dir.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/not_set_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/not_set_dir.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/not_set_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/set_in_dir.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/set_in_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/set_in_dir.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/set_in_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/two_defaults.conf b/repos/system_upgrade/common/actors/multipath/config_reader/tests/files/two_defaults.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/two_defaults.conf +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/files/two_defaults.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/test_multipath_conf_read_8to9.py b/repos/system_upgrade/common/actors/multipath/config_reader/tests/test_multipath_conf_read_8to9.py +similarity index 58% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/test_multipath_conf_read_8to9.py +rename to repos/system_upgrade/common/actors/multipath/config_reader/tests/test_multipath_conf_read_8to9.py +index 9134e1d7..e593a857 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/test_multipath_conf_read_8to9.py ++++ b/repos/system_upgrade/common/actors/multipath/config_reader/tests/test_multipath_conf_read_8to9.py +@@ -1,7 +1,11 @@ + import os + ++import pytest ++ + from leapp.libraries.actor import multipathconfread +-from leapp.models import MultipathConfig8to9 ++from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import MultipathConfFacts8to9, MultipathConfig8to9, MultipathInfo + + TEST_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files') + +@@ -100,45 +104,71 @@ def test_parse_config(): + assert_config(config, expected_data) + + +-def test_get_facts_missing_dir(monkeypatch): ++@pytest.mark.parametrize( ++ ('primary_config', 'expected_configs'), ++ [ ++ ('missing_dir.conf', [missing_dir_conf]), ++ ('empty_dir.conf', [empty_dir_conf]), ++ ('not_set_dir.conf', [not_set_dir_conf, empty1_conf, nothing_important_conf]), ++ ('set_in_dir.conf', [set_in_dir_conf, all_true_conf]), ++ ] ++) ++def test_get_facts_missing_dir(monkeypatch, primary_config, expected_configs): + monkeypatch.setattr(multipathconfread, '_parse_config_orig', multipathconfread._parse_config, raising=False) + monkeypatch.setattr(multipathconfread, '_parse_config', mock_parse_config) ++ monkeypatch.setattr(multipathconfread, 'is_processable', lambda: True) + +- facts = multipathconfread.get_multipath_conf_facts(os.path.join(TEST_DIR, 'missing_dir.conf')) +- assert facts +- assert len(facts.configs) == 1 +- assert_config(facts.configs[0], missing_dir_conf) ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, 'produce', produce_mock) + ++ actor_mock = CurrentActorMocked(src_ver='8.10', dst_ver='9.6') ++ monkeypatch.setattr(api, 'current_actor', actor_mock) + +-def test_get_facts_empty_dir(monkeypatch): +- monkeypatch.setattr(multipathconfread, '_parse_config_orig', multipathconfread._parse_config, raising=False) +- monkeypatch.setattr(multipathconfread, '_parse_config', mock_parse_config) ++ config_to_use = os.path.join(TEST_DIR, primary_config) ++ multipathconfread.scan_and_emit_multipath_info(config_to_use) + +- facts = multipathconfread.get_multipath_conf_facts(os.path.join(TEST_DIR, 'empty_dir.conf')) +- assert facts +- assert len(facts.configs) == 1 +- assert_config(facts.configs[0], empty_dir_conf) ++ assert produce_mock.called + ++ general_info = [msg for msg in produce_mock.model_instances if isinstance(msg, MultipathInfo)] ++ assert len(general_info) == 1 ++ assert general_info[0].is_configured ++ # general_info[0].config_dir is with the MultipathConfFacts8to9 messages below + +-def test_get_facts_not_set_dir(monkeypatch): +- monkeypatch.setattr(multipathconfread, '_parse_config_orig', multipathconfread._parse_config, raising=False) +- monkeypatch.setattr(multipathconfread, '_parse_config', mock_parse_config) ++ msgs = [msg for msg in produce_mock.model_instances if isinstance(msg, MultipathConfFacts8to9)] ++ assert len(msgs) == 1 + +- expected_configs = (not_set_dir_conf, empty1_conf, nothing_important_conf) +- facts = multipathconfread.get_multipath_conf_facts(os.path.join(TEST_DIR, 'not_set_dir.conf')) +- assert facts +- assert len(facts.configs) == 3 +- for i in range(len(facts.configs)): +- assert_config(facts.configs[i], expected_configs[i]) ++ actual_configs = msgs[0].configs ++ assert len(actual_configs) == len(expected_configs) + ++ for actual_config, expected_config in zip(actual_configs, expected_configs): ++ assert_config(actual_config, expected_config) + +-def test_get_facts_set_in_dir(monkeypatch): +- monkeypatch.setattr(multipathconfread, '_parse_config_orig', multipathconfread._parse_config, raising=False) +- monkeypatch.setattr(multipathconfread, '_parse_config', mock_parse_config) + +- expected_configs = (set_in_dir_conf, all_true_conf) +- facts = multipathconfread.get_multipath_conf_facts(os.path.join(TEST_DIR, 'set_in_dir.conf')) +- assert facts +- assert len(facts.configs) == 2 +- for i in range(len(facts.configs)): +- assert_config(facts.configs[i], expected_configs[i]) ++def test_only_general_info_is_produced_on_9to10(monkeypatch): ++ default_config_path = '/etc/multipath.conf' ++ ++ def parse_config_mock(path): ++ assert path == default_config_path ++ return MultipathConfig8to9(pathname=path) ++ ++ monkeypatch.setattr(multipathconfread, '_parse_config', parse_config_mock) ++ monkeypatch.setattr(multipathconfread, 'is_processable', lambda: True) ++ ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, 'produce', produce_mock) ++ ++ actor_mock = CurrentActorMocked(src_ver='9.6', dst_ver='10.0') ++ monkeypatch.setattr(api, 'current_actor', actor_mock) ++ ++ multipathconfread.scan_and_emit_multipath_info(default_config_path) ++ ++ assert produce_mock.called ++ ++ general_info_msgs = [msg for msg in produce_mock.model_instances if isinstance(msg, MultipathInfo)] ++ assert len(general_info_msgs) == 1 ++ general_info = general_info_msgs[0] ++ assert general_info.is_configured ++ assert general_info.config_dir == '/etc/multipath/conf.d' ++ ++ msgs = [msg for msg in produce_mock.model_instances if isinstance(msg, MultipathConfFacts8to9)] ++ assert not msgs +diff --git a/repos/system_upgrade/common/actors/multipath/system_conf_patcher/actor.py b/repos/system_upgrade/common/actors/multipath/system_conf_patcher/actor.py +new file mode 100644 +index 00000000..44d4fd3b +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/system_conf_patcher/actor.py +@@ -0,0 +1,23 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import system_config_patcher ++from leapp.models import MultipathConfigUpdatesInfo ++from leapp.tags import ApplicationsPhaseTag, IPUWorkflowTag ++ ++ ++class MultipathSystemConfigPatcher(Actor): ++ """ ++ Propagate any modified multipath configs to the source system. ++ ++ We copy, modify and use multipath configs from the source system in the upgrade initramfs ++ as the configs might be incompatible with the target system. Once the upgrade is performed, ++ actual system's configs need to be modified in the same fashion. This is achieved by simply ++ copying our modified multipath configs that were used to upgrade the system. ++ """ ++ ++ name = 'multipath_system_config_patcher' ++ consumes = (MultipathConfigUpdatesInfo,) ++ produces = () ++ tags = (ApplicationsPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ system_config_patcher.patch_system_configs() +diff --git a/repos/system_upgrade/common/actors/multipath/system_conf_patcher/libraries/system_config_patcher.py b/repos/system_upgrade/common/actors/multipath/system_conf_patcher/libraries/system_config_patcher.py +new file mode 100644 +index 00000000..0d873322 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/system_conf_patcher/libraries/system_config_patcher.py +@@ -0,0 +1,17 @@ ++import shutil ++ ++from leapp.libraries.stdlib import api ++from leapp.models import MultipathConfigUpdatesInfo ++ ++ ++def patch_system_configs(): ++ for config_updates in api.consume(MultipathConfigUpdatesInfo): ++ for modified_config in config_updates.updates: ++ api.current_logger().debug( ++ 'Copying modified multipath config {} to {}.'.format( ++ modified_config.updated_config_location, ++ modified_config.target_path ++ ) ++ ) ++ ++ shutil.copy(modified_config.updated_config_location, modified_config.target_path) +diff --git a/repos/system_upgrade/common/actors/multipath/system_conf_patcher/tests/test_config_patcher.py b/repos/system_upgrade/common/actors/multipath/system_conf_patcher/tests/test_config_patcher.py +new file mode 100644 +index 00000000..1151fb69 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/system_conf_patcher/tests/test_config_patcher.py +@@ -0,0 +1,41 @@ ++import shutil ++ ++from leapp.libraries.actor import system_config_patcher ++from leapp.libraries.common.testutils import CurrentActorMocked ++from leapp.libraries.stdlib import api ++from leapp.models import MultipathConfigUpdatesInfo, UpdatedMultipathConfig ++ ++ ++def test_config_patcher(monkeypatch): ++ modified_configs = [ ++ UpdatedMultipathConfig( ++ updated_config_location='/var/lib/leapp/planned_conf_modifications/etc/multipath.conf', ++ target_path='/etc/multipath.conf' ++ ), ++ UpdatedMultipathConfig( ++ updated_config_location='/var/lib/leapp/planned_conf_modifications/etc/multipath/conf.d/myconfig.conf', ++ target_path='/etc/multipath/conf.d/myconfig.conf' ++ ) ++ ] ++ config_update_info = MultipathConfigUpdatesInfo(updates=modified_configs) ++ ++ actor_mock = CurrentActorMocked(msgs=[config_update_info]) ++ monkeypatch.setattr(api, 'current_actor', actor_mock) ++ ++ copies_performed = [] ++ ++ def copy_mock(src, dst, *args, **kwargs): ++ copies_performed.append((src, dst)) ++ ++ monkeypatch.setattr(shutil, 'copy', copy_mock) ++ system_config_patcher.patch_system_configs() ++ ++ expected_copies = [ ++ ('/var/lib/leapp/planned_conf_modifications/etc/multipath.conf', '/etc/multipath.conf'), ++ ( ++ '/var/lib/leapp/planned_conf_modifications/etc/multipath/conf.d/myconfig.conf', ++ '/etc/multipath/conf.d/myconfig.conf' ++ ) ++ ] ++ ++ assert sorted(copies_performed) == expected_copies +diff --git a/repos/system_upgrade/common/actors/multipath/target_uspace_configs/actor.py b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/actor.py +new file mode 100644 +index 00000000..bfe0219e +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/actor.py +@@ -0,0 +1,22 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import target_uspace_multipath_configs ++from leapp.models import MultipathConfigUpdatesInfo, MultipathInfo, TargetUserSpaceUpgradeTasks, UpgradeInitramfsTasks ++from leapp.tags import IPUWorkflowTag, TargetTransactionChecksPhaseTag ++ ++ ++class RequestMultipathConfsInTargetUserspace(Actor): ++ """ ++ Aggregates information about multipath configs. ++ ++ Produces uniform information consisting of copy instructions about which ++ multipath configs (original/updated) should be put into the target ++ userspace. ++ """ ++ ++ name = 'request_multipath_conf_in_target_userspace' ++ consumes = (MultipathInfo, MultipathConfigUpdatesInfo) ++ produces = (TargetUserSpaceUpgradeTasks, UpgradeInitramfsTasks) ++ tags = (TargetTransactionChecksPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ target_uspace_multipath_configs.process() +diff --git a/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py +new file mode 100644 +index 00000000..0deda56b +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py +@@ -0,0 +1,66 @@ ++import os ++ ++from leapp.libraries.stdlib import api ++from leapp.models import ( ++ CopyFile, ++ DracutModule, ++ MultipathConfigUpdatesInfo, ++ MultipathInfo, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks ++) ++ ++ ++def request_mpath_dracut_module_for_upgrade_initramfs(): ++ multipath_mod = DracutModule(name='multipath') ++ request = UpgradeInitramfsTasks(include_dracut_modules=[multipath_mod]) ++ api.produce(request) ++ ++ ++def request_mpath_confs(multipath_info): ++ files_to_put_into_uspace = { # source system path -> target uspace destination ++ '/etc/multipath.conf': '/etc/multipath.conf' # default config ++ } ++ ++ if os.path.exists(multipath_info.config_dir): ++ for filename in os.listdir(multipath_info.config_dir): ++ config_path = os.path.join(multipath_info.config_dir, filename) ++ if not config_path.endswith('.conf'): ++ api.current_logger().debug( ++ 'Skipping {} as it does not have .conf extension'.format(config_path) ++ ) ++ continue ++ files_to_put_into_uspace[config_path] = config_path ++ ++ for config_updates in api.consume(MultipathConfigUpdatesInfo): ++ for update in config_updates.updates: ++ files_to_put_into_uspace[update.updated_config_location] = update.target_path ++ ++ # Note: original implementation would copy the /etc/multipath directory, which contains ++ # /etc/multipath/conf.d location for drop-in files. The current logic includes it automatically, ++ # if the user does not override this default location. In case that the default drop-in location ++ # is changed, this new location is used. ++ additional_files = ['/etc/xdrdevices.conf'] ++ for additional_file in additional_files: ++ if os.path.exists(additional_file): ++ files_to_put_into_uspace[additional_file] = additional_file ++ ++ copy_tasks = [] ++ for source_system_path, target_uspace_path in files_to_put_into_uspace.items(): ++ task = CopyFile(src=source_system_path, dst=target_uspace_path) ++ copy_tasks.append(task) ++ ++ tasks = TargetUserSpaceUpgradeTasks(copy_files=copy_tasks) ++ api.produce(tasks) ++ ++ ++def process(): ++ multipath_info = next(api.consume(MultipathInfo), None) ++ if not multipath_info: ++ api.current_logger().debug( ++ 'Received no MultipathInfo message. No configfiles will ' ++ 'be requested to be placed into target userspace.' ++ ) ++ return ++ request_mpath_confs(multipath_info) ++ request_mpath_dracut_module_for_upgrade_initramfs() +diff --git a/repos/system_upgrade/common/models/multipath.py b/repos/system_upgrade/common/models/multipath.py +new file mode 100644 +index 00000000..1d1c53b5 +--- /dev/null ++++ b/repos/system_upgrade/common/models/multipath.py +@@ -0,0 +1,78 @@ ++from leapp.models import fields, Model ++from leapp.topics import SystemInfoTopic ++ ++ ++class MultipathInfo(Model): ++ """ Available information about multpath devices of the source system. """ ++ topic = SystemInfoTopic ++ ++ is_configured = fields.Boolean(default=False) ++ """ ++ True if multipath is configured on the system. ++ ++ Detected based on checking whether /etc/multipath.conf exists. ++ """ ++ ++ config_dir = fields.Nullable(fields.String()) ++ """ Value of config_dir in the defaults section. None if not set. """ ++ ++ ++class UpdatedMultipathConfig(Model): ++ """ Information about multipath config that needed to be modified for the target system. """ ++ topic = SystemInfoTopic ++ ++ updated_config_location = fields.String() ++ """ Location of the updated config that should be propagated to the source system. """ ++ ++ target_path = fields.String() ++ """ Location where should be the updated config placed. """ ++ ++ ++class MultipathConfigUpdatesInfo(Model): ++ """ Aggregate information about multipath configs that were updated. """ ++ topic = SystemInfoTopic ++ ++ updates = fields.List(fields.Model(UpdatedMultipathConfig), default=[]) ++ """ Collection of multipath config updates that must be performed during the upgrade. """ ++ ++ ++class MultipathConfig8to9(Model): ++ """ ++ Model information about multipath configuration file important for the 8>9 upgrade path. ++ ++ Note: This model is in the common repository due to the technical reasons ++ (reusing parser code in a single actor), and it should not be emitted on ++ non-8to9 upgrade paths. In the future, this model will likely be moved into ++ el8toel9 repository. ++ """ ++ topic = SystemInfoTopic ++ ++ pathname = fields.String() ++ """Config file path name""" ++ ++ config_dir = fields.Nullable(fields.String()) ++ """Value of config_dir in the defaults section. None if not set""" ++ ++ enable_foreign_exists = fields.Boolean(default=False) ++ """True if enable_foreign is set in the defaults section""" ++ ++ invalid_regexes_exist = fields.Boolean(default=False) ++ """True if any regular expressions have the value of "*" """ ++ ++ allow_usb_exists = fields.Boolean(default=False) ++ """True if allow_usb_devices is set in the defaults section.""" ++ ++ ++class MultipathConfFacts8to9(Model): ++ """ ++ Model representing information from multipath configuration files important for the 8>9 upgrade path. ++ ++ Note: This model is in the common repository due to the technical reasons ++ (reusing parser code in a single actor), and it should not be emitted on ++ non-8to9 upgrade paths. In the future, this model will likely be moved into ++ el8toel9 repository. ++ """ ++ topic = SystemInfoTopic ++ ++ configs = fields.List(fields.Model(MultipathConfig8to9), default=[]) ++ """List of multipath configuration files""" +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/actor.py b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/actor.py +similarity index 57% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/actor.py +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/actor.py +index 6c3ef41b..ce6a1ebc 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/actor.py ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/actor.py +@@ -1,27 +1,26 @@ + from leapp.actors import Actor + from leapp.libraries.actor import multipathconfupdate +-from leapp.models import MultipathConfFacts8to9 +-from leapp.tags import ApplicationsPhaseTag, IPUWorkflowTag ++from leapp.models import MultipathConfFacts8to9, MultipathConfigUpdatesInfo ++from leapp.tags import IPUWorkflowTag, TargetTransactionChecksPhaseTag + + +-class MultipathConfUpdate8to9(Actor): ++class MultipathUpgradeConfUpdate8to9(Actor): + """ +- Modifies multipath configuration files on the target RHEL-9 system so that +- they will run properly. This is done in three ways ++ Modifies multipath configuration files on the target RHEL-9 upgrade userspace so that ++ we can mount multipath devices during the upgrade. This is done in three ways + 1. Adding the allow_usb_devices and enable_foreign options to + /etc/multipath.conf if they are not present, to retain RHEL-8 behavior + 2. Converting any "*" regular expression strings to ".*" + """ + +- name = 'multipath_conf_update_8to9' ++ name = 'multipath_upgrade_conf_update_8to9' + consumes = (MultipathConfFacts8to9,) +- produces = () +- tags = (ApplicationsPhaseTag, IPUWorkflowTag) ++ produces = (MultipathConfigUpdatesInfo,) ++ tags = (TargetTransactionChecksPhaseTag, IPUWorkflowTag) + + def process(self): + facts = next(self.consume(MultipathConfFacts8to9), None) + if facts is None: +- self.log.debug('Skipping execution. No MultipathConfFacts8to9 has ' +- 'been produced') ++ self.log.debug('Skipping execution. No MultipathConfFacts8to9 has been produced') + return + multipathconfupdate.update_configs(facts) +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/libraries/multipathconfupdate.py b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/libraries/multipathconfupdate.py +similarity index 67% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/libraries/multipathconfupdate.py +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/libraries/multipathconfupdate.py +index 9e49d78f..2dfde7b1 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/libraries/multipathconfupdate.py ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/libraries/multipathconfupdate.py +@@ -1,4 +1,11 @@ ++import os ++import shutil ++ + from leapp.libraries.common import multipathutil ++from leapp.libraries.stdlib import api ++from leapp.models import MultipathConfigUpdatesInfo, UpdatedMultipathConfig ++ ++MODIFICATIONS_STORE_PATH = '/var/lib/leapp/proposed_modifications' + + _regexes = ('vendor', 'product', 'revision', 'product_blacklist', 'devnode', + 'wwid', 'property', 'protocol') +@@ -71,10 +78,37 @@ def _update_config(need_foreign, need_allow_usb, config): + return contents + + ++def prepare_destination_for_file(file_path): ++ dirname = os.path.dirname(file_path) ++ os.makedirs(dirname, exist_ok=True) ++ ++ ++def prepare_place_for_config_modifications(workspace_path=MODIFICATIONS_STORE_PATH): ++ if os.path.exists(workspace_path): ++ shutil.rmtree(workspace_path) ++ os.mkdir(workspace_path) ++ ++ + def update_configs(facts): + need_foreign = not any(x for x in facts.configs if x.enable_foreign_exists) + need_allow_usb = not any(x for x in facts.configs if x.allow_usb_exists) ++ ++ config_updates = [] ++ prepare_place_for_config_modifications() ++ + for config in facts.configs: ++ original_config_location = config.pathname ++ ++ rootless_path = config.pathname.lstrip('/') ++ path_to_config_copy = os.path.join(MODIFICATIONS_STORE_PATH, rootless_path) ++ api.current_logger().debug( ++ 'Instead of modyfing {}, preparing modified config at {}'.format( ++ config.pathname, ++ path_to_config_copy ++ ) ++ ) ++ updated_config_location = path_to_config_copy ++ + contents = _update_config(need_foreign, need_allow_usb, config) + need_foreign = False + need_allow_usb = False +@@ -83,4 +117,11 @@ def update_configs(facts): + config file. + """ + if contents: +- multipathutil.write_config(config.pathname, contents) ++ prepare_destination_for_file(updated_config_location) ++ multipathutil.write_config(updated_config_location, contents) ++ ++ update = UpdatedMultipathConfig(updated_config_location=updated_config_location, ++ target_path=original_config_location) ++ config_updates.append(update) ++ ++ api.produce(MultipathConfigUpdatesInfo(updates=config_updates)) +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/all_the_things.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/all_the_things.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/all_the_things.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/all_the_things.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/allow_usb.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/allow_usb.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/allow_usb.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/allow_usb.conf +index e7a9c23e..0d7ad283 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/allow_usb.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/allow_usb.conf +@@ -1075,5 +1075,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/complicated.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/complicated.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/complicated.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/complicated.conf +index cbfaf801..31d3b61d 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/complicated.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/complicated.conf +@@ -1104,5 +1104,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/conf2.d/all_true.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/conf2.d/all_true.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/conf2.d/all_true.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/conf2.d/all_true.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/default_rhel8.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/default_rhel8.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/default_rhel8.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/default_rhel8.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/empty.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/empty.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/empty.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/empty.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/empty_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/empty_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/empty_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/empty_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/missing_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/missing_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/missing_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/missing_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/no_defaults.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/no_defaults.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/no_defaults.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/no_defaults.conf +index 02d7c1a2..d50d6a71 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/no_defaults.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/no_defaults.conf +@@ -1045,7 +1045,7 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } + + defaults { # section added by Leapp +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/no_foreign.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/no_foreign.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/no_foreign.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/no_foreign.conf +index 9abffc40..d3d29c29 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/no_foreign.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/no_foreign.conf +@@ -1086,5 +1086,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/not_set_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/not_set_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/not_set_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/not_set_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/two_defaults.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/two_defaults.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/after/two_defaults.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/after/two_defaults.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/all_the_things.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/all_the_things.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/all_the_things.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/all_the_things.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/allow_usb.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/allow_usb.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/allow_usb.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/allow_usb.conf +index 57b6f97b..39681b85 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/allow_usb.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/allow_usb.conf +@@ -1074,5 +1074,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/complicated.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/complicated.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/complicated.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/complicated.conf +index 23d93ecf..c889461c 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/tests/files/complicated.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/complicated.conf +@@ -1103,5 +1103,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf1.d/empty.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf1.d/empty.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf1.d/empty.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf1.d/empty.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf1.d/nothing_important.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf1.d/nothing_important.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf1.d/nothing_important.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf1.d/nothing_important.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf2.d/all_true.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf2.d/all_true.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf2.d/all_true.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf2.d/all_true.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf3.d/README b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf3.d/README +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/conf3.d/README +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/conf3.d/README +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/converted_the_things.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/converted_the_things.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/converted_the_things.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/converted_the_things.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/default_rhel8.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/default_rhel8.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/default_rhel8.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/default_rhel8.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/empty.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/empty.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/empty.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/empty.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/empty_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/empty_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/empty_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/empty_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/missing_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/missing_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/missing_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/missing_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/no_defaults.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/no_defaults.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/no_defaults.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/no_defaults.conf +index f7885ca8..ec8ddee2 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/no_defaults.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/no_defaults.conf +@@ -1045,5 +1045,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/no_foreign.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/no_foreign.conf +similarity index 99% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/no_foreign.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/no_foreign.conf +index 9525731c..87f9a24c 100644 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/no_foreign.conf ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/no_foreign.conf +@@ -1085,5 +1085,5 @@ multipaths { + multipath { + wwid "33333333000001388" + alias "foo" +- } ++ } + } +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/not_set_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/not_set_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/not_set_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/not_set_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/set_in_dir.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/set_in_dir.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/set_in_dir.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/set_in_dir.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/two_defaults.conf b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/two_defaults.conf +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/files/before/two_defaults.conf +rename to repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/files/before/two_defaults.conf +diff --git a/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/test_multipath_conf_update_8to9.py b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/test_multipath_conf_update_8to9.py +new file mode 100644 +index 00000000..4ca73791 +--- /dev/null ++++ b/repos/system_upgrade/el8toel9/actors/multipath_upgrade_conf_patcher/tests/test_multipath_conf_update_8to9.py +@@ -0,0 +1,179 @@ ++import os ++ ++import pytest ++ ++from leapp.libraries.actor import multipathconfupdate ++from leapp.libraries.common import multipathutil ++from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import MultipathConfFacts8to9, MultipathConfig8to9 ++ ++BEFORE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/before') ++AFTER_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/after') ++ ++ ++def build_config(pathname, config_dir, enable_foreign_exists, invalid_regexes_exist, allow_usb_exists): ++ return MultipathConfig8to9( ++ pathname=pathname, ++ config_dir=config_dir, ++ enable_foreign_exists=enable_foreign_exists, ++ invalid_regexes_exist=invalid_regexes_exist, ++ allow_usb_exists=allow_usb_exists, ++ ) ++ ++ ++def build_facts(confs): ++ return MultipathConfFacts8to9(configs=confs) ++ ++ ++def mock_read_config(path): ++ """convert to full pathname""" ++ return multipathutil.read_config_orig(os.path.join(BEFORE_DIR, path)) ++ ++ ++default_rhel8_conf = build_config( ++ 'default_rhel8.conf', None, True, False, False) ++ ++all_the_things_conf = build_config( ++ 'all_the_things.conf', None, False, True, False) ++ ++converted_the_things_conf = build_config( ++ 'converted_the_things.conf', None, True, False, True) ++ ++idempotent_conf = build_config( ++ 'converted_the_things.conf', None, False, True, False) ++ ++complicated_conf = build_config( ++ 'complicated.conf', '/etc/multipath/conf.d', True, True, False) ++ ++no_foreign_conf = build_config( ++ 'no_foreign.conf', None, False, True, True) ++ ++allow_usb_conf = build_config( ++ 'allow_usb.conf', None, False, False, True) ++ ++no_defaults_conf = build_config( ++ 'no_defaults.conf', None, False, True, False) ++ ++two_defaults_conf = build_config( ++ 'two_defaults.conf', None, True, False, False) ++ ++empty_conf = build_config( ++ 'empty.conf', None, False, False, False) ++ ++missing_dir_conf = build_config( ++ 'missing_dir.conf', 'missing', False, True, False) ++ ++not_set_dir_conf = build_config( ++ 'not_set_dir.conf', 'conf1.d', False, True, False) ++ ++empty1_conf = build_config( ++ 'conf1.d/empty.conf', None, False, False, False) ++ ++nothing_important_conf = build_config( ++ 'conf1.d/nothing_important.conf', 'this_gets_ignored', False, False, False) ++ ++set_in_dir_conf = build_config( ++ 'set_in_dir.conf', 'conf2.d', False, False, False) ++ ++all_true_conf = build_config( ++ 'conf2.d/all_true.conf', None, True, True, True) ++ ++empty_dir_conf = build_config( ++ 'empty_dir.conf', 'conf3.d', False, False, False) ++ ++ ++@pytest.mark.parametrize( ++ 'config_facts', ++ [ ++ build_facts([default_rhel8_conf]), ++ build_facts([all_the_things_conf]), ++ build_facts([converted_the_things_conf]), ++ build_facts([complicated_conf]), ++ build_facts([no_foreign_conf]), ++ build_facts([allow_usb_conf]), ++ build_facts([no_defaults_conf]), ++ build_facts([two_defaults_conf]), ++ build_facts([empty_conf]), ++ build_facts([missing_dir_conf]), ++ build_facts([empty_dir_conf]), ++ build_facts([not_set_dir_conf, empty1_conf, nothing_important_conf]), ++ build_facts([set_in_dir_conf, all_true_conf]), ++ build_facts([idempotent_conf]) ++ ] ++) ++def test_all_facts(monkeypatch, config_facts): ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) ++ ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, 'produce', produce_mock) ++ ++ config_writes = {} ++ ++ def write_config_mock(location, contents): ++ config_writes[location] = contents ++ ++ monkeypatch.setattr(multipathutil, 'read_config_orig', multipathutil.read_config, raising=False) ++ monkeypatch.setattr(multipathutil, 'read_config', mock_read_config) ++ monkeypatch.setattr(multipathutil, 'write_config', write_config_mock) ++ monkeypatch.setattr(multipathconfupdate, 'prepare_destination_for_file', lambda file_path: None) ++ monkeypatch.setattr(multipathconfupdate, 'prepare_place_for_config_modifications', lambda: None) ++ ++ multipathconfupdate.update_configs(config_facts) ++ ++ config_updates = {} ++ for config_updates_msg in produce_mock.model_instances: ++ for update in config_updates_msg.updates: ++ config_updates[update.target_path] = update.updated_config_location ++ ++ for config in config_facts.configs: ++ expected_conf_location = os.path.join(AFTER_DIR, config.pathname) ++ ++ if config.pathname not in config_updates: ++ assert not os.path.exists(expected_conf_location) ++ continue ++ ++ updated_config_location = config_updates[config.pathname] ++ actual_contents = config_writes[updated_config_location] ++ ++ updated_config_expected_location = os.path.join( ++ multipathconfupdate.MODIFICATIONS_STORE_PATH, ++ config.pathname.lstrip('/') ++ ) ++ ++ assert updated_config_location == updated_config_expected_location ++ ++ expected_contents = multipathutil.read_config_orig(expected_conf_location) ++ assert actual_contents == expected_contents ++ ++ ++def test_proposed_config_updates_store(monkeypatch): ++ """ Check whether configs are being stored in the expected path. """ ++ config = MultipathConfig8to9( ++ pathname='/etc/multipath.conf.d/xy.conf', ++ config_dir='', ++ enable_foreign_exists=False, ++ invalid_regexes_exist=False, ++ allow_usb_exists=False, ++ ) ++ ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, 'produce', produce_mock) ++ ++ config_writes = {} ++ ++ def write_config_mock(location, contents): ++ config_writes[location] = contents ++ ++ monkeypatch.setattr(multipathutil, 'write_config', write_config_mock) ++ monkeypatch.setattr(multipathconfupdate, '_update_config', lambda *args: 'new config content') ++ monkeypatch.setattr(multipathconfupdate, 'prepare_destination_for_file', lambda file_path: None) ++ monkeypatch.setattr(multipathconfupdate, 'prepare_place_for_config_modifications', lambda: None) ++ ++ multipathconfupdate.update_configs(MultipathConfFacts8to9(configs=[config])) ++ ++ expected_updated_config_path = os.path.join( ++ multipathconfupdate.MODIFICATIONS_STORE_PATH, ++ 'etc/multipath.conf.d/xy.conf' ++ ) ++ assert expected_updated_config_path in config_writes +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/actor.py b/repos/system_upgrade/el8toel9/actors/multipathconfread/actor.py +deleted file mode 100644 +index 2b41ae8b..00000000 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfread/actor.py ++++ /dev/null +@@ -1,33 +0,0 @@ +-from leapp.actors import Actor +-from leapp.libraries.actor import multipathconfread +-from leapp.models import DistributionSignedRPM, MultipathConfFacts8to9, TargetUserSpaceUpgradeTasks +-from leapp.tags import FactsPhaseTag, IPUWorkflowTag +- +- +-class MultipathConfRead8to9(Actor): +- """ +- Read multipath configuration files and extract the necessary information +- +- Related files: +- - /etc/multipath.conf +- - /etc/multipath/ - any files inside the directory +- - /etc/xdrdevices.conf +- +- As well, create task (msg) to copy all needed multipath files into +- the target container as the files are needed to create proper initramfs. +- This covers the files mentioned above. +- """ +- +- name = 'multipath_conf_read_8to9' +- consumes = (DistributionSignedRPM,) +- produces = (MultipathConfFacts8to9, TargetUserSpaceUpgradeTasks) +- tags = (FactsPhaseTag, IPUWorkflowTag) +- +- def process(self): +- if multipathconfread.is_processable(): +- res = multipathconfread.get_multipath_conf_facts() +- if res: +- self.produce(res) +- # Create task to copy multipath config files Iff facts +- # are generated +- multipathconfread.produce_copy_to_target_task() +diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/test_multipath_conf_update_8to9.py b/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/test_multipath_conf_update_8to9.py +deleted file mode 100644 +index c18d6b85..00000000 +--- a/repos/system_upgrade/el8toel9/actors/multipathconfupdate/tests/test_multipath_conf_update_8to9.py ++++ /dev/null +@@ -1,119 +0,0 @@ +-import os +- +-from leapp.libraries.actor import multipathconfupdate +-from leapp.libraries.common import multipathutil +-from leapp.models import MultipathConfFacts8to9, MultipathConfig8to9 +- +-BEFORE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/before') +-AFTER_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files/after') +- +-converted_data = {} +- +- +-def build_config(pathname, config_dir, enable_foreign_exists, invalid_regexes_exist, allow_usb_exists): +- return MultipathConfig8to9( +- pathname=pathname, +- config_dir=config_dir, +- enable_foreign_exists=enable_foreign_exists, +- invalid_regexes_exist=invalid_regexes_exist, +- allow_usb_exists=allow_usb_exists, +- ) +- +- +-def build_facts(confs): +- return MultipathConfFacts8to9(configs=confs) +- +- +-def mock_read_config(path): +- """convert to full pathname""" +- return multipathutil.read_config_orig(os.path.join(BEFORE_DIR, path)) +- +- +-def mock_write_config(path, contents): +- converted_data[path] = contents +- +- +-default_rhel8_conf = build_config( +- 'default_rhel8.conf', None, True, False, False) +- +-all_the_things_conf = build_config( +- 'all_the_things.conf', None, False, True, False) +- +-converted_the_things_conf = build_config( +- 'converted_the_things.conf', None, True, False, True) +- +-idempotent_conf = build_config( +- 'converted_the_things.conf', None, False, True, False) +- +-complicated_conf = build_config( +- 'complicated.conf', '/etc/multipath/conf.d', True, True, False) +- +-no_foreign_conf = build_config( +- 'no_foreign.conf', None, False, True, True) +- +-allow_usb_conf = build_config( +- 'allow_usb.conf', None, False, False, True) +- +-no_defaults_conf = build_config( +- 'no_defaults.conf', None, False, True, False) +- +-two_defaults_conf = build_config( +- 'two_defaults.conf', None, True, False, False) +- +-empty_conf = build_config( +- 'empty.conf', None, False, False, False) +- +-missing_dir_conf = build_config( +- 'missing_dir.conf', 'missing', False, True, False) +- +-not_set_dir_conf = build_config( +- 'not_set_dir.conf', 'conf1.d', False, True, False) +- +-empty1_conf = build_config( +- 'conf1.d/empty.conf', None, False, False, False) +- +-nothing_important_conf = build_config( +- 'conf1.d/nothing_important.conf', 'this_gets_ignored', False, False, False) +- +-set_in_dir_conf = build_config( +- 'set_in_dir.conf', 'conf2.d', False, False, False) +- +-all_true_conf = build_config( +- 'conf2.d/all_true.conf', None, True, True, True) +- +-empty_dir_conf = build_config( +- 'empty_dir.conf', 'conf3.d', False, False, False) +- +-facts_list = [build_facts([default_rhel8_conf]), +- build_facts([all_the_things_conf]), +- build_facts([converted_the_things_conf]), +- build_facts([complicated_conf]), +- build_facts([no_foreign_conf]), +- build_facts([allow_usb_conf]), +- build_facts([no_defaults_conf]), +- build_facts([two_defaults_conf]), +- build_facts([empty_conf]), +- build_facts([missing_dir_conf]), +- build_facts([empty_dir_conf]), +- build_facts([not_set_dir_conf, empty1_conf, nothing_important_conf]), +- build_facts([set_in_dir_conf, all_true_conf]), +- build_facts([idempotent_conf])] +- +- +-def _test_facts(facts): +- multipathconfupdate.update_configs(facts) +- for config in facts.configs: +- expected_data = multipathutil.read_config_orig(os.path.join(AFTER_DIR, config.pathname)) +- if config.pathname in converted_data: +- assert converted_data[config.pathname] == expected_data +- else: +- assert expected_data is None +- +- +-def test_all_facts(monkeypatch): +- monkeypatch.setattr(multipathutil, 'read_config_orig', multipathutil.read_config, raising=False) +- monkeypatch.setattr(multipathutil, 'read_config', mock_read_config) +- monkeypatch.setattr(multipathutil, 'write_config', mock_write_config) +- for facts in facts_list: +- _test_facts(facts) +- converted_data.clear() +diff --git a/repos/system_upgrade/el8toel9/models/multipathconffacts.py b/repos/system_upgrade/el8toel9/models/multipathconffacts.py +deleted file mode 100644 +index 91d3ce35..00000000 +--- a/repos/system_upgrade/el8toel9/models/multipathconffacts.py ++++ /dev/null +@@ -1,30 +0,0 @@ +-from leapp.models import fields, Model +-from leapp.topics import SystemInfoTopic +- +- +-class MultipathConfig8to9(Model): +- """Model representing information about a multipath configuration file""" +- topic = SystemInfoTopic +- +- pathname = fields.String() +- """Config file path name""" +- +- config_dir = fields.Nullable(fields.String()) +- """Value of config_dir in the defaults section. None if not set""" +- +- enable_foreign_exists = fields.Boolean(default=False) +- """True if enable_foreign is set in the defaults section""" +- +- invalid_regexes_exist = fields.Boolean(default=False) +- """True if any regular expressions have the value of "*" """ +- +- allow_usb_exists = fields.Boolean(default=False) +- """True if allow_usb_devices is set in the defaults section.""" +- +- +-class MultipathConfFacts8to9(Model): +- """Model representing information from multipath configuration files""" +- topic = SystemInfoTopic +- +- configs = fields.List(fields.Model(MultipathConfig8to9), default=[]) +- """List of multipath configuration files""" +-- +2.52.0 + diff --git a/SOURCES/0080-Fix-remediation-command-to-wrap-it-with-quotes.patch b/SOURCES/0080-Fix-remediation-command-to-wrap-it-with-quotes.patch new file mode 100644 index 0000000..9d6db8f --- /dev/null +++ b/SOURCES/0080-Fix-remediation-command-to-wrap-it-with-quotes.patch @@ -0,0 +1,30 @@ +From 07e2ec22c2f1aae09a318b48562712f1477799b9 Mon Sep 17 00:00:00 2001 +From: karolinku +Date: Mon, 1 Dec 2025 16:51:48 +0100 +Subject: [PATCH 080/111] Fix remediation command to wrap it with quotes + +In checkrootsymlinks actor, remediation command did not include +necessary double quotes, what makes the command syntactically +incorrect and not able to apply. + +Jira: RHEL-30447 +--- + repos/system_upgrade/common/actors/checkrootsymlinks/actor.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/repos/system_upgrade/common/actors/checkrootsymlinks/actor.py b/repos/system_upgrade/common/actors/checkrootsymlinks/actor.py +index c35272b2..7b89bf7a 100644 +--- a/repos/system_upgrade/common/actors/checkrootsymlinks/actor.py ++++ b/repos/system_upgrade/common/actors/checkrootsymlinks/actor.py +@@ -55,7 +55,7 @@ class CheckRootSymlinks(Actor): + os.path.relpath(item.target, '/'), + os.path.join('/', item.name)]) + commands.append(command) +- rem_commands = [['sh', '-c', ' && '.join(commands)]] ++ rem_commands = [['sh', '-c', '"{}"'.format(' && '.join(commands))]] + # Generate reports about non-utf8 absolute links presence + nonutf_count = len(absolute_links_nonutf) + if nonutf_count > 0: +-- +2.52.0 + diff --git a/SOURCES/0081-Add-upstream-doc-about-running-single-actor.patch b/SOURCES/0081-Add-upstream-doc-about-running-single-actor.patch new file mode 100644 index 0000000..72d742b --- /dev/null +++ b/SOURCES/0081-Add-upstream-doc-about-running-single-actor.patch @@ -0,0 +1,188 @@ +From 2c1ecc24b1b6bbba074a7b6cd2dab994ab26a6cb Mon Sep 17 00:00:00 2001 +From: karolinku +Date: Wed, 23 Apr 2025 11:46:07 +0200 +Subject: [PATCH 081/111] Add upstream doc about running single actor + +JIRA: RHELMISC-11596 +--- + .../tutorials/howto-single-actor-run.md | 155 ++++++++++++++++++ + docs/source/tutorials/index.rst | 1 + + 2 files changed, 156 insertions(+) + create mode 100644 docs/source/tutorials/howto-single-actor-run.md + +diff --git a/docs/source/tutorials/howto-single-actor-run.md b/docs/source/tutorials/howto-single-actor-run.md +new file mode 100644 +index 00000000..728ca083 +--- /dev/null ++++ b/docs/source/tutorials/howto-single-actor-run.md +@@ -0,0 +1,155 @@ ++# Running a single Actor ++ ++During development or debugging of actors there may appear a need of running single actor instead of the entire workflow. The advantages of such approach include: ++- **Time and resource efficiency** - Running the entire workflow takes time and resources. Source system is scanned, information is collected and stored, in-place upgrade process goes through several phases. All these actions take time, actors are run multiple times during debugging or development process, so preparing single actor execution lets us save time. ++- **Isolation of problem** - When debugged issue is related to single actor, this approach allows to isolate the issue without interference from other actors. ++ ++ ++```{hint} ++In practice, running a single actor for debugging does not have to be the best way to start when you do not have much experience with Leapp and IPU yet. However, in some cases it's still very valuable and helpful. ++``` ++ ++The execution of an actor using the `snactor` tool seems simple. In case of system upgrade leapp repositories it's not so straightforward and ++it can be quite complicated. In this guide we share our experience how to use `snactor` correctly, describing typical problems that developers hit. ++ ++There are two main approaches: ++- **Running an actor with an empty or non-existent leapp database** -- applicable when a crafted data (or no data at all) is needed. Usually during development. ++- **Running an actor with leapp database filled by previous leapp execution** -- useful for debugging when the leapp.db file is available and want to run the actor in the same context as it has been previously executed when an error occurred. ++ ++```{note} ++The leapp database refers to the `leapp.db` file. In case of using snactor, it's by default present in the `.leapp` directory of the used leapp repository ++scope. ++``` ++ ++````{tip} ++Cleaning the database can be managed with `snactor` tool command: ++```shell ++snactor messages clear ++``` ++In other way, the database file can be also simply removed instead of using snactor. ++```` ++ ++ ++Since an actor seems to be an independent piece of code, there is a dependency chain to resolve inside a workflow, especially around consumed messages and configuration which have to be resolved. When running entire In-Place Upgrade process, those dependencies needed for each actor are satisfied by assignment of each actor to specific phase, where actors emit and consume messages in desired sequence. Single actor usually needs specific list of such requirements, which can be fulfilled by manual preparation of this dependency chain. This very limited amount of resources needed for single actor can be called minimal context. ++ ++ ++## Running a single actor with minimal context ++ ++It is possible to run a single actor without proceeding with `leapp preupgrade` machinery. ++This solution is based on the snactor tool. However, this solution requires minimal context to run. ++ ++As mentioned before and described in [article](https://leapp.readthedocs.io/en/stable/building-blocks-and-architecture.html#architecture-overview) ++about workflow architecture, most of the actors are part of the produce/consume chain of messages. Important step in this procedure is to recreate the sequence of actors to be run to fulfill a chain of dependencies and provide necessary variables. ++ ++Let's explain these steps based on a real case. The following example will be based on the `scan_fips` actor. ++ ++ ++### Initial configuration ++ ++All actors (even those which are not depending on any message emitted by other actors) depend on some initial configuration which is provided by the `ipu_workflow_config` [actor](https://github.com/oamg/leapp-repository/blob/main/repos/system_upgrade/common/actors/ipuworkflowconfig/libraries/ipuworkflowconfig.py). No matter what actor you would like to run, the first step is always to run the `ipu_workflow_config` actor. ++ ++Due to some missing initial variables, which usually are set by the framework, those variables need to be exported manually. Note that following vars are example ones, adjust them to your needs depending on your system configuration: ++```shell ++ ++export LEAPP_UPGRADE_PATH_FLAVOUR=default ++export LEAPP_UPGRADE_PATH_TARGET_RELEASE=9.8 ++export LEAPP_TARGET_OS=9.8 ++``` ++ ++The `ipu_workflow_config` actor produces `IPUWorkflow` message, which contains all required initial config, so at the beginning execute: ++ ++```shell ++snactor run ipu_workflow_config --print-output --save-output ++``` ++ ++```{note} ++Option `--save-output` is necessary to preserve output of this command in Leapp database. Without saving the message, it will not be available for other actors. Option *--print-output* is optional. ++``` ++ ++### Resolving actor's message dependencies ++ ++All basic information what actor consumes and produce can be found in each `actor.py` [code](https://github.com/oamg/leapp-repository/blob/main/repos/system_upgrade/common/actors/scanfips/actor.py#L13-L14). In case of `scan_fips` actor it's: ++ ++```shell ++ consumes = (KernelCmdline,) ++ produces = (FIPSInfo,) ++``` ++ ++This actor consumes one message and produces another. Now we need to track the consumed message, which is `KernelCmdline`. Grep the cloned repository to find that the actor which produces such [message](https://github.com/oamg/leapp-repository/blob/main/repos/system_upgrade/common/actors/scankernelcmdline/actor.py#L14) is `scan_kernel_cmdline`. ++ ++```shell ++snactor run scan_kernel_cmdline --print-output --save-output --actor-config IPUConfig ++``` ++ ++```{note} ++Important step here is to point out what actor config needs to be used, `IPUConfig` in that case. ++This parameter needs to be specified every time you want to run an actor, pointing to proper configuration. ++``` ++ ++This [scan_kernel_cmdline](https://github.com/oamg/leapp-repository/blob/main/repos/system_upgrade/common/actors/scankernelcmdline/actor.py#L13) doesn't consume anything: `consumes = ()`. So finally the desired actor can be run: ++ ++```shell ++snactor run scan_fips --print-output --save-output --actor-config IPUConfig ++``` ++ ++### Limitations ++Note that not all cases will be as simple as the presented one, sometimes actors depend on multiple messages originating from other actors, requiring longer session of environment recreation. ++ ++Also actors designed to run on other architectures will not be able to run. ++ ++## Run single actor with existing database ++ ++In contrast to the previous paragraph, where we operated only on self-created minimal context, the tutorial below will explain how to work with existing or provided context. ++Sometimes - especially for debugging and reproduction of the bug it is very convenient to use provided Leapp database *leapp.db*. This is a file containing all information needed to run Leapp framework on a system, including messages and configurations. Usually all necessary environment for actors is set up by ++first run of `leapp preupgrade` command, when starting from scratch. In this case, we already have `leapp.db` (e.g. transferred from other system) database file. ++ ++Every new run of `leapp` command creates another entry in the database. It creates ++another row in execution table with specific ID, so each context can be easily tracked and ++reproduced. ++ ++See the list of executions using the [leapp-inspector](https://leapp-repository.readthedocs.io/latest/tutorials/troubleshooting-debugging.html#troubleshooting-with-leapp-inspector) tool. ++ ++```shell ++leapp-inspector --db path/to/leapp.db executions ++``` ++Example output: ++```shell ++################################################################## ++ Executions of Leapp ++################################################################## ++Execution | Timestamp ++------------------------------------ | --------------------------- ++d146e105-fafd-43a2-a791-54e141eeab9c | 2025-11-26T19:39:20.563594Z ++b7fd5dca-a49f-4af7-b70c-8bbcc28a4338 | 2025-11-26T19:39:38.034070Z ++50b5289f-be4d-4206-a6e0-73e3caa1f9ed | 2025-11-26T19:41:40.401273Z ++ ++``` ++ ++ ++To determine which context (execution) `leapp` will run, there are two variables: `LEAPP_DEBUG_PRESERVE_CONTEXT` ++and `LEAPP_EXECUTION_ID`. When the `LEAPP_DEBUG_PRESERVE_CONTEXT` is set to 1 and the environment has ++`LEAPP_EXECUTION_ID` set, the `LEAPP_EXECUTION_ID` is not overwritten with snactor's execution ID. ++This allows the developer to run actors in the same way as if the actor was run during the last leapp's ++execution, thus, avoiding to rerun the entire upgrade process. ++ ++ ++Set variables: ++```shell ++ ++export LEAPP_DEBUG_PRESERVE_CONTEXT=1 ++export LEAPP_EXECUTION_ID=50b5289f-be4d-4206-a6e0-73e3caa1f9ed ++``` ++ ++Run desired actors or the entire upgrade process safely now. Output will not be preserved as another context entry. ++```shell ++ ++snactor run --config /etc/leapp/leapp.conf --actor-config IPUConfig --print-output ++``` ++ ++```{note} ++Point to `leapp.conf` file with *--config* option. By default this file is located in `/etc/leapp/` and, among others, it contains Leapp database (`leapp.db`) location. When working with given database, either adjust configuration file or place database file in default location. ++``` ++ ++### Limitations ++ ++Even though the context was provided, it is not possible to run actors which are designed for different architecture than source system. +diff --git a/docs/source/tutorials/index.rst b/docs/source/tutorials/index.rst +index a04fc183..6059e76a 100644 +--- a/docs/source/tutorials/index.rst ++++ b/docs/source/tutorials/index.rst +@@ -19,6 +19,7 @@ write leapp actors for **In-Place Upgrades (IPU)** with the leapp framework. + + setup-devel-env + howto-first-actor-upgrade ++ howto-single-actor-run + custom-content + configurable-actors + templates/index +-- +2.52.0 + diff --git a/SOURCES/0082-add-handling-for-LVM-configuration.patch b/SOURCES/0082-add-handling-for-LVM-configuration.patch new file mode 100644 index 0000000..7c75147 --- /dev/null +++ b/SOURCES/0082-add-handling-for-LVM-configuration.patch @@ -0,0 +1,615 @@ +From 80169c215d6c59cfe86b3ac2fe9553fc3cf61836 Mon Sep 17 00:00:00 2001 +From: Peter Mocary +Date: Thu, 4 Dec 2025 14:52:41 +0100 +Subject: [PATCH 082/111] add handling for LVM configuration + +The relevant user LVM configuration is now copied into the target userspace +container along with enabling LVM dracut module for upgrade initramfs creation. +The LVM configuration is copied into the target userspace container when +lvm2 package in installed. Based on the configuration, the devices file +is also copied in when present and enabled. The --nolvmconf option used when +executing dracut is changed into --lvmconf instead if the files are +copied into the target userspace container. + +Jira: RHEL-14712 +--- + .../common/actors/checklvm/actor.py | 24 +++ + .../actors/checklvm/libraries/checklvm.py | 74 ++++++++ + .../actors/checklvm/tests/test_checklvm.py | 92 +++++++++ + .../upgradeinitramfsgenerator/actor.py | 2 + + .../libraries/upgradeinitramfsgenerator.py | 36 ++-- + .../common/actors/scanlvmconfig/actor.py | 18 ++ + .../scanlvmconfig/libraries/scanlvmconfig.py | 52 ++++++ + .../scanlvmconfig/tests/test_scanlvmconfig.py | 176 ++++++++++++++++++ + .../system_upgrade/common/models/lvmconfig.py | 26 +++ + 9 files changed, 487 insertions(+), 13 deletions(-) + create mode 100644 repos/system_upgrade/common/actors/checklvm/actor.py + create mode 100644 repos/system_upgrade/common/actors/checklvm/libraries/checklvm.py + create mode 100644 repos/system_upgrade/common/actors/checklvm/tests/test_checklvm.py + create mode 100644 repos/system_upgrade/common/actors/scanlvmconfig/actor.py + create mode 100644 repos/system_upgrade/common/actors/scanlvmconfig/libraries/scanlvmconfig.py + create mode 100644 repos/system_upgrade/common/actors/scanlvmconfig/tests/test_scanlvmconfig.py + create mode 100644 repos/system_upgrade/common/models/lvmconfig.py + +diff --git a/repos/system_upgrade/common/actors/checklvm/actor.py b/repos/system_upgrade/common/actors/checklvm/actor.py +new file mode 100644 +index 00000000..167698db +--- /dev/null ++++ b/repos/system_upgrade/common/actors/checklvm/actor.py +@@ -0,0 +1,24 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor.checklvm import check_lvm ++from leapp.models import DistributionSignedRPM, LVMConfig, TargetUserSpaceUpgradeTasks, UpgradeInitramfsTasks ++from leapp.reporting import Report ++from leapp.tags import ChecksPhaseTag, IPUWorkflowTag ++ ++ ++class CheckLVM(Actor): ++ """ ++ Check if the LVM is installed and ensure the target userspace container ++ and initramfs are prepared to support it. ++ ++ The LVM configuration files are copied into the target userspace container ++ so that the dracut is able to use them while creating the initramfs. ++ The dracut LVM module is enabled by this actor as well. ++ """ ++ ++ name = 'check_lvm' ++ consumes = (DistributionSignedRPM, LVMConfig) ++ produces = (Report, TargetUserSpaceUpgradeTasks, UpgradeInitramfsTasks) ++ tags = (ChecksPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ check_lvm() +diff --git a/repos/system_upgrade/common/actors/checklvm/libraries/checklvm.py b/repos/system_upgrade/common/actors/checklvm/libraries/checklvm.py +new file mode 100644 +index 00000000..073bfbf4 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/checklvm/libraries/checklvm.py +@@ -0,0 +1,74 @@ ++import os ++ ++from leapp import reporting ++from leapp.libraries.common.rpms import has_package ++from leapp.libraries.stdlib import api ++from leapp.models import ( ++ CopyFile, ++ DistributionSignedRPM, ++ DracutModule, ++ LVMConfig, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks ++) ++ ++LVM_CONFIG_PATH = '/etc/lvm/lvm.conf' ++LVM_DEVICES_FILE_PATH_PREFIX = '/etc/lvm/devices' ++ ++ ++def _report_filter_detection(): ++ title = 'LVM filter definition detected.' ++ summary = ( ++ 'Beginning with RHEL 9, LVM devices file is used by default to select devices used by ' ++ f'LVM. Since leapp detected the use of LVM filter in the {LVM_CONFIG_PATH} configuration ' ++ 'file, the configuration won\'t be modified to use devices file during the upgrade and ' ++ 'the LVM filter will remain in use after the upgrade.' ++ ) ++ ++ remediation_hint = ( ++ 'While not required, switching to the LVM devices file from the LVM filter is possible ' ++ 'using the following command. The command uses the existing LVM filter to create the system.devices ' ++ 'file which is then used instead of the LVM filter. Before running the command, ' ++ f'make sure that \'use_devicesfile=1\' is set in {LVM_CONFIG_PATH}.' ++ ) ++ remediation_command = ['vgimportdevices'] ++ ++ reporting.create_report([ ++ reporting.Title(title), ++ reporting.Summary(summary), ++ reporting.Remediation(hint=remediation_hint, commands=[remediation_command]), ++ reporting.ExternalLink( ++ title='Limiting LVM device visibility and usage', ++ url='https://red.ht/limiting-lvm-devices-visibility-and-usage', ++ ), ++ reporting.Severity(reporting.Severity.INFO), ++ ]) ++ ++ ++def check_lvm(): ++ if not has_package(DistributionSignedRPM, 'lvm2'): ++ return ++ ++ lvm_config = next(api.consume(LVMConfig), None) ++ if not lvm_config: ++ return ++ ++ lvm_devices_file_path = os.path.join(LVM_DEVICES_FILE_PATH_PREFIX, lvm_config.devices.devicesfile) ++ lvm_devices_file_exists = os.path.isfile(lvm_devices_file_path) ++ ++ filters_used = not lvm_config.devices.use_devicesfile or not lvm_devices_file_exists ++ if filters_used: ++ _report_filter_detection() ++ ++ api.current_logger().debug('Including lvm dracut module.') ++ api.produce(UpgradeInitramfsTasks(include_dracut_modules=[DracutModule(name='lvm')])) ++ ++ copy_files = [] ++ api.current_logger().debug('Copying "{}" to the target userspace.'.format(LVM_CONFIG_PATH)) ++ copy_files.append(CopyFile(src=LVM_CONFIG_PATH)) ++ ++ if lvm_devices_file_exists and lvm_config.devices.use_devicesfile: ++ api.current_logger().debug('Copying "{}" to the target userspace.'.format(lvm_devices_file_path)) ++ copy_files.append(CopyFile(src=lvm_devices_file_path)) ++ ++ api.produce(TargetUserSpaceUpgradeTasks(copy_files=copy_files)) +diff --git a/repos/system_upgrade/common/actors/checklvm/tests/test_checklvm.py b/repos/system_upgrade/common/actors/checklvm/tests/test_checklvm.py +new file mode 100644 +index 00000000..a7da8050 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/checklvm/tests/test_checklvm.py +@@ -0,0 +1,92 @@ ++import os ++ ++import pytest ++ ++from leapp.libraries.actor import checklvm ++from leapp.libraries.common.testutils import produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import ( ++ DistributionSignedRPM, ++ LVMConfig, ++ LVMConfigDevicesSection, ++ RPM, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks ++) ++ ++ ++def test_check_lvm_when_lvm_not_installed(monkeypatch): ++ def consume_mocked(model): ++ if model == LVMConfig: ++ assert False ++ if model == DistributionSignedRPM: ++ yield DistributionSignedRPM(items=[]) ++ ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(api, 'consume', consume_mocked) ++ ++ checklvm.check_lvm() ++ ++ assert not api.produce.called ++ ++ ++@pytest.mark.parametrize( ++ ('config', 'create_report', 'devices_file_exists'), ++ [ ++ (LVMConfig(devices=LVMConfigDevicesSection(use_devicesfile=False)), True, False), ++ (LVMConfig(devices=LVMConfigDevicesSection(use_devicesfile=True)), False, True), ++ (LVMConfig(devices=LVMConfigDevicesSection(use_devicesfile=True)), True, False), ++ (LVMConfig(devices=LVMConfigDevicesSection(use_devicesfile=False, devicesfile="test.devices")), True, False), ++ (LVMConfig(devices=LVMConfigDevicesSection(use_devicesfile=True, devicesfile="test.devices")), False, True), ++ (LVMConfig(devices=LVMConfigDevicesSection(use_devicesfile=True, devicesfile="test.devices")), True, False), ++ ] ++) ++def test_scan_when_lvm_installed(monkeypatch, config, create_report, devices_file_exists): ++ lvm_package = RPM( ++ name='lvm2', ++ version='2', ++ release='1', ++ epoch='1', ++ packager='', ++ arch='x86_64', ++ pgpsig='RSA/SHA256, Mon 01 Jan 1970 00:00:00 AM -03, Key ID 199e2f91fd431d51' ++ ) ++ ++ def isfile_mocked(_): ++ return devices_file_exists ++ ++ def consume_mocked(model): ++ if model == LVMConfig: ++ yield config ++ if model == DistributionSignedRPM: ++ yield DistributionSignedRPM(items=[lvm_package]) ++ ++ def report_filter_detection_mocked(): ++ assert create_report ++ ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(api, 'consume', consume_mocked) ++ monkeypatch.setattr(os.path, 'isfile', isfile_mocked) ++ monkeypatch.setattr(checklvm, '_report_filter_detection', report_filter_detection_mocked) ++ ++ checklvm.check_lvm() ++ ++ # The lvm is installed, thus the dracut module is enabled and at least the lvm.conf is copied ++ assert api.produce.called == 2 ++ assert len(api.produce.model_instances) == 2 ++ ++ expected_copied_files = [checklvm.LVM_CONFIG_PATH] ++ if devices_file_exists and config.devices.use_devicesfile: ++ devices_file_path = os.path.join(checklvm.LVM_DEVICES_FILE_PATH_PREFIX, config.devices.devicesfile) ++ expected_copied_files.append(devices_file_path) ++ ++ for produced_model in api.produce.model_instances: ++ assert isinstance(produced_model, (UpgradeInitramfsTasks, TargetUserSpaceUpgradeTasks)) ++ ++ if isinstance(produced_model, UpgradeInitramfsTasks): ++ assert len(produced_model.include_dracut_modules) == 1 ++ assert produced_model.include_dracut_modules[0].name == 'lvm' ++ else: ++ assert len(produced_model.copy_files) == len(expected_copied_files) ++ for file in produced_model.copy_files: ++ assert file.src in expected_copied_files +diff --git a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/actor.py b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/actor.py +index d99bab48..c0c93036 100644 +--- a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/actor.py ++++ b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/actor.py +@@ -6,6 +6,7 @@ from leapp.models import ( + BootContent, + FIPSInfo, + LiveModeConfig, ++ LVMConfig, + TargetOSInstallationImage, + TargetUserSpaceInfo, + TargetUserSpaceUpgradeTasks, +@@ -31,6 +32,7 @@ class UpgradeInitramfsGenerator(Actor): + consumes = ( + FIPSInfo, + LiveModeConfig, ++ LVMConfig, + RequiredUpgradeInitramPackages, # deprecated + TargetOSInstallationImage, + TargetUserSpaceInfo, +diff --git a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py +index eefdb41a..03447b7c 100644 +--- a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py ++++ b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py +@@ -12,6 +12,7 @@ from leapp.models import UpgradeDracutModule # deprecated + from leapp.models import ( + BootContent, + LiveModeConfig, ++ LVMConfig, + TargetOSInstallationImage, + TargetUserSpaceInfo, + TargetUserSpaceUpgradeTasks, +@@ -364,20 +365,29 @@ def generate_initram_disk(context): + def fmt_module_list(module_list): + return ','.join(mod.name for mod in module_list) + ++ env_variables = [ ++ 'LEAPP_KERNEL_VERSION={kernel_version}', ++ 'LEAPP_ADD_DRACUT_MODULES="{dracut_modules}"', ++ 'LEAPP_KERNEL_ARCH={arch}', ++ 'LEAPP_ADD_KERNEL_MODULES="{kernel_modules}"', ++ 'LEAPP_DRACUT_INSTALL_FILES="{files}"' ++ ] ++ ++ if next(api.consume(LVMConfig), None): ++ env_variables.append('LEAPP_DRACUT_LVMCONF="1"') ++ ++ env_variables = ' '.join(env_variables) ++ env_variables = env_variables.format( ++ kernel_version=_get_target_kernel_version(context), ++ dracut_modules=fmt_module_list(initramfs_includes.dracut_modules), ++ kernel_modules=fmt_module_list(initramfs_includes.kernel_modules), ++ arch=api.current_actor().configuration.architecture, ++ files=' '.join(initramfs_includes.files) ++ ) ++ cmd = os.path.join('/', INITRAM_GEN_SCRIPT_NAME) ++ + # FIXME: issue #376 +- context.call([ +- '/bin/sh', '-c', +- 'LEAPP_KERNEL_VERSION={kernel_version} ' +- 'LEAPP_ADD_DRACUT_MODULES="{dracut_modules}" LEAPP_KERNEL_ARCH={arch} ' +- 'LEAPP_ADD_KERNEL_MODULES="{kernel_modules}" ' +- 'LEAPP_DRACUT_INSTALL_FILES="{files}" {cmd}'.format( +- kernel_version=_get_target_kernel_version(context), +- dracut_modules=fmt_module_list(initramfs_includes.dracut_modules), +- kernel_modules=fmt_module_list(initramfs_includes.kernel_modules), +- arch=api.current_actor().configuration.architecture, +- files=' '.join(initramfs_includes.files), +- cmd=os.path.join('/', INITRAM_GEN_SCRIPT_NAME)) +- ], env=env) ++ context.call(['/bin/sh', '-c', f'{env_variables} {cmd}'], env=env) + + boot_files_info = copy_boot_files(context) + return boot_files_info +diff --git a/repos/system_upgrade/common/actors/scanlvmconfig/actor.py b/repos/system_upgrade/common/actors/scanlvmconfig/actor.py +new file mode 100644 +index 00000000..23ed032d +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scanlvmconfig/actor.py +@@ -0,0 +1,18 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import scanlvmconfig ++from leapp.models import DistributionSignedRPM, LVMConfig ++from leapp.tags import FactsPhaseTag, IPUWorkflowTag ++ ++ ++class ScanLVMConfig(Actor): ++ """ ++ Scan LVM configuration. ++ """ ++ ++ name = 'scan_lvm_config' ++ consumes = (DistributionSignedRPM,) ++ produces = (LVMConfig,) ++ tags = (FactsPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ scanlvmconfig.scan() +diff --git a/repos/system_upgrade/common/actors/scanlvmconfig/libraries/scanlvmconfig.py b/repos/system_upgrade/common/actors/scanlvmconfig/libraries/scanlvmconfig.py +new file mode 100644 +index 00000000..37755e7c +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scanlvmconfig/libraries/scanlvmconfig.py +@@ -0,0 +1,52 @@ ++import os ++ ++from leapp.libraries.common.config import version ++from leapp.libraries.common.rpms import has_package ++from leapp.libraries.stdlib import api ++from leapp.models import DistributionSignedRPM, LVMConfig, LVMConfigDevicesSection ++ ++LVM_CONFIG_PATH = '/etc/lvm/lvm.conf' ++ ++ ++def _lvm_config_devices_parser(lvm_config_lines): ++ in_section = False ++ config = {} ++ for line in lvm_config_lines: ++ line = line.split("#", 1)[0].strip() ++ if not line: ++ continue ++ if "devices {" in line: ++ in_section = True ++ continue ++ if in_section and "}" in line: ++ in_section = False ++ if in_section: ++ value = line.split("=", 1) ++ config[value[0].strip()] = value[1].strip().strip('"') ++ return config ++ ++ ++def _read_config_lines(path): ++ with open(path) as lvm_conf_file: ++ return lvm_conf_file.readlines() ++ ++ ++def scan(): ++ if not has_package(DistributionSignedRPM, 'lvm2'): ++ return ++ ++ if not os.path.isfile(LVM_CONFIG_PATH): ++ api.current_logger().debug('The "{}" is not present on the system.'.format(LVM_CONFIG_PATH)) ++ return ++ ++ lvm_config_lines = _read_config_lines(LVM_CONFIG_PATH) ++ devices_section = _lvm_config_devices_parser(lvm_config_lines) ++ ++ lvm_config_devices = LVMConfigDevicesSection(use_devicesfile=int(version.get_source_major_version()) > 8) ++ if 'devicesfile' in devices_section: ++ lvm_config_devices.devicesfile = devices_section['devicesfile'] ++ ++ if 'use_devicesfile' in devices_section and devices_section['use_devicesfile'] in ['0', '1']: ++ lvm_config_devices.use_devicesfile = devices_section['use_devicesfile'] == '1' ++ ++ api.produce(LVMConfig(devices=lvm_config_devices)) +diff --git a/repos/system_upgrade/common/actors/scanlvmconfig/tests/test_scanlvmconfig.py b/repos/system_upgrade/common/actors/scanlvmconfig/tests/test_scanlvmconfig.py +new file mode 100644 +index 00000000..26728fd8 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scanlvmconfig/tests/test_scanlvmconfig.py +@@ -0,0 +1,176 @@ ++import os ++ ++import pytest ++ ++from leapp.libraries.actor import scanlvmconfig ++from leapp.libraries.common.config import version ++from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import DistributionSignedRPM, LVMConfig, LVMConfigDevicesSection, RPM ++ ++ ++@pytest.mark.parametrize( ++ ("config_as_lines", "config_as_dict"), ++ [ ++ ([], {}), ++ ( ++ ['devices {\n', ++ '\t# comment\n' ++ '}\n'], ++ {} ++ ), ++ ( ++ ['global {\n', ++ 'use_lvmetad = 1\n', ++ '}\n'], ++ {} ++ ), ++ ( ++ ['devices {\n', ++ 'filter = [ "r|/dev/cdrom|", "a|.*|" ]\n', ++ 'use_devicesfile=0\n', ++ 'devicesfile="file-name.devices"\n', ++ '}'], ++ {'filter': '[ "r|/dev/cdrom|", "a|.*|" ]', ++ 'use_devicesfile': '0', ++ 'devicesfile': 'file-name.devices'} ++ ), ++ ( ++ ['devices {\n', ++ 'use_devicesfile = 1\n', ++ 'devicesfile = "file-name.devices"\n', ++ ' }\n'], ++ {'use_devicesfile': '1', ++ 'devicesfile': 'file-name.devices'} ++ ), ++ ( ++ ['devices {\n', ++ ' # comment\n', ++ 'use_devicesfile = 1 # comment\n', ++ '#devicesfile = "file-name.devices"\n', ++ ' }\n'], ++ {'use_devicesfile': '1'} ++ ), ++ ( ++ ['config {\n', ++ '# configuration section\n', ++ '\tabort_on_errors = 1\n', ++ '\tprofile_dir = "/etc/lvm/prifile\n', ++ '}\n', ++ 'devices {\n', ++ ' \n', ++ '\tfilter = ["a|.*|"] \n', ++ '\tuse_devicesfile=0\n', ++ '}\n', ++ 'allocation {\n', ++ '\tcling_tag_list = [ "@site1", "@site2" ]\n', ++ '\tcache_settings {\n', ++ '\t}\n', ++ '}\n' ++ ], ++ {'filter': '["a|.*|"]', 'use_devicesfile': '0'} ++ ), ++ ] ++ ++) ++def test_lvm_config_devices_parser(config_as_lines, config_as_dict): ++ lvm_config = scanlvmconfig._lvm_config_devices_parser(config_as_lines) ++ assert lvm_config == config_as_dict ++ ++ ++def test_scan_when_lvm_not_installed(monkeypatch): ++ def isfile_mocked(_): ++ assert False ++ ++ def read_config_lines_mocked(_): ++ assert False ++ ++ msgs = [ ++ DistributionSignedRPM(items=[]) ++ ] ++ ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(os.path, 'isfile', isfile_mocked) ++ monkeypatch.setattr(scanlvmconfig, '_read_config_lines', read_config_lines_mocked) ++ ++ scanlvmconfig.scan() ++ ++ assert not api.produce.called ++ ++ ++@pytest.mark.parametrize( ++ ('source_major_version', 'devices_section_dict', 'produced_devices_section'), ++ [ ++ ('8', {}, LVMConfigDevicesSection(use_devicesfile=False)), ++ ('9', {}, LVMConfigDevicesSection(use_devicesfile=True)), ++ ('8', { ++ 'use_devicesfile': '0', ++ }, LVMConfigDevicesSection(use_devicesfile=False, ++ devicesfile='system.devices') ++ ), ++ ('9', { ++ 'use_devicesfile': '0', ++ 'devicesfile': 'file-name.devices' ++ }, LVMConfigDevicesSection(use_devicesfile=False, ++ devicesfile='file-name.devices') ++ ), ++ ++ ('8', { ++ 'use_devicesfile': '1', ++ 'devicesfile': 'file-name.devices' ++ }, LVMConfigDevicesSection(use_devicesfile=True, ++ devicesfile='file-name.devices') ++ ), ++ ('9', { ++ 'use_devicesfile': '1', ++ }, LVMConfigDevicesSection(use_devicesfile=True, ++ devicesfile='system.devices') ++ ), ++ ++ ] ++ ++) ++def test_scan_when_lvm_installed(monkeypatch, source_major_version, devices_section_dict, produced_devices_section): ++ ++ def isfile_mocked(file): ++ assert file == scanlvmconfig.LVM_CONFIG_PATH ++ return True ++ ++ def read_config_lines_mocked(file): ++ assert file == scanlvmconfig.LVM_CONFIG_PATH ++ return ["test_line"] ++ ++ def lvm_config_devices_parser_mocked(lines): ++ assert lines == ["test_line"] ++ return devices_section_dict ++ ++ lvm_package = RPM( ++ name='lvm2', ++ version='2', ++ release='1', ++ epoch='1', ++ packager='', ++ arch='x86_64', ++ pgpsig='RSA/SHA256, Mon 01 Jan 1970 00:00:00 AM -03, Key ID 199e2f91fd431d51' ++ ) ++ ++ msgs = [ ++ DistributionSignedRPM(items=[lvm_package]) ++ ] ++ ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(version, 'get_source_major_version', lambda: source_major_version) ++ monkeypatch.setattr(os.path, 'isfile', isfile_mocked) ++ monkeypatch.setattr(scanlvmconfig, '_read_config_lines', read_config_lines_mocked) ++ monkeypatch.setattr(scanlvmconfig, '_lvm_config_devices_parser', lvm_config_devices_parser_mocked) ++ ++ scanlvmconfig.scan() ++ ++ assert api.produce.called == 1 ++ assert len(api.produce.model_instances) == 1 ++ ++ produced_model = api.produce.model_instances[0] ++ assert isinstance(produced_model, LVMConfig) ++ assert produced_model.devices == produced_devices_section +diff --git a/repos/system_upgrade/common/models/lvmconfig.py b/repos/system_upgrade/common/models/lvmconfig.py +new file mode 100644 +index 00000000..ab5e7815 +--- /dev/null ++++ b/repos/system_upgrade/common/models/lvmconfig.py +@@ -0,0 +1,26 @@ ++from leapp.models import fields, Model ++from leapp.topics import SystemInfoTopic ++ ++ ++class LVMConfigDevicesSection(Model): ++ """The devices section from the LVM configuration.""" ++ topic = SystemInfoTopic ++ ++ use_devicesfile = fields.Boolean() ++ """ ++ Determines whether only the devices in the devices file are used by LVM. Note ++ that the default value changed on the RHEL 9 to True. ++ """ ++ ++ devicesfile = fields.String(default="system.devices") ++ """ ++ Defines the name of the devices file that should be used. The default devices ++ file is located in '/etc/lvm/devices/system.devices'. ++ """ ++ ++ ++class LVMConfig(Model): ++ """LVM configuration split into sections.""" ++ topic = SystemInfoTopic ++ ++ devices = fields.Model(LVMConfigDevicesSection) +-- +2.52.0 + diff --git a/SOURCES/0083-multipath-do-not-crash-when-there-is-no-multipath.co.patch b/SOURCES/0083-multipath-do-not-crash-when-there-is-no-multipath.co.patch new file mode 100644 index 0000000..bbb57d3 --- /dev/null +++ b/SOURCES/0083-multipath-do-not-crash-when-there-is-no-multipath.co.patch @@ -0,0 +1,152 @@ +From c960a70efc2d9fbbd9819b0276bcd9fbac2416e9 Mon Sep 17 00:00:00 2001 +From: Michal Hecko +Date: Mon, 8 Dec 2025 15:49:09 +0100 +Subject: [PATCH 083/111] multipath: do not crash when there is no + multipath.conf + +Our newly introduced handling of multipath in the upgrade initramfs has +a bug when it tries to check whether multipath_info.config_dir exists. +However, when multipath config does not exists, a default message with +multipath_info.config_dir=None is produced, causing an unhandled +exception. This patch fixes the issue. Moreover, an additional issue +when updated configs are not *guaranteed* to be placed into the target +uspace was discovered. +--- + .../target_uspace_multipath_configs.py | 16 +++- + .../tests/test_target_uspace_configs.py | 86 +++++++++++++++++++ + 2 files changed, 101 insertions(+), 1 deletion(-) + create mode 100644 repos/system_upgrade/common/actors/multipath/target_uspace_configs/tests/test_target_uspace_configs.py + +diff --git a/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py +index 0deda56b..72afc477 100644 +--- a/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py ++++ b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/libraries/target_uspace_multipath_configs.py +@@ -34,6 +34,11 @@ def request_mpath_confs(multipath_info): + + for config_updates in api.consume(MultipathConfigUpdatesInfo): + for update in config_updates.updates: ++ # Detect /etc/multipath.conf > /etc/multipath.conf, and replace it with the patched ++ # version PATCHED > /etc/multipath.conf ++ if update.target_path in files_to_put_into_uspace: ++ del files_to_put_into_uspace[update.target_path] ++ + files_to_put_into_uspace[update.updated_config_location] = update.target_path + + # Note: original implementation would copy the /etc/multipath directory, which contains +@@ -56,11 +61,20 @@ def request_mpath_confs(multipath_info): + + def process(): + multipath_info = next(api.consume(MultipathInfo), None) ++ + if not multipath_info: + api.current_logger().debug( +- 'Received no MultipathInfo message. No configfiles will ' ++ 'Received no MultipathInfo message. No config files will ' ++ 'be requested to be placed into target userspace.' ++ ) ++ return ++ ++ if not multipath_info.is_configured: ++ api.current_logger().debug( ++ 'Multipath is not configured. No config files will ' + 'be requested to be placed into target userspace.' + ) + return ++ + request_mpath_confs(multipath_info) + request_mpath_dracut_module_for_upgrade_initramfs() +diff --git a/repos/system_upgrade/common/actors/multipath/target_uspace_configs/tests/test_target_uspace_configs.py b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/tests/test_target_uspace_configs.py +new file mode 100644 +index 00000000..ffb63322 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/multipath/target_uspace_configs/tests/test_target_uspace_configs.py +@@ -0,0 +1,86 @@ ++import os ++import shutil ++ ++import pytest ++ ++from leapp.libraries.actor import target_uspace_multipath_configs as actor_lib ++from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import ( ++ MultipathConfigUpdatesInfo, ++ MultipathInfo, ++ TargetUserSpaceUpgradeTasks, ++ UpdatedMultipathConfig, ++ UpgradeInitramfsTasks ++) ++ ++ ++@pytest.mark.parametrize( ++ ('multipath_info', 'should_produce'), ++ [ ++ (None, False), # No multipath info message ++ (MultipathInfo(is_configured=False), False), # Multipath is not configured ++ (MultipathInfo(is_configured=True, config_dir='/etc/multipath/conf.d'), True) ++ ] ++) ++def test_production_conditions(monkeypatch, multipath_info, should_produce): ++ """ Test whether messages are produced under right conditions. """ ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, 'produce', produce_mock) ++ ++ msgs = [multipath_info] if multipath_info else [] ++ if multipath_info and multipath_info.is_configured: ++ update = UpdatedMultipathConfig( ++ updated_config_location='/var/lib/leapp/proposed_changes/etc/multipath/conf.d/config.conf', ++ target_path='/etc/multipath/conf.d/config.conf' ++ ) ++ msgs.append(MultipathConfigUpdatesInfo(updates=[update])) ++ ++ actor_mock = CurrentActorMocked(msgs=msgs) ++ monkeypatch.setattr(api, 'current_actor', actor_mock) ++ ++ def listdir_mock(path): ++ assert path == '/etc/multipath/conf.d' ++ return ['config.conf', 'config-not-to-be-touched.conf'] ++ ++ def exists_mock(path): ++ return path == '/etc/multipath/conf.d' ++ ++ monkeypatch.setattr(os.path, 'exists', exists_mock) ++ monkeypatch.setattr(os, 'listdir', listdir_mock) ++ ++ actor_lib.process() ++ ++ if should_produce: ++ _target_uspace_tasks = [ ++ msg for msg in produce_mock.model_instances if isinstance(msg, TargetUserSpaceUpgradeTasks) ++ ] ++ assert len(_target_uspace_tasks) == 1 ++ ++ target_uspace_tasks = _target_uspace_tasks[0] ++ ++ copies = sorted((copy.src, copy.dst) for copy in target_uspace_tasks.copy_files) ++ expected_copies = [ ++ ( ++ '/etc/multipath.conf', ++ '/etc/multipath.conf' ++ ), ++ ( ++ '/var/lib/leapp/proposed_changes/etc/multipath/conf.d/config.conf', ++ '/etc/multipath/conf.d/config.conf' ++ ), ++ ( ++ '/etc/multipath/conf.d/config-not-to-be-touched.conf', ++ '/etc/multipath/conf.d/config-not-to-be-touched.conf' ++ ) ++ ] ++ assert copies == sorted(expected_copies) ++ ++ _upgrade_initramfs_tasks = [m for m in produce_mock.model_instances if isinstance(m, UpgradeInitramfsTasks)] ++ assert len(_upgrade_initramfs_tasks) == 1 ++ upgrade_initramfs_tasks = _upgrade_initramfs_tasks[0] ++ ++ dracut_modules = [dracut_mod.name for dracut_mod in upgrade_initramfs_tasks.include_dracut_modules] ++ assert dracut_modules == ['multipath'] ++ else: ++ assert not produce_mock.called +-- +2.52.0 + diff --git a/SOURCES/0084-Replace-distro-specific-packages-during-conversion.patch b/SOURCES/0084-Replace-distro-specific-packages-during-conversion.patch new file mode 100644 index 0000000..518f66d --- /dev/null +++ b/SOURCES/0084-Replace-distro-specific-packages-during-conversion.patch @@ -0,0 +1,470 @@ +From 5b3ccd99ece89f880acf42162e456710ea13b1d4 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Tue, 18 Nov 2025 17:46:11 +0100 +Subject: [PATCH 084/111] Replace distro specific packages during conversion + +There are certain packages that are distribution specific and need to be +replaced in the DNF upgrade transaction with their target distro +counterpart when converting during the upgrade. For example the release +and logos packages. Some packages, such as packages containing +repository definitions or GPG keys, need to be removed without any +replacement. + +This patch introduces a new convert/swapdistropackages actor to +accomplish this. Currently only packages that need to be handled during +CS->RHEL and AL->RHEL conversion are handled, however the actor contains +a config dict to easily add more paths. + +Jira: RHEL-110568 +--- + .../convert/swapdistropackages/actor.py | 20 ++ + .../libraries/swapdistropackages.py | 111 +++++++ + .../tests/test_swapdistropackages.py | 291 ++++++++++++++++++ + 3 files changed, 422 insertions(+) + create mode 100644 repos/system_upgrade/common/actors/convert/swapdistropackages/actor.py + create mode 100644 repos/system_upgrade/common/actors/convert/swapdistropackages/libraries/swapdistropackages.py + create mode 100644 repos/system_upgrade/common/actors/convert/swapdistropackages/tests/test_swapdistropackages.py + +diff --git a/repos/system_upgrade/common/actors/convert/swapdistropackages/actor.py b/repos/system_upgrade/common/actors/convert/swapdistropackages/actor.py +new file mode 100644 +index 00000000..f8d9c446 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/swapdistropackages/actor.py +@@ -0,0 +1,20 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import swapdistropackages ++from leapp.models import DistributionSignedRPM, RpmTransactionTasks ++from leapp.tags import ChecksPhaseTag, IPUWorkflowTag ++ ++ ++class SwapDistroPackages(Actor): ++ """ ++ Swap distribution specific packages. ++ ++ Does nothing if not converting. ++ """ ++ ++ name = 'swap_distro_packages' ++ consumes = (DistributionSignedRPM,) ++ produces = (RpmTransactionTasks,) ++ tags = (IPUWorkflowTag, ChecksPhaseTag) ++ ++ def process(self): ++ swapdistropackages.process() +diff --git a/repos/system_upgrade/common/actors/convert/swapdistropackages/libraries/swapdistropackages.py b/repos/system_upgrade/common/actors/convert/swapdistropackages/libraries/swapdistropackages.py +new file mode 100644 +index 00000000..f7e2ce68 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/swapdistropackages/libraries/swapdistropackages.py +@@ -0,0 +1,111 @@ ++import fnmatch ++ ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id ++from leapp.libraries.common.config.version import get_target_major_version ++from leapp.libraries.stdlib import api ++from leapp.models import DistributionSignedRPM, RpmTransactionTasks ++ ++# Config for swapping distribution-specific RPMs ++# The keys can be in 2 "formats": ++# (, ) ++# (, , ) ++# The "swap" dict maps packages on the source distro to their replacements on ++# the target distro ++# The "remove" set lists packages or glob pattern for matching packages from ++# the source distro to remove without any replacement. ++_CONFIG = { ++ ("centos", "rhel"): { ++ "swap": { ++ "centos-logos": "redhat-logos", ++ "centos-logos-httpd": "redhat-logos-httpd", ++ "centos-logos-ipa": "redhat-logos-ipa", ++ "centos-indexhtml": "redhat-indexhtml", ++ "centos-backgrounds": "redhat-backgrounds", ++ "centos-stream-release": "redhat-release", ++ }, ++ "remove": { ++ "centos-gpg-keys", ++ "centos-stream-repos", ++ # various release packages, typically contain repofiles ++ "centos-release-*", ++ # present on Centos (not Stream) 8, let's include them if they are potentially leftover ++ "centos-linux-release", ++ "centos-linux-repos", ++ "centos-obsolete-packages", ++ }, ++ }, ++ ("almalinux", "rhel"): { ++ "swap": { ++ "almalinux-logos": "redhat-logos", ++ "almalinux-logos-httpd": "redhat-logos-httpd", ++ "almalinux-logos-ipa": "redhat-logos-ipa", ++ "almalinux-indexhtml": "redhat-indexhtml", ++ "almalinux-backgrounds": "redhat-backgrounds", ++ "almalinux-release": "redhat-release", ++ }, ++ "remove": { ++ "almalinux-repos", ++ "almalinux-gpg-keys", ++ ++ "almalinux-release-*", ++ "centos-release-*", ++ "elrepo-release", ++ "epel-release", ++ }, ++ }, ++} ++ ++ ++def _get_config(source_distro, target_distro, target_major): ++ key = (source_distro, target_distro, target_major) ++ config = _CONFIG.get(key) ++ if config: ++ return config ++ ++ key = (source_distro, target_distro) ++ return _CONFIG.get(key) ++ ++ ++def _glob_match_rpms(rpms, pattern): ++ return [rpm for rpm in rpms if fnmatch.fnmatch(rpm, pattern)] ++ ++ ++def _make_transaction_tasks(config, rpms): ++ to_install = set() ++ to_remove = set() ++ for source_pkg, target_pkg in config.get("swap", {}).items(): ++ if source_pkg in rpms: ++ to_remove.add(source_pkg) ++ to_install.add(target_pkg) ++ ++ for pkg in config.get("remove", {}): ++ matches = _glob_match_rpms(rpms, pkg) ++ to_remove.update(matches) ++ ++ return RpmTransactionTasks(to_install=list(to_install), to_remove=list(to_remove)) ++ ++ ++def process(): ++ rpms_msg = next(api.consume(DistributionSignedRPM), None) ++ if not rpms_msg: ++ raise StopActorExecutionError("Did not receive DistributionSignedRPM message") ++ ++ source_distro = get_source_distro_id() ++ target_distro = get_target_distro_id() ++ ++ if source_distro == target_distro: ++ return ++ ++ config = _get_config(source_distro, target_distro, get_target_major_version()) ++ if not config: ++ api.current_logger().warning( ++ "Could not find config for handling distro specific packages for {}->{} upgrade.".format( ++ source_distro, target_distro ++ ) ++ ) ++ return ++ ++ rpms = {rpm.name for rpm in rpms_msg.items} ++ task = _make_transaction_tasks(config, rpms) ++ api.produce(task) +diff --git a/repos/system_upgrade/common/actors/convert/swapdistropackages/tests/test_swapdistropackages.py b/repos/system_upgrade/common/actors/convert/swapdistropackages/tests/test_swapdistropackages.py +new file mode 100644 +index 00000000..99bb9c20 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/swapdistropackages/tests/test_swapdistropackages.py +@@ -0,0 +1,291 @@ ++from unittest import mock ++ ++import pytest ++ ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.actor import swapdistropackages ++from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import DistributionSignedRPM, RPM, RpmTransactionTasks ++ ++ ++def test_get_config(monkeypatch): ++ test_config = { ++ ("centos", "rhel"): { ++ "swap": {"pkgA": "pkgB"}, ++ "remove": { ++ "pkgC", ++ }, ++ }, ++ ("centos", "rhel", 10): {"swap": {"pkg1": "pkg2"}}, ++ } ++ monkeypatch.setattr(swapdistropackages, "_CONFIG", test_config) ++ ++ expect = { ++ "swap": {"pkgA": "pkgB"}, ++ "remove": { ++ "pkgC", ++ }, ++ } ++ # fallback to (centos, rhel) when there is no target version specific config ++ cfg = swapdistropackages._get_config("centos", "rhel", 9) ++ assert cfg == expect ++ ++ # has it's own target version specific config ++ cfg = swapdistropackages._get_config("centos", "rhel", 10) ++ assert cfg == {"swap": {"pkg1": "pkg2"}} ++ ++ # not mapped ++ cfg = swapdistropackages._get_config("almalinux", "rhel", 9) ++ assert not cfg ++ ++ ++@pytest.mark.parametrize( ++ "rpms,config,expected", ++ [ ++ ( ++ ["pkgA", "pkgB", "pkgC"], ++ { ++ "swap": {"pkgA": "pkgB"}, ++ "remove": { ++ "pkgC", ++ }, ++ }, ++ RpmTransactionTasks(to_install=["pkgB"], to_remove=["pkgA", "pkgC"]), ++ ), ++ # only some pkgs present ++ ( ++ ["pkg1", "pkgA", "pkg-other"], ++ { ++ "swap": {"pkgX": "pkgB", "pkg1": "pkg2"}, ++ "remove": {"pkg*"}, ++ }, ++ RpmTransactionTasks( ++ to_install=["pkg2"], to_remove=["pkgA", "pkg1", "pkg-other"] ++ ), ++ ), ++ ( ++ ["pkgA", "pkgB"], ++ {}, ++ RpmTransactionTasks(to_install=[], to_remove=[]), ++ ), ++ ], ++) ++def test__make_transaction_tasks(rpms, config, expected): ++ tasks = swapdistropackages._make_transaction_tasks(config, rpms) ++ assert set(tasks.to_install) == set(expected.to_install) ++ assert set(tasks.to_remove) == set(expected.to_remove) ++ ++ ++def test_process_ok(monkeypatch): ++ def _msg_pkgs(pkgnames): ++ rpms = [] ++ for name in pkgnames: ++ rpms.append(RPM( ++ name=name, ++ epoch="0", ++ packager="packager", ++ version="1.2", ++ release="el9", ++ arch="noarch", ++ pgpsig="", ++ )) ++ return DistributionSignedRPM(items=rpms) ++ ++ rpms = [ ++ "centos-logos", ++ "centos-logos-httpd", ++ "centos-logos-ipa", ++ "centos-indexhtml", ++ "centos-backgrounds", ++ "centos-stream-release", ++ "centos-gpg-keys", ++ "centos-stream-repos", ++ "centos-linux-release", ++ "centos-linux-repos", ++ "centos-obsolete-packages", ++ "centos-release-automotive", ++ "centos-release-automotive-experimental", ++ "centos-release-autosd", ++ "centos-release-ceph-pacific", ++ "centos-release-ceph-quincy", ++ "centos-release-ceph-reef", ++ "centos-release-ceph-squid", ++ "centos-release-ceph-tentacle", ++ "centos-release-cloud", ++ "centos-release-gluster10", ++ "centos-release-gluster11", ++ "centos-release-gluster9", ++ "centos-release-hyperscale", ++ "centos-release-hyperscale-experimental", ++ "centos-release-hyperscale-experimental-testing", ++ "centos-release-hyperscale-spin", ++ "centos-release-hyperscale-spin-testing", ++ "centos-release-hyperscale-testing", ++ "centos-release-isa-override", ++ "centos-release-kmods", ++ "centos-release-kmods-kernel", ++ "centos-release-kmods-kernel-6", ++ "centos-release-messaging", ++ "centos-release-nfs-ganesha4", ++ "centos-release-nfs-ganesha5", ++ "centos-release-nfs-ganesha6", ++ "centos-release-nfs-ganesha7", ++ "centos-release-nfs-ganesha8", ++ "centos-release-nfv-common", ++ "centos-release-nfv-openvswitch", ++ "centos-release-okd-4", ++ "centos-release-openstack-antelope", ++ "centos-release-openstack-bobcat", ++ "centos-release-openstack-caracal", ++ "centos-release-openstack-dalmatian", ++ "centos-release-openstack-epoxy", ++ "centos-release-openstack-yoga", ++ "centos-release-openstack-zed", ++ "centos-release-openstackclient-xena", ++ "centos-release-opstools", ++ "centos-release-ovirt45", ++ "centos-release-ovirt45-testing", ++ "centos-release-proposed_updates", ++ "centos-release-rabbitmq-38", ++ "centos-release-samba414", ++ "centos-release-samba415", ++ "centos-release-samba416", ++ "centos-release-samba417", ++ "centos-release-samba418", ++ "centos-release-samba419", ++ "centos-release-samba420", ++ "centos-release-samba421", ++ "centos-release-samba422", ++ "centos-release-samba423", ++ "centos-release-storage-common", ++ "centos-release-virt-common", ++ ] ++ curr_actor_mocked = CurrentActorMocked( ++ src_distro="centos", ++ dst_distro="rhel", ++ msgs=[_msg_pkgs(rpms)], ++ ) ++ monkeypatch.setattr(api, 'current_actor', curr_actor_mocked) ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, 'produce', produce_mock) ++ ++ swapdistropackages.process() ++ ++ expected = RpmTransactionTasks( ++ to_install=[ ++ "redhat-logos", ++ "redhat-logos-httpd", ++ "redhat-logos-ipa", ++ "redhat-indexhtml", ++ "redhat-backgrounds", ++ "redhat-release", ++ ], ++ to_remove=rpms, ++ ) ++ ++ assert produce_mock.called == 1 ++ produced = produce_mock.model_instances[0] ++ assert set(produced.to_install) == set(expected.to_install) ++ assert set(produced.to_remove) == set(expected.to_remove) ++ ++ ++def test_process_no_config_skip(monkeypatch): ++ curr_actor_mocked = CurrentActorMocked( ++ src_distro="distroA", dst_distro="distroB", msgs=[DistributionSignedRPM()] ++ ) ++ monkeypatch.setattr(api, "current_actor", curr_actor_mocked) ++ monkeypatch.setattr(swapdistropackages, "_get_config", lambda *args: None) ++ monkeypatch.setattr(api, "current_logger", logger_mocked()) ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, "produce", produce_mock) ++ ++ swapdistropackages.process() ++ ++ assert produce_mock.called == 0 ++ assert ( ++ "Could not find config for handling distro specific packages for distroA->distroB upgrade" ++ ) in api.current_logger.warnmsg[0] ++ ++ ++@pytest.mark.parametrize("distro", ["rhel", "centos"]) ++def test_process_not_converting_skip(monkeypatch, distro): ++ curr_actor_mocked = CurrentActorMocked( ++ src_distro=distro, dst_distro=distro, msgs=[DistributionSignedRPM()] ++ ) ++ monkeypatch.setattr(api, "current_actor", curr_actor_mocked) ++ monkeypatch.setattr(api, "current_logger", logger_mocked()) ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, "produce", produce_mock) ++ ++ with mock.patch( ++ "leapp.libraries.actor.swapdistropackages._get_config" ++ ) as _get_config_mocked: ++ swapdistropackages.process() ++ _get_config_mocked.assert_not_called() ++ assert produce_mock.called == 0 ++ ++ ++def test_process_no_rpms_mgs(monkeypatch): ++ curr_actor_mocked = CurrentActorMocked(src_distro='centos', dst_distro='rhel') ++ monkeypatch.setattr(api, "current_actor", curr_actor_mocked) ++ produce_mock = produce_mocked() ++ monkeypatch.setattr(api, "produce", produce_mock) ++ ++ with pytest.raises( ++ StopActorExecutionError, ++ match="Did not receive DistributionSignedRPM message" ++ ): ++ swapdistropackages.process() ++ ++ assert produce_mock.called == 0 ++ ++ ++@pytest.mark.parametrize( ++ "pattern, expect", ++ [ ++ ( ++ "centos-release-*", ++ [ ++ "centos-release-samba420", ++ "centos-release-okd-4", ++ "centos-release-opstools", ++ ], ++ ), ++ ( ++ "almalinux-release-*", ++ [ ++ "almalinux-release-testing", ++ "almalinux-release-devel", ++ ], ++ ), ++ ( ++ "epel-release", ++ ["epel-release"], ++ ), ++ ], ++) ++def test_glob_match_rpms(pattern, expect): ++ """ ++ A simple test making sure the fnmatch works correctly for RPM names ++ since it was originally meant for filepaths. ++ """ ++ ++ TEST_GLOB_RPMS = [ ++ "centos-release-samba420", ++ "centos-stream-repos", ++ "centos-release-okd-4", ++ "centos-release", ++ "centos-release-opstools", ++ "release-centos", ++ "almalinux-release-devel", ++ "almalinux-release", ++ "almalinux-repos", ++ "release-almalinux", ++ "vim", ++ "epel-release", ++ "almalinux-release-testing", ++ "gcc-devel" ++ ] ++ actual = swapdistropackages._glob_match_rpms(TEST_GLOB_RPMS, pattern) ++ assert set(actual) == set(expect) +-- +2.52.0 + diff --git a/SOURCES/0085-Enable-CentOS-Stream-test-pipelines.patch b/SOURCES/0085-Enable-CentOS-Stream-test-pipelines.patch new file mode 100644 index 0000000..fbb0ec1 --- /dev/null +++ b/SOURCES/0085-Enable-CentOS-Stream-test-pipelines.patch @@ -0,0 +1,145 @@ +From 78e226508a201c16354a8acfd5238787872505a8 Mon Sep 17 00:00:00 2001 +From: Daniel Diblik +Date: Mon, 10 Nov 2025 16:04:19 +0100 +Subject: [PATCH 085/111] Enable CentOS Stream test pipelines + +Signed-off-by: Daniel Diblik +--- + .packit.yaml | 103 +++++++++++++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 103 insertions(+) + +diff --git a/.packit.yaml b/.packit.yaml +index 83b7ce6a..e158c7e4 100644 +--- a/.packit.yaml ++++ b/.packit.yaml +@@ -460,6 +460,15 @@ jobs: + tmt: + plan_filter: 'tag:9to10' + environments: ++ - &tmt-env-settings-centos9to10 ++ tmt: ++ context: &tmt-context-centos9to10 ++ distro: "centos-9" ++ distro_target: "centos-10" ++ settings: ++ provisioning: ++ tags: ++ BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-96to100 + tmt: + context: &tmt-context-96to100 +@@ -478,6 +487,15 @@ jobs: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test ++ - &tmt-env-settings-centos9torhel101 ++ tmt: ++ context: &tmt-context-centos9torhel101 ++ distro: "centos-9" ++ distro_target: "rhel-10.1" ++ settings: ++ provisioning: ++ tags: ++ BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-98to102 + tmt: + context: &tmt-context-98to102 +@@ -487,6 +505,15 @@ jobs: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test ++ - &tmt-env-settings-centos9torhel102 ++ tmt: ++ context: &tmt-context-centos9torhel102 ++ distro: "centos-9" ++ distro_target: "rhel-10.2" ++ settings: ++ provisioning: ++ tags: ++ BusinessUnit: sst_upgrades@leapp_upstream_test + + - &sanity-abstract-9to10-aws + <<: *sanity-abstract-9to10 +@@ -705,3 +732,79 @@ jobs: + env: + <<: *env-98to102 + ++# ###################################################################### # ++# ########################## CentOS Stream ############################# # ++# ###################################################################### # ++ ++# ###################################################################### # ++# ###################### CentOS Stream > RHEL ########################## # ++# ###################################################################### # ++ ++# ###################################################################### # ++# ############################ 9 > 10.1 ################################ # ++# ###################################################################### # ++ ++- &sanity-centos9torhel101 ++ <<: *sanity-abstract-9to10 ++ trigger: pull_request ++ identifier: sanity-CentOS9toRHEL10.1 ++ targets: ++ epel-9-x86_64: ++ distros: [CentOS-Stream-9] ++ tf_extra_params: ++ test: ++ tmt: ++ plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' ++ environments: ++ - *tmt-env-settings-centos9torhel101 ++ env: &env-centos9to101 ++ SOURCE_RELEASE: "9" ++ TARGET_RELEASE: "10.1" ++ ++# ###################################################################### # ++# ############################ 9 > 10.2 ################################ # ++# ###################################################################### # ++ ++- &sanity-centos9torhel102 ++ <<: *sanity-abstract-9to10 ++ trigger: pull_request ++ identifier: sanity-CentOS9toRHEL10.2 ++ targets: ++ epel-9-x86_64: ++ distros: [CentOS-Stream-9] ++ tf_extra_params: ++ test: ++ tmt: ++ plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' ++ name: ++ environments: ++ - *tmt-env-settings-centos9torhel102 ++ env: &env-centos9torhel102 ++ SOURCE_RELEASE: "9" ++ TARGET_RELEASE: "10.2" ++ ++# ###################################################################### # ++# ################## CentOS Stream > CentOS Stream ##################### # ++# ###################################################################### # ++ ++# ###################################################################### # ++# ############################## 9 > 10 ################################ # ++# ###################################################################### # ++ ++- &sanity-centos-9to10 ++ <<: *sanity-abstract-9to10 ++ trigger: pull_request ++ identifier: sanity-CentOS9to10 ++ targets: ++ epel-9-x86_64: ++ distros: [CentOS-Stream-9] ++ tf_extra_params: ++ test: ++ tmt: ++ plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' ++ environments: ++ - *tmt-env-settings-centos9to10 ++ env: &env-centos9to10 ++ SOURCE_RELEASE: "9" ++ TARGET_RELEASE: "10" ++ TARGET_OS: "centos" +-- +2.52.0 + diff --git a/SOURCES/0086-docs-Fix-search-not-working.patch b/SOURCES/0086-docs-Fix-search-not-working.patch new file mode 100644 index 0000000..135570a --- /dev/null +++ b/SOURCES/0086-docs-Fix-search-not-working.patch @@ -0,0 +1,26 @@ +From 4ddf53061291db9b9bbd921a320ba2f306a2ffc8 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Mon, 8 Dec 2025 14:03:30 +0100 +Subject: [PATCH 086/111] docs: Fix search not working + +The jquery.js file was not getting properly put into the +build/html/_static/ directory. Removing this line seems to fix that. +--- + docs/source/conf.py | 1 - + 1 file changed, 1 deletion(-) + +diff --git a/docs/source/conf.py b/docs/source/conf.py +index a0e6a1de..dd39d3fa 100644 +--- a/docs/source/conf.py ++++ b/docs/source/conf.py +@@ -40,7 +40,6 @@ exclude_patterns = [] + + html_static_path = ['_static'] + html_theme = 'sphinx_rtd_theme' +-html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + + pygments_style = 'sphinx' + +-- +2.52.0 + diff --git a/SOURCES/0087-Handle-invalid-values-for-case-sensitive-SSH-options.patch b/SOURCES/0087-Handle-invalid-values-for-case-sensitive-SSH-options.patch new file mode 100644 index 0000000..7e1bae3 --- /dev/null +++ b/SOURCES/0087-Handle-invalid-values-for-case-sensitive-SSH-options.patch @@ -0,0 +1,95 @@ +From 4105452bc89b36359124f5a20d17b73b7512a928 Mon Sep 17 00:00:00 2001 +From: karolinku +Date: Mon, 15 Dec 2025 12:16:03 +0100 +Subject: [PATCH 087/111] Handle invalid values for case-sensitive SSH options + +Catch ModelViolationError when parsing sshd configuration files that +contain invalid values for case-sensitive options like PermitRootLogin +and UsePrivilegeSeparation. + +This change provides a clear error message +explaining that arguments are case-sensitive and lists the valid values +based on the model definition. + +Jira: RHEL-19247 +--- + .../libraries/readopensshconfig.py | 26 ++++++++++++++++++- + ..._readopensshconfig_opensshconfigscanner.py | 13 ++++++++++ + 2 files changed, 38 insertions(+), 1 deletion(-) + +diff --git a/repos/system_upgrade/common/actors/opensshconfigscanner/libraries/readopensshconfig.py b/repos/system_upgrade/common/actors/opensshconfigscanner/libraries/readopensshconfig.py +index 50e37092..f467676b 100644 +--- a/repos/system_upgrade/common/actors/opensshconfigscanner/libraries/readopensshconfig.py ++++ b/repos/system_upgrade/common/actors/opensshconfigscanner/libraries/readopensshconfig.py +@@ -7,6 +7,7 @@ from leapp.exceptions import StopActorExecutionError + from leapp.libraries.common.rpms import check_file_modification + from leapp.libraries.stdlib import api + from leapp.models import OpenSshConfig, OpenSshPermitRootLogin ++from leapp.models.fields import ModelViolationError + + CONFIG = '/etc/ssh/sshd_config' + DEPRECATED_DIRECTIVES = ['showpatchlevel'] +@@ -60,12 +61,35 @@ def parse_config(config, base_config=None, current_cfg_depth=0): + # convert deprecated alias + if value == "without-password": + value = "prohibit-password" +- v = OpenSshPermitRootLogin(value=value, in_match=in_match) ++ try: ++ v = OpenSshPermitRootLogin(value=value, in_match=in_match) ++ except ModelViolationError: ++ valid_values = OpenSshPermitRootLogin.value.serialize()['choices'] ++ raise StopActorExecutionError( ++ 'Invalid SSH configuration: Invalid value for PermitRootLogin', ++ details={ ++ 'details': 'Invalid value "{}" for PermitRootLogin in {}. ' ++ 'Arguments for SSH configuration options are case-sensitive. ' ++ 'Valid values are: {}.' ++ .format(value, CONFIG, ', '.join(valid_values)) ++ } ++ ) + ret.permit_root_login.append(v) + + elif el[0].lower() == 'useprivilegeseparation': + # Record only first occurrence, which is effective + if not ret.use_privilege_separation: ++ valid_values = OpenSshConfig.use_privilege_separation.serialize()['choices'] ++ if value not in valid_values: ++ raise StopActorExecutionError( ++ 'Invalid SSH configuration: Invalid value for UsePrivilegeSeparation', ++ details={ ++ 'details': 'Invalid value "{}" for UsePrivilegeSeparation in {}. ' ++ 'Arguments for SSH configuration options are case-sensitive. ' ++ 'Valid values are: {}.' ++ .format(value, CONFIG, ', '.join(valid_values)) ++ } ++ ) + ret.use_privilege_separation = value + + elif el[0].lower() == 'protocol': +diff --git a/repos/system_upgrade/common/actors/opensshconfigscanner/tests/test_readopensshconfig_opensshconfigscanner.py b/repos/system_upgrade/common/actors/opensshconfigscanner/tests/test_readopensshconfig_opensshconfigscanner.py +index 64c16f7f..1a6a1c9f 100644 +--- a/repos/system_upgrade/common/actors/opensshconfigscanner/tests/test_readopensshconfig_opensshconfigscanner.py ++++ b/repos/system_upgrade/common/actors/opensshconfigscanner/tests/test_readopensshconfig_opensshconfigscanner.py +@@ -351,6 +351,19 @@ def test_produce_config(): + assert cfg.subsystem_sftp == 'internal-sftp' + + ++@pytest.mark.parametrize('config_line,option_name,invalid_value', [ ++ ('PermitRootLogin NO', 'PermitRootLogin', 'NO'), ++ ('UsePrivilegeSeparation YES', 'UsePrivilegeSeparation', 'YES'), ++]) ++def test_parse_config_invalid_option_case(config_line, option_name, invalid_value): ++ config = [config_line] ++ ++ with pytest.raises(StopActorExecutionError) as err: ++ parse_config(config) ++ ++ assert str(err.value).startswith('Invalid SSH configuration') ++ ++ + def test_actor_execution(current_actor_context): + current_actor_context.run() + assert current_actor_context.consume(OpenSshConfig) +-- +2.52.0 + diff --git a/SOURCES/0088-pes_events_scanner-Also-remove-RHEL-9-events-in-remo.patch b/SOURCES/0088-pes_events_scanner-Also-remove-RHEL-9-events-in-remo.patch new file mode 100644 index 0000000..84b6950 --- /dev/null +++ b/SOURCES/0088-pes_events_scanner-Also-remove-RHEL-9-events-in-remo.patch @@ -0,0 +1,38 @@ +From f66867ab6dfcc998bf8df39753639936d5552048 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 18:28:54 +0200 +Subject: [PATCH 088/111] pes_events_scanner: Also remove RHEL 9 events in + remove_leapp_related_events() + +--- + .../peseventsscanner/libraries/pes_events_scanner.py | 9 +++++---- + 1 file changed, 5 insertions(+), 4 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +index 67e517d1..02107314 100644 +--- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py ++++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +@@ -492,15 +492,16 @@ def apply_transaction_configuration(source_pkgs, transaction_configuration): + + + def remove_leapp_related_events(events): +- # NOTE(ivasilev) Need to revisit this once rhel9->rhel10 upgrades become a thing +- leapp_pkgs = rpms.get_leapp_dep_packages( +- major_version=['7', '8']) + rpms.get_leapp_packages(major_version=['7', '8']) ++ major_vers = ['7', '8', '9'] ++ leapp_pkgs = rpms.get_leapp_dep_packages(major_vers) + rpms.get_leapp_packages(major_vers) + res = [] + for event in events: + if not any(pkg.name in leapp_pkgs for pkg in event.in_pkgs): + res.append(event) + else: +- api.current_logger().debug('Filtered out leapp related event, event id: {}'.format(event.id)) ++ api.current_logger().debug( ++ 'Filtered out leapp related event, event id: {}'.format(event.id) ++ ) + return res + + +-- +2.52.0 + diff --git a/SOURCES/0089-lib-overlaygen-Fix-possibly-unbound-var.patch b/SOURCES/0089-lib-overlaygen-Fix-possibly-unbound-var.patch new file mode 100644 index 0000000..f2f9ea5 --- /dev/null +++ b/SOURCES/0089-lib-overlaygen-Fix-possibly-unbound-var.patch @@ -0,0 +1,26 @@ +From 0dce9ea14e28804746e10c40e659fbe525f6787a Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:18:39 +0200 +Subject: [PATCH 089/111] lib/overlaygen: Fix possibly unbound var + +--- + repos/system_upgrade/common/libraries/overlaygen.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/repos/system_upgrade/common/libraries/overlaygen.py b/repos/system_upgrade/common/libraries/overlaygen.py +index 83dc33b8..81342557 100644 +--- a/repos/system_upgrade/common/libraries/overlaygen.py ++++ b/repos/system_upgrade/common/libraries/overlaygen.py +@@ -670,8 +670,8 @@ def _overlay_disk_size_old(): + """ + Convenient function to retrieve the overlay disk size + """ ++ env_size = get_env('LEAPP_OVL_SIZE', '2048') + try: +- env_size = get_env('LEAPP_OVL_SIZE', '2048') + disk_size = int(env_size) + except ValueError: + disk_size = 2048 +-- +2.52.0 + diff --git a/SOURCES/0090-lib-rhui-Remove-RHEL-7-RHUI-setups.patch b/SOURCES/0090-lib-rhui-Remove-RHEL-7-RHUI-setups.patch new file mode 100644 index 0000000..1f766ff --- /dev/null +++ b/SOURCES/0090-lib-rhui-Remove-RHEL-7-RHUI-setups.patch @@ -0,0 +1,124 @@ +From bdcd9440b1ca3130e40d98233d60b76bdd674b3b Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 17:36:19 +0200 +Subject: [PATCH 090/111] lib/rhui: Remove RHEL 7 RHUI setups + +--- + .../checkrhui/tests/component_test_checkrhui.py | 2 +- + repos/system_upgrade/common/libraries/rhui.py | 17 +---------------- + 2 files changed, 2 insertions(+), 17 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py b/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py +index 2e6f279e..7fa2112f 100644 +--- a/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py ++++ b/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py +@@ -108,7 +108,7 @@ def mk_cloud_map(variants): + ] + ) + def test_determine_rhui_src_variant(monkeypatch, extra_pkgs, rhui_setups, expected_result): +- actor = CurrentActorMocked(src_ver='7.9', config=_make_default_config(all_rhui_cfg)) ++ actor = CurrentActorMocked(src_ver='8.10', config=_make_default_config(all_rhui_cfg)) + monkeypatch.setattr(api, 'current_actor', actor) + installed_pkgs = {'zip', 'zsh', 'bash', 'grubby'}.union(set(extra_pkgs)) + +diff --git a/repos/system_upgrade/common/libraries/rhui.py b/repos/system_upgrade/common/libraries/rhui.py +index c90c8c14..e200075f 100644 +--- a/repos/system_upgrade/common/libraries/rhui.py ++++ b/repos/system_upgrade/common/libraries/rhui.py +@@ -8,9 +8,6 @@ from leapp.libraries.common.config.version import get_source_major_version, get_ + from leapp.libraries.stdlib import api + from leapp.utils.deprecation import deprecated + +-# when on AWS and upgrading from RHEL 7, we need also Python2 version of "Amazon-id" dnf +-# plugin which is served by "leapp-rhui-aws" rpm package (please note this package is not +-# in any RH official repository but only in "rhui-client-config-*" repo) + DNF_PLUGIN_PATH_PY2 = '/usr/lib/python2.7/site-packages/dnf-plugins/' + YUM_REPOS_PATH = '/etc/yum.repos.d' + +@@ -101,7 +98,7 @@ class RHUIFamily: + + + def mk_rhui_setup(clients=None, leapp_pkg='', mandatory_files=None, optional_files=None, +- extra_info=None, os_version='7.0', arch=arch.ARCH_X86_64, content_channel=ContentChannel.GA, ++ extra_info=None, os_version='8.0', arch=arch.ARCH_X86_64, content_channel=ContentChannel.GA, + files_supporting_client_operation=None): + + os_version_fragments = os_version.split('.') +@@ -131,7 +128,6 @@ def mk_rhui_setup(clients=None, leapp_pkg='', mandatory_files=None, optional_fil + # the search for target equivalent to setups sharing the same family, and thus reducing a chance of error. + RHUI_SETUPS = { + RHUIFamily(RHUIProvider.AWS, client_files_folder='aws'): [ +- mk_rhui_setup(clients={'rh-amazon-rhui-client'}, optional_files=[], os_version='7'), + mk_rhui_setup(clients={'rh-amazon-rhui-client'}, leapp_pkg='leapp-rhui-aws', + mandatory_files=[ + ('rhui-client-config-server-8.crt', RHUI_PKI_PRODUCT_DIR), +@@ -171,7 +167,6 @@ RHUI_SETUPS = { + ], os_version='10'), + ], + RHUIFamily(RHUIProvider.AWS, arch=arch.ARCH_ARM64, client_files_folder='aws'): [ +- mk_rhui_setup(clients={'rh-amazon-rhui-client-arm'}, optional_files=[], os_version='7', arch=arch.ARCH_ARM64), + mk_rhui_setup(clients={'rh-amazon-rhui-client'}, leapp_pkg='leapp-rhui-aws', + mandatory_files=[ + ('rhui-client-config-server-8.crt', RHUI_PKI_PRODUCT_DIR), +@@ -209,8 +204,6 @@ RHUI_SETUPS = { + ], os_version='10'), + ], + RHUIFamily(RHUIProvider.AWS, variant=RHUIVariant.SAP, client_files_folder='aws-sap-e4s'): [ +- mk_rhui_setup(clients={'rh-amazon-rhui-client-sap-bundle'}, optional_files=[], os_version='7', +- content_channel=ContentChannel.E4S), + mk_rhui_setup(clients={'rh-amazon-rhui-client-sap-bundle-e4s'}, leapp_pkg='leapp-rhui-aws-sap-e4s', + mandatory_files=[ + ('rhui-client-config-server-8-sap-bundle.crt', RHUI_PKI_PRODUCT_DIR), +@@ -265,8 +258,6 @@ RHUI_SETUPS = { + ], os_version='10', content_channel=ContentChannel.E4S), + ], + RHUIFamily(RHUIProvider.AZURE, client_files_folder='azure'): [ +- mk_rhui_setup(clients={'rhui-azure-rhel7'}, os_version='7', +- extra_info={'agent_pkg': 'WALinuxAgent'}), + mk_rhui_setup(clients={'rhui-azure-rhel8'}, leapp_pkg='leapp-rhui-azure', + mandatory_files=[('leapp-azure.repo', YUM_REPOS_PATH)], + optional_files=[ +@@ -298,7 +289,6 @@ RHUI_SETUPS = { + os_version='10'), + ], + RHUIFamily(RHUIProvider.AZURE, variant=RHUIVariant.SAP_APPS, client_files_folder='azure-sap-apps'): [ +- mk_rhui_setup(clients={'rhui-azure-rhel7-base-sap-apps'}, os_version='7', content_channel=ContentChannel.EUS), + mk_rhui_setup(clients={'rhui-azure-rhel8-sapapps'}, leapp_pkg='leapp-rhui-azure-sap', + mandatory_files=[('leapp-azure-sap-apps.repo', YUM_REPOS_PATH)], + optional_files=[ +@@ -336,7 +326,6 @@ RHUI_SETUPS = { + os_version='10', content_channel=ContentChannel.EUS), + ], + RHUIFamily(RHUIProvider.AZURE, variant=RHUIVariant.SAP_HA, client_files_folder='azure-sap-ha'): [ +- mk_rhui_setup(clients={'rhui-azure-rhel7-base-sap-ha'}, os_version='7', content_channel=ContentChannel.E4S), + mk_rhui_setup(clients={'rhui-azure-rhel8-sap-ha'}, leapp_pkg='leapp-rhui-azure-sap', + mandatory_files=[('leapp-azure-sap-ha.repo', YUM_REPOS_PATH)], + optional_files=[ +@@ -374,8 +363,6 @@ RHUI_SETUPS = { + os_version='10', content_channel=ContentChannel.E4S), + ], + RHUIFamily(RHUIProvider.GOOGLE, client_files_folder='google'): [ +- mk_rhui_setup(clients={'google-rhui-client-rhel7'}, os_version='7'), +- mk_rhui_setup(clients={'google-rhui-client-rhel7-els'}, os_version='7'), + mk_rhui_setup(clients={'google-rhui-client-rhel8'}, leapp_pkg='leapp-rhui-google', + mandatory_files=[('leapp-google.repo', YUM_REPOS_PATH)], + files_supporting_client_operation=['leapp-google.repo'], +@@ -386,7 +373,6 @@ RHUI_SETUPS = { + os_version='9'), + ], + RHUIFamily(RHUIProvider.GOOGLE, variant=RHUIVariant.SAP, client_files_folder='google-sap'): [ +- mk_rhui_setup(clients={'google-rhui-client-rhel79-sap'}, os_version='7', content_channel=ContentChannel.E4S), + mk_rhui_setup(clients={'google-rhui-client-rhel8-sap'}, leapp_pkg='leapp-rhui-google-sap', + mandatory_files=[('leapp-google-sap.repo', YUM_REPOS_PATH)], + files_supporting_client_operation=['leapp-google-sap.repo'], +@@ -401,7 +387,6 @@ RHUI_SETUPS = { + os_version='9', content_channel=ContentChannel.E4S), + ], + RHUIFamily(RHUIProvider.ALIBABA, client_files_folder='alibaba'): [ +- mk_rhui_setup(clients={'client-rhel7'}, os_version='7'), + mk_rhui_setup(clients={'aliyun_rhui_rhel8'}, leapp_pkg='leapp-rhui-alibaba', + mandatory_files=[('leapp-alibaba.repo', YUM_REPOS_PATH)], + optional_files=[ +-- +2.52.0 + diff --git a/SOURCES/0091-lib-rhui-Remove-deprecated-code-and-setups-map.patch b/SOURCES/0091-lib-rhui-Remove-deprecated-code-and-setups-map.patch new file mode 100644 index 0000000..1400544 --- /dev/null +++ b/SOURCES/0091-lib-rhui-Remove-deprecated-code-and-setups-map.patch @@ -0,0 +1,311 @@ +From 6ab7f341c706ca32f8344c214e421e43fe657bae Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 12:13:14 +0200 +Subject: [PATCH 091/111] lib/rhui: Remove deprecated code and setups map + +--- + .../tests/component_test_checkrhui.py | 10 - + repos/system_upgrade/common/libraries/rhui.py | 250 ------------------ + 2 files changed, 260 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py b/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py +index 7fa2112f..f0820c86 100644 +--- a/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py ++++ b/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py +@@ -53,16 +53,6 @@ def mk_setup_info(): + return TargetRHUISetupInfo(preinstall_tasks=pre_tasks, postinstall_tasks=post_tasks) + + +-def iter_known_rhui_setups(): +- for upgrade_path, providers in rhui.RHUI_CLOUD_MAP.items(): +- for provider_variant, variant_description in providers.items(): +- src_clients = variant_description['src_pkg'] +- if isinstance(src_clients, str): +- src_clients = {src_clients, } +- +- yield provider_variant, upgrade_path, src_clients +- +- + def mk_cloud_map(variants): + upg_path = {} + for variant_desc in variants: +diff --git a/repos/system_upgrade/common/libraries/rhui.py b/repos/system_upgrade/common/libraries/rhui.py +index e200075f..7639a64f 100644 +--- a/repos/system_upgrade/common/libraries/rhui.py ++++ b/repos/system_upgrade/common/libraries/rhui.py +@@ -1,12 +1,8 @@ + import os + from collections import namedtuple + +-import six +- + from leapp.libraries.common.config import architecture as arch + from leapp.libraries.common.config.version import get_source_major_version, get_target_major_version +-from leapp.libraries.stdlib import api +-from leapp.utils.deprecation import deprecated + + DNF_PLUGIN_PATH_PY2 = '/usr/lib/python2.7/site-packages/dnf-plugins/' + YUM_REPOS_PATH = '/etc/yum.repos.d' +@@ -435,220 +431,6 @@ RHUI_SETUPS = { + } + + +-# DEPRECATED, use RHUI_SETUPS instead +-RHUI_CLOUD_MAP = { +- '7to8': { +- 'aws': { +- 'src_pkg': 'rh-amazon-rhui-client', +- 'target_pkg': 'rh-amazon-rhui-client', +- 'leapp_pkg': 'leapp-rhui-aws', +- 'leapp_pkg_repo': 'leapp-aws.repo', +- 'files_map': [ +- ('rhui-client-config-server-8.crt', RHUI_PKI_PRODUCT_DIR), +- ('rhui-client-config-server-8.key', RHUI_PKI_DIR), +- ('cdn.redhat.com-chain.crt', RHUI_PKI_DIR), +- (AWS_DNF_PLUGIN_NAME, DNF_PLUGIN_PATH_PY2), +- ('leapp-aws.repo', YUM_REPOS_PATH) +- ], +- }, +- 'aws-sap-e4s': { +- 'src_pkg': 'rh-amazon-rhui-client-sap-bundle', +- 'target_pkg': 'rh-amazon-rhui-client-sap-bundle-e4s', +- 'leapp_pkg': 'leapp-rhui-aws-sap-e4s', +- 'leapp_pkg_repo': 'leapp-aws-sap-e4s.repo', +- 'files_map': [ +- ('rhui-client-config-server-8-sap-bundle.crt', RHUI_PKI_PRODUCT_DIR), +- ('rhui-client-config-server-8-sap-bundle.key', RHUI_PKI_DIR), +- ('cdn.redhat.com-chain.crt', RHUI_PKI_DIR), +- (AWS_DNF_PLUGIN_NAME, DNF_PLUGIN_PATH_PY2), +- ('leapp-aws-sap-e4s.repo', YUM_REPOS_PATH) +- ], +- }, +- 'azure': { +- 'src_pkg': 'rhui-azure-rhel7', +- 'target_pkg': 'rhui-azure-rhel8', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure', +- 'leapp_pkg_repo': 'leapp-azure.repo', +- 'files_map': [ +- ('leapp-azure.repo', YUM_REPOS_PATH) +- ], +- }, +- 'azure-sap-apps': { +- 'src_pkg': 'rhui-azure-rhel7-base-sap-apps', +- 'target_pkg': 'rhui-azure-rhel8-sapapps', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure-sap', +- 'leapp_pkg_repo': 'leapp-azure-sap-apps.repo', +- 'files_map': [ +- ('leapp-azure-sap-apps.repo', YUM_REPOS_PATH), +- ], +- }, +- 'azure-sap-ha': { +- 'src_pkg': 'rhui-azure-rhel7-base-sap-ha', +- 'target_pkg': 'rhui-azure-rhel8-sap-ha', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure-sap', +- 'leapp_pkg_repo': 'leapp-azure-sap-ha.repo', +- 'files_map': [ +- ('leapp-azure-sap-ha.repo', YUM_REPOS_PATH) +- ], +- }, +- 'google': { +- 'src_pkg': 'google-rhui-client-rhel7', +- 'target_pkg': 'google-rhui-client-rhel8', +- 'leapp_pkg': 'leapp-rhui-google', +- 'leapp_pkg_repo': 'leapp-google.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-google.repo', YUM_REPOS_PATH) +- ], +- }, +- 'google-sap': { +- 'src_pkg': 'google-rhui-client-rhel79-sap', +- 'target_pkg': 'google-rhui-client-rhel8-sap', +- 'leapp_pkg': 'leapp-rhui-google-sap', +- 'leapp_pkg_repo': 'leapp-google-sap.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-google-sap.repo', YUM_REPOS_PATH) +- ], +- }, +- 'alibaba': { +- 'src_pkg': 'client-rhel7', +- 'target_pkg': 'aliyun_rhui_rhel8', +- 'leapp_pkg': 'leapp-rhui-alibaba', +- 'leapp_pkg_repo': 'leapp-alibaba.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-alibaba.repo', YUM_REPOS_PATH) +- ], +- } +- }, +- '8to9': { +- 'aws': { +- 'src_pkg': 'rh-amazon-rhui-client', +- 'target_pkg': 'rh-amazon-rhui-client', +- 'leapp_pkg': 'leapp-rhui-aws', +- 'leapp_pkg_repo': 'leapp-aws.repo', +- 'files_map': [ +- ('rhui-client-config-server-9.crt', RHUI_PKI_PRODUCT_DIR), +- ('rhui-client-config-server-9.key', RHUI_PKI_DIR), +- ('cdn.redhat.com-chain.crt', RHUI_PKI_DIR), +- ('leapp-aws.repo', YUM_REPOS_PATH) +- ], +- }, +- 'aws-sap-e4s': { +- 'src_pkg': 'rh-amazon-rhui-client-sap-bundle-e4s', +- 'target_pkg': 'rh-amazon-rhui-client-sap-bundle-e4s', +- 'leapp_pkg': 'leapp-rhui-aws-sap-e4s', +- 'leapp_pkg_repo': 'leapp-aws-sap-e4s.repo', +- 'files_map': [ +- ('rhui-client-config-server-9-sap-bundle.crt', RHUI_PKI_PRODUCT_DIR), +- ('rhui-client-config-server-9-sap-bundle.key', RHUI_PKI_DIR), +- ('cdn.redhat.com-chain.crt', RHUI_PKI_DIR), +- ('leapp-aws-sap-e4s.repo', YUM_REPOS_PATH) +- ], +- }, +- 'azure': { +- 'src_pkg': 'rhui-azure-rhel8', +- 'target_pkg': 'rhui-azure-rhel9', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure', +- 'leapp_pkg_repo': 'leapp-azure.repo', +- 'files_map': [ +- ('leapp-azure.repo', YUM_REPOS_PATH) +- ], +- }, +- # FIXME(mhecko): This entry is identical to the azure one, since we have no EUS content yet, therefore, it +- # # serves only the purpose of containing the name of rhui client package to correctly detect +- # # cloud provider. Trying to work around this entry by specifying --channel, will result in +- # # failures - there is no repomapping for EUS content, and the name of target pkg differs on EUS. +- # # If the EUS image is available sooner than the 'azure-eus' entry gets modified, the user can +- # # still upgrade to non-EUS, and switch the newly upgraded system to EUS manually. +- 'azure-eus': { +- 'src_pkg': 'rhui-azure-rhel8-eus', +- 'target_pkg': 'rhui-azure-rhel9', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure-eus', +- 'leapp_pkg_repo': 'leapp-azure.repo', +- 'files_map': [ +- ('leapp-azure.repo', YUM_REPOS_PATH) +- ], +- }, +- 'azure-sap-ha': { +- 'src_pkg': 'rhui-azure-rhel8-sap-ha', +- 'target_pkg': 'rhui-azure-rhel9-sap-ha', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure-sap', +- 'leapp_pkg_repo': 'leapp-azure-sap-ha.repo', +- 'files_map': [ +- ('leapp-azure-sap-ha.repo', YUM_REPOS_PATH) +- ], +- }, +- 'azure-sap-apps': { +- 'src_pkg': 'rhui-azure-rhel8-sapapps', +- 'target_pkg': 'rhui-azure-rhel9-sapapps', +- 'agent_pkg': 'WALinuxAgent', +- 'leapp_pkg': 'leapp-rhui-azure-sap', +- 'leapp_pkg_repo': 'leapp-azure-sap-apps.repo', +- 'files_map': [ +- ('leapp-azure-sap-apps.repo', YUM_REPOS_PATH) +- ], +- }, +- 'google': { +- 'src_pkg': 'google-rhui-client-rhel8', +- 'target_pkg': 'google-rhui-client-rhel9', +- 'leapp_pkg': 'leapp-rhui-google', +- 'leapp_pkg_repo': 'leapp-google.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-google.repo', YUM_REPOS_PATH) +- ], +- }, +- 'google-sap': { +- 'src_pkg': 'google-rhui-client-rhel8-sap', +- 'target_pkg': 'google-rhui-client-rhel9-sap', +- 'leapp_pkg': 'leapp-rhui-google-sap', +- 'leapp_pkg_repo': 'leapp-google-sap.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-google-sap.repo', YUM_REPOS_PATH) +- ], +- }, +- 'alibaba': { +- 'src_pkg': 'aliyun_rhui_rhel8', +- 'target_pkg': 'aliyun_rhui_rhel9', +- 'leapp_pkg': 'leapp-rhui-alibaba', +- 'leapp_pkg_repo': 'leapp-alibaba.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-alibaba.repo', YUM_REPOS_PATH) +- ], +- }, +- }, +- '9to10': { +- 'alibaba': { +- 'src_pkg': 'aliyun_rhui_rhel9', +- 'target_pkg': 'aliyun_rhui_rhel10', +- 'leapp_pkg': 'leapp-rhui-alibaba', +- 'leapp_pkg_repo': 'leapp-alibaba.repo', +- 'files_map': [ +- ('content.crt', RHUI_PKI_PRODUCT_DIR), +- ('key.pem', RHUI_PKI_DIR), +- ('leapp-alibaba.repo', YUM_REPOS_PATH) +- ], +- }, +- } +-} +- +- + def get_upg_path(): + """ + Get upgrade path in specific string format +@@ -658,38 +440,6 @@ def get_upg_path(): + return '{0}to{1}'.format(source_major_version, target_major_version) + + +-@deprecated(since='2023-07-27', message='This functionality has been replaced with the RHUIInfo message.') +-def gen_rhui_files_map(): +- """ +- Generate RHUI files map based on architecture and upgrade path +- """ +- arch = api.current_actor().configuration.architecture +- upg_path = get_upg_path() +- +- cloud_map = RHUI_CLOUD_MAP +- # for the moment the only arch related difference in RHUI package naming is on ARM +- if arch == 'aarch64': +- cloud_map[get_upg_path()]['aws']['src_pkg'] = 'rh-amazon-rhui-client-arm' +- +- files_map = dict((k, v['files_map']) for k, v in six.iteritems(cloud_map[upg_path])) +- return files_map +- +- +-@deprecated(since='2023-07-27', message='This functionality has been integrated into target_userspace_creator.') +-def copy_rhui_data(context, provider): +- """ +- Copy relevant RHUI certificates and key into the target userspace container +- """ +- rhui_dir = api.get_common_folder_path('rhui') +- data_dir = os.path.join(rhui_dir, provider) +- +- context.call(['mkdir', '-p', RHUI_PKI_PRODUCT_DIR]) +- context.call(['mkdir', '-p', RHUI_PKI_PRIVATE_DIR]) +- +- for path_ in gen_rhui_files_map().get(provider, ()): +- context.copy_to(os.path.join(data_dir, path_[0]), path_[1]) +- +- + def get_all_known_rhui_pkgs_for_current_upg(): + upg_major_versions = (get_source_major_version(), get_target_major_version()) + +-- +2.52.0 + diff --git a/SOURCES/0092-lib-gpg-Remove-RHEL-7-workarounds.patch b/SOURCES/0092-lib-gpg-Remove-RHEL-7-workarounds.patch new file mode 100644 index 0000000..1b6469d --- /dev/null +++ b/SOURCES/0092-lib-gpg-Remove-RHEL-7-workarounds.patch @@ -0,0 +1,139 @@ +From 8e0265729f92741665a0465d6d9ad0e7fafbc4ef Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 12:14:33 +0200 +Subject: [PATCH 092/111] lib/gpg: Remove RHEL 7 "workarounds" + +--- + repos/system_upgrade/common/libraries/gpg.py | 15 ++---- + .../common/libraries/tests/test_gpg.py | 48 +++++-------------- + 2 files changed, 16 insertions(+), 47 deletions(-) + +diff --git a/repos/system_upgrade/common/libraries/gpg.py b/repos/system_upgrade/common/libraries/gpg.py +index 9990cdcf..4c5133e7 100644 +--- a/repos/system_upgrade/common/libraries/gpg.py ++++ b/repos/system_upgrade/common/libraries/gpg.py +@@ -1,7 +1,7 @@ + import os + + from leapp.libraries.common import config +-from leapp.libraries.common.config.version import get_source_major_version, get_target_major_version ++from leapp.libraries.common.config.version import get_target_major_version + from leapp.libraries.stdlib import api, run + from leapp.models import GpgKey + +@@ -28,18 +28,11 @@ def _gpg_show_keys(key_path): + """ + Show keys in given file in version-agnostic manner + +- This runs gpg --show-keys (EL8) or gpg --with-fingerprints (EL7) +- to verify the given file exists, is readable and contains valid +- OpenPGP key data, which is printed in parsable format (--with-colons). ++ This runs gpg --show-keys to verify the given file exists, is readable and ++ contains valid OpenPGP key data, which is printed in parsable format (--with-colons). + """ + try: +- cmd = ['gpg2'] +- # RHEL7 gnupg requires different switches to get the same output +- if get_source_major_version() == '7': +- cmd.append('--with-fingerprint') +- else: +- cmd.append('--show-keys') +- cmd += ['--with-colons', key_path] ++ cmd = ['gpg2', '--show-keys', '--with-colons', key_path] + # TODO: discussed, most likely the checked=False will be dropped + # and error will be handled in other functions + return run(cmd, split=True, checked=False) +diff --git a/repos/system_upgrade/common/libraries/tests/test_gpg.py b/repos/system_upgrade/common/libraries/tests/test_gpg.py +index 1394e60d..ec44f921 100644 +--- a/repos/system_upgrade/common/libraries/tests/test_gpg.py ++++ b/repos/system_upgrade/common/libraries/tests/test_gpg.py +@@ -12,8 +12,6 @@ from leapp.models import GpgKey, InstalledRPM, RPM + + + @pytest.mark.parametrize('target, product_type, distro, exp', [ +- ('8.6', 'beta', 'rhel', '../../files/distro/rhel/rpm-gpg/8beta'), +- ('8.8', 'htb', 'rhel', '../../files/distro/rhel/rpm-gpg/8'), + ('9.0', 'beta', 'rhel', '../../files/distro/rhel/rpm-gpg/9beta'), + ('9.2', 'ga', 'rhel', '../../files/distro/rhel/rpm-gpg/9'), + ('10.0', 'ga', 'rhel', '../../files/distro/rhel/rpm-gpg/10'), +@@ -30,14 +28,9 @@ def test_get_path_to_gpg_certs(monkeypatch, target, product_type, distro, exp): + assert p == exp + + +-def is_rhel7(): +- return int(distro.major_version()) < 8 +- +- + @pytest.mark.skipif(distro.id() not in ("rhel", "centos"), reason="Requires RHEL or CentOS for valid results.") + def test_gpg_show_keys(loaded_leapp_repository, monkeypatch): +- src = '7.9' if is_rhel7() else '8.6' +- current_actor = CurrentActorMocked(src_ver=src, release_id='rhel') ++ current_actor = CurrentActorMocked(src_ver='8.10', release_id='rhel') + monkeypatch.setattr(api, 'current_actor', current_actor) + + # python2 compatibility :/ +@@ -50,11 +43,8 @@ def test_gpg_show_keys(loaded_leapp_repository, monkeypatch): + # non-existing file + non_existent_path = os.path.join(dirpath, 'nonexistent') + res = gpg._gpg_show_keys(non_existent_path) +- if is_rhel7(): +- err_msg = "gpg: can't open `{}'".format(non_existent_path) +- else: +- err_msg = "gpg: can't open '{}': No such file or directory\n".format(non_existent_path) + assert not res['stdout'] ++ err_msg = "gpg: can't open '{}': No such file or directory\n".format(non_existent_path) + assert err_msg in res['stderr'] + assert res['exit_code'] == 2 + +@@ -67,13 +57,8 @@ def test_gpg_show_keys(loaded_leapp_repository, monkeypatch): + f.write('test') + + res = gpg._gpg_show_keys(no_key_path) +- if is_rhel7(): +- err_msg = ('gpg: no valid OpenPGP data found.\n' +- 'gpg: processing message failed: Unknown system error\n') +- else: +- err_msg = 'gpg: no valid OpenPGP data found.\n' + assert not res['stdout'] +- assert res['stderr'] == err_msg ++ assert res['stderr'] == 'gpg: no valid OpenPGP data found.\n' + assert res['exit_code'] == 2 + + fp = gpg._parse_fp_from_gpg(res) +@@ -89,24 +74,15 @@ def test_gpg_show_keys(loaded_leapp_repository, monkeypatch): + finally: + shutil.rmtree(dirpath) + +- if is_rhel7(): +- assert len(res['stdout']) == 4 +- assert res['stdout'][0] == ('pub:-:4096:1:199E2F91FD431D51:1256212795:::-:' +- 'Red Hat, Inc. (release key 2) :') +- assert res['stdout'][1] == 'fpr:::::::::567E347AD0044ADE55BA8A5F199E2F91FD431D51:' +- assert res['stdout'][2] == ('pub:-:4096:1:5054E4A45A6340B3:1646863006:::-:' +- 'Red Hat, Inc. (auxiliary key 3) :') +- assert res['stdout'][3] == 'fpr:::::::::7E4624258C406535D56D6F135054E4A45A6340B3:' +- else: +- assert len(res['stdout']) == 6 +- assert res['stdout'][0] == 'pub:-:4096:1:199E2F91FD431D51:1256212795:::-:::scSC::::::23::0:' +- assert res['stdout'][1] == 'fpr:::::::::567E347AD0044ADE55BA8A5F199E2F91FD431D51:' +- assert res['stdout'][2] == ('uid:-::::1256212795::DC1CAEC7997B3575101BB0FCAAC6191792660D8F::' +- 'Red Hat, Inc. (release key 2) ::::::::::0:') +- assert res['stdout'][3] == 'pub:-:4096:1:5054E4A45A6340B3:1646863006:::-:::scSC::::::23::0:' +- assert res['stdout'][4] == 'fpr:::::::::7E4624258C406535D56D6F135054E4A45A6340B3:' +- assert res['stdout'][5] == ('uid:-::::1646863006::DA7F68E3872D6E7BDCE05225E7EB5F3ACDD9699F::' +- 'Red Hat, Inc. (auxiliary key 3) ::::::::::0:') ++ assert len(res['stdout']) == 6 ++ assert res['stdout'][0] == 'pub:-:4096:1:199E2F91FD431D51:1256212795:::-:::scSC::::::23::0:' ++ assert res['stdout'][1] == 'fpr:::::::::567E347AD0044ADE55BA8A5F199E2F91FD431D51:' ++ assert res['stdout'][2] == ('uid:-::::1256212795::DC1CAEC7997B3575101BB0FCAAC6191792660D8F::' ++ 'Red Hat, Inc. (release key 2) ::::::::::0:') ++ assert res['stdout'][3] == 'pub:-:4096:1:5054E4A45A6340B3:1646863006:::-:::scSC::::::23::0:' ++ assert res['stdout'][4] == 'fpr:::::::::7E4624258C406535D56D6F135054E4A45A6340B3:' ++ assert res['stdout'][5] == ('uid:-::::1646863006::DA7F68E3872D6E7BDCE05225E7EB5F3ACDD9699F::' ++ 'Red Hat, Inc. (auxiliary key 3) ::::::::::0:') + + err = '{}/trustdb.gpg: trustdb created'.format(dirpath) + assert err in res['stderr'] +-- +2.52.0 + diff --git a/SOURCES/0093-lib-rpms-Update-tests-for-9-10.patch b/SOURCES/0093-lib-rpms-Update-tests-for-9-10.patch new file mode 100644 index 0000000..8b56a52 --- /dev/null +++ b/SOURCES/0093-lib-rpms-Update-tests-for-9-10.patch @@ -0,0 +1,158 @@ +From 9557e64f84af0097ce45b0187381d4d5a097679d Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 18:27:37 +0200 +Subject: [PATCH 093/111] lib/rpms: Update tests for 9->10 + +--- + repos/system_upgrade/common/libraries/rpms.py | 55 ++++++++------- + .../common/libraries/tests/test_rpms.py | 67 +++++++++++++++++-- + 2 files changed, 92 insertions(+), 30 deletions(-) + +diff --git a/repos/system_upgrade/common/libraries/rpms.py b/repos/system_upgrade/common/libraries/rpms.py +index 8f98c1a4..11a31882 100644 +--- a/repos/system_upgrade/common/libraries/rpms.py ++++ b/repos/system_upgrade/common/libraries/rpms.py +@@ -18,30 +18,39 @@ class LeappComponents: + TOOLS = 'tools' + + ++# NOTE: need to keep package for dropped upgrade paths so peseventsscanner can drop ++# related PES events + _LEAPP_PACKAGES_MAP = { +- LeappComponents.FRAMEWORK: {'7': {'pkgs': ['leapp', 'python2-leapp'], +- 'deps': ['leapp-deps']}, +- '8': {'pkgs': ['leapp', 'python3-leapp'], +- 'deps': ['leapp-deps']}, +- '9': {'pkgs': ['leapp', 'python3-leapp'], +- 'deps': ['leapp-deps']} +- }, +- LeappComponents.REPOSITORY: {'7': {'pkgs': ['leapp-upgrade-el7toel8'], +- 'deps': ['leapp-upgrade-el7toel8-deps']}, +- '8': {'pkgs': ['leapp-upgrade-el8toel9', 'leapp-upgrade-el8toel9-fapolicyd'], +- 'deps': ['leapp-upgrade-el8toel9-deps']}, +- '9': {'pkgs': ['leapp-upgrade-el9toel10', 'leapp-upgrade-el9toel10-fapolicyd'], +- 'deps': ['leapp-upgrade-el9toel10-deps']} +- }, +- LeappComponents.COCKPIT: {'7': {'pkgs': ['cockpit-leapp']}, +- '8': {'pkgs': ['cockpit-leapp']}, +- '9': {'pkgs': ['cockpit-leapp']}, +- }, +- LeappComponents.TOOLS: {'7': {'pkgs': ['snactor']}, +- '8': {'pkgs': ['snactor']}, +- '9': {'pkgs': ['snactor']} +- } +- } ++ LeappComponents.FRAMEWORK: { ++ '7': {'pkgs': ['leapp', 'python2-leapp'], 'deps': ['leapp-deps']}, ++ '8': {'pkgs': ['leapp', 'python3-leapp'], 'deps': ['leapp-deps']}, ++ '9': {'pkgs': ['leapp', 'python3-leapp'], 'deps': ['leapp-deps']}, ++ }, ++ LeappComponents.REPOSITORY: { ++ '7': { ++ 'pkgs': ['leapp-upgrade-el7toel8'], ++ 'deps': ['leapp-upgrade-el7toel8-deps'], ++ }, ++ '8': { ++ 'pkgs': ['leapp-upgrade-el8toel9', 'leapp-upgrade-el8toel9-fapolicyd'], ++ 'deps': ['leapp-upgrade-el8toel9-deps'], ++ }, ++ '9': { ++ 'pkgs': ['leapp-upgrade-el9toel10', 'leapp-upgrade-el9toel10-fapolicyd'], ++ 'deps': ['leapp-upgrade-el9toel10-deps'], ++ }, ++ }, ++ LeappComponents.COCKPIT: { ++ '7': {'pkgs': ['cockpit-leapp']}, ++ '8': {'pkgs': ['cockpit-leapp']}, ++ '9': {'pkgs': ['cockpit-leapp']}, ++ }, ++ LeappComponents.TOOLS: { ++ '7': {'pkgs': ['snactor']}, ++ '8': {'pkgs': ['snactor']}, ++ '9': {'pkgs': ['snactor']}, ++ }, ++} + + GET_LEAPP_PACKAGES_DEFAULT_COMPONENTS = frozenset((LeappComponents.FRAMEWORK, + LeappComponents.REPOSITORY, +diff --git a/repos/system_upgrade/common/libraries/tests/test_rpms.py b/repos/system_upgrade/common/libraries/tests/test_rpms.py +index 13f87651..c9d7f420 100644 +--- a/repos/system_upgrade/common/libraries/tests/test_rpms.py ++++ b/repos/system_upgrade/common/libraries/tests/test_rpms.py +@@ -36,13 +36,66 @@ def test_parse_config_modification(): + assert _parse_config_modification(data, "/etc/ssh/sshd_config") + + +-@pytest.mark.parametrize('major_version,component,result', [ +- (None, None, ['leapp', 'python3-leapp', 'leapp-upgrade-el8toel9', 'leapp-upgrade-el8toel9-fapolicyd', 'snactor']), +- ('7', None, ['leapp', 'python2-leapp', 'leapp-upgrade-el7toel8', 'snactor']), +- (['7', '8'], None, ['leapp', 'python2-leapp', 'leapp-upgrade-el7toel8', +- 'python3-leapp', 'leapp-upgrade-el8toel9', 'leapp-upgrade-el8toel9-fapolicyd', 'snactor']), +- ('8', 'framework', ['leapp', 'python3-leapp']), +- ]) ++@pytest.mark.parametrize( ++ "major_version,component,result", ++ [ ++ ( ++ None, ++ None, ++ [ ++ "leapp", ++ "python3-leapp", ++ "leapp-upgrade-el8toel9", ++ "leapp-upgrade-el8toel9-fapolicyd", ++ "snactor", ++ ], ++ ), ++ ("7", None, ["leapp", "python2-leapp", "leapp-upgrade-el7toel8", "snactor"]), ++ ( ++ "8", ++ None, ++ [ ++ "leapp", ++ "python3-leapp", ++ "leapp-upgrade-el8toel9", ++ "leapp-upgrade-el8toel9-fapolicyd", ++ "snactor", ++ ], ++ ), ++ ( ++ ["7", "8"], ++ None, ++ [ ++ "leapp", ++ "python2-leapp", ++ "leapp-upgrade-el7toel8", ++ "python3-leapp", ++ "leapp-upgrade-el8toel9", ++ "leapp-upgrade-el8toel9-fapolicyd", ++ "snactor", ++ ], ++ ), ++ ( ++ ["8", "9"], ++ None, ++ [ ++ "leapp", ++ "python3-leapp", ++ "leapp-upgrade-el8toel9", ++ "leapp-upgrade-el8toel9-fapolicyd", ++ "leapp-upgrade-el9toel10", ++ "leapp-upgrade-el9toel10-fapolicyd", ++ "snactor", ++ ], ++ ), ++ ("8", "framework", ["leapp", "python3-leapp"]), ++ ( ++ "9", ++ "repository", ++ ["leapp-upgrade-el9toel10", "leapp-upgrade-el9toel10-fapolicyd"], ++ ), ++ ], ++) + def test_get_leapp_packages(major_version, component, result, monkeypatch): + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='8.9', dst_ver='9.3')) + +-- +2.52.0 + diff --git a/SOURCES/0094-lib-module-Remove-7-8-releasever-workaround.patch b/SOURCES/0094-lib-module-Remove-7-8-releasever-workaround.patch new file mode 100644 index 0000000..c58e819 --- /dev/null +++ b/SOURCES/0094-lib-module-Remove-7-8-releasever-workaround.patch @@ -0,0 +1,31 @@ +From 5547c926b0c1bf5c2c8d943a178b878a8df50120 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:21:13 +0200 +Subject: [PATCH 094/111] lib/module: Remove 7->8 releasever workaround + +--- + repos/system_upgrade/common/libraries/module.py | 8 -------- + 1 file changed, 8 deletions(-) + +diff --git a/repos/system_upgrade/common/libraries/module.py b/repos/system_upgrade/common/libraries/module.py +index db725e71..ba7ecba9 100644 +--- a/repos/system_upgrade/common/libraries/module.py ++++ b/repos/system_upgrade/common/libraries/module.py +@@ -26,14 +26,6 @@ def _create_or_get_dnf_base(base=None): + # preload releasever from what we know, this will be our fallback + conf.substitutions['releasever'] = get_source_major_version() + +- # dnf on EL7 doesn't load vars from /etc/yum, so we need to help it a bit +- if get_source_major_version() == '7': +- try: +- with open('/etc/yum/vars/releasever') as releasever_file: +- conf.substitutions['releasever'] = releasever_file.read().strip() +- except IOError: +- pass +- + # load all substitutions from etc + conf.substitutions.update_from_etc('/') + +-- +2.52.0 + diff --git a/SOURCES/0095-lib-dnfplugin-Remove-RHEL-7-bind-mount-code-path.patch b/SOURCES/0095-lib-dnfplugin-Remove-RHEL-7-bind-mount-code-path.patch new file mode 100644 index 0000000..dbe23bd --- /dev/null +++ b/SOURCES/0095-lib-dnfplugin-Remove-RHEL-7-bind-mount-code-path.patch @@ -0,0 +1,41 @@ +From 1ec6ea8f8081c6895ed42696df9de51343e6c8ba Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:28:06 +0200 +Subject: [PATCH 095/111] lib/dnfplugin: Remove RHEL 7 bind mount code path + +--- + repos/system_upgrade/common/libraries/dnfplugin.py | 11 +++-------- + 1 file changed, 3 insertions(+), 8 deletions(-) + +diff --git a/repos/system_upgrade/common/libraries/dnfplugin.py b/repos/system_upgrade/common/libraries/dnfplugin.py +index 1af52dc5..7e1fd497 100644 +--- a/repos/system_upgrade/common/libraries/dnfplugin.py ++++ b/repos/system_upgrade/common/libraries/dnfplugin.py +@@ -19,7 +19,6 @@ _DEDICATED_URL = 'https://access.redhat.com/solutions/7011704' + + class _DnfPluginPathStr(str): + _PATHS = { +- "8": os.path.join('/lib/python3.6/site-packages/dnf-plugins', DNF_PLUGIN_NAME), + "9": os.path.join('/lib/python3.9/site-packages/dnf-plugins', DNF_PLUGIN_NAME), + "10": os.path.join('/lib/python3.12/site-packages/dnf-plugins', DNF_PLUGIN_NAME), + } +@@ -405,13 +404,9 @@ def perform_transaction_install(target_userspace_info, storage_info, used_repos, + '/run/udev:/installroot/run/udev', + ] + +- if get_target_major_version() == '8': +- bind_mounts.append('/sys:/installroot/sys') +- else: +- # the target major version is RHEL 9+ +- # we are bindmounting host's "/sys" to the intermediate "/hostsys" +- # in the upgrade initramdisk to avoid cgroups tree layout clash +- bind_mounts.append('/hostsys:/installroot/sys') ++ # we are bindmounting host's "/sys" to the intermediate "/hostsys" ++ # in the upgrade initramdisk to avoid cgroups tree layout clash ++ bind_mounts.append('/hostsys:/installroot/sys') + + already_mounted = {entry.split(':')[0] for entry in bind_mounts} + for entry in storage_info.fstab: +-- +2.52.0 + diff --git a/SOURCES/0096-lib-mounting-Remove-RHEL-7-nspawn-options.patch b/SOURCES/0096-lib-mounting-Remove-RHEL-7-nspawn-options.patch new file mode 100644 index 0000000..34af4a9 --- /dev/null +++ b/SOURCES/0096-lib-mounting-Remove-RHEL-7-nspawn-options.patch @@ -0,0 +1,45 @@ +From e2be1ed71d8985e836a3a0df2fc2d1a9b47c1b99 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:47:47 +0200 +Subject: [PATCH 096/111] lib/mounting: Remove RHEL 7 nspawn options + +--- + repos/system_upgrade/common/libraries/mounting.py | 15 ++++++++------- + 1 file changed, 8 insertions(+), 7 deletions(-) + +diff --git a/repos/system_upgrade/common/libraries/mounting.py b/repos/system_upgrade/common/libraries/mounting.py +index 279d31dc..ea59164c 100644 +--- a/repos/system_upgrade/common/libraries/mounting.py ++++ b/repos/system_upgrade/common/libraries/mounting.py +@@ -5,7 +5,7 @@ import shutil + from collections import namedtuple + + from leapp.libraries.common.config import get_all_envs +-from leapp.libraries.common.config.version import get_source_major_version, matches_source_version ++from leapp.libraries.common.config.version import matches_source_version + from leapp.libraries.stdlib import api, CalledProcessError, run + + # Using ALWAYS_BIND will crash the upgrade process if the file does not exist. +@@ -83,12 +83,13 @@ class IsolationType: + """ Transform the command to be executed with systemd-nspawn """ + binds = ['--bind={}'.format(bind) for bind in self.binds] + setenvs = ['--setenv={}={}'.format(env.name, env.value) for env in self.env_vars] +- final_cmd = ['systemd-nspawn', '--register=no', '--quiet'] +- if get_source_major_version() != '7': +- # TODO: check whether we could use the --keep unit on el7 too. +- # in such a case, just add line into the previous solution.. +- # TODO: the same about --capability=all +- final_cmd += ['--keep-unit', '--capability=all'] ++ final_cmd = [ ++ 'systemd-nspawn', ++ '--register=no', ++ '--quiet', ++ '--keep-unit', ++ '--capability=all', ++ ] + if matches_source_version('>= 9.0'): + # Disable pseudo-TTY in container + final_cmd += ['--pipe'] +-- +2.52.0 + diff --git a/SOURCES/0097-lib-version-Remove-RHEL-7-from-supported-version.patch b/SOURCES/0097-lib-version-Remove-RHEL-7-from-supported-version.patch new file mode 100644 index 0000000..e80750e --- /dev/null +++ b/SOURCES/0097-lib-version-Remove-RHEL-7-from-supported-version.patch @@ -0,0 +1,44 @@ +From 82fd5e9844ef0d7910959c601a9e5c25252e53cf Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:48:45 +0200 +Subject: [PATCH 097/111] lib/version: Remove RHEL 7 from supported version + +--- + .../common/actors/checkosrelease/tests/test_checkosrelease.py | 4 ++-- + repos/system_upgrade/common/libraries/config/version.py | 4 ---- + 2 files changed, 2 insertions(+), 6 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/checkosrelease/tests/test_checkosrelease.py b/repos/system_upgrade/common/actors/checkosrelease/tests/test_checkosrelease.py +index aa0fd636..1ca8a1d7 100644 +--- a/repos/system_upgrade/common/actors/checkosrelease/tests/test_checkosrelease.py ++++ b/repos/system_upgrade/common/actors/checkosrelease/tests/test_checkosrelease.py +@@ -27,8 +27,8 @@ def test_no_skip_check(monkeypatch): + + def test_not_supported_release(monkeypatch): + monkeypatch.setattr(version, "is_supported_version", lambda: False) +- monkeypatch.setattr(version, "get_source_major_version", lambda: '7') +- monkeypatch.setattr(version, "current_version", lambda: ('bad', '7')) ++ monkeypatch.setattr(version, "get_source_major_version", lambda: '8') ++ monkeypatch.setattr(version, "current_version", lambda: ('bad', '8')) + monkeypatch.setattr(reporting, "create_report", create_report_mocked()) + + checkosrelease.check_os_version() +diff --git a/repos/system_upgrade/common/libraries/config/version.py b/repos/system_upgrade/common/libraries/config/version.py +index 84cbd753..c9bc3fb2 100644 +--- a/repos/system_upgrade/common/libraries/config/version.py ++++ b/repos/system_upgrade/common/libraries/config/version.py +@@ -14,11 +14,7 @@ OP_MAP = { + '<=': operator.le + } + +-# TODO(pstodulk): drop 9.4 & 9.5 before May 2025 release +-# These will not be supported fo IPU 9 -> 10 + _SUPPORTED_VERSIONS = { +- # Note: 'rhel-alt' is detected when on 'rhel' with kernel 4.x +- '7': {'rhel': ['7.9'], 'rhel-alt': [], 'rhel-saphana': ['7.9']}, + '8': {'rhel': ['8.10'], 'rhel-saphana': ['8.10']}, + '9': {'rhel': ['9.6'], 'rhel-saphana': ['9.6']}, + } +-- +2.52.0 + diff --git a/SOURCES/0098-checkfips-Drop-RHEL-7-inhibitor-and-update-tests.patch b/SOURCES/0098-checkfips-Drop-RHEL-7-inhibitor-and-update-tests.patch new file mode 100644 index 0000000..961660d --- /dev/null +++ b/SOURCES/0098-checkfips-Drop-RHEL-7-inhibitor-and-update-tests.patch @@ -0,0 +1,137 @@ +From d327f568a9ecb5de67e219c9174f547dadfbc8bd Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 18:00:53 +0200 +Subject: [PATCH 098/111] checkfips: Drop RHEL 7 inhibitor and update tests + +The tests never covered the part where UpgradeInitramfsTasks have to get +produced by the actor on 8->9. +--- + .../common/actors/checkfips/actor.py | 46 ++++++++----------- + .../actors/checkfips/tests/test_checkfips.py | 37 +++++++++++---- + 2 files changed, 45 insertions(+), 38 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/checkfips/actor.py b/repos/system_upgrade/common/actors/checkfips/actor.py +index 73408655..8c379bfd 100644 +--- a/repos/system_upgrade/common/actors/checkfips/actor.py ++++ b/repos/system_upgrade/common/actors/checkfips/actor.py +@@ -1,4 +1,3 @@ +-from leapp import reporting + from leapp.actors import Actor + from leapp.exceptions import StopActorExecutionError + from leapp.libraries.common.config import version +@@ -20,39 +19,30 @@ class CheckFips(Actor): + fips_info = next(self.consume(FIPSInfo), None) + + if not fips_info: +- raise StopActorExecutionError('Cannot check FIPS state due to not receiving necessary FIPSInfo message', +- details={'Problem': 'Did not receive a message with information about FIPS ' +- 'usage'}) +- +- if version.get_target_major_version() == '8': +- if fips_info.is_enabled: +- title = 'Automated upgrades from RHEL 7 to RHEL 8 in FIPS mode are not supported' +- summary = ('Leapp has detected that FIPS is enabled on this system. ' +- 'Automated in-place upgrade of RHEL 7 systems in FIPS mode is currently unsupported ' +- 'and manual intervention is required.') +- +- fips_7to8_steps_docs_url = 'https://red.ht/planning-upgrade-to-rhel8' +- +- reporting.create_report([ +- reporting.Title(title), +- reporting.Summary(summary), +- reporting.Severity(reporting.Severity.HIGH), +- reporting.Groups([reporting.Groups.SECURITY, reporting.Groups.INHIBITOR]), +- reporting.ExternalLink(url=fips_7to8_steps_docs_url, +- title='Planning an upgrade from RHEL 7 to RHEL 8') +- ]) +- elif version.get_target_major_version() == '9': +- # FIXME(mhecko): We include these files manually as they are not included automatically when the fips +- # module is used due to a bug in dracut. This code should be removed, once the dracut bug is resolved. +- # See https://bugzilla.redhat.com/show_bug.cgi?id=2176560 ++ raise StopActorExecutionError( ++ 'Cannot check FIPS state due to not receiving necessary FIPSInfo message', ++ details={ ++ 'Problem': 'Did not receive a message with information about FIPS usage' ++ }, ++ ) ++ ++ if version.get_target_major_version() == '9': ++ # FIXME(mhecko): We include these files manually as they are not ++ # included automatically when the fips module is used due to a bug ++ # in dracut. This code should be removed, once the dracut bug is ++ # resolved. See https://bugzilla.redhat.com/show_bug.cgi?id=2176560 + if fips_info.is_enabled: + fips_required_initramfs_files = [ + '/etc/crypto-policies/back-ends/opensslcnf.config', + '/etc/pki/tls/openssl.cnf', + '/usr/lib64/ossl-modules/fips.so', + ] +- self.produce(UpgradeInitramfsTasks(include_files=fips_required_initramfs_files, +- include_dracut_modules=[DracutModule(name='fips')])) ++ self.produce( ++ UpgradeInitramfsTasks( ++ include_files=fips_required_initramfs_files, ++ include_dracut_modules=[DracutModule(name='fips')], ++ ) ++ ) + elif version.get_target_major_version() == '10': + # TODO(mmatuska): What to do with FIPS on 9to10? OAMG-11431 + pass +diff --git a/repos/system_upgrade/common/actors/checkfips/tests/test_checkfips.py b/repos/system_upgrade/common/actors/checkfips/tests/test_checkfips.py +index 5498bf23..8057bc0d 100644 +--- a/repos/system_upgrade/common/actors/checkfips/tests/test_checkfips.py ++++ b/repos/system_upgrade/common/actors/checkfips/tests/test_checkfips.py +@@ -1,23 +1,40 @@ + import pytest + + from leapp.libraries.common.config import version +-from leapp.models import FIPSInfo, Report ++from leapp.models import DracutModule, FIPSInfo, Report, UpgradeInitramfsTasks + from leapp.utils.report import is_inhibitor + + +-@pytest.mark.parametrize(('fips_info', 'target_major_version', 'should_inhibit'), [ +- (FIPSInfo(is_enabled=True), '8', True), +- (FIPSInfo(is_enabled=True), '9', False), +- (FIPSInfo(is_enabled=False), '8', False), ++@pytest.mark.parametrize(('fips_info', 'target_major_version', 'should_produce'), [ + (FIPSInfo(is_enabled=False), '9', False), ++ (FIPSInfo(is_enabled=True), '9', True), ++ (FIPSInfo(is_enabled=False), '10', False), ++ (FIPSInfo(is_enabled=True), '10', False), + ]) +-def test_check_fips(monkeypatch, current_actor_context, fips_info, target_major_version, should_inhibit): ++def test_check_fips(monkeypatch, current_actor_context, fips_info, target_major_version, should_produce): + monkeypatch.setattr(version, 'get_target_major_version', lambda: target_major_version) ++ + current_actor_context.feed(fips_info) + current_actor_context.run() +- if should_inhibit: +- output = current_actor_context.consume(Report) ++ ++ # no inhibitor in any case ++ assert not any(is_inhibitor(msg.report) for msg in current_actor_context.consume(Report)) ++ ++ output = current_actor_context.consume(UpgradeInitramfsTasks) ++ if should_produce: + assert len(output) == 1 +- assert is_inhibitor(output[0].report) ++ ++ expected_initramfs_files = [ ++ '/etc/crypto-policies/back-ends/opensslcnf.config', ++ '/etc/pki/tls/openssl.cnf', ++ '/usr/lib64/ossl-modules/fips.so', ++ ] ++ ++ assert output[0].include_files == expected_initramfs_files ++ ++ assert len(output[0].include_dracut_modules) == 1 ++ mod = output[0].include_dracut_modules[0] ++ assert isinstance(mod, DracutModule) ++ assert mod.name == "fips" + else: +- assert not any(is_inhibitor(msg.report) for msg in current_actor_context.consume(Report)) ++ assert not output +-- +2.52.0 + diff --git a/SOURCES/0099-scangrubconfig-Comment-out-RHEL-7-config-error-detec.patch b/SOURCES/0099-scangrubconfig-Comment-out-RHEL-7-config-error-detec.patch new file mode 100644 index 0000000..07e1113 --- /dev/null +++ b/SOURCES/0099-scangrubconfig-Comment-out-RHEL-7-config-error-detec.patch @@ -0,0 +1,86 @@ +From 418773c5ea5b6c47468d33f273ef0777fbbd0cef Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:52:40 +0200 +Subject: [PATCH 099/111] scangrubconfig: Comment out RHEL 7 config error + detection + +The comment left there by ivasilev suggest that this could possibly be +used on newer RHEL versions too. Let's leave commented out until it's +confirmed it's not needed. +--- + .../actors/scangrubconfig/libraries/scanner.py | 16 ++++++++-------- + .../scangrubconfig/tests/test_scangrubconfig.py | 10 ++++------ + 2 files changed, 12 insertions(+), 14 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/scangrubconfig/libraries/scanner.py b/repos/system_upgrade/common/actors/scangrubconfig/libraries/scanner.py +index 86bba22b..ac4591ed 100644 +--- a/repos/system_upgrade/common/actors/scangrubconfig/libraries/scanner.py ++++ b/repos/system_upgrade/common/actors/scangrubconfig/libraries/scanner.py +@@ -1,7 +1,7 @@ + import os + import re + +-from leapp.libraries.common.config import architecture, version ++from leapp.libraries.common.config import architecture + from leapp.models import GrubConfigError + + +@@ -57,13 +57,13 @@ def scan(): + config = '/etc/default/grub' + # Check for GRUB_CMDLINE_LINUX syntax errors + # XXX FIXME(ivasilev) Can we make this check a common one? For now let's limit it to rhel7->rhel8 only +- if version.get_source_major_version() == '7': +- if not architecture.matches_architecture(architecture.ARCH_S390X): +- # For now, skip just s390x, that's only one that is failing now +- # because ZIPL is used there +- if detect_config_error(config): +- errors.append(GrubConfigError(error_detected=True, files=[config], +- error_type=GrubConfigError.ERROR_GRUB_CMDLINE_LINUX_SYNTAX)) ++ # if version.get_source_major_version() == '7': ++ # if not architecture.matches_architecture(architecture.ARCH_S390X): ++ # # For now, skip just s390x, that's only one that is failing now ++ # # because ZIPL is used there ++ # if detect_config_error(config): ++ # errors.append(GrubConfigError(error_detected=True, files=[config], ++ # error_type=GrubConfigError.ERROR_GRUB_CMDLINE_LINUX_SYNTAX)) + + # Check for missing newline errors + if is_grub_config_missing_final_newline(config): +diff --git a/repos/system_upgrade/common/actors/scangrubconfig/tests/test_scangrubconfig.py b/repos/system_upgrade/common/actors/scangrubconfig/tests/test_scangrubconfig.py +index 926f0f27..be1b2cc6 100644 +--- a/repos/system_upgrade/common/actors/scangrubconfig/tests/test_scangrubconfig.py ++++ b/repos/system_upgrade/common/actors/scangrubconfig/tests/test_scangrubconfig.py +@@ -4,7 +4,7 @@ import pytest + + from leapp.libraries.actor import scanner + from leapp.libraries.common.config import architecture, version +-from leapp.models import GrubConfigError, Report ++from leapp.models import GrubConfigError + + CUR_DIR = os.path.dirname(os.path.abspath(__file__)) + +@@ -24,18 +24,16 @@ def test_wrong_config_error_detection(): + def test_all_errors_produced(current_actor_context, monkeypatch): + # Tell the actor we are not running on s390x + monkeypatch.setattr(architecture, 'matches_architecture', lambda _: False) +- monkeypatch.setattr(version, 'get_source_version', lambda: '7.9') + # Set that all checks failed + monkeypatch.setattr(scanner, 'is_grub_config_missing_final_newline', lambda _: True) + monkeypatch.setattr(scanner, 'is_grubenv_corrupted', lambda _: True) + monkeypatch.setattr(scanner, 'detect_config_error', lambda _: True) + # Run the actor + current_actor_context.run() +- # Check that exactly 3 messages of different types are produced ++ # Check that exactly 2 messages of different types are produced + errors = current_actor_context.consume(GrubConfigError) +- assert len(errors) == 3 +- for err_type in [GrubConfigError.ERROR_MISSING_NEWLINE, GrubConfigError.ERROR_CORRUPTED_GRUBENV, +- GrubConfigError.ERROR_GRUB_CMDLINE_LINUX_SYNTAX]: ++ assert len(errors) == 2 ++ for err_type in [GrubConfigError.ERROR_MISSING_NEWLINE, GrubConfigError.ERROR_CORRUPTED_GRUBENV]: + distinct_error = next((e for e in errors if e.error_type == err_type), None) + assert distinct_error + assert distinct_error.files +-- +2.52.0 + diff --git a/SOURCES/0100-checkipaserver-Remove-RHEL-7-article-link.patch b/SOURCES/0100-checkipaserver-Remove-RHEL-7-article-link.patch new file mode 100644 index 0000000..5e7a4fe --- /dev/null +++ b/SOURCES/0100-checkipaserver-Remove-RHEL-7-article-link.patch @@ -0,0 +1,30 @@ +From 1ded7bc6b5107852e44db078ad0d27a75d1acb20 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:54:22 +0200 +Subject: [PATCH 100/111] checkipaserver: Remove RHEL 7 article link + +--- + .../common/actors/checkipaserver/libraries/checkipaserver.py | 3 --- + 1 file changed, 3 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/checkipaserver/libraries/checkipaserver.py b/repos/system_upgrade/common/actors/checkipaserver/libraries/checkipaserver.py +index 6a1c887c..60d4db86 100644 +--- a/repos/system_upgrade/common/actors/checkipaserver/libraries/checkipaserver.py ++++ b/repos/system_upgrade/common/actors/checkipaserver/libraries/checkipaserver.py +@@ -1,13 +1,10 @@ + from leapp import reporting + from leapp.libraries.common.config.version import get_source_major_version + +-MIGRATION_GUIDE_7 = "https://red.ht/IdM-upgrading-RHEL-7-to-RHEL-8" +- + # TBD: update the doc url when migration guide 8->9 becomes available + MIGRATION_GUIDE_8 = "https://red.ht/IdM-upgrading-RHEL-8-to-RHEL-9" + MIGRATION_GUIDE_9 = "https://red.ht/IdM-upgrading-RHEL-9-to-RHEL-10" + MIGRATION_GUIDES = { +- '7': MIGRATION_GUIDE_7, + '8': MIGRATION_GUIDE_8, + '9': MIGRATION_GUIDE_9 + } +-- +2.52.0 + diff --git a/SOURCES/0101-checkluks-Remove-RHEL-7-inhibitor-and-related-code.patch b/SOURCES/0101-checkluks-Remove-RHEL-7-inhibitor-and-related-code.patch new file mode 100644 index 0000000..e4b4730 --- /dev/null +++ b/SOURCES/0101-checkluks-Remove-RHEL-7-inhibitor-and-related-code.patch @@ -0,0 +1,126 @@ +From 082863d904b4e0c3cc5f160a28f41a02758fab63 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:06:16 +0200 +Subject: [PATCH 101/111] checkluks: Remove RHEL 7 inhibitor and related code + +--- + .../common/actors/checkluks/actor.py | 3 +- + .../actors/checkluks/libraries/checkluks.py | 21 ------------ + .../actors/checkluks/tests/test_checkluks.py | 32 ------------------- + 3 files changed, 1 insertion(+), 55 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/checkluks/actor.py b/repos/system_upgrade/common/actors/checkluks/actor.py +index 607fd040..2ea16985 100644 +--- a/repos/system_upgrade/common/actors/checkluks/actor.py ++++ b/repos/system_upgrade/common/actors/checkluks/actor.py +@@ -9,9 +9,8 @@ class CheckLuks(Actor): + """ + Check if any encrypted partitions are in use and whether they are supported for the upgrade. + +- Upgrading EL7 system with encrypted partition is not supported (but ceph OSDs). + For EL8+ it's ok if the discovered used encrypted storage has LUKS2 format +- and it's bounded to clevis-tpm2 token (so it can be automatically unlocked ++ and it's bound to clevis-tpm2 token (so it can be automatically unlocked + during the process). + """ + +diff --git a/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py b/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py +index 84e8e61f..4626cf63 100644 +--- a/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py ++++ b/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py +@@ -6,7 +6,6 @@ from leapp.models import ( + CopyFile, + DracutModule, + LuksDumps, +- StorageInfo, + TargetUserSpaceUpgradeTasks, + UpgradeInitramfsTasks + ) +@@ -35,21 +34,6 @@ def _get_ceph_volumes(): + return ceph_info.encrypted_volumes[:] if ceph_info else [] + + +-def apply_obsoleted_check_ipu_7_8(): +- ceph_vol = _get_ceph_volumes() +- for storage_info in api.consume(StorageInfo): +- for blk in storage_info.lsblk: +- if blk.tp == 'crypt' and blk.name not in ceph_vol: +- create_report([ +- reporting.Title('LUKS encrypted partition detected'), +- reporting.Summary('Upgrading system with encrypted partitions is not supported'), +- reporting.Severity(reporting.Severity.HIGH), +- reporting.Groups([reporting.Groups.BOOT, reporting.Groups.ENCRYPTION]), +- reporting.Groups([reporting.Groups.INHIBITOR]), +- ]) +- break +- +- + def report_inhibitor(luks1_partitions, no_tpm2_partitions): + source_major_version = get_source_major_version() + clevis_doc_url = CLEVIS_DOC_URL_FMT.format(source_major_version) +@@ -119,11 +103,6 @@ def report_inhibitor(luks1_partitions, no_tpm2_partitions): + + + def check_invalid_luks_devices(): +- if get_source_major_version() == '7': +- # NOTE: keeping unchanged behaviour for IPU 7 -> 8 +- apply_obsoleted_check_ipu_7_8() +- return +- + luks_dumps = next(api.consume(LuksDumps), None) + if not luks_dumps: + api.current_logger().debug('No LUKS volumes detected. Skipping.') +diff --git a/repos/system_upgrade/common/actors/checkluks/tests/test_checkluks.py b/repos/system_upgrade/common/actors/checkluks/tests/test_checkluks.py +index d559b54c..13b8bc55 100644 +--- a/repos/system_upgrade/common/actors/checkluks/tests/test_checkluks.py ++++ b/repos/system_upgrade/common/actors/checkluks/tests/test_checkluks.py +@@ -1,11 +1,3 @@ +-""" +-Unit tests for inhibitwhenluks actor +- +-Skip isort as it's kind of broken when mixing grid import and one line imports +- +-isort:skip_file +-""" +- + from leapp.libraries.common.config import version + from leapp.models import ( + CephInfo, +@@ -13,7 +5,6 @@ from leapp.models import ( + LuksDump, + LuksDumps, + LuksToken, +- StorageInfo, + TargetUserSpaceUpgradeTasks, + UpgradeInitramfsTasks + ) +@@ -148,26 +139,3 @@ LSBLK_ENTRY = LsblkEntry( + parent_name="", + parent_path="" + ) +- +- +-def test_inhibitor_on_el7(monkeypatch, current_actor_context): +- # NOTE(pstodulk): consider it good enough as el7 stuff is going to be removed +- # soon. +- monkeypatch.setattr(version, 'get_source_major_version', lambda: '7') +- +- luks_dump = LuksDump( +- version=2, +- uuid='83050bd9-61c6-4ff0-846f-bfd3ac9bfc67', +- device_path='/dev/sda', +- device_name='sda', +- tokens=[LuksToken(token_id=0, keyslot=1, token_type='clevis-tpm2')]) +- current_actor_context.feed(LuksDumps(dumps=[luks_dump])) +- current_actor_context.feed(CephInfo(encrypted_volumes=[])) +- +- current_actor_context.feed(StorageInfo(lsblk=[LSBLK_ENTRY])) +- current_actor_context.run() +- assert current_actor_context.consume(Report) +- +- report_fields = current_actor_context.consume(Report)[0].report +- assert is_inhibitor(report_fields) +- assert report_fields['title'] == 'LUKS encrypted partition detected' +-- +2.52.0 + diff --git a/SOURCES/0102-scankernel-Remove-RHEL-7-kernel-names.patch b/SOURCES/0102-scankernel-Remove-RHEL-7-kernel-names.patch new file mode 100644 index 0000000..b799c39 --- /dev/null +++ b/SOURCES/0102-scankernel-Remove-RHEL-7-kernel-names.patch @@ -0,0 +1,37 @@ +From ece256fcc5f5e9e952a2a17377c1ea386135ae5b Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 20:00:01 +0200 +Subject: [PATCH 102/111] scankernel: Remove RHEL 7 kernel names + +--- + .../libraries/scankernel.py | 14 ++++---------- + 1 file changed, 4 insertions(+), 10 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py b/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py +index 35683cca..76f13caf 100644 +--- a/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py ++++ b/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py +@@ -20,16 +20,10 @@ def get_kernel_pkg_name(rhel_major_version, kernel_type): + :returns: Kernel package name + :rtype: str + """ +- if rhel_major_version == '7': +- kernel_pkg_name_table = { +- kernel_lib.KernelType.ORDINARY: 'kernel', +- kernel_lib.KernelType.REALTIME: 'kernel-rt' +- } +- else: +- kernel_pkg_name_table = { +- kernel_lib.KernelType.ORDINARY: 'kernel-core', +- kernel_lib.KernelType.REALTIME: 'kernel-rt-core' +- } ++ kernel_pkg_name_table = { ++ kernel_lib.KernelType.ORDINARY: 'kernel-core', ++ kernel_lib.KernelType.REALTIME: 'kernel-rt-core' ++ } + return kernel_pkg_name_table[kernel_type] + + +-- +2.52.0 + diff --git a/SOURCES/0103-repomap-lib-Drop-RHEL-7-default-PESID.patch b/SOURCES/0103-repomap-lib-Drop-RHEL-7-default-PESID.patch new file mode 100644 index 0000000..a036f47 --- /dev/null +++ b/SOURCES/0103-repomap-lib-Drop-RHEL-7-default-PESID.patch @@ -0,0 +1,175 @@ +From a83309de2b633ccb35a16b2c0ccca6d1f317c6ae Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:59:07 +0200 +Subject: [PATCH 103/111] repomap lib: Drop RHEL 7 default PESID + +--- + .../libraries/peseventsscanner_repomap.py | 1 - + .../tests/test_pes_event_scanner.py | 44 +++++++++++-------- + .../libraries/setuptargetrepos_repomap.py | 1 - + .../tests/test_repomapping.py | 20 ++++----- + 4 files changed, 35 insertions(+), 31 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py +index abd35e0b..b1e46903 100644 +--- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py ++++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py +@@ -3,7 +3,6 @@ from leapp.libraries.common.config.version import get_source_major_version, get_ + from leapp.libraries.stdlib import api + + DEFAULT_PESID = { +- '7': 'rhel7-base', + '8': 'rhel8-BaseOS', + '9': 'rhel9-BaseOS', + '10': 'rhel10-BaseOS' +diff --git a/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py +index f67f3840..c8c14528 100644 +--- a/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py ++++ b/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py +@@ -214,22 +214,22 @@ def test_actor_performs(monkeypatch): + + events = [ + Event(1, Action.SPLIT, +- {Pkg('split-in', 'rhel7-base')}, +- {Pkg('split-out0', 'rhel8-BaseOS'), Pkg('split-out1', 'rhel8-BaseOS')}, +- (7, 9), (8, 0), []), ++ {Pkg('split-in', 'rhel8-BaseOS')}, ++ {Pkg('split-out0', 'rhel9-Baseos'), Pkg('split-out1', 'rhel9-Baseos')}, ++ (8, 10), (9, 0), []), + Event(2, Action.MERGED, +- {Pkg('split-out0', 'rhel8-BaseOS'), Pkg('split-out1', 'rhel8-BaseOS')}, +- {Pkg('merged-out', 'rhel8-BaseOS')}, +- (8, 0), (8, 1), []), ++ {Pkg('split-out0', 'rhel9-Baseos'), Pkg('split-out1', 'rhel9-Baseos')}, ++ {Pkg('merged-out', 'rhel9-Baseos')}, ++ (9, 0), (9, 1), []), + Event(3, Action.MOVED, +- {Pkg('moved-in', 'rhel7-base')}, {Pkg('moved-out', 'rhel8-BaseOS')}, +- (7, 9), (8, 0), []), ++ {Pkg('moved-in', 'rhel8-BaseOS')}, {Pkg('moved-out', 'rhel9-Baseos')}, ++ (8, 10), (9, 0), []), + Event(4, Action.REMOVED, +- {Pkg('removed', 'rhel7-base')}, set(), +- (8, 0), (8, 1), []), ++ {Pkg('removed', 'rhel8-BaseOS')}, set(), ++ (9, 0), (9, 1), []), + Event(5, Action.DEPRECATED, +- {Pkg('irrelevant', 'rhel7-base')}, set(), +- (8, 0), (8, 1), []), ++ {Pkg('irrelevant', 'rhel8-BaseOS')}, set(), ++ (9, 0), (9, 1), []), + ] + + monkeypatch.setattr(pes_events_scanner, 'get_pes_events', lambda data_folder, json_filename: events) +@@ -242,23 +242,29 @@ def test_actor_performs(monkeypatch): + + repositories_mapping = RepositoriesMapping( + mapping=[ +- RepoMapEntry(source='rhel7-base', target=['rhel8-BaseOS'], ), ++ RepoMapEntry(source='rhel8-BaseOS', target=['rhel9-Baseos'], ), + ], + repositories=[ +- PESIDRepositoryEntry(pesid='rhel7-base', major_version='7', repoid='rhel7-repo', arch='x86_64', +- repo_type='rpm', channel='ga', rhui='', distro='rhel'), + PESIDRepositoryEntry(pesid='rhel8-BaseOS', major_version='8', repoid='rhel8-repo', arch='x86_64', ++ repo_type='rpm', channel='ga', rhui='', distro='rhel'), ++ PESIDRepositoryEntry(pesid='rhel9-Baseos', major_version='9', repoid='rhel9-repo', arch='x86_64', + repo_type='rpm', channel='ga', rhui='', distro='rhel')] + ) + + enabled_modules = EnabledModules(modules=[]) + repo_facts = RepositoriesFacts( +- repositories=[RepositoryFile(file='', data=[RepositoryData(repoid='rhel7-repo', name='RHEL7 repo')])] ++ repositories=[RepositoryFile(file='', data=[RepositoryData(repoid='rhel8-repo', name='RHEL8 repo')])] + ) + +- monkeypatch.setattr(api, 'current_actor', +- CurrentActorMocked(msgs=[installed_pkgs, repositories_mapping, enabled_modules, repo_facts], +- src_ver='7.9', dst_ver='8.1')) ++ monkeypatch.setattr( ++ api, ++ "current_actor", ++ CurrentActorMocked( ++ msgs=[installed_pkgs, repositories_mapping, enabled_modules, repo_facts], ++ src_ver="8.10", ++ dst_ver="9.1", ++ ), ++ ) + + produced_messages = produce_mocked() + created_report = create_report_mocked() +diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py +index 3286609d..763eddc6 100644 +--- a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py ++++ b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py +@@ -3,7 +3,6 @@ from leapp.libraries.common.config.version import get_source_major_version, get_ + from leapp.libraries.stdlib import api + + DEFAULT_PESID = { +- '7': 'rhel7-base', + '8': 'rhel8-BaseOS', + '9': 'rhel9-BaseOS', + '10': 'rhel10-BaseOS' +diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py b/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py +index 30c415c0..32af8609 100644 +--- a/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py ++++ b/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py +@@ -689,14 +689,14 @@ def test_get_default_repository_channels_simple(monkeypatch): + where there is only one repository enabled from the pesid family in which are + the default repositories searched in. + """ +- monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4')) ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='8.10', dst_ver='9.6')) + repository_mapping = RepositoriesMapping( + mapping=[], +- repositories=[make_pesid_repo('rhel7-base', '7', 'rhel7-repoid-ga', channel='ga')] ++ repositories=[make_pesid_repo('rhel8-BaseOS', '8', 'rhel8-repoid-ga', channel='ga')] + ) + handler = RepoMapDataHandler(repository_mapping) + +- assert ['ga'] == get_default_repository_channels(handler, ['rhel7-repoid-ga']) ++ assert ['ga'] == get_default_repository_channels(handler, ['rhel8-repoid-ga']) + + + def test_get_default_repository_channels_highest_priority_channel(monkeypatch): +@@ -706,17 +706,17 @@ def test_get_default_repository_channels_highest_priority_channel(monkeypatch): + Verifies that the returned list contains the highest priority channel if there is a repository + with the channel enabled on the source system. + """ +- monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4')) ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='8.10', dst_ver='9.6')) + repository_mapping = RepositoriesMapping( + mapping=[], + repositories=[ +- make_pesid_repo('rhel7-base', '7', 'rhel7-repoid-ga', channel='ga'), +- make_pesid_repo('rhel7-base', '7', 'rhel7-repoid-eus', channel='eus'), ++ make_pesid_repo('rhel8-BaseOS', '8', 'rhel8-repoid-ga', channel='ga'), ++ make_pesid_repo('rhel8-BaseOS', '8', 'rhel8-repoid-eus', channel='eus'), + ] + ) + handler = RepoMapDataHandler(repository_mapping) + +- assert ['eus', 'ga'] == get_default_repository_channels(handler, ['rhel7-repoid-ga', 'rhel7-repoid-eus']) ++ assert ['eus', 'ga'] == get_default_repository_channels(handler, ['rhel8-repoid-ga', 'rhel8-repoid-eus']) + + + def test_get_default_repository_channels_no_default_pesid_repo(monkeypatch): +@@ -726,12 +726,12 @@ def test_get_default_repository_channels_no_default_pesid_repo(monkeypatch): + Verifies that the returned list contains some fallback channel even if no repository from the default + pesid family in which are the channels searched is enabled. + """ +- monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4')) ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(arch='x86_64', src_ver='8.10', dst_ver='9.6')) + repository_mapping = RepositoriesMapping( + mapping=[], + repositories=[ +- make_pesid_repo('rhel7-base', '7', 'rhel7-repoid-ga', channel='ga'), +- make_pesid_repo('rhel7-base', '7', 'rhel7-repoid-eus', channel='eus'), ++ make_pesid_repo('rhel8-BaseOS', '8', 'rhel8-repoid-ga', channel='ga'), ++ make_pesid_repo('rhel8-BaseOS', '8', 'rhel8-repoid-eus', channel='eus'), + ] + ) + handler = RepoMapDataHandler(repository_mapping) +-- +2.52.0 + diff --git a/SOURCES/0104-scanpkgmanager-Remove-yum-related-code.patch b/SOURCES/0104-scanpkgmanager-Remove-yum-related-code.patch new file mode 100644 index 0000000..a205b28 --- /dev/null +++ b/SOURCES/0104-scanpkgmanager-Remove-yum-related-code.patch @@ -0,0 +1,186 @@ +From 656e301fe7749a77f7f4a5c652610ac27105dd33 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:50:32 +0200 +Subject: [PATCH 104/111] scanpkgmanager: Remove yum-related code + +--- + .../scanpkgmanager/libraries/pluginscanner.py | 21 ++++++------------- + .../libraries/scanpkgmanager.py | 13 ++++-------- + .../tests/test_pluginscanner.py | 16 ++------------ + .../tests/test_scanpkgmanager.py | 11 +++------- + 4 files changed, 15 insertions(+), 46 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/scanpkgmanager/libraries/pluginscanner.py b/repos/system_upgrade/common/actors/scanpkgmanager/libraries/pluginscanner.py +index 7bb03996..f83050ee 100644 +--- a/repos/system_upgrade/common/actors/scanpkgmanager/libraries/pluginscanner.py ++++ b/repos/system_upgrade/common/actors/scanpkgmanager/libraries/pluginscanner.py +@@ -1,6 +1,5 @@ + import re + +-from leapp.libraries.common.config.version import get_source_major_version + from leapp.libraries.stdlib import run + + # When the output spans multiple lines, each of the lines after the first one +@@ -50,7 +49,7 @@ def _parse_loaded_plugins(package_manager_output): + + def scan_enabled_package_manager_plugins(): + """ +- Runs package manager (yum/dnf) command and parses its output for enabled/loaded plugins. ++ Runs package manager (dnf) command and parses its output for enabled/loaded plugins. + + :return: A list of enabled plugins. + :rtype: List +@@ -60,16 +59,8 @@ def scan_enabled_package_manager_plugins(): + # An alternative approach would be to check the install path for package manager plugins + # and parse corresponding plugin configuration files. + +- if get_source_major_version() == '7': +- # in case of yum, set debuglevel=2 to be sure the output is always +- # same. The format of data is different for various debuglevels +- cmd = ['yum', '--setopt=debuglevel=2'] +- else: +- # the verbose mode in dnf always set particular debuglevel, so the +- # output is not affected by the default debug level set on the +- # system +- cmd = ['dnf', '-v'] # On RHEL8 we need to supply an extra switch +- +- pkg_manager_output = run(cmd, split=True, checked=False) # The command will certainly fail (does not matter). +- +- return _parse_loaded_plugins(pkg_manager_output) ++ # the verbose mode in dnf always set particular debuglevel, so the ++ # output is not affected by the default debug level set on the ++ # system ++ output = run(['dnf', '-v'], split=True, checked=False) # The command will certainly fail (does not matter). ++ return _parse_loaded_plugins(output) +diff --git a/repos/system_upgrade/common/actors/scanpkgmanager/libraries/scanpkgmanager.py b/repos/system_upgrade/common/actors/scanpkgmanager/libraries/scanpkgmanager.py +index bf7ec0be..2fcac423 100644 +--- a/repos/system_upgrade/common/actors/scanpkgmanager/libraries/scanpkgmanager.py ++++ b/repos/system_upgrade/common/actors/scanpkgmanager/libraries/scanpkgmanager.py +@@ -2,17 +2,13 @@ import os + import re + + from leapp.libraries.actor import pluginscanner +-from leapp.libraries.common.config.version import get_source_major_version + from leapp.libraries.stdlib import api + from leapp.models import PkgManagerInfo + + YUM_CONFIG_PATH = '/etc/yum.conf' + DNF_CONFIG_PATH = '/etc/dnf/dnf.conf' + +- +-def _get_releasever_path(): +- default_manager = 'yum' if get_source_major_version() == '7' else 'dnf' +- return '/etc/{}/vars/releasever'.format(default_manager) ++RELEASEVER_PATH = '/etc/dnf/vars/releasever' + + + def _releasever_exists(releasever_path): +@@ -20,13 +16,12 @@ def _releasever_exists(releasever_path): + + + def get_etc_releasever(): +- """ Get release version from "/etc/{yum,dnf}/vars/releasever" file """ ++ """ Get release version from "/etc/dnf/vars/releasever" file """ + +- releasever_path = _get_releasever_path() +- if not _releasever_exists(releasever_path): ++ if not _releasever_exists(RELEASEVER_PATH): + return None + +- with open(releasever_path, 'r') as fo: ++ with open(RELEASEVER_PATH, 'r') as fo: + # we care about the first line only + releasever = fo.readline().strip() + +diff --git a/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_pluginscanner.py b/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_pluginscanner.py +index f0260e54..0b2bd5b7 100644 +--- a/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_pluginscanner.py ++++ b/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_pluginscanner.py +@@ -21,18 +21,11 @@ def assert_plugins_identified_as_enabled(expected_plugins, identified_plugins): + assert expected_enabled_plugin in identified_plugins, fail_description + + +-@pytest.mark.parametrize( +- ('source_major_version', 'command'), +- [ +- ('7', ['yum', '--setopt=debuglevel=2']), +- ('8', ['dnf', '-v']), +- ] +-) +-def test_scan_enabled_plugins(monkeypatch, source_major_version, command): ++def test_scan_enabled_plugins(monkeypatch): + """Tests whether the enabled plugins are correctly retrieved from the package manager output.""" + + def run_mocked(cmd, **kwargs): +- if cmd == command: ++ if cmd == ['dnf', '-v']: + return { + 'stdout': CMD_YUM_OUTPUT.split('\n'), + 'stderr': 'You need to give some command', +@@ -40,13 +33,9 @@ def test_scan_enabled_plugins(monkeypatch, source_major_version, command): + } + raise ValueError('Tried to run an unexpected command.') + +- def get_source_major_version_mocked(): +- return source_major_version +- + # The library imports `run` all the way into its namespace (from ...stdlib import run), + # we must overwrite it there then: + monkeypatch.setattr(pluginscanner, 'run', run_mocked) +- monkeypatch.setattr(pluginscanner, 'get_source_major_version', get_source_major_version_mocked) + + enabled_plugins = pluginscanner.scan_enabled_package_manager_plugins() + assert_plugins_identified_as_enabled( +@@ -72,7 +61,6 @@ def test_yum_loaded_plugins_multiline_output(yum_output, monkeypatch): + } + + monkeypatch.setattr(pluginscanner, 'run', run_mocked) +- monkeypatch.setattr(pluginscanner, 'get_source_major_version', lambda: '7') + + enabled_plugins = pluginscanner.scan_enabled_package_manager_plugins() + +diff --git a/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_scanpkgmanager.py b/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_scanpkgmanager.py +index 75c5c5ba..dc94060a 100644 +--- a/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_scanpkgmanager.py ++++ b/repos/system_upgrade/common/actors/scanpkgmanager/tests/test_scanpkgmanager.py +@@ -2,15 +2,12 @@ import os + + import pytest + +-from leapp.libraries import stdlib + from leapp.libraries.actor import pluginscanner, scanpkgmanager +-from leapp.libraries.common import testutils + from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked + from leapp.libraries.stdlib import api + + CUR_DIR = os.path.dirname(os.path.abspath(__file__)) + PROXY_ADDRESS = 'https://192.168.121.123:3128' +-YUM_CONFIG_PATH = '/etc/yum.conf' + DNF_CONFIG_PATH = '/etc/dnf/dnf.conf' + + +@@ -22,10 +19,6 @@ def mock_releasever_exists(overrides): + return mocked_releasever_exists + + +-def mocked_get_releasever_path(): +- return os.path.join(CUR_DIR, 'files/releasever') +- +- + @pytest.mark.parametrize('etcrelease_exists', [True, False]) + def test_get_etcreleasever(monkeypatch, etcrelease_exists): + monkeypatch.setattr( +@@ -38,7 +31,9 @@ def test_get_etcreleasever(monkeypatch, etcrelease_exists): + ) + monkeypatch.setattr(scanpkgmanager.api, 'produce', produce_mocked()) + monkeypatch.setattr(scanpkgmanager.api, 'current_actor', CurrentActorMocked()) +- monkeypatch.setattr(scanpkgmanager, '_get_releasever_path', mocked_get_releasever_path) ++ monkeypatch.setattr( ++ scanpkgmanager, 'RELEASEVER_PATH', os.path.join(CUR_DIR, 'files/releasever') ++ ) + monkeypatch.setattr(scanpkgmanager, '_get_proxy_if_set', lambda x: None) + monkeypatch.setattr(pluginscanner, 'scan_enabled_package_manager_plugins', lambda: []) + +-- +2.52.0 + diff --git a/SOURCES/0105-checkyumpluginsenabled-Drop-yum-related-code.patch b/SOURCES/0105-checkyumpluginsenabled-Drop-yum-related-code.patch new file mode 100644 index 0000000..ab412ac --- /dev/null +++ b/SOURCES/0105-checkyumpluginsenabled-Drop-yum-related-code.patch @@ -0,0 +1,185 @@ +From c746806784c06fccac28a3e92578fa9abf9e9a1a Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 18:59:01 +0200 +Subject: [PATCH 105/111] checkyumpluginsenabled: Drop yum-related code + +The name of the actor is kept for backwards compatibility. +--- + .../actors/checkyumpluginsenabled/actor.py | 7 +-- + .../libraries/checkyumpluginsenabled.py | 51 ++++++++----------- + .../tests/test_checkyumpluginsenabled.py | 14 +++-- + 3 files changed, 32 insertions(+), 40 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/checkyumpluginsenabled/actor.py b/repos/system_upgrade/common/actors/checkyumpluginsenabled/actor.py +index fbc2f8bc..c5a4853a 100644 +--- a/repos/system_upgrade/common/actors/checkyumpluginsenabled/actor.py ++++ b/repos/system_upgrade/common/actors/checkyumpluginsenabled/actor.py +@@ -1,13 +1,14 @@ + from leapp.actors import Actor +-from leapp.libraries.actor.checkyumpluginsenabled import check_required_yum_plugins_enabled ++from leapp.libraries.actor.checkyumpluginsenabled import check_required_dnf_plugins_enabled + from leapp.models import PkgManagerInfo + from leapp.reporting import Report + from leapp.tags import ChecksPhaseTag, IPUWorkflowTag + + ++# NOTE: the name is kept for backwards compatibility, even though this scans only DNF now + class CheckYumPluginsEnabled(Actor): + """ +- Checks that the required yum plugins are enabled. ++ Checks that the required DNF plugins are enabled. + """ + + name = 'check_yum_plugins_enabled' +@@ -17,4 +18,4 @@ class CheckYumPluginsEnabled(Actor): + + def process(self): + pkg_manager_info = next(self.consume(PkgManagerInfo)) +- check_required_yum_plugins_enabled(pkg_manager_info) ++ check_required_dnf_plugins_enabled(pkg_manager_info) +diff --git a/repos/system_upgrade/common/actors/checkyumpluginsenabled/libraries/checkyumpluginsenabled.py b/repos/system_upgrade/common/actors/checkyumpluginsenabled/libraries/checkyumpluginsenabled.py +index 5522af9c..87ff6511 100644 +--- a/repos/system_upgrade/common/actors/checkyumpluginsenabled/libraries/checkyumpluginsenabled.py ++++ b/repos/system_upgrade/common/actors/checkyumpluginsenabled/libraries/checkyumpluginsenabled.py +@@ -1,25 +1,24 @@ + import os + + from leapp import reporting +-from leapp.libraries.common.config.version import get_source_major_version + from leapp.libraries.common.rhsm import skip_rhsm + + # If LEAPP_NO_RHSM is set, subscription-manager and product-id will not be + # considered as required when checking whether the required plugins are enabled. +-REQUIRED_YUM_PLUGINS = {'subscription-manager', 'product-id'} ++REQUIRED_DNF_PLUGINS = {'subscription-manager', 'product-id'} + FMT_LIST_SEPARATOR = '\n - ' + + +-def check_required_yum_plugins_enabled(pkg_manager_info): ++def check_required_dnf_plugins_enabled(pkg_manager_info): + """ +- Checks whether the yum plugins required by the IPU are enabled. ++ Checks whether the DNF plugins required by the IPU are enabled. + + If they are not enabled, a report is produced informing the user about it. + + :param pkg_manager_info: PkgManagerInfo + """ + +- missing_required_plugins = REQUIRED_YUM_PLUGINS - set(pkg_manager_info.enabled_plugins) ++ missing_required_plugins = REQUIRED_DNF_PLUGINS - set(pkg_manager_info.enabled_plugins) + + if skip_rhsm(): + missing_required_plugins -= {'subscription-manager', 'product-id'} +@@ -29,37 +28,30 @@ def check_required_yum_plugins_enabled(pkg_manager_info): + for missing_plugin in missing_required_plugins: + missing_required_plugins_text += '{0}{1}'.format(FMT_LIST_SEPARATOR, missing_plugin) + +- if get_source_major_version() == '7': +- pkg_manager = 'YUM' +- pkg_manager_config_path = '/etc/yum.conf' +- plugin_configs_dir = '/etc/yum/pluginconf.d' +- else: +- # On RHEL8+ the yum package might not be installed +- pkg_manager = 'DNF' +- pkg_manager_config_path = '/etc/dnf/dnf.conf' +- plugin_configs_dir = '/etc/dnf/plugins' +- +- # pkg_manager_config_path - enable/disable plugins globally +- # subscription_manager_plugin_conf, product_id_plugin_conf - plugins can be disabled individually +- subscription_manager_plugin_conf = os.path.join(plugin_configs_dir, 'subscription-manager.conf') ++ # dnf_conf_path - enable/disable plugins globally ++ # rhsm_plugin_conf, product_id_plugin_conf - plugins can be disabled individually ++ dnf_conf_path = '/etc/dnf/dnf.conf' ++ plugin_configs_dir = '/etc/dnf/plugins' ++ rhsm_plugin_conf = os.path.join(plugin_configs_dir, 'subscription-manager.conf') + product_id_plugin_conf = os.path.join(plugin_configs_dir, 'product-id.conf') + + remediation_commands = [ +- 'sed -i \'s/^plugins=0/plugins=1/\' \'{0}\''.format(pkg_manager_config_path), +- 'sed -i \'s/^enabled=0/enabled=1/\' \'{0}\''.format(subscription_manager_plugin_conf), +- 'sed -i \'s/^enabled=0/enabled=1/\' \'{0}\''.format(product_id_plugin_conf) ++ f"sed -i 's/^plugins=0/plugins=1/' '{dnf_conf_path}'" ++ f"sed -i 's/^enabled=0/enabled=1/' '{rhsm_plugin_conf}'" ++ f"sed -i 's/^enabled=0/enabled=1/' '{product_id_plugin_conf}'" + ] + + reporting.create_report([ +- reporting.Title('Required {0} plugins are not being loaded.'.format(pkg_manager)), ++ reporting.Title('Required DNF plugins are not being loaded.'), + reporting.Summary( +- 'The following {0} plugins are not being loaded: {1}'.format(pkg_manager, +- missing_required_plugins_text) ++ 'The following DNF plugins are not being loaded: {}'.format(missing_required_plugins_text) + ), + reporting.Remediation( +- hint='If you have yum plugins globally disabled, please enable them by editing the {0}. ' +- 'Individually, the {1} plugins can be enabled in their corresponding configurations found at: {2}' +- .format(pkg_manager_config_path, pkg_manager, plugin_configs_dir), ++ hint=( ++ 'If you have DNF plugins globally disabled, please enable them by editing the {0}. ' ++ 'Individually, the DNF plugins can be enabled in their corresponding configurations found at: {1}' ++ .format(dnf_conf_path, plugin_configs_dir) ++ ), + # Provide all commands as one due to problems with satellites + commands=[['bash', '-c', '"{0}"'.format('; '.join(remediation_commands))]] + ), +@@ -67,10 +59,11 @@ def check_required_yum_plugins_enabled(pkg_manager_info): + url='https://access.redhat.com/solutions/7028063', + title='Why is Leapp preupgrade generating "Inhibitor: Required YUM plugins are not being loaded."' + ), +- reporting.RelatedResource('file', pkg_manager_config_path), +- reporting.RelatedResource('file', subscription_manager_plugin_conf), ++ reporting.RelatedResource('file', dnf_conf_path), ++ reporting.RelatedResource('file', rhsm_plugin_conf), + reporting.RelatedResource('file', product_id_plugin_conf), + reporting.Severity(reporting.Severity.HIGH), + reporting.Groups([reporting.Groups.INHIBITOR]), + reporting.Groups([reporting.Groups.REPOSITORY]), ++ reporting.Key("2a0ff91bea885cfe9d763cf3a379789848a501b9"), + ]) +diff --git a/repos/system_upgrade/common/actors/checkyumpluginsenabled/tests/test_checkyumpluginsenabled.py b/repos/system_upgrade/common/actors/checkyumpluginsenabled/tests/test_checkyumpluginsenabled.py +index 9bf9a3ba..1f7e916c 100644 +--- a/repos/system_upgrade/common/actors/checkyumpluginsenabled/tests/test_checkyumpluginsenabled.py ++++ b/repos/system_upgrade/common/actors/checkyumpluginsenabled/tests/test_checkyumpluginsenabled.py +@@ -1,7 +1,5 @@ +-import pytest +- + from leapp import reporting +-from leapp.libraries.actor.checkyumpluginsenabled import check_required_yum_plugins_enabled ++from leapp.libraries.actor.checkyumpluginsenabled import check_required_dnf_plugins_enabled + from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked + from leapp.libraries.stdlib import api + from leapp.models import PkgManagerInfo +@@ -37,15 +35,15 @@ def test__create_report_mocked(monkeypatch): + + + def test_report_when_missing_required_plugins(monkeypatch): +- """Test whether a report entry is created when any of the required YUM plugins are missing.""" +- yum_config = PkgManagerInfo(enabled_plugins=['product-id', 'some-user-plugin']) ++ """Test whether a report entry is created when any of the required DNF plugins are missing.""" ++ dnf_config = PkgManagerInfo(enabled_plugins=['product-id', 'some-user-plugin']) + + actor_reports = create_report_mocked() + + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) + monkeypatch.setattr(reporting, 'create_report', actor_reports) + +- check_required_yum_plugins_enabled(yum_config) ++ check_required_dnf_plugins_enabled(dnf_config) + + assert actor_reports.called, "Report wasn't created when required a plugin is missing." + +@@ -62,7 +60,7 @@ def test_nothing_is_reported_when_rhsm_disabled(monkeypatch): + monkeypatch.setattr(api, 'current_actor', actor_mocked) + monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) + +- yum_config = PkgManagerInfo(enabled_plugins=[]) +- check_required_yum_plugins_enabled(yum_config) ++ dnf_config = PkgManagerInfo(enabled_plugins=[]) ++ check_required_dnf_plugins_enabled(dnf_config) + + assert not reporting.create_report.called, 'Report was created even if LEAPP_NO_RHSM was set' +-- +2.52.0 + diff --git a/SOURCES/0106-scancryptopolicies-Remove-RHEL-7-early-return.patch b/SOURCES/0106-scancryptopolicies-Remove-RHEL-7-early-return.patch new file mode 100644 index 0000000..52dd9c2 --- /dev/null +++ b/SOURCES/0106-scancryptopolicies-Remove-RHEL-7-early-return.patch @@ -0,0 +1,82 @@ +From f543432d31e3c84ea98fff348f23641a855acbc3 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:49:42 +0200 +Subject: [PATCH 106/111] scancryptopolicies: Remove RHEL 7 early return + +--- + .../common/actors/scancryptopolicies/actor.py | 5 ---- + .../component_test_scancryptopolicies.py | 25 +++++-------------- + 2 files changed, 6 insertions(+), 24 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/scancryptopolicies/actor.py b/repos/system_upgrade/common/actors/scancryptopolicies/actor.py +index 6f871243..dc695bc3 100644 +--- a/repos/system_upgrade/common/actors/scancryptopolicies/actor.py ++++ b/repos/system_upgrade/common/actors/scancryptopolicies/actor.py +@@ -1,6 +1,5 @@ + from leapp.actors import Actor + from leapp.libraries.actor import scancryptopolicies +-from leapp.libraries.common.config import version + from leapp.models import CryptoPolicyInfo + from leapp.tags import FactsPhaseTag, IPUWorkflowTag + +@@ -24,8 +23,4 @@ class ScanCryptoPolicies(Actor): + tags = (IPUWorkflowTag, FactsPhaseTag) + + def process(self): +- if version.get_source_major_version() == '7': +- # there are no crypto policies in EL 7 +- return +- + scancryptopolicies.process() +diff --git a/repos/system_upgrade/common/actors/scancryptopolicies/tests/component_test_scancryptopolicies.py b/repos/system_upgrade/common/actors/scancryptopolicies/tests/component_test_scancryptopolicies.py +index 06029734..1f745574 100644 +--- a/repos/system_upgrade/common/actors/scancryptopolicies/tests/component_test_scancryptopolicies.py ++++ b/repos/system_upgrade/common/actors/scancryptopolicies/tests/component_test_scancryptopolicies.py +@@ -1,18 +1,10 @@ + import os + +-import pytest +- + from leapp.libraries.actor import scancryptopolicies +-from leapp.libraries.common.config import version + from leapp.models import CryptoPolicyInfo + + +-@pytest.mark.parametrize(('source_version', 'should_run'), [ +- ('7', False), +- ('8', True), +- ('9', True), +-]) +-def test_actor_execution(monkeypatch, current_actor_context, source_version, should_run): ++def test_actor_execution(monkeypatch, current_actor_context): + def read_current_policy_mock(filename): + return "DEFAULT_XXX" + +@@ -28,19 +20,14 @@ def test_actor_execution(monkeypatch, current_actor_context, source_version, sho + return _original_listdir(path) + + def isfile_mock(filename): +- if filename.endswith('/modules'): +- return False +- return True ++ return not filename.endswith('/modules') + +- monkeypatch.setattr(version, 'get_source_major_version', lambda: source_version) + monkeypatch.setattr(scancryptopolicies, 'read_current_policy', read_current_policy_mock) + _original_listdir = os.listdir + monkeypatch.setattr(os, 'listdir', listdir_mock) + monkeypatch.setattr(os.path, 'isfile', isfile_mock) + current_actor_context.run() +- if should_run: +- cpi = current_actor_context.consume(CryptoPolicyInfo) +- assert cpi +- assert cpi[0].current_policy == 'DEFAULT_XXX' +- else: +- assert not current_actor_context.consume(CryptoPolicyInfo) ++ ++ cpi = current_actor_context.consume(CryptoPolicyInfo) ++ assert cpi ++ assert cpi[0].current_policy == 'DEFAULT_XXX' +-- +2.52.0 + diff --git a/SOURCES/0107-cryptopoliciescheck-Remove-RHEL-7-early-return-and-u.patch b/SOURCES/0107-cryptopoliciescheck-Remove-RHEL-7-early-return-and-u.patch new file mode 100644 index 0000000..ea31641 --- /dev/null +++ b/SOURCES/0107-cryptopoliciescheck-Remove-RHEL-7-early-return-and-u.patch @@ -0,0 +1,129 @@ +From 1886ca234e03a12ad9496de3172a5cfdd5eb16ee Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:51:46 +0200 +Subject: [PATCH 107/111] cryptopoliciescheck: Remove RHEL 7 early return and + update tests + +--- + .../actors/cryptopoliciescheck/actor.py | 5 -- + .../component_test_cryptopoliciescheck.py | 58 +++++-------------- + 2 files changed, 16 insertions(+), 47 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/cryptopoliciescheck/actor.py b/repos/system_upgrade/common/actors/cryptopoliciescheck/actor.py +index e5f67644..41a90d5d 100644 +--- a/repos/system_upgrade/common/actors/cryptopoliciescheck/actor.py ++++ b/repos/system_upgrade/common/actors/cryptopoliciescheck/actor.py +@@ -1,6 +1,5 @@ + from leapp.actors import Actor + from leapp.libraries.actor import cryptopoliciescheck +-from leapp.libraries.common.config import version + from leapp.models import CryptoPolicyInfo, Report, TargetUserSpacePreupgradeTasks + from leapp.tags import ChecksPhaseTag, IPUWorkflowTag + +@@ -22,8 +21,4 @@ class CryptoPoliciesCheck(Actor): + tags = (IPUWorkflowTag, ChecksPhaseTag,) + + def process(self): +- if version.get_source_major_version() == '7': +- # there are no crypto policies in EL 7 +- return +- + cryptopoliciescheck.process(self.consume(CryptoPolicyInfo)) +diff --git a/repos/system_upgrade/common/actors/cryptopoliciescheck/tests/component_test_cryptopoliciescheck.py b/repos/system_upgrade/common/actors/cryptopoliciescheck/tests/component_test_cryptopoliciescheck.py +index 0eb58ef6..3ce1e1ae 100644 +--- a/repos/system_upgrade/common/actors/cryptopoliciescheck/tests/component_test_cryptopoliciescheck.py ++++ b/repos/system_upgrade/common/actors/cryptopoliciescheck/tests/component_test_cryptopoliciescheck.py +@@ -1,5 +1,3 @@ +-import pytest +- + from leapp.libraries.common.config import version + from leapp.models import ( + CopyFile, +@@ -11,13 +9,7 @@ from leapp.models import ( + ) + + +-@pytest.mark.parametrize(('source_version'), [ +- ('7'), +- ('8'), +- ('9'), +-]) +-def test_actor_execution_default(monkeypatch, current_actor_context, source_version): +- monkeypatch.setattr(version, 'get_source_major_version', lambda: source_version) ++def test_actor_execution_default(current_actor_context): + current_actor_context.feed( + CryptoPolicyInfo( + current_policy="DEFAULT", +@@ -29,13 +21,7 @@ def test_actor_execution_default(monkeypatch, current_actor_context, source_vers + assert not current_actor_context.consume(TargetUserSpacePreupgradeTasks) + + +-@pytest.mark.parametrize(('source_version', 'should_run'), [ +- ('7', False), +- ('8', True), +- ('9', True), +-]) +-def test_actor_execution_legacy(monkeypatch, current_actor_context, source_version, should_run): +- monkeypatch.setattr(version, 'get_source_major_version', lambda: source_version) ++def test_actor_execution_legacy(current_actor_context): + current_actor_context.feed( + CryptoPolicyInfo( + current_policy="LEGACY", +@@ -45,24 +31,15 @@ def test_actor_execution_legacy(monkeypatch, current_actor_context, source_versi + ) + current_actor_context.run() + +- if should_run: +- assert current_actor_context.consume(TargetUserSpacePreupgradeTasks) +- u = current_actor_context.consume(TargetUserSpacePreupgradeTasks)[0] +- assert u.install_rpms == ['crypto-policies-scripts'] +- assert u.copy_files == [] ++ assert current_actor_context.consume(TargetUserSpacePreupgradeTasks) ++ u = current_actor_context.consume(TargetUserSpacePreupgradeTasks)[0] ++ assert u.install_rpms == ['crypto-policies-scripts'] ++ assert u.copy_files == [] + +- assert current_actor_context.consume(Report) +- else: +- assert not current_actor_context.consume(TargetUserSpacePreupgradeTasks) ++ assert current_actor_context.consume(Report) + + +-@pytest.mark.parametrize(('source_version', 'should_run'), [ +- ('7', False), +- ('8', True), +- ('9', True), +-]) +-def test_actor_execution_custom(monkeypatch, current_actor_context, source_version, should_run): +- monkeypatch.setattr(version, 'get_source_major_version', lambda: source_version) ++def test_actor_execution_custom(current_actor_context): + current_actor_context.feed( + CryptoPolicyInfo( + current_policy="CUSTOM:SHA2", +@@ -76,15 +53,12 @@ def test_actor_execution_custom(monkeypatch, current_actor_context, source_versi + ) + current_actor_context.run() + +- if should_run: +- assert current_actor_context.consume(TargetUserSpacePreupgradeTasks) +- u = current_actor_context.consume(TargetUserSpacePreupgradeTasks)[0] +- assert u.install_rpms == ['crypto-policies-scripts'] +- assert u.copy_files == [ +- CopyFile(src='/etc/crypto-policies/policies/CUSTOM.pol'), +- CopyFile(src='/etc/crypto-policies/policies/modules/SHA2.pmod'), +- ] ++ assert current_actor_context.consume(TargetUserSpacePreupgradeTasks) ++ u = current_actor_context.consume(TargetUserSpacePreupgradeTasks)[0] ++ assert u.install_rpms == ['crypto-policies-scripts'] ++ assert u.copy_files == [ ++ CopyFile(src='/etc/crypto-policies/policies/CUSTOM.pol'), ++ CopyFile(src='/etc/crypto-policies/policies/modules/SHA2.pmod'), ++ ] + +- assert current_actor_context.consume(Report) +- else: +- assert not current_actor_context.consume(TargetUserSpacePreupgradeTasks) ++ assert current_actor_context.consume(Report) +-- +2.52.0 + diff --git a/SOURCES/0108-checkopensslconf-Always-use-IBMCA-provider-wording-i.patch b/SOURCES/0108-checkopensslconf-Always-use-IBMCA-provider-wording-i.patch new file mode 100644 index 0000000..63fed3c --- /dev/null +++ b/SOURCES/0108-checkopensslconf-Always-use-IBMCA-provider-wording-i.patch @@ -0,0 +1,53 @@ +From e588f34744192d80d01bcda286b1013b7d7afffc Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:55:32 +0200 +Subject: [PATCH 108/111] checkopensslconf: Always use IBMCA provider wording + instead of IBMCA engine + +On RHEL 7 it's called engine, drop that as RHEL 7 upgrades repo has been +dropped. +--- + .../checkopensslconf/libraries/checkopensslconf.py | 10 ++++------ + 1 file changed, 4 insertions(+), 6 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/openssl/checkopensslconf/libraries/checkopensslconf.py b/repos/system_upgrade/common/actors/openssl/checkopensslconf/libraries/checkopensslconf.py +index 53e803b2..d005e205 100644 +--- a/repos/system_upgrade/common/actors/openssl/checkopensslconf/libraries/checkopensslconf.py ++++ b/repos/system_upgrade/common/actors/openssl/checkopensslconf/libraries/checkopensslconf.py +@@ -20,29 +20,27 @@ def check_ibmca(): + return + # In RHEL 9 has been introduced new technology: openssl providers. The engine + # is deprecated, so keep proper teminology to not confuse users. +- dst_tech = 'engine' if version.get_target_major_version() == '8' else 'providers' + summary = ( + 'The presence of openssl-ibmca package suggests that the system may be configured' + ' to use the IBMCA OpenSSL engine.' + ' Due to major changes in OpenSSL and libica between RHEL {source} and RHEL {target} it is not' + ' possible to migrate OpenSSL configuration files automatically. Therefore,' +- ' it is necessary to enable IBMCA {tech} in the OpenSSL config file manually' ++ ' it is necessary to enable IBMCA providers in the OpenSSL config file manually' + ' after the system upgrade.' + .format( + source=version.get_source_major_version(), + target=version.get_target_major_version(), +- tech=dst_tech + ) + ) + + hint = ( +- 'Configure the IBMCA {tech} manually after the upgrade.' ++ 'Configure the IBMCA providers manually after the upgrade.' + ' Please, be aware that it is not recommended to configure the system default' + ' {fpath}. Instead, it is recommended to configure a copy of' + ' that file and use this copy only for particular applications that are supposed' +- ' to utilize the IBMCA {tech}. The location of the OpenSSL configuration file' ++ ' to utilize the IBMCA providers. The location of the OpenSSL configuration file' + ' can be specified using the OPENSSL_CONF environment variable.' +- .format(tech=dst_tech, fpath=DEFAULT_OPENSSL_CONF) ++ .format(fpath=DEFAULT_OPENSSL_CONF) + ) + + reporting.create_report([ +-- +2.52.0 + diff --git a/SOURCES/0109-opensslconfigscanner-Drop-early-return-on-el7.patch b/SOURCES/0109-opensslconfigscanner-Drop-early-return-on-el7.patch new file mode 100644 index 0000000..97e2371 --- /dev/null +++ b/SOURCES/0109-opensslconfigscanner-Drop-early-return-on-el7.patch @@ -0,0 +1,58 @@ +From 2748e9920f9626973b7d1cf38c6a61445a13660a Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Thu, 21 Aug 2025 19:57:33 +0200 +Subject: [PATCH 109/111] opensslconfigscanner: Drop early return on el7 + +--- + .../opensslconfigscanner/libraries/readconf.py | 6 ------ + .../tests/test_opensslconfigscanner.py | 13 ++----------- + 2 files changed, 2 insertions(+), 17 deletions(-) + +diff --git a/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/libraries/readconf.py b/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/libraries/readconf.py +index 678cc7aa..e1037033 100644 +--- a/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/libraries/readconf.py ++++ b/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/libraries/readconf.py +@@ -1,6 +1,5 @@ + import errno + +-from leapp.libraries.common.config import version + from leapp.libraries.common.rpms import check_file_modification + from leapp.libraries.stdlib import api + from leapp.models import OpenSslConfig, OpenSslConfigBlock, OpenSslConfigPair +@@ -88,11 +87,6 @@ def scan_config(producer): + Parse openssl.cnf file to create OpenSslConfig message. + """ + +- if version.get_source_major_version() == '7': +- # Apply this only for EL 8+ as we are not interested about this +- # on EL 7 anymore (moved from el8toel9) +- return +- + # direct access to configuration file + output = read_config() + config = parse_config(output) +diff --git a/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/tests/test_opensslconfigscanner.py b/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/tests/test_opensslconfigscanner.py +index 8978e133..dedc82f2 100644 +--- a/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/tests/test_opensslconfigscanner.py ++++ b/repos/system_upgrade/common/actors/openssl/opensslconfigscanner/tests/test_opensslconfigscanner.py +@@ -142,15 +142,6 @@ def test_produce_config(): + assert cfg.blocks[2].pairs[0].value == "/etc/crypto-policies/back-ends/opensslcnf.config" + + +-@pytest.mark.parametrize(('source_version', 'should_run'), [ +- ('7', False), +- ('8', True), +- ('9', True), +-]) +-def test_actor_execution(monkeypatch, current_actor_context, source_version, should_run): +- monkeypatch.setattr(version, 'get_source_major_version', lambda: source_version) ++def test_actor_execution(current_actor_context): + current_actor_context.run() +- if should_run: +- assert current_actor_context.consume(OpenSslConfig) +- else: +- assert not current_actor_context.consume(OpenSslConfig) ++ assert current_actor_context.consume(OpenSslConfig) +-- +2.52.0 + diff --git a/SOURCES/0110-lib-overlaygen-Remove-the-legacy-OVL-solution.patch b/SOURCES/0110-lib-overlaygen-Remove-the-legacy-OVL-solution.patch new file mode 100644 index 0000000..ffb8bc5 --- /dev/null +++ b/SOURCES/0110-lib-overlaygen-Remove-the-legacy-OVL-solution.patch @@ -0,0 +1,163 @@ +From 93429f7b3ee08ac8763d5ed8b5cf49d30a3ecfb5 Mon Sep 17 00:00:00 2001 +From: Matej Matuska +Date: Tue, 16 Dec 2025 15:35:35 +0100 +Subject: [PATCH 110/111] lib/overlaygen: Remove the legacy OVL solution + +This solution has been kept primarily for 7->8 upgrades, that is no +longer relevant and the solution is buggy, remove it. + +No docs update - the env var was not documented to begin with. +--- + .../common/libraries/overlaygen.py | 125 +----------------- + 1 file changed, 2 insertions(+), 123 deletions(-) + +diff --git a/repos/system_upgrade/common/libraries/overlaygen.py b/repos/system_upgrade/common/libraries/overlaygen.py +index 81342557..f0d0ba1d 100644 +--- a/repos/system_upgrade/common/libraries/overlaygen.py ++++ b/repos/system_upgrade/common/libraries/overlaygen.py +@@ -608,7 +608,7 @@ def create_source_overlay( + :type mounts_dir: str + :param scratch_dir: Absolute path to the directory in which all disk and OVL images are stored. + :type scratch_dir: str +- :param xfs_info: The XFSPresence message. ++ :param xfs_info: The XFSPresence message (this is currently unused, but kept for compatibility). + :type xfs_info: leapp.models.XFSPresence + :param storage_info: The StorageInfo message. + :type storage_info: leapp.models.StorageInfo +@@ -626,11 +626,7 @@ def create_source_overlay( + scratch_dir=scratch_dir, mounts_dir=mounts_dir)) + try: + _create_mounts_dir(scratch_dir, mounts_dir) +- if get_env('LEAPP_OVL_LEGACY', '0') != '1': +- mounts = _prepare_required_mounts(scratch_dir, mounts_dir, storage_info, scratch_reserve) +- else: +- # fallback to the deprecated OVL solution +- mounts = _prepare_required_mounts_old(scratch_dir, mounts_dir, _get_mountpoints(storage_info), xfs_info) ++ mounts = _prepare_required_mounts(scratch_dir, mounts_dir, storage_info, scratch_reserve) + with mounts.pop('/') as root_mount: + with mounting.OverlayMount(name='system_overlay', source='/', workdir=root_mount.target) as root_overlay: + if mount_target: +@@ -643,120 +639,3 @@ def create_source_overlay( + yield overlay + finally: + cleanup_scratch(scratch_dir, mounts_dir) +- +- +-# ############################################################################# +-# Deprecated OVL solution ... +-# This is going to be removed in future as the whole functionality is going to +-# be replaced by new one. The problem is that the new solution can potentially +-# negatively affect systems with many loop mountpoints, so let's keep this +-# as a workaround for now. I am separating the old and new code in this way +-# to make the future removal easy. +-# The code below is triggered when LEAPP_OVL_LEGACY=1 envar is set. +-# IMPORTANT: Before an update of functions above, ensure the functionality of +-# the code below is not affected, otherwise copy the function below with the +-# "_old" suffix. +-# ############################################################################# +-def _ensure_enough_diskimage_space_old(space_needed, directory): +- stat = os.statvfs(directory) +- if (stat.f_frsize * stat.f_bavail) < (space_needed * 1024 * 1024): +- message = ('Not enough space available for creating required disk images in {directory}. ' + +- 'Needed: {space_needed} MiB').format(space_needed=space_needed, directory=directory) +- api.current_logger().error(message) +- raise StopActorExecutionError(message) +- +- +-def _overlay_disk_size_old(): +- """ +- Convenient function to retrieve the overlay disk size +- """ +- env_size = get_env('LEAPP_OVL_SIZE', '2048') +- try: +- disk_size = int(env_size) +- except ValueError: +- disk_size = 2048 +- api.current_logger().warning( +- 'Invalid "LEAPP_OVL_SIZE" environment variable "%s". Setting default "%d" value', env_size, disk_size +- ) +- return disk_size +- +- +-def _create_diskimages_dir_old(scratch_dir, diskimages_dir): +- """ +- Prepares directories for disk images +- """ +- api.current_logger().debug('Creating disk images directory.') +- try: +- utils.makedirs(diskimages_dir) +- api.current_logger().debug('Done creating disk images directory.') +- except OSError: +- api.current_logger().error('Failed to create disk images directory %s', diskimages_dir, exc_info=True) +- +- # This is an attempt for giving the user a chance to resolve it on their own +- raise StopActorExecutionError( +- message='Failed to prepare environment for package download while creating directories.', +- details={ +- 'hint': 'Please ensure that {scratch_dir} is empty and modifiable.'.format(scratch_dir=scratch_dir) +- } +- ) +- +- +-def _create_mount_disk_image_old(disk_images_directory, path): +- """ +- Creates the mount disk image, for cases when we hit XFS with ftype=0 +- """ +- diskimage_path = os.path.join(disk_images_directory, _mount_name(path)) +- disk_size = _overlay_disk_size_old() +- +- api.current_logger().debug('Attempting to create disk image with size %d MiB at %s', disk_size, diskimage_path) +- utils.call_with_failure_hint( +- cmd=['/bin/dd', 'if=/dev/zero', 'of={}'.format(diskimage_path), 'bs=1M', 'count={}'.format(disk_size)], +- hint='Please ensure that there is enough diskspace in {} at least {} MiB are needed'.format( +- diskimage_path, disk_size) +- ) +- +- api.current_logger().debug('Creating ext4 filesystem in disk image at %s', diskimage_path) +- try: +- utils.call_with_oserror_handled(cmd=['/sbin/mkfs.ext4', '-F', diskimage_path]) +- except CalledProcessError as e: +- api.current_logger().error('Failed to create ext4 filesystem in %s', diskimage_path, exc_info=True) +- raise StopActorExecutionError( +- message=str(e) +- ) +- +- return diskimage_path +- +- +-def _prepare_required_mounts_old(scratch_dir, mounts_dir, mount_points, xfs_info): +- result = { +- mount_point.fs_file: mounting.NullMount( +- _mount_dir(mounts_dir, mount_point.fs_file)) for mount_point in mount_points +- } +- +- if not xfs_info.mountpoints_without_ftype: +- return result +- +- space_needed = _overlay_disk_size_old() * len(xfs_info.mountpoints_without_ftype) +- disk_images_directory = os.path.join(scratch_dir, 'diskimages') +- +- # Ensure we cleanup old disk images before we check for space constraints. +- run(['rm', '-rf', disk_images_directory]) +- _create_diskimages_dir_old(scratch_dir, disk_images_directory) +- _ensure_enough_diskimage_space_old(space_needed, scratch_dir) +- +- mount_names = [mount_point.fs_file for mount_point in mount_points] +- +- # TODO(pstodulk): this (adding rootfs into the set always) is hotfix for +- # bz #1911802 (not ideal one..). The problem occurs one rootfs is ext4 fs, +- # but /var/lib/leapp/... is under XFS without ftype; In such a case we can +- # see still the very same problems as before. But letting you know that +- # probably this is not the final solution, as we could possibly see the +- # same problems on another partitions too (needs to be tested...). However, +- # it could fit for now until we provide the complete solution around XFS +- # workarounds (including management of required spaces for virtual FSs per +- # mountpoints - without that, we cannot fix this properly) +- for mountpoint in set(xfs_info.mountpoints_without_ftype + ['/']): +- if mountpoint in mount_names: +- image = _create_mount_disk_image_old(disk_images_directory, mountpoint) +- result[mountpoint] = mounting.LoopMount(source=image, target=_mount_dir(mounts_dir, mountpoint)) +- return result +-- +2.52.0 + diff --git a/SOURCES/0111-data-update-data-files-pes_evets.json-1451.patch b/SOURCES/0111-data-update-data-files-pes_evets.json-1451.patch new file mode 100644 index 0000000..cd27213 --- /dev/null +++ b/SOURCES/0111-data-update-data-files-pes_evets.json-1451.patch @@ -0,0 +1,548 @@ +From 284494d7cb03f0aae71ec0065b494805fe33ea7b Mon Sep 17 00:00:00 2001 +From: Leapp BOT <37839841+leapp-bot@users.noreply.github.com> +Date: Wed, 17 Dec 2025 14:09:32 +0100 +Subject: [PATCH 111/111] data: update data files - pes_evets.json (#1451) + +Regulard update of PES data, containing some fixes of original data and new events as well. +--- + etc/leapp/files/pes-events.json | 491 +++++++++++++++++++++++++++++--- + 1 file changed, 451 insertions(+), 40 deletions(-) + +diff --git a/etc/leapp/files/pes-events.json b/etc/leapp/files/pes-events.json +index fec9a900..964b7117 100644 +--- a/etc/leapp/files/pes-events.json ++++ b/etc/leapp/files/pes-events.json +@@ -1,5 +1,5 @@ + { +-"timestamp": "202511121106Z", ++"timestamp": "202512021706Z", + "provided_data_streams": [ + "4.1" + ], +@@ -250419,9 +250419,6 @@ null + { + "action": 6, + "architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", + "x86_64" + ], + "id": 6802, +@@ -315101,7 +315098,7 @@ null + }, + "initial_release": { + "major_version": 8, +-"minor_version": 5, ++"minor_version": 10, + "os_name": "RHEL" + }, + "modulestream_maps": [ +@@ -315124,7 +315121,7 @@ null + null + ], + "name": "openexr-devel", +-"repository": "rhel9-AppStream" ++"repository": "rhel9-CRB" + } + ], + "set_id": 12502 +@@ -583248,40 +583245,6 @@ null + "s390x", + "x86_64" + ], +-"id": 16179, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "openexr-devel", +-"repository": "rhel9-CRB" +-} +-], +-"set_id": 22334 +-}, +-"initial_release": { +-"major_version": 8, +-"minor_version": 10, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [], +-"out_packageset": null, +-"release": { +-"major_version": 9, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 0, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], + "id": 16180, + "in_packageset": { + "package": [ +@@ -708770,6 +708733,454 @@ null + "minor_version": 2, + "os_name": "RHEL" + } ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19909, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rhc-playbook-verifier", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26586 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19910, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rhc-playbook-verifier", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26587 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19911, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3-zstandard", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26588 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19912, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "unbound-utils", ++"repository": "rhel10-BaseOS" ++} ++], ++"set_id": 26589 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 5, ++"architectures": [ ++"x86_64" ++], ++"id": 19913, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel10-NFV" ++}, ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt-kvm", ++"repository": "rhel10-NFV" ++} ++], ++"set_id": 26593 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 0, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [ ++{ ++"in_modulestream": null, ++"out_modulestream": null ++} ++], ++"out_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel10-NFV" ++} ++], ++"set_id": 26600 ++}, ++"release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 5, ++"architectures": [ ++"x86_64" ++], ++"id": 19915, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel9-NFV" ++}, ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt-kvm", ++"repository": "rhel9-NFV" ++} ++], ++"set_id": 26595 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 6, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [ ++{ ++"in_modulestream": null, ++"out_modulestream": null ++} ++], ++"out_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel9-NFV" ++} ++], ++"set_id": 26601 ++}, ++"release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19916, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "gnome-autoar-devel", ++"repository": "rhel8-CRB" ++} ++], ++"set_id": 26596 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19917, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rest-devel", ++"repository": "rhel8-CRB" ++} ++], ++"set_id": 26597 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19918, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "plymouth-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26598 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19919, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "plymouth-devel", ++"repository": "rhel8-AppStream" ++} ++], ++"set_id": 26599 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19920, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "libdmx-devel", ++"repository": "rhel8-CRB" ++} ++], ++"set_id": 26602 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19921, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "libdmx-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26603 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} + } + ] + } +-- +2.52.0 + diff --git a/SOURCES/leapp-repository-0.23.0-elevate.patch b/SOURCES/leapp-repository-0.23.0-elevate.patch index 1bc906e..59d70da 100644 --- a/SOURCES/leapp-repository-0.23.0-elevate.patch +++ b/SOURCES/leapp-repository-0.23.0-elevate.patch @@ -1,16 +1,56 @@ -diff --git a/.github/workflows/pr-welcome-msg.yml b/.github/workflows/pr-welcome-msg.yml -index f056fb79..4c12ab2a 100644 ---- a/.github/workflows/pr-welcome-msg.yml -+++ b/.github/workflows/pr-welcome-msg.yml +diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml +index 4b07e4b3..2ac322e5 100644 +--- a/.github/workflows/codespell.yml ++++ b/.github/workflows/codespell.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest + steps: - - name: Create comment -- uses: peter-evans/create-or-update-comment@v4 -+ uses: peter-evans/create-or-update-comment@v5 +- - uses: actions/checkout@v5 ++ - uses: actions/checkout@v6 + - uses: codespell-project/actions-codespell@v2 with: - issue-number: ${{ github.event.pull_request.number }} - body: | + ignore_words_list: ro,fo,couldn,repositor,zeor,bootup +diff --git a/.github/workflows/differential-shellcheck.yml b/.github/workflows/differential-shellcheck.yml +index 6c81713c..3b92d771 100644 +--- a/.github/workflows/differential-shellcheck.yml ++++ b/.github/workflows/differential-shellcheck.yml +@@ -19,7 +19,7 @@ jobs: + + steps: + - name: Repository checkout +- uses: actions/checkout@v5 ++ uses: actions/checkout@v6 + with: + fetch-depth: 0 + +diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml +index d1b8fb2a..f855baa2 100644 +--- a/.github/workflows/unit-tests.yml ++++ b/.github/workflows/unit-tests.yml +@@ -19,11 +19,11 @@ jobs: + - name: 'Unit tests (python:3.12; repos:el9toel10,common)' + python: python3.12 + repos: 'el9toel10,common' +- container: el9 ++ container: el10 + - name: 'Linters (python:3.12; repos:el9toel10,common)' + python: python3.12 + repos: 'el9toel10,common' +- container: el9-lint ++ container: el10-lint + - name: 'Unit tests (python:3.9; repos:el9toel10,common)' + python: python3.9 + repos: 'el9toel10,common' +@@ -52,7 +52,7 @@ jobs: + + steps: + - name: Checkout code +- uses: actions/checkout@v5 ++ uses: actions/checkout@v6 + with: + # NOTE(ivasilev) fetch-depth 0 is critical here as leapp deps discovery depends on specific substring in + # commit message and default 1 option will get us just merge commit which has an unrelevant message. diff --git a/.gitignore b/.gitignore index 0bb92d3d..a04c7ded 100644 --- a/.gitignore @@ -23,6 +63,273 @@ index 0bb92d3d..a04c7ded 100644 # pycharm .idea +diff --git a/.packit.yaml b/.packit.yaml +index e158c7e4..37fa7849 100644 +--- a/.packit.yaml ++++ b/.packit.yaml +@@ -123,15 +123,6 @@ jobs: + tmt: + plan_filter: 'tag:8to9' + environments: +- - &tmt-env-settings-810to94 +- tmt: +- context: &tmt-context-810to94 +- distro: "rhel-8.10" +- distro_target: "rhel-9.4" +- settings: +- provisioning: +- tags: +- BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-810to96 + tmt: + context: &tmt-context-810to96 +@@ -141,15 +132,6 @@ jobs: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test +- - &tmt-env-settings-810to97 +- tmt: +- context: &tmt-context-810to97 +- distro: "rhel-8.10" +- distro_target: "rhel-9.7" +- settings: +- provisioning: +- tags: +- BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-810to98 + tmt: + context: &tmt-context-810to98 +@@ -190,67 +172,6 @@ jobs: + # ######################### Individual tests ########################### # + # ###################################################################### # + +-# ###################################################################### # +-# ############################# 8.10 > 9.4 ############################# # +-# ###################################################################### # +- +-- &sanity-810to94 +- <<: *sanity-abstract-8to9 +- trigger: pull_request +- identifier: sanity-8.10to9.4 +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:tier0 & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-810to94 +- env: &env-810to94 +- SOURCE_RELEASE: "8.10" +- TARGET_RELEASE: "9.4" +- LEAPP_TARGET_PRODUCT_CHANNEL: "EUS" +- +-# On-demand minimal beaker tests +-- &beaker-minimal-810to94 +- <<: *beaker-minimal-8to9-abstract-ondemand +- trigger: pull_request +- labels: +- - beaker-minimal +- - beaker-minimal-8.10to9.4 +- - 8.10to9.4 +- identifier: sanity-8.10to9.4-beaker-minimal-ondemand +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-810to94 +- env: +- <<: *env-810to94 +- +-# On-demand kernel-rt tests +-- &kernel-rt-810to94 +- <<: *kernel-rt-abstract-8to9-ondemand +- trigger: pull_request +- labels: +- - kernel-rt +- - kernel-rt-8.10to9.4 +- - 8.10to9.4 +- identifier: sanity-8.10to9.4-kernel-rt-ondemand +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' +- environments: +- - tmt: +- context: *tmt-context-810to94 +- settings: +- provisioning: +- tags: +- BusinessUnit: sst_upgrades@leapp_upstream_test +- env: +- <<: *env-810to94 +- +- + # ###################################################################### # + # ############################# 8.10 > 9.6 ############################# # + # ###################################################################### # +@@ -327,60 +248,6 @@ jobs: + <<: *env-810to96 + + +-# ###################################################################### # +-# ############################# 8.10 > 9.7 ############################# # +-# ###################################################################### # +- +-- &sanity-810to97 +- <<: *sanity-abstract-8to9 +- trigger: pull_request +- identifier: sanity-8.10to9.7 +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:tier0 & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-810to97 +- env: &env-810to97 +- SOURCE_RELEASE: "8.10" +- TARGET_RELEASE: "9.7" +- +-# On-demand minimal beaker tests +-- &beaker-minimal-810to97 +- <<: *beaker-minimal-8to9-abstract-ondemand +- trigger: pull_request +- labels: +- - beaker-minimal +- - beaker-minimal-8.10to9.7 +- - 8.10to9.7 +- identifier: sanity-8.10to9.7-beaker-minimal-ondemand +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-810to97 +- env: +- <<: *env-810to97 +- +-# On-demand kernel-rt tests +-- &kernel-rt-810to97 +- <<: *kernel-rt-abstract-8to9-ondemand +- trigger: pull_request +- labels: +- - kernel-rt +- - kernel-rt-8.10to9.7 +- - 8.10to9.7 +- identifier: sanity-8.10to9.7-kernel-rt-ondemand +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-810to97 +- env: +- <<: *env-810to97 +- + # ###################################################################### # + # ############################# 8.10 > 9.8 ############################# # + # ###################################################################### # +@@ -478,15 +345,6 @@ jobs: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test +- - &tmt-env-settings-97to101 +- tmt: +- context: &tmt-context-97to101 +- distro: "rhel-9.7" +- distro_target: "rhel-10.1" +- settings: +- provisioning: +- tags: +- BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-centos9torhel101 + tmt: + context: &tmt-context-centos9torhel101 +@@ -605,70 +463,6 @@ jobs: + env: + <<: *env-96to100 + +-# ###################################################################### # +-# ############################# 9.7 > 10.1 ############################# # +-# ###################################################################### # +- +-- &sanity-97to101 +- <<: *sanity-abstract-9to10 +- trigger: pull_request +- identifier: sanity-9.7to10.1 +- targets: +- epel-9-x86_64: +- distros: [RHEL-9.7.0-Nightly] +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-97to101 +- env: &env-97to101 +- SOURCE_RELEASE: "9.7" +- TARGET_RELEASE: "10.1" +- +-# On-demand minimal beaker tests +-- &beaker-minimal-97to101 +- <<: *beaker-minimal-9to10-abstract-ondemand +- trigger: pull_request +- labels: +- - beaker-minimal +- - beaker-minimal-9.7to10.1 +- - 9.7to10.1 +- identifier: sanity-9.7to10.1-beaker-minimal-ondemand +- targets: +- epel-9-x86_64: +- distros: [RHEL-9.7-Nightly] +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-97to101 +- env: +- <<: *env-97to101 +- +-# On-demand kernel-rt tests +-- &kernel-rt-97to101 +- <<: *kernel-rt-abstract-9to10-ondemand +- trigger: pull_request +- labels: +- - kernel-rt +- - kernel-rt-9.7to10.1 +- - 9.7to10.1 +- identifier: sanity-9.7to10.1-kernel-rt-ondemand +- targets: +- epel-9-x86_64: +- distros: [RHEL-9.7-Nightly] +- tf_extra_params: +- test: +- tmt: +- plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' +- environments: +- - *tmt-env-settings-97to101 +- env: +- <<: *env-97to101 +- +- + # ###################################################################### # + # ############################# 9.8 > 10.2 ############################# # + # ###################################################################### # +diff --git a/.pylintrc b/.pylintrc +index a82f8818..bd365788 100644 +--- a/.pylintrc ++++ b/.pylintrc +@@ -42,7 +42,9 @@ disable= + unnecessary-pass, + raise-missing-from, # no 'raise from' in python 2 + consider-using-f-string, # sorry, not gonna happen, still have to support py2 +- logging-format-interpolation ++ logging-format-interpolation, ++# problem betwee Python 3.6 and 3.8+ pylint ++ useless-option-value + + [FORMAT] + # Maximum number of characters on a single line. diff --git a/ci/.gitignore b/ci/.gitignore new file mode 100644 index 00000000..e6f97f0f @@ -3434,210 +3741,11736 @@ index 00000000..370758e6 + end + end +end -diff --git a/docs/source/contrib-and-devel-guidelines.md b/docs/source/contributing/coding-guidelines.md -similarity index 68% -rename from docs/source/contrib-and-devel-guidelines.md -rename to docs/source/contributing/coding-guidelines.md -index 3229c8a4..d06d0200 100644 ---- a/docs/source/contrib-and-devel-guidelines.md -+++ b/docs/source/contributing/coding-guidelines.md -@@ -1,5 +1,4 @@ --# Contribution and development guidelines --## Code guidelines -+# Coding guidelines +diff --git a/docs/source/libraries-and-api/deprecations-list.md b/docs/source/libraries-and-api/deprecations-list.md +index e620d70d..817b63c5 100644 +--- a/docs/source/libraries-and-api/deprecations-list.md ++++ b/docs/source/libraries-and-api/deprecations-list.md +@@ -15,6 +15,16 @@ Only the versions in which a deprecation has been made are listed. + ## Next release (till TODO date) + - Shared libraries + - **`leapp.libraries.common.config.get_distro_id()`** - The function has been replaced by variants for source and target distros - `leapp.libraries.common.config.get_source_distro_id()` and `leapp.libraries.common.config.get_target_distro_id()`. ++ - Following UEFI related functions and classes have been moved from `leapp.libraries.common.grub` into `leapp.libraries.common.efi`: ++ - **`EFIBootInfo`** - raises `leapp.libraries.common.efi.EFIError` instead of `leapp.exceptions.StopActorExecutionError` ++ - **`EFIBootLoaderEntry`** ++ - **`canonical_path_to_efi_format()`** ++ - **`get_efi_device()`** - raises `leapp.libraries.common.efi.EFIError` instead of `leapp.exceptions.StopActorExecutionError` ++ - **`get_efi_partition()`** - raises `leapp.libraries.common.efi.EFIError` instead of `leapp.exceptions.StopActorExecutionError` ++ - **`is_efi()`** ++ - Functions related to manipulation of devices and partitions were moved from `leapp.libraries.common.grub` into `leapp.libraries.common.partitions`: ++ - **`get_device_number()`** - replaced by **`get_partition_number()`** ++ - **`blk_dev_from_partition()`** - Your code should follow the [Python Coding Guidelines](https://leapp.readthedocs.io/en/latest/contributing.html#follow-python-coding-guidelines) used for the leapp project. On top of these rules follow instructions - below. -@@ -84,53 +83,3 @@ guaranteed to exist and executable. - The use of the {py:mod}`subprocess` library is forbidden in leapp repositories. - Use of the library would require very good reasoning, why the - {py:func}`~leapp.libraries.stdlib.run` function cannot be used. -- --## Commits and pull requests (PRs) --### PR description --The description should contain information about all introduced changes: --* What has been changed --* How it has been changed --* The reason for the change --* How could people try/test the PR --* Reference to a Jira ticket, Github issue, ... if applicable -- --Good description provides all information for readers without the need to --read the code. Note that reviewers can decline to review the PR with a poor --description. -- --### Commit messages --When your pull-request is ready to be reviewed, every commit needs to include --a title and a body continuing a description of the change --- what problem is --being solved and how. The end of the commit body should contain Jira issue --number (if applicable), GitHub issue that is being fixed, etc.: --``` -- Commit title -- -- Commit message body on multiple lines -- -- Jira-ref: --``` -- --Note that good commit message should provide information in similar way like --the PR description. Poorly written commit messages can block the merge of PR --or proper review. -- --### Granularity of commits --The granularity of commits depends strongly on the problem being solved. However, --a large number of small commits is typically undesired. If possible, aim a --Git history such that commits can be reverted individually, without requiring reverting --numerous other dependent commits in order to get the `main` branch into a working state. -- --Note that commits fixing problems of other commits in the PR are expected to be --squashed before the final review and merge of the PR. Using of `git commit --fixup ...` --and `git commit --squash ...` commands can help you to prepare such commits --properly in advance and make the rebase later easier using `git rebase -i --autosquash`. --We suggest you to get familiar with these commands as it can make your work really --easier. Note that when you are starting to get higher number of such fixing commits --in your PR, it's good practice to use the rebase more often. High numbers of such --commits could make the final rebase more tricky in the end. So your PR should not --have more than 15 commits at any time. -- --### Create a separate git branch for your changes --TBD -- -diff --git a/docs/source/contributing/community-upgrades.md b/docs/source/contributing/community-upgrades.md -new file mode 100644 -index 00000000..cbec0a24 ---- /dev/null -+++ b/docs/source/contributing/community-upgrades.md -@@ -0,0 +1,39 @@ -+# Community upgrades for Centos-like distros -+ -+In the past, this project was solely focused on Red Hat Enterprise Linux upgrades. Recently, we've been extending and refactoring the `leapp-repository` codebase to allow upgrades of other distributions, such as CentOS Stream and also upgrades + conversions between different distributions in one step. -+ -+This document outlines the state of support for upgrades of distributions other than RHEL. Note that support in this case doesn't mean what the codebase allows, but what the core leapp team supports in terms of issues, bugfixes, feature requests, testing, etc. -+ -+RHEL upgrades and upgrades + conversions *to* RHEL are the only officially supported upgrade paths and are the primary focus of leapp developers. However, we are open to and welcome contributions from the community, allowing other upgrade (and conversion) paths in the codebase. For example, we've already integrated a contribution introducing upgrade paths for Alma Linux upgrades. -+ -+This does not mean that we won't offer help outside of the outlined scope, but it is primarily up to the contributors contributing a particular upgrade path to maintain and test it. Also, it can take us some time to get to such PRs, so be patient please. -+ -+Upon agreement we can also update the upgrade paths (in `upgrade_paths.json`) when there is a new release of the particular distribution. However note that we might include some upgrade paths required for conversions *to* RHEL on top of that. -+ -+Contributions improving the overall upgrade experience are also welcome, as they always have been. -+ -+```{note} -+By default, upgrade + conversion paths are automatically derived from upgrade paths. If this is not desired or other paths are required, feel free to open a pull request or open a [discussion](https://github.com/oamg/leapp-repository/discussions) on that topic. -+``` -+ -+## How to contribute -+ -+Currently, the process for enabling upgrades and conversions for other distributions is not fully documented. In the meantime you can use the [pull request introducing Alma Linux upgrades](https://github.com/oamg/leapp-repository/pull/1391/) as reference. However, note that the leapp upgrade data files have special rules for updates, described below. -+ -+### Leapp data files -+ -+#### repomap.json -+ -+To use correct target repositories during the upgrade automatically, the `repomap.json` data file needs to be updated to cover repositories of the newly added distribution. However, the file cannot be updated manually as its content is generated, hence any manual changes would be overwritten with the next update. Currently there is not straightforward way for the community to update our generators, but you can -+ -+- submit a separate PR of how the resulting `repomap.json` file should look like, for an example you can take a look at [this PR](https://github.com/oamg/leapp-repository/pull/1395) -+- or provide the list of repositories (possibly also architectures) present on the distribution -+ -+and we will update the generators accordingly, asking you to review the result then. We are discussing an improvement to make this more community friendly. -+ -+#### pes-events.json and device_driver_deprecation_data.json -+ -+Both PES events and device driver deprecation data only contain data for RHEL in the upstream `leapp-repository` and we will not include any data unrelated to RHEL. If you find a bug in the data, you can open a bug in the [RHEL Jira](https://issues.redhat.com/) for the `leapp-repository` component. -+ -+Before contributing, make sure your PR conforms to our {doc}`Coding guidelines` -+ and {doc}`PR guidelines`. -diff --git a/docs/source/contributing/index.rst b/docs/source/contributing/index.rst -new file mode 100644 -index 00000000..ebdc9151 ---- /dev/null -+++ b/docs/source/contributing/index.rst -@@ -0,0 +1,18 @@ -+Contributing -+======================================================== -+ -+.. toctree:: -+ :maxdepth: 4 -+ :caption: Contents: -+ :glob: -+ -+ coding-guidelines -+ pr-guidelines -+ community-upgrades -+ -+.. Indices and tables -+.. ================== -+.. -+.. * :ref:`genindex` -+.. * :ref:`modindex` -+.. * :ref:`search` -diff --git a/docs/source/contributing/pr-guidelines.md b/docs/source/contributing/pr-guidelines.md -new file mode 100644 -index 00000000..4f6ee4fe ---- /dev/null -+++ b/docs/source/contributing/pr-guidelines.md -@@ -0,0 +1,48 @@ -+# Commits and pull requests (PRs) -+## PR description -+The description should contain information about all introduced changes: -+* What has been changed -+* How it has been changed -+* The reason for the change -+* How could people try/test the PR -+* Reference to a Jira ticket, Github issue, ... if applicable -+ -+Good description provides all information for readers without the need to -+read the code. Note that reviewers can decline to review the PR with a poor -+description. -+ -+## Commit messages -+When your pull-request is ready to be reviewed, every commit needs to include -+a title and a body continuing a description of the change --- what problem is -+being solved and how. The end of the commit body should contain Jira issue -+number (if applicable), GitHub issue that is being fixed, etc.: -+``` -+ Commit title -+ -+ Commit message body on multiple lines -+ -+ Jira-ref: -+``` -+ -+Note that good commit message should provide information in similar way like -+the PR description. Poorly written commit messages can block the merge of PR -+or proper review. -+ -+## Granularity of commits -+The granularity of commits depends strongly on the problem being solved. However, -+a large number of small commits is typically undesired. If possible, aim a -+Git history such that commits can be reverted individually, without requiring reverting -+numerous other dependent commits in order to get the `main` branch into a working state. -+ -+Note that commits fixing problems of other commits in the PR are expected to be -+squashed before the final review and merge of the PR. Using of `git commit --fixup ...` -+and `git commit --squash ...` commands can help you to prepare such commits -+properly in advance and make the rebase later easier using `git rebase -i --autosquash`. -+We suggest you to get familiar with these commands as it can make your work really -+easier. Note that when you are starting to get higher number of such fixing commits -+in your PR, it's good practice to use the rebase more often. High numbers of such -+commits could make the final rebase more tricky in the end. So your PR should not -+have more than 15 commits at any time. -+ -+## Create a separate git branch for your changes -+TBD -diff --git a/docs/source/index.rst b/docs/source/index.rst -index 27537ca4..ed68f751 100644 ---- a/docs/source/index.rst -+++ b/docs/source/index.rst -@@ -21,7 +21,7 @@ providing Red Hat Enterprise Linux in-place upgrade functionality. - upgrade-architecture-and-workflow/index - configuring-ipu/index - libraries-and-api/index -- contrib-and-devel-guidelines -+ contributing/index - faq + ## v0.23.0 (till March 2026) - .. Indices and tables +diff --git a/etc/leapp/files/device_driver_deprecation_data.json b/etc/leapp/files/device_driver_deprecation_data.json +index a9c06956..c38c2840 100644 +--- a/etc/leapp/files/device_driver_deprecation_data.json ++++ b/etc/leapp/files/device_driver_deprecation_data.json +@@ -1,6 +1,6 @@ + { + "provided_data_streams": [ +- "4.1" ++ "4.2" + ], + "data": [ + { +diff --git a/etc/leapp/files/pes-events.json b/etc/leapp/files/pes-events.json +index 964b7117..07a716f0 100644 +--- a/etc/leapp/files/pes-events.json ++++ b/etc/leapp/files/pes-events.json +@@ -1,7 +1,7 @@ + { +-"timestamp": "202512021706Z", ++"timestamp": "202602051305Z", + "provided_data_streams": [ +-"4.1" ++"4.2" + ], + "packageinfo": [ + { +@@ -269457,6 +269457,10 @@ null + { + "name": "ruby", + "stream": "3.1" ++}, ++{ ++"name": "ruby", ++"stream": "3.3" + } + ], + "name": "rubygem-abrt", +@@ -269467,7 +269471,7 @@ null + }, + "initial_release": { + "major_version": 8, +-"minor_version": 7, ++"minor_version": 10, + "os_name": "RHEL" + }, + "modulestream_maps": [], +@@ -269510,6 +269514,10 @@ null + { + "name": "ruby", + "stream": "3.1" ++}, ++{ ++"name": "ruby", ++"stream": "3.3" + } + ], + "name": "rubygem-abrt", +@@ -269520,14 +269528,14 @@ null + }, + "initial_release": { + "major_version": 8, +-"minor_version": 6, ++"minor_version": 9, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { + "major_version": 8, +-"minor_version": 7, ++"minor_version": 10, + "os_name": "RHEL" + } + }, +@@ -269563,6 +269571,10 @@ null + { + "name": "ruby", + "stream": "3.1" ++}, ++{ ++"name": "ruby", ++"stream": "3.3" + } + ], + "name": "rubygem-abrt-doc", +@@ -269573,7 +269585,7 @@ null + }, + "initial_release": { + "major_version": 8, +-"minor_version": 7, ++"minor_version": 10, + "os_name": "RHEL" + }, + "modulestream_maps": [], +@@ -269616,6 +269628,10 @@ null + { + "name": "ruby", + "stream": "3.1" ++}, ++{ ++"name": "ruby", ++"stream": "3.3" + } + ], + "name": "rubygem-abrt-doc", +@@ -269626,14 +269642,14 @@ null + }, + "initial_release": { + "major_version": 8, +-"minor_version": 6, ++"minor_version": 9, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { + "major_version": 8, +-"minor_version": 7, ++"minor_version": 10, + "os_name": "RHEL" + } + }, +@@ -634585,7 +634601,7 @@ null + } + }, + { +-"action": 7, ++"action": 3, + "architectures": [ + "aarch64", + "ppc64le", +@@ -634607,7 +634623,7 @@ null + }, + "initial_release": { + "major_version": 9, +-"minor_version": 7, ++"minor_version": 8, + "os_name": "RHEL" + }, + "modulestream_maps": [ +@@ -634622,7 +634638,7 @@ null + "modulestreams": [ + null + ], +-"name": "tomcat9-el-3.0-api", ++"name": "tomcat-el-5.0-api", + "repository": "rhel10-AppStream" + } + ], +@@ -634635,7 +634651,7 @@ null + } + }, + { +-"action": 7, ++"action": 3, + "architectures": [ + "aarch64", + "ppc64le", +@@ -634657,7 +634673,7 @@ null + }, + "initial_release": { + "major_version": 9, +-"minor_version": 7, ++"minor_version": 8, + "os_name": "RHEL" + }, + "modulestream_maps": [ +@@ -634672,7 +634688,7 @@ null + "modulestreams": [ + null + ], +-"name": "tomcat9-servlet-4.0-api", ++"name": "tomcat-servlet-6.0-api", + "repository": "rhel10-AppStream" + } + ], +@@ -634685,7 +634701,7 @@ null + } + }, + { +-"action": 7, ++"action": 3, + "architectures": [ + "aarch64", + "ppc64le", +@@ -634707,7 +634723,7 @@ null + }, + "initial_release": { + "major_version": 9, +-"minor_version": 7, ++"minor_version": 8, + "os_name": "RHEL" + }, + "modulestream_maps": [ +@@ -634722,7 +634738,7 @@ null + "modulestreams": [ + null + ], +-"name": "tomcat9-jsp-2.3-api", ++"name": "tomcat-jsp-3.1-api", + "repository": "rhel10-AppStream" + } + ], +@@ -635344,14 +635360,14 @@ null + } + }, + { +-"action": 7, ++"action": 0, + "architectures": [ + "aarch64", + "ppc64le", + "s390x", + "x86_64" + ], +-"id": 17728, ++"id": 17733, + "in_packageset": { + "package": [ + { +@@ -635359,234 +635375,18 @@ null + null + ], + "name": "tomcat", +-"repository": "rhel9-AppStream" +-} +-], +-"set_id": 24313 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null +-} +-], +-"out_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat9", + "repository": "rhel10-AppStream" + } + ], +-"set_id": 24314 +-}, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 7, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17729, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-admin-webapps", +-"repository": "rhel9-AppStream" +-} +-], +-"set_id": 24315 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null +-} +-], +-"out_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat9-admin-webapps", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24316 +-}, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 7, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17730, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-docs-webapp", +-"repository": "rhel9-AppStream" +-} +-], +-"set_id": 24317 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null +-} +-], +-"out_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat9-docs-webapp", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24318 +-}, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 7, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17731, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-lib", +-"repository": "rhel9-AppStream" +-} +-], +-"set_id": 24323 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null +-} +-], +-"out_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat9-lib", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24324 +-}, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 7, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17732, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-webapps", +-"repository": "rhel9-AppStream" +-} +-], +-"set_id": 24327 ++"set_id": 24329 + }, + "initial_release": { + "major_version": 9, + "minor_version": 7, + "os_name": "RHEL" + }, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null +-} +-], +-"out_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat9-webapps", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24328 +-}, ++"modulestream_maps": [], ++"out_packageset": null, + "release": { + "major_version": 10, + "minor_version": 0, +@@ -635601,18 +635401,18 @@ null + "s390x", + "x86_64" + ], +-"id": 17733, ++"id": 17734, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "tomcat", ++"name": "tomcat-admin-webapps", + "repository": "rhel10-AppStream" + } + ], +-"set_id": 24329 ++"set_id": 24330 + }, + "initial_release": { + "major_version": 9, +@@ -635635,120 +635435,18 @@ null + "s390x", + "x86_64" + ], +-"id": 17734, ++"id": 17735, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "tomcat-admin-webapps", ++"name": "tomcat-docs-webapp", + "repository": "rhel10-AppStream" + } + ], +-"set_id": 24330 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [], +-"out_packageset": null, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 0, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17735, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-docs-webapp", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24331 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [], +-"out_packageset": null, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 0, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17736, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-el-5.0-api", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24332 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [], +-"out_packageset": null, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 0, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], +-"id": 17737, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-jsp-3.1-api", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24333 ++"set_id": 24331 + }, + "initial_release": { + "major_version": 9, +@@ -635805,40 +635503,6 @@ null + "s390x", + "x86_64" + ], +-"id": 17739, +-"in_packageset": { +-"package": [ +-{ +-"modulestreams": [ +-null +-], +-"name": "tomcat-servlet-6.0-api", +-"repository": "rhel10-AppStream" +-} +-], +-"set_id": 24335 +-}, +-"initial_release": { +-"major_version": 9, +-"minor_version": 7, +-"os_name": "RHEL" +-}, +-"modulestream_maps": [], +-"out_packageset": null, +-"release": { +-"major_version": 10, +-"minor_version": 0, +-"os_name": "RHEL" +-} +-}, +-{ +-"action": 0, +-"architectures": [ +-"aarch64", +-"ppc64le", +-"s390x", +-"x86_64" +-], + "id": 17740, + "in_packageset": { + "package": [ +@@ -708674,18 +708338,7515 @@ null + "s390x", + "x86_64" + ], +-"id": 19907, ++"id": 19907, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "capnproto-devel", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26584 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19908, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "capnproto-libs", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26585 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19909, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rhc-playbook-verifier", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26586 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19910, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rhc-playbook-verifier", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26587 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19911, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3-zstandard", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26588 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19912, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "unbound-utils", ++"repository": "rhel10-BaseOS" ++} ++], ++"set_id": 26589 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 5, ++"architectures": [ ++"x86_64" ++], ++"id": 19913, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel10-NFV" ++}, ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt-kvm", ++"repository": "rhel10-NFV" ++} ++], ++"set_id": 26593 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 0, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [ ++{ ++"in_modulestream": null, ++"out_modulestream": null ++} ++], ++"out_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel10-NFV" ++} ++], ++"set_id": 26600 ++}, ++"release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 5, ++"architectures": [ ++"x86_64" ++], ++"id": 19915, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel9-NFV" ++}, ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt-kvm", ++"repository": "rhel9-NFV" ++} ++], ++"set_id": 26595 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 6, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [ ++{ ++"in_modulestream": null, ++"out_modulestream": null ++} ++], ++"out_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "kernel-rt", ++"repository": "rhel9-NFV" ++} ++], ++"set_id": 26601 ++}, ++"release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19916, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "gnome-autoar-devel", ++"repository": "rhel8-CRB" ++} ++], ++"set_id": 26596 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19917, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rest-devel", ++"repository": "rhel8-CRB" ++} ++], ++"set_id": 26597 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19918, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "plymouth-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26598 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19919, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "plymouth-devel", ++"repository": "rhel8-AppStream" ++} ++], ++"set_id": 26599 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19920, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "libdmx-devel", ++"repository": "rhel8-CRB" ++} ++], ++"set_id": 26602 ++}, ++"initial_release": { ++"major_version": 8, ++"minor_version": 9, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 8, ++"minor_version": 10, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19921, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "libdmx-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26603 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19922, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "v4l-utils", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26604 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19923, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "v4l-utils", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26605 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19924, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-postgis", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26606 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19925, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-postgis-client", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26607 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19926, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-postgis-docs", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26608 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19927, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-postgis-upgrade", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26609 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19928, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-postgis-utils", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26610 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19929, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-pgaudit", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26611 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19930, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-pgvector", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26612 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19931, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-pg_repack", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26613 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19932, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-decoderbufs", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26614 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"x86_64" ++], ++"id": 19933, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "virt-firmware-rs", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26615 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19934, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "libexttextcat-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26616 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19935, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "rest-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26617 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19936, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "gnome-autoar-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26618 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19937, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26619 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19938, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-contrib", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26620 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19939, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-docs", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26621 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19940, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-plperl", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26622 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19941, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-plpython3", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26623 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19942, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-private-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26624 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19943, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-private-libs", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26625 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19944, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-server", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26626 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19945, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-server-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26627 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19946, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-static", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26628 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19947, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-test", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26629 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19948, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-upgrade", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26630 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19949, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "postgresql18-upgrade-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26631 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19950, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26633 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19951, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "pgaudit", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26634 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19952, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "pg_repack", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26635 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19953, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "pgvector", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26636 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19954, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgis", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26637 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19955, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgis-client", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26638 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19956, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgis-docs", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26639 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19957, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgis-upgrade", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26640 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19958, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgis-utils", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26641 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19959, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgres-decoderbufs", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26642 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19960, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26643 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19961, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-contrib", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26644 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19962, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-docs", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26645 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19963, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-plperl", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26646 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19964, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-plpython3", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26647 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19965, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-private-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26648 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19966, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-private-libs", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26649 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19967, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-server", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26650 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19968, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-server-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26651 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19969, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-static", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26652 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19970, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-test", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26653 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19971, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-test-rpm-macros", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26654 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19972, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-upgrade", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26655 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19973, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "postgresql", ++"stream": "18" ++} ++], ++"name": "postgresql-upgrade-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26656 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19974, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mesa-compat", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26661 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19975, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mesa-compat", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26662 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19977, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26660 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19978, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-cffi", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26663 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19979, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-charset-normalizer", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26664 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19980, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-cryptography", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26665 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19981, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-Cython", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26666 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19982, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-debug", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26667 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19983, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26668 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19984, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-flit-core", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26669 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19985, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-debug", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26670 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19986, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-devel", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26671 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19987, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-idle", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26672 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19988, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26673 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19989, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-libs", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26674 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19990, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-test", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26675 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19991, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-tkinter", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26676 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19992, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-idle", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26677 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19993, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-idna", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26678 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19994, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-iniconfig", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26679 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19995, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-libs", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26680 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19996, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-lxml", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26681 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19997, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-meson-python", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26682 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19998, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-mod_wsgi", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26683 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 19999, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-numpy-f2py", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26684 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20000, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-numpy", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26685 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20001, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-packaging", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26686 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20002, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pip", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26687 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20003, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pip-wheel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26688 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20004, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pluggy", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26689 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20005, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-ply", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26690 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20006, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-psycopg2", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26691 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20007, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-psycopg2-tests", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26692 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20008, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pybind11-devel", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26693 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20009, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pybind11", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26694 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20010, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pycparser", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26695 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20011, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-PyMySQL", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26696 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20012, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-PyMySQL+rsa", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26697 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20013, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pyproject-metadata", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26698 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20014, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pytest", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26699 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20015, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pyyaml", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26700 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20016, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-requests", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26701 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20017, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-scipy", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26702 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20018, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-semantic_version", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26703 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20019, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26704 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20020, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools-rust", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26705 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20021, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools_scm", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26706 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20022, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools_scm+toml", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26707 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20023, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools-wheel", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26708 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20024, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-test", ++"repository": "rhel10-CRB" ++} ++], ++"set_id": 26709 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20025, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-tkinter", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26710 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20026, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-urllib3", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26711 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20028, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-cffi", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26713 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20029, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-charset-normalizer", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26714 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20030, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-cryptography", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26715 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20031, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-Cython", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26716 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20032, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-debug", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26717 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20033, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26718 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20034, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-flit-core", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26719 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20035, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-debug", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26720 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20036, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26721 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20037, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-idle", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26722 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20038, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26723 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20039, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-libs", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26724 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20040, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-test", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26725 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20041, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-freethreading-tkinter", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26726 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20042, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-idle", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26727 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20043, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-idna", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26728 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20044, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-iniconfig", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26729 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20045, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26730 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20046, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-libs", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26731 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20047, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-lxml", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26732 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20048, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-meson-python", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26733 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20049, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-mod_wsgi", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26734 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20050, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-numpy-f2py", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26735 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20051, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-numpy", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26736 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20052, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-packaging", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26737 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20053, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pip", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26738 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20054, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pip-wheel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26739 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20055, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pluggy", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26740 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20056, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-ply", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26741 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20057, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-psycopg2", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26742 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20058, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-psycopg2-tests", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26743 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20059, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pybind11-devel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26744 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20060, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pybind11", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26745 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20061, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pycparser", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26746 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20062, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-PyMySQL", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26747 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20063, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-PyMySQL+rsa", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26748 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20064, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pyproject-metadata", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26749 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20065, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pytest", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26750 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20066, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-pyyaml", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26751 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20067, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-requests", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26752 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20068, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-scipy", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26753 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20069, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-semantic_version", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26754 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20070, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26755 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20071, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools-rust", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26756 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20072, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools_scm", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26757 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20073, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools_scm+toml", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26758 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20074, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-setuptools-wheel", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26759 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20075, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-test", ++"repository": "rhel9-CRB" ++} ++], ++"set_id": 26760 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20076, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-tkinter", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26761 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20077, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "python3.14-urllib3", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26762 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20078, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "frr10", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26763 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"x86_64" ++], ++"id": 20084, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "greenboot", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26786 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"x86_64" ++], ++"id": 20085, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "greenboot", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26787 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20086, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "ansible-collection-redhat-leapp", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26788 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20087, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "clevis-pin-trustee", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26789 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20088, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26790 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20089, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-backup", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26791 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20090, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-client-utils", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26792 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20091, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-common", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26793 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20092, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26794 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20093, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-embedded", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26795 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20094, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-embedded-devel", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26796 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20095, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-errmsg", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26797 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20096, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-gssapi-server", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26798 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20097, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-oqgraph-engine", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26799 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20098, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-pam", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26800 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20099, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-server", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26801 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20100, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-server-galera", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26802 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20101, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-server-utils", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26803 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20102, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "mariadb-test", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26804 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20103, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++{ ++"name": "mariadb", ++"stream": "11.8" ++} ++], ++"name": "galera", ++"repository": "rhel9-AppStream" ++} ++], ++"set_id": 26805 ++}, ++"initial_release": { ++"major_version": 9, ++"minor_version": 7, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 9, ++"minor_version": 8, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20104, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-backup", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26806 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20105, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-client-utils", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26807 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20106, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-common", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26808 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20107, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26809 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20108, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-embedded", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26810 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20109, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-embedded-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26811 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20110, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-errmsg", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26812 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20111, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-gssapi-server", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26813 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20112, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-oqgraph-engine", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26814 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20113, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-pam", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26815 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20114, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-server", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26816 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20115, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-server-galera", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26817 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20116, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-server-utils", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26818 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20117, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "mariadb11.8-test", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26819 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20118, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26820 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20119, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-bcmath", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26821 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20120, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-cli", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26822 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20121, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-common", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26823 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20122, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-dba", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26824 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20123, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-dbg", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26825 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20124, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-devel", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26826 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20125, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-embedded", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26827 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20126, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-enchant", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26828 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20127, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-ffi", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26829 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20128, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-fpm", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26830 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20129, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-gd", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26831 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20130, ++"in_packageset": { ++"package": [ ++{ ++"modulestreams": [ ++null ++], ++"name": "php8.4-gmp", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26832 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20131, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "capnproto-devel", +-"repository": "rhel10-CRB" ++"name": "php8.4-intl", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26584 ++"set_id": 26833 + }, + "initial_release": { + "major_version": 10, +@@ -708708,18 +715869,18 @@ null + "s390x", + "x86_64" + ], +-"id": 19908, ++"id": 20132, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "capnproto-libs", +-"repository": "rhel10-CRB" ++"name": "php8.4-ldap", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26585 ++"set_id": 26834 + }, + "initial_release": { + "major_version": 10, +@@ -708742,18 +715903,18 @@ null + "s390x", + "x86_64" + ], +-"id": 19909, ++"id": 20133, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "rhc-playbook-verifier", ++"name": "php8.4-mbstring", + "repository": "rhel10-AppStream" + } + ], +-"set_id": 26586 ++"set_id": 26835 + }, + "initial_release": { + "major_version": 10, +@@ -708776,29 +715937,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19910, ++"id": 20134, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "rhc-playbook-verifier", +-"repository": "rhel9-AppStream" ++"name": "php8.4-mysqlnd", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26587 ++"set_id": 26836 + }, + "initial_release": { +-"major_version": 9, +-"minor_version": 7, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 9, +-"minor_version": 8, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -708810,18 +715971,18 @@ null + "s390x", + "x86_64" + ], +-"id": 19911, ++"id": 20135, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "python3-zstandard", ++"name": "php8.4-odbc", + "repository": "rhel10-AppStream" + } + ], +-"set_id": 26588 ++"set_id": 26837 + }, + "initial_release": { + "major_version": 10, +@@ -708844,18 +716005,18 @@ null + "s390x", + "x86_64" + ], +-"id": 19912, ++"id": 20136, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "unbound-utils", +-"repository": "rhel10-BaseOS" ++"name": "php8.4-opcache", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26589 ++"set_id": 26838 + }, + "initial_release": { + "major_version": 10, +@@ -708871,110 +716032,206 @@ null + } + }, + { +-"action": 5, ++"action": 0, + "architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", + "x86_64" + ], +-"id": 19913, ++"id": 20137, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "kernel-rt", +-"repository": "rhel10-NFV" ++"name": "php8.4-pdo", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26839 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} + }, + { ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20138, ++"in_packageset": { ++"package": [ ++{ + "modulestreams": [ + null + ], +-"name": "kernel-rt-kvm", +-"repository": "rhel10-NFV" ++"name": "php8.4-pgsql", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26593 ++"set_id": 26840 + }, + "initial_release": { + "major_version": 10, +-"minor_version": 0, ++"minor_version": 1, + "os_name": "RHEL" + }, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" + } ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" + ], +-"out_packageset": { ++"id": 20139, ++"in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "kernel-rt", +-"repository": "rhel10-NFV" ++"name": "php8.4-process", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26600 ++"set_id": 26841 + }, +-"release": { ++"initial_release": { + "major_version": 10, + "minor_version": 1, + "os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" + } + }, + { +-"action": 5, ++"action": 0, + "architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", + "x86_64" + ], +-"id": 19915, ++"id": 20140, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "kernel-rt", +-"repository": "rhel9-NFV" ++"name": "php8.4-snmp", ++"repository": "rhel10-AppStream" ++} ++], ++"set_id": 26842 ++}, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" ++} + }, + { ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" ++], ++"id": 20141, ++"in_packageset": { ++"package": [ ++{ + "modulestreams": [ + null + ], +-"name": "kernel-rt-kvm", +-"repository": "rhel9-NFV" ++"name": "php8.4-soap", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26595 ++"set_id": 26843 + }, + "initial_release": { +-"major_version": 9, +-"minor_version": 6, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, +-"modulestream_maps": [ +-{ +-"in_modulestream": null, +-"out_modulestream": null ++"modulestream_maps": [], ++"out_packageset": null, ++"release": { ++"major_version": 10, ++"minor_version": 2, ++"os_name": "RHEL" + } ++}, ++{ ++"action": 0, ++"architectures": [ ++"aarch64", ++"ppc64le", ++"s390x", ++"x86_64" + ], +-"out_packageset": { ++"id": 20142, ++"in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "kernel-rt", +-"repository": "rhel9-NFV" ++"name": "php8.4-xml", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26601 ++"set_id": 26844 + }, ++"initial_release": { ++"major_version": 10, ++"minor_version": 1, ++"os_name": "RHEL" ++}, ++"modulestream_maps": [], ++"out_packageset": null, + "release": { +-"major_version": 9, +-"minor_version": 7, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -708986,29 +716243,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19916, ++"id": 20143, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "gnome-autoar-devel", +-"repository": "rhel8-CRB" ++"name": "php8.4-pecl-xdebug3", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26596 ++"set_id": 26845 + }, + "initial_release": { +-"major_version": 8, +-"minor_version": 9, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 8, +-"minor_version": 10, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -709020,29 +716277,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19917, ++"id": 20144, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "rest-devel", +-"repository": "rhel8-CRB" ++"name": "php8.4-pecl-rrd", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26597 ++"set_id": 26846 + }, + "initial_release": { +-"major_version": 8, +-"minor_version": 9, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 8, +-"minor_version": 10, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -709054,29 +716311,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19918, ++"id": 20145, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "plymouth-devel", +-"repository": "rhel9-AppStream" ++"name": "php8.4-pecl-zip", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26598 ++"set_id": 26847 + }, + "initial_release": { +-"major_version": 9, +-"minor_version": 7, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 9, +-"minor_version": 8, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -709088,29 +716345,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19919, ++"id": 20146, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "plymouth-devel", +-"repository": "rhel8-AppStream" ++"name": "php8.4-pecl-apcu", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26599 ++"set_id": 26848 + }, + "initial_release": { +-"major_version": 8, +-"minor_version": 9, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 8, +-"minor_version": 10, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -709122,29 +716379,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19920, ++"id": 20147, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "libdmx-devel", +-"repository": "rhel8-CRB" ++"name": "php8.4-pecl-apcu-devel", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26602 ++"set_id": 26849 + }, + "initial_release": { +-"major_version": 8, +-"minor_version": 9, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 8, +-"minor_version": 10, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + }, +@@ -709156,29 +716413,29 @@ null + "s390x", + "x86_64" + ], +-"id": 19921, ++"id": 20148, + "in_packageset": { + "package": [ + { + "modulestreams": [ + null + ], +-"name": "libdmx-devel", +-"repository": "rhel9-CRB" ++"name": "php8.4-pecl-redis6", ++"repository": "rhel10-AppStream" + } + ], +-"set_id": 26603 ++"set_id": 26850 + }, + "initial_release": { +-"major_version": 9, +-"minor_version": 7, ++"major_version": 10, ++"minor_version": 1, + "os_name": "RHEL" + }, + "modulestream_maps": [], + "out_packageset": null, + "release": { +-"major_version": 9, +-"minor_version": 8, ++"major_version": 10, ++"minor_version": 2, + "os_name": "RHEL" + } + } +diff --git a/etc/leapp/files/repomap.json b/etc/leapp/files/repomap.json +index c4ae9038..a57a04e4 100644 +--- a/etc/leapp/files/repomap.json ++++ b/etc/leapp/files/repomap.json +@@ -1,202 +1,138 @@ + { +- "datetime": "202511131423Z", ++ "datetime": "202602061951Z", + "version_format": "1.3.0", + "provided_data_streams": [ +- "4.1" ++ "4.2" + ], + "mapping": [ + { +- "source_major_version": "7", +- "target_major_version": "8", ++ "source_major_version": "8", ++ "target_major_version": "9", + "entries": [ + { +- "source": "rhel7-base", +- "target": [ +- "rhel8-AppStream", +- "rhel8-BaseOS" +- ] +- }, +- { +- "source": "rhel7-optional", +- "target": [ +- "rhel8-CRB" +- ] +- }, +- { +- "source": "rhel7-supplementary", +- "target": [ +- "rhel8-Supplementary" +- ] +- }, +- { +- "source": "rhel7-extras", +- "target": [ +- "rhel8-AppStream", +- "rhel8-BaseOS" +- ] +- }, +- { +- "source": "rhel7-rt", +- "target": [ +- "rhel8-RT" +- ] +- }, +- { +- "source": "rhel7-nfv", +- "target": [ +- "rhel8-NFV" +- ] +- }, +- { +- "source": "rhel7-sap", ++ "source": "rhel8-BaseOS", + "target": [ +- "rhel8-SAP-NetWeaver" ++ "rhel9-BaseOS" + ] + }, + { +- "source": "rhel7-sap-hana", ++ "source": "rhel8-AppStream", + "target": [ +- "rhel8-SAP-Solutions" ++ "rhel9-AppStream" + ] + }, + { +- "source": "rhel7-rhui-microsoft-azure-sap-apps", ++ "source": "rhel8-CRB", + "target": [ +- "rhel8-SAP-Solutions", +- "rhel8-SAP-NetWeaver", +- "rhel8-rhui-microsoft-azure-sap-apps" ++ "rhel9-CRB" + ] + }, + { +- "source": "rhel7-highavailability", ++ "source": "rhel8-rhui-client-config-server-8-ha", + "target": [ +- "rhel8-HighAvailability" ++ "rhel9-rhui-client-config-server-9" + ] + }, + { +- "source": "rhel7-ansible-2", ++ "source": "rhel8-rhui-client-config-server-8", + "target": [ +- "rhel8-ansible-2" ++ "rhel9-rhui-client-config-server-9" + ] + }, + { +- "source": "rhel7-rhui-client-config-server-7", ++ "source": "rhel8-Supplementary", + "target": [ +- "rhel8-rhui-client-config-server-8" ++ "rhel9-Supplementary" + ] + }, + { +- "source": "rhel7-rhui-client-config-server-7-sap", ++ "source": "rhel8-RT", + "target": [ +- "rhel8-rhui-client-config-server-8-sap" ++ "rhel9-RT" + ] + }, + { +- "source": "rhel7-rhui-microsoft-azure-rhel7", ++ "source": "rhel8-NFV", + "target": [ +- "rhel8-rhui-microsoft-azure-rhel8" ++ "rhel9-NFV" + ] + }, + { +- "source": "rhel7-rhui-microsoft-sap-ha", ++ "source": "rhel8-SAP-NetWeaver", + "target": [ +- "rhel8-rhui-microsoft-sap-ha" ++ "rhel9-SAP-NetWeaver" + ] + }, + { +- "source": "rhel7-rhui-google-compute-engine", ++ "source": "rhel8-SAP-Solutions", + "target": [ +- "rhel8-rhui-google-compute-engine-leapp" ++ "rhel9-SAP-Solutions" + ] + }, + { +- "source": "rhel7-rhui-custom-client-at-alibaba", +- "target": [ +- "rhel8-rhui-custom-client-at-alibaba" +- ] +- } +- ] +- }, +- { +- "source_major_version": "8", +- "target_major_version": "9", +- "entries": [ +- { +- "source": "rhel8-BaseOS", ++ "source": "rhel8-HighAvailability", + "target": [ +- "rhel9-BaseOS" ++ "rhel9-HighAvailability" + ] + }, + { +- "source": "rhel8-AppStream", ++ "source": "rhel8-Advanced-Virt", + "target": [ + "rhel9-AppStream" + ] + }, + { +- "source": "rhel8-CRB", ++ "source": "rhel8-Advanced-Virt-CRB", + "target": [ + "rhel9-CRB" + ] + }, + { +- "source": "rhel8-rhui-client-config-server-8-ha", +- "target": [ +- "rhel9-rhui-client-config-server-9" +- ] +- }, +- { +- "source": "rhel8-rhui-client-config-server-8", +- "target": [ +- "rhel9-rhui-client-config-server-9" +- ] +- }, +- { +- "source": "rhel8-Supplementary", ++ "source": "rhel8-jbeap-7.4", + "target": [ +- "rhel9-Supplementary" ++ "rhel9-jbeap-7.4" + ] + }, + { +- "source": "rhel8-RT", ++ "source": "rhel8-jbeap-8.0", + "target": [ +- "rhel9-RT" ++ "rhel9-jbeap-8.0" + ] + }, + { +- "source": "rhel8-NFV", ++ "source": "rhel8-jbeap-8.1", + "target": [ +- "rhel9-NFV" ++ "rhel9-jbeap-8.1" + ] + }, + { +- "source": "rhel8-SAP-NetWeaver", ++ "source": "rhel8-satellite-6.16", + "target": [ +- "rhel9-SAP-NetWeaver" ++ "rhel9-satellite-6.16" + ] + }, + { +- "source": "rhel8-SAP-Solutions", ++ "source": "rhel8-satellite-capsule-6.16", + "target": [ +- "rhel9-SAP-Solutions" ++ "rhel9-satellite-capsule-6.16" + ] + }, + { +- "source": "rhel8-HighAvailability", ++ "source": "rhel8-satellite-maintenance-6.16", + "target": [ +- "rhel9-HighAvailability" ++ "rhel9-satellite-maintenance-6.16" + ] + }, + { +- "source": "rhel8-Advanced-Virt", ++ "source": "rhel8-satellite-utils-6.16", + "target": [ +- "rhel9-AppStream" ++ "rhel9-satellite-utils-6.16" + ] + }, + { +- "source": "rhel8-Advanced-Virt-CRB", ++ "source": "rhel8-satellite-client-6", + "target": [ +- "rhel9-CRB" ++ "rhel9-satellite-client-6" + ] + }, + { +@@ -234,24 +170,6 @@ + "target": [ + "rhel9-rhui-custom-client-at-alibaba" + ] +- }, +- { +- "source": "rhel8-jbeap-7.4", +- "target": [ +- "rhel9-jbeap-7.4" +- ] +- }, +- { +- "source": "rhel8-jbeap-8.0", +- "target": [ +- "rhel9-jbeap-8.0" +- ] +- }, +- { +- "source": "rhel8-jbeap-8.1", +- "target": [ +- "rhel9-jbeap-8.1" +- ] + } + ] + }, +@@ -313,6 +231,12 @@ + "rhel10-HighAvailability" + ] + }, ++ { ++ "source": "rhel9-satellite-client-6", ++ "target": [ ++ "rhel10-satellite-client-6" ++ ] ++ }, + { + "source": "rhel9-rhui-client-config-server-9", + "target": [ +@@ -356,6 +280,38 @@ + { + "pesid": "rhel10-BaseOS", + "entries": [ ++ { ++ "major_version": "10", ++ "repoid": "baseos", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "baseos", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "baseos", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "baseos", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "10", + "repoid": "baseos", +@@ -602,6 +558,38 @@ + { + "pesid": "rhel10-AppStream", + "entries": [ ++ { ++ "major_version": "10", ++ "repoid": "appstream", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "appstream", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "appstream", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "appstream", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "10", + "repoid": "appstream", +@@ -980,6 +968,38 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "10", ++ "repoid": "crb", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "crb", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "crb", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "crb", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "10", + "repoid": "crb", +@@ -1196,6 +1216,22 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-ppc64le-rt-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-s390x-rt-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "10", + "repoid": "rhel-10-for-x86_64-rt-beta-rpms", +@@ -1220,6 +1256,14 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "10", ++ "repoid": "rt", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "10", + "repoid": "rt", +@@ -1233,6 +1277,14 @@ + { + "pesid": "rhel10-NFV", + "entries": [ ++ { ++ "major_version": "10", ++ "repoid": "nfv", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "10", + "repoid": "nfv", +@@ -1265,6 +1317,22 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-ppc64le-nfv-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-s390x-nfv-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "10", + "repoid": "rhel-10-for-x86_64-nfv-beta-rpms", +@@ -1294,6 +1362,14 @@ + { + "pesid": "rhel10-SAP-NetWeaver", + "entries": [ ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-aarch64-sap-netweaver-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "10", + "repoid": "rhel-10-for-ppc64le-sap-netweaver-beta-rpms", +@@ -1424,9 +1500,17 @@ + "entries": [ + { + "major_version": "10", +- "repoid": "rhel-10-for-ppc64le-sap-solutions-e4s-rpms", +- "arch": "ppc64le", +- "channel": "e4s", ++ "repoid": "rhel-10-for-aarch64-sap-solutions-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-ppc64le-sap-solutions-e4s-rpms", ++ "arch": "ppc64le", ++ "channel": "e4s", + "repo_type": "rpm", + "distro": "rhel" + }, +@@ -1438,6 +1522,14 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "10", ++ "repoid": "rhel-10-for-s390x-sap-solutions-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "10", + "repoid": "rhel-10-for-x86_64-sap-solutions-e4s-rhui-rpms", +@@ -1477,6 +1569,38 @@ + { + "pesid": "rhel10-HighAvailability", + "entries": [ ++ { ++ "major_version": "10", ++ "repoid": "highavailability", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "highavailability", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "highavailability", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "10", ++ "repoid": "highavailability", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "10", + "repoid": "highavailability", +@@ -1637,1323 +1761,126 @@ + "arch": "x86_64", + "channel": "e4s", + "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "10", +- "repoid": "rhel-10-for-x86_64-highavailability-eus-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "10", +- "repoid": "rhel-10-for-x86_64-highavailability-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "10", +- "repoid": "rhui-rhel-10-for-x86_64-highavailability-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- } +- ] +- }, +- { +- "pesid": "rhel10-rhui-microsoft-azure-rhel10", +- "entries": [ +- { +- "major_version": "10", +- "repoid": "rhui-microsoft-azure-rhel10", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- } +- ] +- }, +- { +- "pesid": "rhel10-rhui-client-config-server-10", +- "entries": [ +- { +- "major_version": "10", +- "repoid": "rhui-client-config-server-10", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "10", +- "repoid": "rhui-client-config-server-10", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- } +- ] +- }, +- { +- "pesid": "rhel10-rhui-custom-client-at-alibaba", +- "entries": [ +- { +- "major_version": "10", +- "repoid": "rhui-custom-rhui_client_at_alibaba-rhel-10", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- }, +- { +- "major_version": "10", +- "repoid": "rhui-custom-rhui_client_at_alibaba-rhel-10", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- } +- ] +- }, +- { +- "pesid": "rhel10-rhui-client-config-server-10-sap", +- "entries": [ +- { +- "major_version": "10", +- "repoid": "rhui-client-config-server-10-sap-bundle", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- } +- ] +- }, +- { +- "pesid": "rhel10-rhui-microsoft-azure-sap-apps", +- "entries": [ +- { +- "major_version": "10", +- "repoid": "rhui-microsoft-azure-rhel10-sapapps", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- } +- ] +- }, +- { +- "pesid": "rhel10-rhui-microsoft-sap-ha", +- "entries": [ +- { +- "major_version": "10", +- "repoid": "rhui-microsoft-azure-rhel10-sap-ha", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- } +- ] +- }, +- { +- "pesid": "rhel7-base", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-rhui-rpms", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-rpms", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-9-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-beta-rpms", +- "arch": "ppc64le", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-e4s-rpms", +- "arch": "ppc64le", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-els-rpms", +- "arch": "ppc64le", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-eus-rpms", +- "arch": "ppc64le", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-a-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-beta-rpms", +- "arch": "s390x", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-els-rpms", +- "arch": "s390x", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-eus-rpms", +- "arch": "s390x", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-aus-rpms", +- "arch": "x86_64", +- "channel": "aus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-e4s-rhui-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-e4s-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-els-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-eus-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-releases-rhui-beta", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-eus-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-e4s-rhui-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-els-rhui-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-eus-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- } +- ] +- }, +- { +- "pesid": "rhel7-optional", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-optional-rpms", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-rhui-optional-rpms", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-9-optional-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-e4s-optional-rpms", +- "arch": "ppc64le", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-els-optional-rpms", +- "arch": "ppc64le", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-eus-optional-rpms", +- "arch": "ppc64le", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-optional-beta-rpms", +- "arch": "ppc64le", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-optional-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-a-optional-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-els-optional-rpms", +- "arch": "s390x", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-eus-optional-rpms", +- "arch": "s390x", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-optional-beta-rpms", +- "arch": "s390x", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-optional-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-aus-optional-rpms", +- "arch": "x86_64", +- "channel": "aus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-e4s-optional-rhui-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-e4s-optional-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-els-optional-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-eus-optional-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-optional-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-optional-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-releases-rhui-optional-beta", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-eus-optional-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-optional-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-optional-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-e4s-optional-rhui-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-els-optional-rhui-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-optional-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-optional-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- } +- ] +- }, +- { +- "pesid": "rhel7-supplementary", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-9-supplementary-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-eus-supplementary-rpms", +- "arch": "ppc64le", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-supplementary-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-eus-supplementary-rpms", +- "arch": "s390x", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-supplementary-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-eus-supplementary-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-eus-supplementary-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-supplementary-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-supplementary-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-supplementary-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-supplementary-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- } +- ] +- }, +- { +- "pesid": "rhel7-extras", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-extras-beta-rpms", +- "arch": "aarch64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-extras-rhui-rpms", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-arm-64-extras-rpms", +- "arch": "aarch64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-9-extras-beta-rpms", +- "arch": "ppc64le", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-9-extras-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-extras-beta-rpms", +- "arch": "ppc64le", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-power-le-extras-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-a-extras-beta-rpms", +- "arch": "s390x", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-a-extras-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-extras-beta-rpms", +- "arch": "s390x", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-for-system-z-extras-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-extras-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-extras-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-extras-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rhui-extras-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-extras-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-7-server-rhui-extras-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- } +- ] +- }, +- { +- "pesid": "rhel7-rt", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-7-server-eus-rt-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rt-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rt-els-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-7-server-rt-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- } +- ] +- }, +- { +- "pesid": "rhel7-nfv", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-7-server-nfv-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- } +- ] +- }, +- { +- "pesid": "rhel7-sap", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-power-le-e4s-rpms", +- "arch": "ppc64le", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-power-le-els-rpms", +- "arch": "ppc64le", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-power-le-eus-rpms", +- "arch": "ppc64le", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-power-le-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-system-z-beta-rpms", +- "arch": "s390x", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-system-z-els-rpms", +- "arch": "s390x", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-system-z-eus-rpms", +- "arch": "s390x", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-for-system-z-rpms", +- "arch": "s390x", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-e4s-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-els-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-eus-rhui-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-eus-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-rhui-e4s-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-for-rhel-7-server-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-sap-for-rhel-7-server-rhui-e4s-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-rhel-sap-for-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" +- } +- ] +- }, +- { +- "pesid": "rhel7-sap-hana", +- "entries": [ +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-for-power-le-e4s-rpms", +- "arch": "ppc64le", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-for-power-le-els-rpms", +- "arch": "ppc64le", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-for-power-le-eus-rpms", +- "arch": "ppc64le", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-for-power-le-rpms", +- "arch": "ppc64le", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-e4s-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-els-rpms", +- "arch": "x86_64", +- "channel": "els", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-eus-rhui-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-eus-rpms", +- "arch": "x86_64", +- "channel": "eus", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-rhui-e4s-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-rhui-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" ++ "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-sap-hana-for-rhel-7-server-rpms", ++ "major_version": "10", ++ "repoid": "rhel-10-for-x86_64-highavailability-eus-rpms", + "arch": "x86_64", +- "channel": "ga", ++ "channel": "eus", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhui-rhel-sap-hana-for-rhel-7-server-rhui-e4s-rpms", ++ "major_version": "10", ++ "repoid": "rhel-10-for-x86_64-highavailability-rpms", + "arch": "x86_64", +- "channel": "e4s", ++ "channel": "ga", + "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" ++ "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhui-rhel-sap-hana-for-rhel-7-server-rhui-rpms", ++ "major_version": "10", ++ "repoid": "rhui-rhel-10-for-x86_64-highavailability-rhui-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel", +- "rhui": "google" ++ "rhui": "alibaba" + } + ] + }, + { +- "pesid": "rhel7-highavailability", ++ "pesid": "rhel10-satellite-client-6", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-for-system-z-beta-rpms", +- "arch": "s390x", +- "channel": "beta", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-aarch64-eus-rpms", ++ "arch": "aarch64", ++ "channel": "eus", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-for-system-z-rpms", +- "arch": "s390x", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-aarch64-rpms", ++ "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-beta-rpms", +- "arch": "x86_64", +- "channel": "beta", +- "repo_type": "rpm", +- "distro": "rhel" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-e4s-rhui-rpms", +- "arch": "x86_64", +- "channel": "e4s", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-e4s-rpms", +- "arch": "x86_64", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-ppc64le-e4s-rpms", ++ "arch": "ppc64le", + "channel": "e4s", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-els-rpms", +- "arch": "x86_64", +- "channel": "els", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-ppc64le-eus-rpms", ++ "arch": "ppc64le", ++ "channel": "eus", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-eus-rhui-rpms", +- "arch": "x86_64", +- "channel": "eus", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-ppc64le-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", + "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" ++ "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-eus-rpms", +- "arch": "x86_64", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-s390x-eus-rpms", ++ "arch": "s390x", + "channel": "eus", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-rhui-rpms", +- "arch": "x86_64", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-s390x-rpms", ++ "arch": "s390x", + "channel": "ga", + "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" ++ "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-rhui-rpms", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-x86_64-e4s-rpms", + "arch": "x86_64", +- "channel": "ga", ++ "channel": "e4s", + "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "azure" ++ "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhel-ha-for-rhel-7-server-rpms", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-x86_64-eus-rpms", + "arch": "x86_64", +- "channel": "ga", ++ "channel": "eus", + "repo_type": "rpm", + "distro": "rhel" + }, + { +- "major_version": "7", +- "repoid": "rhui-rhel-ha-for-rhel-7-server-e4s-rhui-rpms", ++ "major_version": "10", ++ "repoid": "satellite-client-6-for-rhel-10-x86_64-rpms", + "arch": "x86_64", +- "channel": "e4s", ++ "channel": "ga", + "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "google" ++ "distro": "rhel" + } + ] + }, + { +- "pesid": "rhel7-ansible-2", ++ "pesid": "rhel10-rhui-microsoft-azure-rhel10", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhel-7-server-ansible-2-rhui-rpms", ++ "major_version": "10", ++ "repoid": "rhui-microsoft-azure-rhel10", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", +@@ -2963,34 +1890,20 @@ + ] + }, + { +- "pesid": "rhel7-rhui-client-config-server-7", ++ "pesid": "rhel10-rhui-client-config-server-10", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhui-client-config-server-7", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "aws" +- }, +- { +- "major_version": "7", +- "repoid": "rhui-client-config-server-7-arm", ++ "major_version": "10", ++ "repoid": "rhui-client-config-server-10", + "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel", + "rhui": "aws" +- } +- ] +- }, +- { +- "pesid": "rhel7-rhui-client-config-server-7-sap", +- "entries": [ ++ }, + { +- "major_version": "7", +- "repoid": "rhui-client-config-server-7-sap-bundle", ++ "major_version": "10", ++ "repoid": "rhui-client-config-server-10", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", +@@ -3000,64 +1913,64 @@ + ] + }, + { +- "pesid": "rhel7-rhui-microsoft-azure-rhel7", ++ "pesid": "rhel10-rhui-custom-client-at-alibaba", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhui-microsoft-azure-rhel7", +- "arch": "x86_64", ++ "major_version": "10", ++ "repoid": "rhui-custom-rhui_client_at_alibaba-rhel-10", ++ "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel", +- "rhui": "azure" ++ "rhui": "alibaba" + }, + { +- "major_version": "7", +- "repoid": "rhui-microsoft-azure-rhel7-eus", ++ "major_version": "10", ++ "repoid": "rhui-custom-rhui_client_at_alibaba-rhel-10", + "arch": "x86_64", +- "channel": "eus", ++ "channel": "ga", + "repo_type": "rpm", + "distro": "rhel", +- "rhui": "azure" ++ "rhui": "alibaba" + } + ] + }, + { +- "pesid": "rhel7-rhui-microsoft-sap-ha", ++ "pesid": "rhel10-rhui-client-config-server-10-sap", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhui-microsoft-azure-rhel7-base-sap-ha", ++ "major_version": "10", ++ "repoid": "rhui-client-config-server-10-sap-bundle", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel", +- "rhui": "azure" ++ "rhui": "aws" + } + ] + }, + { +- "pesid": "rhel7-rhui-google-compute-engine", ++ "pesid": "rhel10-rhui-microsoft-azure-sap-apps", + "entries": [ + { +- "major_version": "7", +- "repoid": "google-compute-engine", ++ "major_version": "10", ++ "repoid": "rhui-microsoft-azure-rhel10-sapapps", + "arch": "x86_64", +- "channel": "ga", ++ "channel": "eus", + "repo_type": "rpm", + "distro": "rhel", +- "rhui": "google" ++ "rhui": "azure" + } + ] + }, + { +- "pesid": "rhel7-rhui-microsoft-azure-sap-apps", ++ "pesid": "rhel10-rhui-microsoft-sap-ha", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhui-microsoft-azure-rhel7-base-sap-apps", ++ "major_version": "10", ++ "repoid": "rhui-microsoft-azure-rhel10-sap-ha", + "arch": "x86_64", +- "channel": "ga", ++ "channel": "e4s", + "repo_type": "rpm", + "distro": "rhel", + "rhui": "azure" +@@ -3065,22 +1978,40 @@ + ] + }, + { +- "pesid": "rhel7-rhui-custom-client-at-alibaba", ++ "pesid": "rhel8-BaseOS", + "entries": [ + { +- "major_version": "7", +- "repoid": "rhui-custom-rhui_client_at_alibaba", ++ "major_version": "8", ++ "repoid": "baseos", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "baseos", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "baseos", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "baseos", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", +- "distro": "rhel", +- "rhui": "alibaba" +- } +- ] +- }, +- { +- "pesid": "rhel8-BaseOS", +- "entries": [ ++ "distro": "almalinux" ++ }, + { + "major_version": "8", + "repoid": "baseos", +@@ -3380,6 +2311,38 @@ + { + "pesid": "rhel8-AppStream", + "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "appstream", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "appstream", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "appstream", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "appstream", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "8", + "repoid": "appstream", +@@ -3811,6 +2774,38 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "8", ++ "repoid": "powertools", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "powertools", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "powertools", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "powertools", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "8", + "repoid": "powertools", +@@ -4055,6 +3050,14 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "8", ++ "repoid": "rt", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "8", + "repoid": "rt", +@@ -4068,6 +3071,14 @@ + { + "pesid": "rhel8-NFV", + "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "nfv", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "8", + "repoid": "nfv", +@@ -4348,6 +3359,38 @@ + { + "pesid": "rhel8-HighAvailability", + "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "ha", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "ha", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "ha", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "ha", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "8", + "repoid": "ha", +@@ -4618,6 +3661,151 @@ + } + ] + }, ++ { ++ "pesid": "rhel8-satellite-6.16", ++ "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "satellite-6.16-for-rhel-8-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel8-satellite-capsule-6.16", ++ "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "satellite-capsule-6.16-for-rhel-8-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel8-satellite-maintenance-6.16", ++ "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "satellite-maintenance-6.16-for-rhel-8-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel8-satellite-utils-6.16", ++ "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "satellite-utils-6.16-for-rhel-8-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel8-satellite-client-6", ++ "entries": [ ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-aarch64-eus-rpms", ++ "arch": "aarch64", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-aarch64-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-ppc64le-e4s-rpms", ++ "arch": "ppc64le", ++ "channel": "e4s", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-ppc64le-eus-rpms", ++ "arch": "ppc64le", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-ppc64le-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-s390x-eus-rpms", ++ "arch": "s390x", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-s390x-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-x86_64-aus-rpms", ++ "arch": "x86_64", ++ "channel": "aus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-x86_64-e4s-rpms", ++ "arch": "x86_64", ++ "channel": "e4s", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-x86_64-eus-rpms", ++ "arch": "x86_64", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "8", ++ "repoid": "satellite-client-6-for-rhel-8-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, + { + "pesid": "rhel8-rhui-client-config-server-8", + "entries": [ +@@ -4872,6 +4060,38 @@ + { + "pesid": "rhel9-BaseOS", + "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "baseos", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "baseos", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "baseos", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "baseos", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "9", + "repoid": "baseos", +@@ -5153,6 +4373,38 @@ + { + "pesid": "rhel9-AppStream", + "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "appstream", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "appstream", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "appstream", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "appstream", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "9", + "repoid": "appstream", +@@ -5557,6 +4809,38 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "9", ++ "repoid": "crb", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "crb", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "crb", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "crb", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "9", + "repoid": "crb", +@@ -5791,6 +5075,22 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-ppc64le-rt-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-s390x-rt-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "9", + "repoid": "rhel-9-for-x86_64-rt-beta-rpms", +@@ -5815,6 +5115,14 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "9", ++ "repoid": "rt", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "9", + "repoid": "rt", +@@ -5828,6 +5136,14 @@ + { + "pesid": "rhel9-NFV", + "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "nfv", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "9", + "repoid": "nfv", +@@ -5854,8 +5170,24 @@ + }, + { + "major_version": "9", +- "repoid": "rhel-9-for-aarch64-nfv-rpms", +- "arch": "aarch64", ++ "repoid": "rhel-9-for-aarch64-nfv-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-ppc64le-nfv-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-s390x-nfv-rpms", ++ "arch": "s390x", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" +@@ -5889,6 +5221,14 @@ + { + "pesid": "rhel9-SAP-NetWeaver", + "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-aarch64-sap-netweaver-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "9", + "repoid": "rhel-9-for-ppc64le-sap-netweaver-beta-rpms", +@@ -6026,6 +5366,14 @@ + { + "pesid": "rhel9-SAP-Solutions", + "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-aarch64-sap-solutions-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "9", + "repoid": "rhel-9-for-ppc64le-sap-solutions-e4s-rpms", +@@ -6042,6 +5390,14 @@ + "repo_type": "rpm", + "distro": "rhel" + }, ++ { ++ "major_version": "9", ++ "repoid": "rhel-9-for-s390x-sap-solutions-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, + { + "major_version": "9", + "repoid": "rhel-9-for-x86_64-sap-solutions-e4s-rhui-rpms", +@@ -6090,6 +5446,38 @@ + { + "pesid": "rhel9-HighAvailability", + "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "highavailability", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "highavailability", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "highavailability", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "highavailability", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "almalinux" ++ }, + { + "major_version": "9", + "repoid": "highavailability", +@@ -6305,6 +5693,190 @@ + } + ] + }, ++ { ++ "pesid": "rhel9-jbeap-7.4", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "jb-eap-7.4-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-jbeap-8.0", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "jb-eap-8.0-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-jbeap-8.1", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "jb-eap-8.1-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-satellite-6.16", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "satellite-6.16-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-satellite-capsule-6.16", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "satellite-capsule-6.16-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-satellite-maintenance-6.16", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "satellite-maintenance-6.16-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-satellite-utils-6.16", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "satellite-utils-6.16-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, ++ { ++ "pesid": "rhel9-satellite-client-6", ++ "entries": [ ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-aarch64-eus-rpms", ++ "arch": "aarch64", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-aarch64-rpms", ++ "arch": "aarch64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-ppc64le-e4s-rpms", ++ "arch": "ppc64le", ++ "channel": "e4s", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-ppc64le-eus-rpms", ++ "arch": "ppc64le", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-ppc64le-rpms", ++ "arch": "ppc64le", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-s390x-eus-rpms", ++ "arch": "s390x", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-s390x-rpms", ++ "arch": "s390x", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-x86_64-aus-rpms", ++ "arch": "x86_64", ++ "channel": "aus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-x86_64-e4s-rpms", ++ "arch": "x86_64", ++ "channel": "e4s", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-x86_64-eus-rpms", ++ "arch": "x86_64", ++ "channel": "eus", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ }, ++ { ++ "major_version": "9", ++ "repoid": "satellite-client-6-for-rhel-9-x86_64-rpms", ++ "arch": "x86_64", ++ "channel": "ga", ++ "repo_type": "rpm", ++ "distro": "rhel" ++ } ++ ] ++ }, + { + "pesid": "rhel9-rhui-client-config-server-9", + "entries": [ +@@ -6434,45 +6006,6 @@ + "rhui": "alibaba" + } + ] +- }, +- { +- "pesid": "rhel9-jbeap-7.4", +- "entries": [ +- { +- "major_version": "9", +- "repoid": "jb-eap-7.4-for-rhel-9-x86_64-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- } +- ] +- }, +- { +- "pesid": "rhel9-jbeap-8.0", +- "entries": [ +- { +- "major_version": "9", +- "repoid": "jb-eap-8.0-for-rhel-9-x86_64-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- } +- ] +- }, +- { +- "pesid": "rhel9-jbeap-8.1", +- "entries": [ +- { +- "major_version": "9", +- "repoid": "jb-eap-8.1-for-rhel-9-x86_64-rpms", +- "arch": "x86_64", +- "channel": "ga", +- "repo_type": "rpm", +- "distro": "rhel" +- } +- ] + } + ] + } diff --git a/etc/leapp/transaction/to_reinstall b/etc/leapp/transaction/to_reinstall new file mode 100644 index 00000000..c6694a8e @@ -3732,15 +15565,1869 @@ index 00000000..52f5af9d + api.produce(ActiveVendorList(data=list(active_vendors))) + else: + self.log.info("No active vendors found, vendor list not generated") +diff --git a/repos/system_upgrade/common/actors/checknvme/actor.py b/repos/system_upgrade/common/actors/checknvme/actor.py +new file mode 100644 +index 00000000..dc82c4ad +--- /dev/null ++++ b/repos/system_upgrade/common/actors/checknvme/actor.py +@@ -0,0 +1,54 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import checknvme ++from leapp.models import ( ++ KernelCmdline, ++ LiveModeConfig, ++ NVMEInfo, ++ StorageInfo, ++ TargetKernelCmdlineArgTasks, ++ TargetUserSpacePreupgradeTasks, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks, ++ UpgradeKernelCmdlineArgTasks ++) ++from leapp.reporting import Report ++from leapp.tags import ChecksPhaseTag, IPUWorkflowTag ++ ++ ++class CheckNVME(Actor): ++ """ ++ Check if NVMe devices are used and possibly register additional actions. ++ ++ Check whether the system uses NVMe devices. These can be connected using ++ different transport technologies, e.g., PCIe, TCP, FC, etc. Transports ++ handled by the current implementation: ++ * PCIe (no special actions are required) ++ * Fibre Channel (FC) ++ ++ When NVMe-FC devices are detected, the following actions are taken: ++ * dracut, dracut-network, nvme-cli, and some others packages are installed into initramfs ++ * /etc/nvme is copied into target userspace ++ * the nvmf dracut module is included into upgrade initramfs ++ * rd.nvmf.discover=fc,auto is added to the upgrade boot entry ++ * nvme_core.multipath is added to the upgrade and target boot entry ++ ++ Conditions causing the upgrade to be inhibited: ++ * detecting a NVMe device using a transport technology different than PCIe or FC ++ that is used in /etc/fstab ++ * missing /etc/nvme/hostnqn or /etc/nvme/hostid when NVMe-FC device is present ++ * source system is RHEL 9+ and it has disabled native multipath ++ """ ++ name = 'check_nvme' ++ consumes = (LiveModeConfig, KernelCmdline, NVMEInfo, StorageInfo) ++ produces = ( ++ Report, ++ TargetKernelCmdlineArgTasks, ++ TargetUserSpacePreupgradeTasks, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks, ++ UpgradeKernelCmdlineArgTasks ++ ) ++ tags = (ChecksPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ checknvme.process() +diff --git a/repos/system_upgrade/common/actors/checknvme/libraries/checknvme.py b/repos/system_upgrade/common/actors/checknvme/libraries/checknvme.py +new file mode 100644 +index 00000000..cce11f43 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/checknvme/libraries/checknvme.py +@@ -0,0 +1,352 @@ ++import os ++from collections import defaultdict ++from typing import List ++ ++from leapp import reporting ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.common.config.version import get_source_major_version ++from leapp.libraries.stdlib import api ++from leapp.models import ( ++ CopyFile, ++ DracutModule, ++ KernelCmdline, ++ KernelCmdlineArg, ++ LiveModeConfig, ++ NVMEDevice, ++ NVMEInfo, ++ StorageInfo, ++ TargetKernelCmdlineArgTasks, ++ TargetUserSpacePreupgradeTasks, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks, ++ UpgradeKernelCmdlineArgTasks ++) ++ ++FMT_LIST_SEPARATOR = '\n - ' ++FABRICS_TRANSPORT_TYPES = ['fc', 'tcp', 'rdma'] ++BROKEN_TRANSPORT_TYPES = ['tcp', 'rdma'] ++SAFE_TRANSPORT_TYPES = ['pcie', 'fc'] ++RQ_RPMS_CONTAINER = [ ++ 'iproute', ++ 'jq', ++ 'nvme-cli', ++ 'sed', ++] ++ ++# We need this packages early (when setting up container) as we will be modifying some ++# of their files ++EARLY_CONTAINER_RPMS = [ ++ 'dracut', ++ 'dracut-network', # Adds dracut-nvmf module ++] ++ ++ ++class NVMEDeviceCollection: ++ def __init__(self): ++ self.device_by_transport = defaultdict(list) ++ ++ def add_device(self, device: NVMEDevice): ++ self.device_by_transport[device.transport].append(device) ++ ++ def add_devices(self, devices: List[NVMEDevice]): ++ for device in devices: ++ self.add_device(device) ++ ++ def get_devices_by_transport(self, transport: str) -> List[NVMEDevice]: ++ return self.device_by_transport[transport] ++ ++ @property ++ def handled_transport_types(self) -> List[str]: ++ return SAFE_TRANSPORT_TYPES ++ ++ @property ++ def unhandled_devices(self) -> List[NVMEDevice]: ++ unhandled_devices = [] ++ for transport, devices in self.device_by_transport.items(): ++ if transport not in self.handled_transport_types: ++ unhandled_devices.extend(devices) ++ return unhandled_devices ++ ++ @property ++ def fabrics_devices(self) -> List[NVMEDevice]: ++ fabrics_devices = [] ++ for transport in FABRICS_TRANSPORT_TYPES: ++ fabrics_devices.extend(self.device_by_transport[transport]) ++ ++ return fabrics_devices ++ ++ ++def _format_list(data, sep=FMT_LIST_SEPARATOR, callback_sort=sorted, limit=0): ++ # NOTE(pstodulk): Teaser O:-> https://issues.redhat.com/browse/RHEL-126447 ++ ++ def identity(values): ++ return values ++ ++ if callback_sort is None: ++ callback_sort = identity ++ res = ['{}{}'.format(sep, item) for item in callback_sort(data)] ++ if limit: ++ return ''.join(res[:limit]) ++ return ''.join(res) ++ ++ ++def is_livemode_enabled() -> bool: ++ livemode_config = next(api.consume(LiveModeConfig), None) ++ if livemode_config and livemode_config.is_enabled: ++ return True ++ return False ++ ++ ++def get_current_cmdline_arg_value(arg_name: str): ++ cmdline = next(api.consume(KernelCmdline), None) ++ ++ if not cmdline: ++ raise StopActorExecutionError( ++ 'Failed to obtain message with information about current kernel cmdline' ++ ) ++ ++ for arg in cmdline.parameters: ++ if arg.key == arg_name: ++ return arg.value ++ ++ return None ++ ++ ++def _report_native_multipath_required(): ++ """Report that NVMe native multipath must be enabled on RHEL 9 before the upgrade.""" ++ reporting.create_report([ ++ reporting.Title('NVMe native multipath must be enabled on the target system'), ++ reporting.Summary( ++ 'The system is booted with "nvme_core.multipath=N" kernel command line argument, ' ++ 'disabling native multipath for NVMe devices. However, native multipath ' ++ 'is required to be used for NVMe over Fabrics (NVMeoF) on the target system. ' ++ 'Regarding that it is required to update the system setup to use ' ++ 'the native multipath before the in-place upgrade.' ++ ), ++ reporting.Remediation(hint=( ++ 'Enable native multipath for NVMe devices following the official ' ++ 'documentation and reboot your system - see the attached link.' ++ )), ++ reporting.ExternalLink( ++ url='https://red.ht/rhel-9-enabling-multipathing-on-nvme-devices', ++ title='Enabling native multipathing on NVMe devices.' ++ ), ++ reporting.Severity(reporting.Severity.HIGH), ++ reporting.Groups([reporting.Groups.INHIBITOR, reporting.Groups.FILESYSTEM]), ++ ]) ++ ++ ++def _report_system_should_migrate_to_native_multipath(): ++ """ ++ Report that since RHEL 9, native NVMe multipath is the recommended multipath solution for NVMe. ++ """ ++ reporting.create_report([ ++ reporting.Title('Native NVMe multipath is recommended on the target system.'), ++ reporting.Summary( ++ 'In the case that the system is using dm-multipath on NVMe devices, ' ++ 'it is recommended to use the native NVMe multipath instead. ' ++ 'We recommend to update the system configuration after the in-place ' ++ 'upgrade following the official documentation - see the attached link.' ++ ), ++ reporting.ExternalLink( ++ url='https://red.ht/rhel-9-enabling-multipathing-on-nvme-devices', ++ title='Enabling native multipathing on NVMe devices.' ++ ), ++ reporting.Severity(reporting.Severity.INFO), ++ reporting.Groups([reporting.Groups.FILESYSTEM, reporting.Groups.POST]), ++ ]) ++ ++ ++def _report_kernel_cmdline_might_be_modified_unnecessarily(): ++ """ ++ Report that we introduced nvme_core.multipath=N, which might not be necessary. ++ ++ We introduce nvme_core.multipath=N (unconditionally) during 8>9 upgrade. However, ++ the introduction of the argument might not be always necessary, but we currently lack ++ an implementation that would precisely identify when the argument is truly needed. ++ """ ++ reporting.create_report([ ++ reporting.Title('Native NVMe multipath will be disabled on the target system.'), ++ reporting.Summary( ++ 'To ensure system\'s storage layout remains consistent during the upgrade, native ' ++ 'NVMe multipath will be disabled by adding nvme_core.multipath=N to the default boot entry. ' ++ 'In the case that the system does not use multipath, the nvme_core.multipath=N should be manually ' ++ 'removed from the target system\'s boot entry after the upgrade.' ++ ), ++ reporting.ExternalLink( ++ url='https://red.ht/rhel-9-enabling-multipathing-on-nvme-devices', ++ title='Enabling native multipathing on NVMe devices.' ++ ), ++ reporting.Severity(reporting.Severity.INFO), ++ reporting.Groups([reporting.Groups.FILESYSTEM, reporting.Groups.POST]), ++ ]) ++ ++ ++def _tasks_copy_files_into_container(nvme_device_collection: NVMEDeviceCollection): ++ """ ++ Tasks needed to modify target userspace container and the upgrade initramfs. ++ """ ++ # NOTE: prepared for future extension, as it's possible that we will need ++ # to copy more files when starting to look at NVMe-(RDMA|TCP) ++ copy_files = [] ++ ++ if nvme_device_collection.fabrics_devices: ++ # /etc/nvme/ is required only in case of NVMe-oF (PCIe drives are safe) ++ copy_files.append(CopyFile(src='/etc/nvme/')) ++ ++ api.produce(TargetUserSpaceUpgradeTasks( ++ copy_files=copy_files, ++ install_rpms=RQ_RPMS_CONTAINER) ++ ) ++ ++ ++def _tasks_for_kernel_cmdline(nvme_device_collection: NVMEDeviceCollection): ++ upgrade_cmdline_args = [] ++ target_cmdline_args = [] ++ ++ if not is_livemode_enabled(): ++ upgrade_cmdline_args.append(KernelCmdlineArg(key='rd.nvmf.discover', value='fc,auto')) ++ ++ # The nvme_core.multipath argument is used to disable native multipath for NVMeoF devices. ++ nvme_core_mpath_arg_val = get_current_cmdline_arg_value('nvme_core.multipath') ++ ++ # FIXME(pstodulk): handle multi-controller NVMe-PCIe drives WITH multipath used by, e.g., Intel SSD DC P4500. ++ # Essentially, we always append nvme_core.multipath=N to the kernel command line during an 8>9 upgrade. This also ++ # includes basics setups where a simple NVMe drive is attached over PCIe without any multipath capabilities (think ++ # of an ordinary laptops). When the user attempts to later perform a 9>10 upgrade, an inhibitor will be raised with ++ # instructions to remove nvme_core.multipath=N introduced by us during the previous upgrade, which might be ++ # confusing as they might never even heard of multipath. Right now, we just emit a report for the user to remove ++ # nvme_core.multipath=N from the boot entry if multipath is not used. We should improve this behaviour in the ++ # future so that we can precisely target when to introduce the argument. ++ ++ if get_source_major_version() == '8': ++ # NOTE: it's expected kind of that for NVMeoF users always use multipath ++ ++ # If the system is already booted with nvme_core.multipath=?, do not change it ++ # The value will be copied from the default boot entry. ++ # On the other, on 8>9 we want to always add this as there native multipath was unsupported ++ # on RHEL 8, therefore, we should not need it (hence the value N). ++ if not nvme_core_mpath_arg_val: ++ upgrade_cmdline_args.append(KernelCmdlineArg(key='nvme_core.multipath', value='N')) ++ target_cmdline_args.append(KernelCmdlineArg(key='nvme_core.multipath', value='N')) ++ ++ if nvme_core_mpath_arg_val != 'Y': ++ # Print the report only if NVMeoF is detected and ++ _report_system_should_migrate_to_native_multipath() ++ _report_kernel_cmdline_might_be_modified_unnecessarily() ++ ++ if get_source_major_version() == '9': ++ # NOTE(pstodulk): Check this always, does not matter whether we detect ++ # NVMeoF or whether just PCIe is used. In any case, we will require user ++ # to fix it. ++ if nvme_core_mpath_arg_val == 'N': ++ _report_native_multipath_required() ++ return ++ ++ api.produce(UpgradeKernelCmdlineArgTasks(to_add=upgrade_cmdline_args)) ++ api.produce(TargetKernelCmdlineArgTasks(to_add=target_cmdline_args)) ++ ++ ++def register_upgrade_tasks(nvme_device_collection: NVMEDeviceCollection): ++ """ ++ Register tasks that should happen during IPU to handle NVMe devices ++ successfully. ++ ++ Args: ++ nvme_fc_devices (list): List of NVMe-FC devices ++ """ ++ _tasks_copy_files_into_container(nvme_device_collection) ++ _tasks_for_kernel_cmdline(nvme_device_collection) ++ ++ api.produce(TargetUserSpacePreupgradeTasks(install_rpms=EARLY_CONTAINER_RPMS)) ++ api.produce(UpgradeInitramfsTasks(include_dracut_modules=[DracutModule(name='nvmf')])) ++ ++ ++def report_missing_configs_for_fabrics_devices(nvme_info: NVMEInfo, ++ nvme_device_collection: NVMEDeviceCollection, ++ max_devices_in_report: int = 3) -> bool: ++ missing_configs = [] ++ if not nvme_info.hostid: ++ missing_configs.append('/etc/nvme/hostid') ++ if not nvme_info.hostnqn: ++ missing_configs.append('/etc/nvme/hostnqn') ++ ++ # NOTE(pstodulk): hostid and hostnqn are mandatory for NVMe-oF devices. ++ # That means practically FC, RDMA, TCP. Let's inform user the upgrade ++ # is blocked and they must configure the system properly to be able to ++ # upgrade ++ if not nvme_device_collection.fabrics_devices or not missing_configs: ++ return # We either have no fabrics devices or we have both hostid and hostnqn ++ ++ files_str = ', '.join(missing_configs) if missing_configs else 'required configuration files' ++ ++ device_names = [dev.name for dev in nvme_device_collection.fabrics_devices[:max_devices_in_report]] ++ if len(nvme_device_collection.fabrics_devices) > max_devices_in_report: ++ device_names.append('...') ++ device_list_str = ', '.join(device_names) ++ ++ reporting.create_report([ ++ reporting.Title('Missing NVMe configuration files required for the upgrade'), ++ reporting.Summary( ++ 'The system has NVMe-oF devices detected ({}), but {} are missing. ' ++ 'Both /etc/nvme/hostid and /etc/nvme/hostnqn must be present and configured for NVMe-oF usage. ' ++ 'Upgrade cannot continue until these files are provided.'.format(device_list_str, files_str) ++ ), ++ reporting.Severity(reporting.Severity.HIGH), ++ reporting.Groups([reporting.Groups.INHIBITOR, reporting.Groups.FILESYSTEM]), ++ reporting.Remediation( ++ hint='Ensure the files /etc/nvme/hostid and /etc/nvme/hostnqn are present and properly configured.' ++ ), ++ ]) ++ ++ ++def get_devices_present_in_fstab() -> List[str]: ++ storage_info = next(api.consume(StorageInfo), None) ++ ++ if not storage_info: ++ raise StopActorExecutionError('Failed to obtain message with information about fstab entries') ++ ++ # Call realpath to get the *canonical* path to the device (user might use disk UUIDs, etc. in fstab) ++ return {os.path.realpath(entry.fs_spec) for entry in storage_info.fstab} ++ ++ ++def check_unhandled_devices_present_in_fstab(nvme_device_collection: NVMEDeviceCollection) -> bool: ++ """Check if any unhandled NVMe devices are present in fstab. ++ ++ Args: ++ nvme_device_collection: NVMEDeviceCollection instance ++ ++ Returns: ++ True if any unhandled NVMe devices are present in fstab, False otherwise ++ """ ++ unhandled_dev_nodes = {os.path.join('/dev', device.name) for device in nvme_device_collection.unhandled_devices} ++ fstab_listed_dev_nodes = set(get_devices_present_in_fstab()) ++ ++ required_unhandled_dev_nodes = unhandled_dev_nodes.intersection(fstab_listed_dev_nodes) ++ if required_unhandled_dev_nodes: ++ summary = ( ++ 'The system has NVMe devices with a transport type that is currently ' ++ 'not handled during the upgrade process present in fstab. Problematic devices: {0}' ++ ).format(_format_list(required_unhandled_dev_nodes)) ++ ++ reporting.create_report([ ++ reporting.Title('NVMe devices with unhandled transport type present in fstab'), ++ reporting.Summary(summary), ++ reporting.Severity(reporting.Severity.HIGH), ++ reporting.Groups([reporting.Groups.INHIBITOR, reporting.Groups.FILESYSTEM]), ++ ]) ++ return True ++ return False ++ ++ ++def process(): ++ nvmeinfo = next(api.consume(NVMEInfo), None) ++ if not nvmeinfo or not nvmeinfo.devices: ++ return # Nothing to do ++ ++ nvme_device_collection = NVMEDeviceCollection() ++ nvme_device_collection.add_devices(nvmeinfo.devices) ++ ++ check_unhandled_devices_present_in_fstab(nvme_device_collection) ++ report_missing_configs_for_fabrics_devices(nvmeinfo, nvme_device_collection) ++ register_upgrade_tasks(nvme_device_collection) +diff --git a/repos/system_upgrade/common/actors/checknvme/tests/test_checknvme.py b/repos/system_upgrade/common/actors/checknvme/tests/test_checknvme.py +new file mode 100644 +index 00000000..7f18e7b4 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/checknvme/tests/test_checknvme.py +@@ -0,0 +1,431 @@ ++import os ++ ++from leapp import reporting ++from leapp.libraries.actor import checknvme ++from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import ( ++ FstabEntry, ++ KernelCmdline, ++ NVMEDevice, ++ NVMEInfo, ++ StorageInfo, ++ TargetKernelCmdlineArgTasks, ++ TargetUserSpacePreupgradeTasks, ++ TargetUserSpaceUpgradeTasks, ++ UpgradeInitramfsTasks, ++ UpgradeKernelCmdlineArgTasks ++) ++from leapp.utils.report import is_inhibitor ++ ++ ++def _make_storage_info(fstab_entries=None): ++ """Helper to create StorageInfo with fstab entries.""" ++ if fstab_entries is None: ++ fstab_entries = [] ++ return StorageInfo(fstab=fstab_entries) ++ ++ ++def test_no_nvme_devices(monkeypatch): ++ """Test when no NVMe devices are present.""" ++ msgs = [KernelCmdline(parameters=[])] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ ++ checknvme.process() ++ ++ # No messages should be produced when no NVMe devices are present ++ assert api.produce.called == 0 ++ ++ ++def test_nvme_pcie_devices_only(monkeypatch): ++ """Test with only NVMe PCIe devices (no FC devices).""" ++ nvme_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='pcie' ++ ) ++ nvme_info = NVMEInfo( ++ devices=[nvme_device], ++ hostid='test-hostid', ++ hostnqn='test-hostnqn' ++ ) ++ ++ msgs = [KernelCmdline(parameters=[]), nvme_info, _make_storage_info()] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ checknvme.process() ++ ++ def _get_produced_msg(msg_type): ++ """Get a single produced message of the given type.""" ++ for msg in api.produce.model_instances: ++ # We cannot use isinstance due to problems with inheritance ++ if type(msg) is msg_type: # pylint: disable=unidiomatic-typecheck ++ return msg ++ return None ++ ++ # Check TargetUserSpaceUpgradeTasks - no copy_files for PCIe-only ++ userspace_tasks = _get_produced_msg(TargetUserSpaceUpgradeTasks) ++ assert userspace_tasks.copy_files == [] ++ assert set(userspace_tasks.install_rpms) == {'iproute', 'jq', 'nvme-cli', 'sed'} ++ ++ # Check TargetUserSpacePreupgradeTasks ++ preupgrade_tasks = _get_produced_msg(TargetUserSpacePreupgradeTasks) ++ assert set(preupgrade_tasks.install_rpms) == {'dracut', 'dracut-network'} ++ ++ # Check UpgradeInitramfsTasks ++ initramfs_tasks = _get_produced_msg(UpgradeInitramfsTasks) ++ assert len(initramfs_tasks.include_dracut_modules) == 1 ++ assert initramfs_tasks.include_dracut_modules[0].name == 'nvmf' ++ ++ # Check UpgradeKernelCmdlineArgTasks ++ upgrade_cmdline_tasks = _get_produced_msg(UpgradeKernelCmdlineArgTasks) ++ upgrade_cmdline_args = {(arg.key, arg.value) for arg in upgrade_cmdline_tasks.to_add} ++ assert ('rd.nvmf.discover', 'fc,auto') in upgrade_cmdline_args ++ ++ # Check TargetKernelCmdlineArgTasks ++ target_cmdline_tasks = _get_produced_msg(TargetKernelCmdlineArgTasks) ++ # For PCIe-only, no nvme_core.multipath arg is added (no fabrics devices) ++ target_cmdline_args = {(arg.key, arg.value) for arg in target_cmdline_tasks.to_add} ++ assert target_cmdline_args == set() or ('nvme_core.multipath', 'N') in target_cmdline_args ++ ++ ++def test_nvme_fc_devices_present(monkeypatch): ++ """Test with NVMe-FC devices present.""" ++ nvme_fc_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='fc' ++ ) ++ nvme_info = NVMEInfo( ++ devices=[nvme_fc_device], ++ hostid='test-hostid', ++ hostnqn='test-hostnqn' ++ ) ++ ++ msgs = [KernelCmdline(parameters=[]), nvme_info, _make_storage_info()] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ checknvme.process() ++ ++ assert api.produce.called == 5 ++ ++ produced_msgs = api.produce.model_instances ++ assert any(isinstance(msg, TargetUserSpacePreupgradeTasks) for msg in produced_msgs) ++ assert any(isinstance(msg, TargetUserSpaceUpgradeTasks) for msg in produced_msgs) ++ assert any(isinstance(msg, UpgradeInitramfsTasks) for msg in produced_msgs) ++ ++ # Check that UpgradeKernelCmdlineArgTasks was produced with correct argument ++ kernel_cmdline_msgs = [msg for msg in produced_msgs if isinstance(msg, UpgradeKernelCmdlineArgTasks)] ++ assert len(kernel_cmdline_msgs) == 1 ++ ++ cmdline_args = {(c_arg.key, c_arg.value) for c_arg in kernel_cmdline_msgs[0].to_add} ++ expected_cmdline_args = { ++ ('rd.nvmf.discover', 'fc,auto'), ++ ('nvme_core.multipath', 'N') ++ } ++ assert expected_cmdline_args == cmdline_args ++ ++ ++def test_mixed_nvme_devices(monkeypatch): ++ """Test with mixed NVMe devices (PCIe and FC).""" ++ nvme_pcie_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='pcie' ++ ) ++ nvme_fc_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme1', ++ name='nvme1', ++ transport='fc' ++ ) ++ nvme_info = NVMEInfo( ++ devices=[nvme_pcie_device, nvme_fc_device], ++ hostid='test-hostid', ++ hostnqn='test-hostnqn' ++ ) ++ ++ msgs = [KernelCmdline(parameters=[]), nvme_info, _make_storage_info()] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ checknvme.process() ++ ++ assert api.produce.called == 5 ++ ++ produced_msgs = api.produce.model_instances ++ ++ # Check that UpgradeKernelCmdlineArgTasks was produced ++ kernel_cmdline_msgs = [msg for msg in produced_msgs if isinstance(msg, UpgradeKernelCmdlineArgTasks)] ++ assert len(kernel_cmdline_msgs) == 1 ++ ++ cmdline_args = {(c_arg.key, c_arg.value) for c_arg in kernel_cmdline_msgs[0].to_add} ++ expected_cmdline_args = { ++ ('rd.nvmf.discover', 'fc,auto'), ++ ('nvme_core.multipath', 'N') ++ } ++ assert expected_cmdline_args == cmdline_args ++ ++ ++def test_multiple_nvme_fc_devices(monkeypatch): ++ """Test with multiple NVMe-FC devices.""" ++ nvme_fc_device1 = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='fc' ++ ) ++ nvme_fc_device2 = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme1', ++ name='nvme1', ++ transport='fc' ++ ) ++ nvme_info = NVMEInfo( ++ devices=[nvme_fc_device1, nvme_fc_device2], ++ hostid='test-hostid', ++ hostnqn='test-hostnqn' ++ ) ++ ++ msgs = [KernelCmdline(parameters=[]), nvme_info, _make_storage_info()] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ checknvme.process() ++ ++ # Should still produce only one UpgradeKernelCmdlineArgTasks message ++ kernel_cmdline_msgs = [msg for msg in api.produce.model_instances ++ if isinstance(msg, UpgradeKernelCmdlineArgTasks)] ++ assert len(kernel_cmdline_msgs) == 1 ++ ++ # Should still have only two kernel arguments ++ assert len(kernel_cmdline_msgs[0].to_add) == 2 ++ ++ ++def test_nvme_missing_hostid_hostnqn_creates_inhibitor(monkeypatch): ++ """Test that missing hostid/hostnqn creates an inhibitor report for NVMe-oF devices.""" ++ nvme_fc_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='fc' ++ ) ++ # Missing hostid and hostnqn ++ nvme_info = NVMEInfo( ++ devices=[nvme_fc_device], ++ hostid=None, ++ hostnqn=None ++ ) ++ ++ msgs = [KernelCmdline(parameters=[]), nvme_info, _make_storage_info()] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ checknvme.process() ++ ++ # Should create an inhibitor report for missing configs ++ assert reporting.create_report.called == 1 ++ assert is_inhibitor(reporting.create_report.report_fields) ++ ++ ++def test_nvme_device_collection_categorization(): ++ """Test NVMEDeviceCollection categorizes devices correctly.""" ++ nvme_pcie_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='pcie' ++ ) ++ nvme_fc_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme1', ++ name='nvme1', ++ transport='fc' ++ ) ++ nvme_tcp_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme2', ++ name='nvme2', ++ transport='tcp' ++ ) ++ ++ collection = checknvme.NVMEDeviceCollection() ++ collection.add_devices([nvme_pcie_device, nvme_fc_device, nvme_tcp_device]) ++ ++ assert nvme_pcie_device in collection.get_devices_by_transport('pcie') ++ assert nvme_fc_device in collection.get_devices_by_transport('fc') ++ assert nvme_tcp_device in collection.get_devices_by_transport('tcp') ++ ++ # FC and TCP are fabrics devices ++ assert nvme_fc_device in collection.fabrics_devices ++ assert nvme_tcp_device in collection.fabrics_devices ++ assert nvme_pcie_device not in collection.fabrics_devices ++ ++ # TCP is unhandled (not in SAFE_TRANSPORT_TYPES) ++ assert nvme_tcp_device in collection.unhandled_devices ++ assert nvme_pcie_device not in collection.unhandled_devices ++ assert nvme_fc_device not in collection.unhandled_devices ++ ++ ++def test_register_upgrade_tasks_without_fabrics_devices(monkeypatch): ++ """Test register_upgrade_tasks without fabrics devices.""" ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ ++ kernel_cmdline_tasks = KernelCmdline(parameters=[]) ++ msgs = [kernel_cmdline_tasks] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ nvme_pcie_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='pcie' ++ ) ++ collection = checknvme.NVMEDeviceCollection() ++ collection.add_device(nvme_pcie_device) ++ ++ checknvme.register_upgrade_tasks(collection) ++ ++ produced_msgs = api.produce.model_instances ++ expected_msg_types = { ++ TargetUserSpaceUpgradeTasks, ++ TargetUserSpacePreupgradeTasks, ++ UpgradeInitramfsTasks, ++ UpgradeKernelCmdlineArgTasks, ++ TargetKernelCmdlineArgTasks, ++ } ++ assert set(type(msg) for msg in produced_msgs) == expected_msg_types ++ ++ ++def test_register_upgrade_tasks_with_fabrics_devices(monkeypatch): ++ """Test register_upgrade_tasks with fabrics devices.""" ++ nvme_fc_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='fc' ++ ) ++ collection = checknvme.NVMEDeviceCollection() ++ collection.add_device(nvme_fc_device) ++ ++ msgs = [KernelCmdline(parameters=[])] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(checknvme, '_report_system_should_migrate_to_native_multipath', lambda: None) ++ monkeypatch.setattr(checknvme, '_report_kernel_cmdline_might_be_modified_unnecessarily', lambda: None) ++ ++ checknvme.register_upgrade_tasks(collection) ++ ++ produced_msgs = api.produce.model_instances ++ expected_msg_types = { ++ TargetUserSpaceUpgradeTasks, ++ TargetUserSpacePreupgradeTasks, ++ UpgradeInitramfsTasks, ++ UpgradeKernelCmdlineArgTasks, ++ TargetKernelCmdlineArgTasks, ++ } ++ assert set(type(msg) for msg in produced_msgs) == expected_msg_types ++ ++ kernel_cmdline_msgs = [msg for msg in produced_msgs if isinstance(msg, UpgradeKernelCmdlineArgTasks)] ++ assert len(kernel_cmdline_msgs) == 1 ++ ++ cmdline_args = {(c_arg.key, c_arg.value) for c_arg in kernel_cmdline_msgs[0].to_add} ++ expected_cmdline_args = { ++ ('rd.nvmf.discover', 'fc,auto'), ++ ('nvme_core.multipath', 'N') ++ } ++ assert expected_cmdline_args == cmdline_args ++ ++ ++def test_check_unhandled_devices_not_in_fstab(monkeypatch): ++ """Test that no inhibitor is created when unhandled devices are not in fstab.""" ++ nvme_tcp_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='tcp' # tcp is unhandled ++ ) ++ collection = checknvme.NVMEDeviceCollection() ++ collection.add_device(nvme_tcp_device) ++ ++ # fstab contains a different device ++ fstab_entries = [ ++ FstabEntry(fs_spec='/dev/sda1', fs_file='/', fs_vfstype='ext4', ++ fs_mntops='defaults', fs_freq='1', fs_passno='1') ++ ] ++ storage_info = _make_storage_info(fstab_entries) ++ ++ msgs = [storage_info] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) ++ monkeypatch.setattr('os.path.realpath', lambda path: path) ++ ++ result = checknvme.check_unhandled_devices_present_in_fstab(collection) ++ ++ assert result is False ++ assert reporting.create_report.called == 0 ++ ++ ++def test_check_unhandled_devices_in_fstab_creates_inhibitor(monkeypatch): ++ """Test that an inhibitor is created when unhandled devices are in fstab.""" ++ nvme_tcp_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='tcp' # tcp is unhandled ++ ) ++ collection = checknvme.NVMEDeviceCollection() ++ collection.add_device(nvme_tcp_device) ++ ++ # fstab contains the unhandled device ++ fstab_entries = [ ++ FstabEntry(fs_spec='/dev/nvme0', fs_file='/', fs_vfstype='ext4', ++ fs_mntops='defaults', fs_freq='1', fs_passno='1') ++ ] ++ storage_info = _make_storage_info(fstab_entries) ++ ++ msgs = [storage_info] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) ++ monkeypatch.setattr(os.path, 'realpath', lambda path: path) ++ ++ result = checknvme.check_unhandled_devices_present_in_fstab(collection) ++ ++ assert result is True ++ assert reporting.create_report.called == 1 ++ assert is_inhibitor(reporting.create_report.report_fields) ++ ++ ++def test_check_unhandled_devices_handled_device_in_fstab_no_inhibitor(monkeypatch): ++ """Test that no inhibitor is created when only handled devices are in fstab.""" ++ nvme_pcie_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='pcie' # pcie is handled ++ ) ++ collection = checknvme.NVMEDeviceCollection() ++ collection.add_device(nvme_pcie_device) ++ ++ # fstab contains the handled device ++ fstab_entries = [ ++ FstabEntry(fs_spec='/dev/nvme0n1p1', fs_file='/', fs_vfstype='ext4', ++ fs_mntops='defaults', fs_freq='1', fs_passno='1') ++ ] ++ storage_info = _make_storage_info(fstab_entries) ++ ++ msgs = [storage_info] ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) ++ monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) ++ monkeypatch.setattr('os.path.realpath', lambda path: path) ++ ++ result = checknvme.check_unhandled_devices_present_in_fstab(collection) ++ ++ assert result is False ++ assert reporting.create_report.called == 0 +diff --git a/repos/system_upgrade/common/actors/checkpersistentmounts/libraries/checkpersistentmounts.py b/repos/system_upgrade/common/actors/checkpersistentmounts/libraries/checkpersistentmounts.py +index 2a35f4c5..79b431bb 100644 +--- a/repos/system_upgrade/common/actors/checkpersistentmounts/libraries/checkpersistentmounts.py ++++ b/repos/system_upgrade/common/actors/checkpersistentmounts/libraries/checkpersistentmounts.py +@@ -31,7 +31,7 @@ def check_mount_is_persistent(storage_info, mountpoint): + """Check if mountpoint is mounted in persistent fashion""" + + mount_entry_exists = any(me.mount == mountpoint for me in storage_info.mount) +- fstab_entry_exists = any(fe.fs_file == mountpoint for fe in storage_info.fstab) ++ fstab_entry_exists = any(fe.fs_file.rstrip('/') == mountpoint for fe in storage_info.fstab) + + if mount_entry_exists and not fstab_entry_exists: + inhibit_upgrade_due_non_persistent_mount(mountpoint) +diff --git a/repos/system_upgrade/common/actors/checkpersistentmounts/tests/test_checkpersistentmounts.py b/repos/system_upgrade/common/actors/checkpersistentmounts/tests/test_checkpersistentmounts.py +index fd6b3da3..14ce4e97 100644 +--- a/repos/system_upgrade/common/actors/checkpersistentmounts/tests/test_checkpersistentmounts.py ++++ b/repos/system_upgrade/common/actors/checkpersistentmounts/tests/test_checkpersistentmounts.py +@@ -11,6 +11,9 @@ MOUNT_ENTRY = MountEntry(name='/dev/sdaX', tp='ext4', mount='/var/lib/leapp', op + FSTAB_ENTRY = FstabEntry(fs_spec='', fs_file='/var/lib/leapp', fs_vfstype='', + fs_mntops='defaults', fs_freq='0', fs_passno='0') + ++FSTAB_ENTRY_TRAIL_SLASH = FstabEntry(fs_spec='', fs_file='/var/lib/leapp/', fs_vfstype='', ++ fs_mntops='defaults', fs_freq='0', fs_passno='0') ++ + + @pytest.mark.parametrize( + ('storage_info', 'should_inhibit'), +@@ -27,6 +30,10 @@ FSTAB_ENTRY = FstabEntry(fs_spec='', fs_file='/var/lib/leapp', fs_vfstype='', + StorageInfo(mount=[MOUNT_ENTRY], fstab=[FSTAB_ENTRY]), + False + ), ++ ( ++ StorageInfo(mount=[MOUNT_ENTRY], fstab=[FSTAB_ENTRY_TRAIL_SLASH]), ++ False ++ ), + ] + ) + def test_var_lib_leapp_non_persistent_is_detected(monkeypatch, storage_info, should_inhibit): diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh -index 56a94b5d..46c5d9b6 100755 +index 56a94b5d..758e1dfa 100755 --- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh +++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh +@@ -282,7 +282,7 @@ do_upgrade() { + local dirname + dirname="$("$NEWROOT/bin/dirname" "$NEWROOT$LEAPP_FAILED_FLAG_FILE")" + [ -d "$dirname" ] || mkdir "$dirname" +- ++ + echo >&2 "Creating file $NEWROOT$LEAPP_FAILED_FLAG_FILE" + echo >&2 "Warning: Leapp upgrade failed and there is an issue blocking the upgrade." + echo >&2 "Please file a support case with /var/log/leapp/leapp-upgrade.log attached" @@ -390,4 +390,3 @@ getarg 'rd.break=leapp-logs' 'rd.upgrade.break=leapp-finish' && { sync mount -o "remount,$old_opts" "$NEWROOT" exit $result - +diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh +index 45f98148..3f656d63 100755 +--- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh ++++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh +@@ -104,6 +104,12 @@ install() { + # script to actually run the upgrader binary + inst_hook upgrade 50 "$_moddir/do-upgrade.sh" + ++ # The initqueue checkscript to ensure all requested devices are mounted. ++ # The initqueue is usually left when rootfs (eventually /usr) is mounted ++ # but we require in this case whole fstab mounted under /sysroot. Without ++ # the script, the initqueue is left too early. ++ inst_hook initqueue/finished 99 "$moddir/upgrade-mount-wait-check.sh" ++ + #NOTE: some clean up?.. ideally, everything should be inside the leapp* + #NOTE: current *.service is changed so in case we would like to use the + # hook, we will have to modify it back +diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/upgrade-mount-wait-check.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/upgrade-mount-wait-check.sh +new file mode 100755 +index 00000000..5e21fa12 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/upgrade-mount-wait-check.sh +@@ -0,0 +1,50 @@ ++#!/bin/bash ++ ++# shellcheck disable=SC1091 # The file must be always present to boot the system ++type getarg >/dev/null 2>&1 || . /lib/dracut-lib.sh ++ ++log_debug() { ++ # TODO(pstodulk): The arg is probably not needed ++ getarg 'rd.upgrade.debug' && echo >&2 "Upgrade Initqueue Debug: $1" ++} ++ ++ ++check_reqs_in_dir() { ++ log_debug "Check resources from: $1" ++ result=0 ++ # shellcheck disable=SC2045 # Iterating over ls should be fine (there should be no whitespaces) ++ for fname in $(ls -1 "$1"); do ++ # We grep for What=/dev explicitly to exclude bind mounting units ++ resource_path=$(grep "^What=/dev/" "$1/$fname" | cut -d "=" -f2-) ++ if [ -z "$resource_path" ]; then ++ # Grep found no match, meaning that the unit is mounting something different than a block device ++ continue ++ fi ++ ++ grep -E "^Options=.*bind.*" "$1/$fname" &>/dev/null ++ is_bindmount=$? ++ if [ $is_bindmount -eq 0 ]; then ++ # The unit contains Options=...,bind,..., or Options=...,rbind,... so it is a bind mount -> skip ++ continue ++ fi ++ ++ grep -E "^Options=.*nofail.*" "$1/$fname" &>/dev/null ++ is_nofail=$? ++ if [ $is_nofail -eq 0 ]; then ++ # The unit contains Options=...,nofail,... so it is a nofail mount -> skip ++ continue ++ fi ++ ++ if [ ! -e "$resource_path" ]; then ++ log_debug "Waiting for missing resource: '$resource_path'" ++ result=1 ++ fi ++ done ++ ++ return $result ++} ++ ++SYSTEMD_DIR="/usr/lib/systemd/system" ++LOCAL_FS_MOUNT_DIR="$SYSTEMD_DIR/local-fs.target.requires" ++ ++check_reqs_in_dir "$LOCAL_FS_MOUNT_DIR" +diff --git a/repos/system_upgrade/common/actors/convert/securebootinhibit/actor.py b/repos/system_upgrade/common/actors/convert/securebootinhibit/actor.py +new file mode 100644 +index 00000000..53f41e71 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/securebootinhibit/actor.py +@@ -0,0 +1,19 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import securebootinhibit ++from leapp.models import FirmwareFacts ++from leapp.reporting import Report ++from leapp.tags import ChecksPhaseTag, IPUWorkflowTag ++ ++ ++class SecureBootInhibit(Actor): ++ """ ++ Inhibit the conversion if SecureBoot is enabled. ++ """ ++ ++ name = 'secure_boot_inhibit' ++ consumes = (FirmwareFacts,) ++ produces = (Report,) ++ tags = (IPUWorkflowTag, ChecksPhaseTag) ++ ++ def process(self): ++ securebootinhibit.process() +diff --git a/repos/system_upgrade/common/actors/convert/securebootinhibit/libraries/securebootinhibit.py b/repos/system_upgrade/common/actors/convert/securebootinhibit/libraries/securebootinhibit.py +new file mode 100644 +index 00000000..5edb9fa2 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/securebootinhibit/libraries/securebootinhibit.py +@@ -0,0 +1,42 @@ ++from leapp import reporting ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.common.config import is_conversion ++from leapp.libraries.stdlib import api ++from leapp.models import FirmwareFacts ++ ++ ++def process(): ++ if not is_conversion(): ++ return ++ ++ ff = next(api.consume(FirmwareFacts), None) ++ if not ff: ++ raise StopActorExecutionError( ++ "Could not identify system firmware", ++ details={"details": "Actor did not receive FirmwareFacts message."}, ++ ) ++ ++ if ff.firmware == "efi" and ff.secureboot_enabled: ++ report = [ ++ reporting.Title( ++ "Detected enabled Secure Boot when trying to convert the system" ++ ), ++ reporting.Summary( ++ "Conversion to a different Linux distribution is not possible" ++ " when the Secure Boot is enabled. Artifacts of the target" ++ " Linux distribution are signed by keys that are not accepted" ++ " by the source Linux distribution." ++ ), ++ reporting.Severity(reporting.Severity.HIGH), ++ reporting.Groups([reporting.Groups.INHIBITOR, reporting.Groups.BOOT]), ++ # TODO some link ++ reporting.Remediation( ++ hint="Disable Secure Boot to be able to convert the system to" ++ " a different Linux distribution. Then re-enable Secure Boot" ++ " again after the upgrade process is finished successfully." ++ " Check instructions for your current OS, or hypervisor in" ++ " case of virtual machines, for more information how to" ++ " disable Secure Boot." ++ ), ++ ] ++ reporting.create_report(report) +diff --git a/repos/system_upgrade/common/actors/convert/securebootinhibit/tests/test_securebootinhibit.py b/repos/system_upgrade/common/actors/convert/securebootinhibit/tests/test_securebootinhibit.py +new file mode 100644 +index 00000000..340e6b16 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/securebootinhibit/tests/test_securebootinhibit.py +@@ -0,0 +1,58 @@ ++import pytest ++ ++from leapp import reporting ++from leapp.libraries.actor import securebootinhibit ++from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked ++from leapp.libraries.stdlib import api ++from leapp.models import FirmwareFacts ++ ++ ++@pytest.mark.parametrize( ++ 'ff,is_conversion,should_inhibit', [ ++ # conversion, secureboot enabled = inhibit ++ ( ++ FirmwareFacts(firmware='efi', ppc64le_opal=None, secureboot_enabled=True), ++ True, ++ True ++ ), ++ ( ++ FirmwareFacts(firmware='efi', ppc64le_opal=None, secureboot_enabled=True), ++ False, ++ False ++ ), ++ # bios is ok ++ ( ++ FirmwareFacts(firmware='bios', ppc64le_opal=None, secureboot_enabled=False), ++ False, ++ False ++ ), ++ # bios is ok during conversion too ++ ( ++ FirmwareFacts(firmware='bios', ppc64le_opal=None, secureboot_enabled=False), ++ True, ++ False ++ ), ++ ( ++ FirmwareFacts(firmware='efi', ppc64le_opal=None, secureboot_enabled=False), ++ True, ++ False ++ ), ++ ( ++ FirmwareFacts(firmware='efi', ppc64le_opal=None, secureboot_enabled=False), ++ False, ++ False ++ ), ++ ] ++) ++def test_process(monkeypatch, ff, is_conversion, should_inhibit): ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[ff])) ++ monkeypatch.setattr(reporting, "create_report", create_report_mocked()) ++ monkeypatch.setattr(securebootinhibit, "is_conversion", lambda: is_conversion) ++ ++ securebootinhibit.process() ++ ++ if should_inhibit: ++ assert reporting.create_report.called == 1 ++ assert reporting.Groups.INHIBITOR in reporting.create_report.report_fields['groups'] ++ else: ++ assert not reporting.create_report.called +diff --git a/repos/system_upgrade/common/actors/convert/updateefi/actor.py b/repos/system_upgrade/common/actors/convert/updateefi/actor.py +new file mode 100644 +index 00000000..4c97ebd7 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/updateefi/actor.py +@@ -0,0 +1,25 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import updateefi ++from leapp.models import FirmwareFacts ++from leapp.reporting import Report ++from leapp.tags import ApplicationsPhaseTag, IPUWorkflowTag ++ ++ ++class UpdateEfiEntry(Actor): ++ """ ++ Update EFI directory and entry during conversion. ++ ++ During conversion, removes leftover source distro EFI directory on the ESP ++ (EFI System Partition) and it's EFI boot entry. It also adds a new boot ++ entry for the target distro. ++ ++ This actor does nothing when not converting. ++ """ ++ ++ name = "update_efi" ++ consumes = (FirmwareFacts,) ++ produces = (Report,) ++ tags = (ApplicationsPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ updateefi.process() +diff --git a/repos/system_upgrade/common/actors/convert/updateefi/libraries/updateefi.py b/repos/system_upgrade/common/actors/convert/updateefi/libraries/updateefi.py +new file mode 100644 +index 00000000..1f300125 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/updateefi/libraries/updateefi.py +@@ -0,0 +1,230 @@ ++import errno ++import os ++ ++from leapp import reporting ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.common import efi ++from leapp.libraries.common.config import architecture, get_source_distro_id, get_target_distro_id, is_conversion ++from leapp.libraries.common.distro import distro_id_to_pretty_name, get_distro_efidir_canon_path ++from leapp.libraries.stdlib import api ++ ++ ++def _get_target_efi_bin_path(): ++ # Sorted by priority. ++ # NOTE: The shim-x64 package providing the shimx64.efi binary can be removed when ++ # not using secure boot, grubx64.efi should always be present (provided by ++ # grub-efi-x64). ++ # WARN: However it is expected to have the shim installed on the system to comply ++ # with the official guidelines. ++ # ++ # TODO: There are usually 2 more shim* files which appear unused on a fresh system: ++ # - shim.efi - seems like it's the same as shimx64.efi ++ # - shim64-.efi - ??? ++ # What about them? ++ efibins_by_arch = { ++ architecture.ARCH_X86_64: ("shimx64.efi", "grubx64.efi"), ++ architecture.ARCH_ARM64: ("shimaa64.efi", "grubaa64.efi"), ++ } ++ ++ arch = api.current_actor().configuration.architecture ++ for filename in efibins_by_arch[arch]: ++ efi_dir = get_distro_efidir_canon_path(get_target_distro_id()) ++ canon_path = os.path.join(efi_dir, filename) ++ if os.path.exists(canon_path): ++ return efi.canonical_path_to_efi_format(canon_path) ++ ++ return None ++ ++ ++def _add_boot_entry_for_target(efibootinfo): ++ """ ++ Create a new UEFI bootloader entry for the target system. ++ ++ Return the newly created bootloader entry. ++ """ ++ efi_bin_path = _get_target_efi_bin_path() ++ if not efi_bin_path: ++ # this is a fatal error as at least one of the possible EFI binaries ++ # should be present ++ raise efi.EFIError("Unable to detect any UEFI binary file.") ++ ++ label = distro_id_to_pretty_name(get_target_distro_id()) ++ ++ existing_entry = efi.get_boot_entry(efibootinfo, label, efi_bin_path) ++ if existing_entry: ++ api.current_logger().debug( ++ "The '{}' UEFI bootloader entry is already present.".format(label) ++ ) ++ return existing_entry ++ ++ return efi.add_boot_entry(label, efi_bin_path) ++ ++ ++def _remove_boot_entry_for_source(efibootinfo): ++ efibootinfo_fresh = efi.EFIBootInfo() ++ source_entry = efibootinfo_fresh.entries.get(efibootinfo.current_bootnum, None) ++ ++ if not source_entry: ++ api.current_logger().debug( ++ "The currently booted source distro EFI boot entry has been already" ++ " removed since the target entry has been added, skipping removal." ++ ) ++ return ++ ++ original_source_entry = efibootinfo.entries[source_entry.boot_number] ++ ++ if source_entry != original_source_entry: ++ api.current_logger().debug( ++ "The boot entry with current bootnum has changed since the target" ++ " distro entry has been added, skipping removal." ++ ) ++ return ++ ++ efi.remove_boot_entry(source_entry.boot_number) ++ ++ ++def _try_remove_source_efi_dir(): ++ """ ++ Try to remove the source distro EFI directory ++ ++ The directory is not reported if it's not empty to preserve potential ++ custom files. In such a case a post upgrade report is produced informing ++ user to handle the leftover files. ++ """ ++ efi_dir_source = get_distro_efidir_canon_path(get_source_distro_id()) ++ if not os.path.exists(efi_dir_source): ++ api.current_logger().debug( ++ "Source distro EFI directory at {} does not exist, skipping removal.".format(efi_dir_source) ++ ) ++ return ++ ++ target_efi_dir = get_distro_efidir_canon_path(get_target_distro_id()) ++ if efi_dir_source == target_efi_dir: ++ api.current_logger().debug( ++ "Source and target distros use the same '{}' EFI directory.".format(efi_dir_source) ++ ) ++ return ++ ++ try: ++ os.rmdir(efi_dir_source) ++ api.current_logger().debug( ++ "Deleted source system EFI directory at {}".format(efi_dir_source) ++ ) ++ except FileNotFoundError: ++ api.current_logger().debug( ++ "Couldn't remove the source system EFI directory at {}: the directory no longer exists".format( ++ efi_dir_source ++ ) ++ ) ++ except OSError as e: ++ if e.errno == errno.ENOTEMPTY: ++ api.current_logger().debug( ++ "Didn't remove the source EFI directory {}, it does not exist".format( ++ efi_dir_source ++ ) ++ ) ++ summary = ( ++ "During the upgrade, the EFI binaries and grub configuration files" ++ f" were migrated from the source OS EFI directory {efi_dir_source}" ++ f" to the target OS EFI directory {target_efi_dir}." ++ f" Leftover files were detected in {target_efi_dir}, review them" ++ " and migrate them manually." ++ ) ++ reporting.create_report([ ++ reporting.Title("Review leftover files in the source OS EFI directory"), ++ reporting.Summary(summary), ++ reporting.Groups([ ++ reporting.Groups.BOOT, ++ reporting.Groups.POST, ++ ]), ++ reporting.Severity(reporting.Severity.LOW), ++ ]) ++ else: ++ api.current_logger().error( ++ "Failed to remove the source system EFI directory at {}: {}".format( ++ efi_dir_source, e ++ ) ++ ) ++ summary = ( ++ f"Removal of the source system EFI directory at {efi_dir_source} failed." ++ " Remove the directory manually if present." ++ ) ++ reporting.create_report([ ++ reporting.Title("Failed to remove source system EFI directory"), ++ reporting.Summary(summary), ++ reporting.Groups([ ++ reporting.Groups.BOOT, ++ reporting.Groups.FAILURE, ++ reporting.Groups.POST, ++ ]), ++ reporting.Severity(reporting.Severity.LOW), ++ ]) ++ ++ ++def _replace_boot_entries(): ++ try: ++ efibootinfo = efi.EFIBootInfo() ++ target_entry = _add_boot_entry_for_target(efibootinfo) ++ # NOTE: this isn't strictly necessary as UEFI should set the next entry ++ # to be the first in the BootOrder. This is a workaround to make sure ++ # the "efi_finalization_fix" actor doesn't attempt to set BootNext to ++ # the original entry which will be deleted below. ++ efi.set_bootnext(target_entry.boot_number) ++ except efi.EFIError as e: ++ raise StopActorExecutionError( ++ "Failed to add UEFI boot entry for the target system", ++ details={"details": str(e)}, ++ ) ++ ++ # NOTE: Some UEFI implementations, such as OVMF used in qemu, automatically ++ # add entries for EFI directories. Though the entry is named after the EFI ++ # directory (so "redhat" on RHEL). However if the UEFI doesn't add an entry ++ # after we fail to do so, it might render the OS "unbootable". ++ # Let's keep the source entry and directory if we can't add the target entry as a ++ # backup. ++ ++ _try_remove_source_efi_dir() ++ ++ try: ++ # doesn't matter if the removal of source EFI dir failed, we don't want ++ # the source entry, we have the new one for target ++ _remove_boot_entry_for_source(efibootinfo) ++ except efi.EFIError as e: ++ api.current_logger().error("Failed to remove source distro EFI boot entry: {}".format(e)) ++ ++ # This is low severity, some UEFIs will automatically remove an entry ++ # whose EFI binary no longer exists at least OVMF, used by qemu, does. ++ summary = ( ++ "Removal of the source system UEFI boot entry failed." ++ " Check UEFI boot entries and manually remove it if it's still present." ++ ) ++ reporting.create_report( ++ [ ++ reporting.Title("Failed to remove source system EFI boot entry"), ++ reporting.Summary(summary), ++ reporting.Groups( ++ [ ++ reporting.Groups.BOOT, ++ reporting.Groups.FAILURE, ++ reporting.Groups.POST, ++ ] ++ ), ++ reporting.Severity(reporting.Severity.LOW), ++ ] ++ ) ++ ++ ++def process(): ++ if not is_conversion(): ++ return ++ ++ if not architecture.matches_architecture(architecture.ARCH_X86_64, architecture.ARCH_ARM64): ++ return ++ ++ if not efi.is_efi(): ++ return ++ ++ # NOTE no need to check whether we have the efibootmgr binary, the ++ # efi_check_boot actor does ++ ++ _replace_boot_entries() +diff --git a/repos/system_upgrade/common/actors/convert/updateefi/tests/test_updateefi.py b/repos/system_upgrade/common/actors/convert/updateefi/tests/test_updateefi.py +new file mode 100644 +index 00000000..0ad31cc5 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/convert/updateefi/tests/test_updateefi.py +@@ -0,0 +1,469 @@ ++import copy ++import errno ++import os ++import types ++from unittest import mock ++ ++import pytest ++ ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.actor import updateefi ++from leapp.libraries.common import efi ++from leapp.libraries.common.config import architecture ++from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked, logger_mocked ++from leapp.libraries.stdlib import api ++ ++ ++@pytest.fixture ++def mock_logger(): ++ with mock.patch( ++ "leapp.libraries.stdlib.api.current_logger", new_callable=logger_mocked ++ ) as mock_logger: ++ yield mock_logger ++ ++ ++@pytest.fixture ++def mock_create_report(): ++ with mock.patch( ++ "leapp.reporting.create_report", new_callable=create_report_mocked ++ ) as mock_create_report: ++ yield mock_create_report ++ ++ ++@pytest.mark.parametrize( ++ "arch, exist, expect", ++ [ ++ (architecture.ARCH_X86_64, ["shimx64.efi", "grubx64.efi"], r"\EFI\redhat\shimx64.efi"), ++ (architecture.ARCH_X86_64, ["shimx64.efi"], r"\EFI\redhat\shimx64.efi"), ++ (architecture.ARCH_X86_64, ["grubx64.efi"], r"\EFI\redhat\grubx64.efi"), ++ (architecture.ARCH_X86_64, [], None), ++ ++ (architecture.ARCH_ARM64, ["shimaa64.efi", "grubaa64.efi"], r"\EFI\redhat\shimaa64.efi"), ++ (architecture.ARCH_ARM64, ["shimaa64.efi"], r"\EFI\redhat\shimaa64.efi"), ++ (architecture.ARCH_ARM64, ["grubaa64.efi"], r"\EFI\redhat\grubaa64.efi"), ++ (architecture.ARCH_ARM64, [], None), ++ ] ++) ++def test__get_target_efi_bin_path(monkeypatch, arch, exist, expect): ++ # distro is not important, just make it look like conversion ++ curr_actor = CurrentActorMocked(arch=arch, src_distro="centos", dst_distro="rhel") ++ monkeypatch.setattr(api, "current_actor", curr_actor) ++ ++ def mock_exists(path): ++ efidir = "/boot/efi/EFI/redhat" ++ return path in [os.path.join(efidir, p) for p in exist] ++ ++ monkeypatch.setattr(os.path, "exists", mock_exists) ++ ++ actual = updateefi._get_target_efi_bin_path() ++ assert actual == expect ++ ++ ++TEST_ADD_ENTRY_INPUTS = [ ++ ("Red Hat Enterprise Linux", r"\EFI\redhat\shimx64.efi"), ++ ("Red Hat Enterprise Linux", r"\EFI\redhat\grubx64.efi"), ++ ("Centos Stream", r"\EFI\centos\grubx64.efi"), ++] ++ ++ ++@pytest.mark.parametrize("label, efi_bin_path", TEST_ADD_ENTRY_INPUTS) ++@mock.patch("leapp.libraries.common.efi.get_boot_entry") ++@mock.patch("leapp.libraries.common.efi.add_boot_entry") ++def test__add_boot_entry_for_target( ++ mock_add_boot_entry, mock_get_boot_entry, monkeypatch, label, efi_bin_path ++): ++ # need to mock this but it's unused because distro_id_to_pretty_name is mocked ++ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(dst_distro="whatever")) ++ monkeypatch.setattr(updateefi, "distro_id_to_pretty_name", lambda _distro: label) ++ monkeypatch.setattr(updateefi, "_get_target_efi_bin_path", lambda: efi_bin_path) ++ ++ mock_efibootinfo = mock.MagicMock(name="EFIBootInfo_instance") ++ entry = efi.EFIBootLoaderEntry("0003", label, True, efi_bin_path) ++ mock_get_boot_entry.return_value = None ++ mock_add_boot_entry.return_value = entry ++ ++ assert entry == updateefi._add_boot_entry_for_target(mock_efibootinfo) ++ ++ mock_get_boot_entry.assert_called_once_with(mock_efibootinfo, label, efi_bin_path) ++ mock_add_boot_entry.assert_called_once_with(label, efi_bin_path) ++ ++ ++@pytest.mark.parametrize("label, efi_bin_path", TEST_ADD_ENTRY_INPUTS) ++@mock.patch("leapp.libraries.common.efi.get_boot_entry") ++@mock.patch("leapp.libraries.common.efi.add_boot_entry") ++def test__add_boot_entry_for_target_already_exists( ++ mock_add_boot_entry, mock_get_boot_entry, monkeypatch, label, efi_bin_path ++): ++ # need to mock this but it's unused because distro_id_to_pretty_name is mocked ++ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(dst_distro="whatever")) ++ monkeypatch.setattr(updateefi, "distro_id_to_pretty_name", lambda _distro: label) ++ monkeypatch.setattr(updateefi, "_get_target_efi_bin_path", lambda: efi_bin_path) ++ ++ mock_efibootinfo = mock.MagicMock(name="EFIBootInfo_instance") ++ entry = efi.EFIBootLoaderEntry("0003", label, True, efi_bin_path) ++ mock_get_boot_entry.return_value = entry ++ ++ out = updateefi._add_boot_entry_for_target(mock_efibootinfo) ++ ++ assert out == entry ++ mock_get_boot_entry.assert_called_once_with(mock_efibootinfo, label, efi_bin_path) ++ mock_add_boot_entry.assert_not_called() ++ ++ ++def test__add_boot_entry_for_target_no_efi_bin(monkeypatch): ++ monkeypatch.setattr(updateefi, "_get_target_efi_bin_path", lambda: None) ++ ++ with pytest.raises(efi.EFIError, match="Unable to detect any UEFI binary file."): ++ mock_efibootinfo = mock.MagicMock(name="EFIBootInfo_instance") ++ updateefi._add_boot_entry_for_target(mock_efibootinfo) ++ ++ ++class MockEFIBootInfo: ++ ++ def __init__(self, entries, current_bootnum=None): ++ # just to have some entries even when we don't need the entries ++ other_entry = efi.EFIBootLoaderEntry( ++ "0001", ++ "UEFI: Built-in EFI Shell", ++ True, ++ "VenMedia(5023b95c-db26-429b-a648-bd47664c8012)..BO", ++ ) ++ entries = entries + [other_entry] ++ ++ self.boot_order = tuple(entry.boot_number for entry in entries) ++ self.current_bootnum = current_bootnum or self.boot_order[0] ++ self.next_bootnum = None ++ self.entries = {entry.boot_number: entry for entry in entries} ++ ++ ++TEST_SOURCE_ENTRY = efi.EFIBootLoaderEntry( ++ "0002", "Centos Stream", True, r"File(\EFI\centos\shimx64.efi)" ++) ++TEST_TARGET_ENTRY = efi.EFIBootLoaderEntry( ++ "0003", "Red Hat Enterprise Linux", True, r"File(\EFI\redhat\shimx64.efi)" ++) ++ ++ ++@mock.patch("leapp.libraries.common.efi.remove_boot_entry") ++@mock.patch("leapp.libraries.common.efi.EFIBootInfo") ++def test__remove_boot_entry_for_source( ++ mock_efibootinfo, ++ mock_remove_boot_entry, ++): ++ efibootinfo = MockEFIBootInfo([TEST_SOURCE_ENTRY], current_bootnum="0002") ++ mock_efibootinfo.return_value = MockEFIBootInfo( ++ [TEST_TARGET_ENTRY, TEST_SOURCE_ENTRY], current_bootnum="0002" ++ ) ++ ++ updateefi._remove_boot_entry_for_source(efibootinfo) ++ ++ mock_efibootinfo.assert_called_once() ++ mock_remove_boot_entry.assert_called_once_with("0002") ++ ++ ++@mock.patch("leapp.libraries.common.efi.remove_boot_entry") ++@mock.patch("leapp.libraries.common.efi.EFIBootInfo") ++def test__remove_boot_entry_for_source_no_longer_exists( ++ mock_efibootinfo, mock_remove_boot_entry, mock_logger ++): ++ efibootinfo = MockEFIBootInfo([TEST_SOURCE_ENTRY], current_bootnum="0002") ++ mock_efibootinfo.return_value = MockEFIBootInfo( ++ [TEST_TARGET_ENTRY], current_bootnum="0002" ++ ) ++ ++ updateefi._remove_boot_entry_for_source(efibootinfo) ++ ++ msg = ( ++ "The currently booted source distro EFI boot entry has been already" ++ " removed since the target entry has been added, skipping removal." ++ ) ++ assert msg in mock_logger.dbgmsg ++ mock_efibootinfo.assert_called_once() ++ mock_remove_boot_entry.assert_not_called() ++ ++ ++@mock.patch("leapp.libraries.common.efi.remove_boot_entry") ++@mock.patch("leapp.libraries.common.efi.EFIBootInfo") ++def test__remove_boot_entry_for_source_has_changed( ++ mock_efibootinfo, mock_remove_boot_entry, mock_logger ++): ++ efibootinfo = MockEFIBootInfo([TEST_SOURCE_ENTRY], current_bootnum="0002") ++ modified_source_entry = copy.copy(TEST_SOURCE_ENTRY) ++ modified_source_entry.efi_bin_source = r"File(\EFI\centos\grubx64.efi)" ++ mock_efibootinfo.return_value = MockEFIBootInfo( ++ [TEST_TARGET_ENTRY, modified_source_entry], current_bootnum="0002" ++ ) ++ ++ updateefi._remove_boot_entry_for_source(efibootinfo) ++ ++ msg = ( ++ "The boot entry with current bootnum has changed since the target" ++ " distro entry has been added, skipping removal." ++ ) ++ assert msg in mock_logger.dbgmsg ++ mock_efibootinfo.assert_called_once() ++ mock_remove_boot_entry.assert_not_called() ++ ++ ++class TestRemoveSourceEFIDir: ++ SOURCE_EFIDIR = "/boot/efi/EFI/centos" ++ TARGET_EFIDIR = "/boot/efi/EFI/redhat" ++ ++ @pytest.fixture(autouse=True) ++ def mock_current_actor(self): # pylint:disable=no-self-use ++ with mock.patch("leapp.libraries.stdlib.api.current_actor") as mock_current_actor: ++ mock_current_actor.return_value = CurrentActorMocked( ++ src_distro="centos", dst_distro="redhat" ++ ) ++ yield ++ ++ @mock.patch("os.path.exists") ++ @mock.patch("leapp.libraries.actor.updateefi.get_distro_efidir_canon_path") ++ @mock.patch("os.rmdir") ++ def test_success( ++ self, mock_rmdir, mock_efidir_path, mock_exists, mock_logger ++ ): ++ mock_efidir_path.side_effect = [self.SOURCE_EFIDIR, self.TARGET_EFIDIR] ++ ++ updateefi._try_remove_source_efi_dir() ++ ++ mock_exists.assert_called_once_with(self.SOURCE_EFIDIR) ++ mock_rmdir.assert_called_once_with(self.SOURCE_EFIDIR) ++ msg = f"Deleted source system EFI directory at {self.SOURCE_EFIDIR}" ++ assert msg in mock_logger.dbgmsg ++ ++ @mock.patch("os.path.exists") ++ @mock.patch("leapp.libraries.actor.updateefi.get_distro_efidir_canon_path") ++ @mock.patch("os.rmdir") ++ def test__efi_dir_does_not_exist( ++ self, mock_rmdir, mock_efidir_path, mock_exists, mock_logger ++ ): ++ mock_efidir_path.return_value = self.SOURCE_EFIDIR ++ mock_exists.return_value = False ++ ++ updateefi._try_remove_source_efi_dir() ++ ++ mock_exists.assert_called_once_with(self.SOURCE_EFIDIR) ++ mock_rmdir.assert_not_called() ++ msg = f"Source distro EFI directory at {self.SOURCE_EFIDIR} does not exist, skipping removal." ++ assert msg in mock_logger.dbgmsg ++ ++ @mock.patch("os.path.exists") ++ @mock.patch("leapp.libraries.actor.updateefi.get_distro_efidir_canon_path") ++ @mock.patch("os.rmdir") ++ def test_source_efi_dir_same_as_target( ++ self, mock_rmdir, mock_efidir_path, mock_exists, mock_logger ++ ): ++ """ ++ Source and target dirs use the same directory ++ """ ++ mock_efidir_path.side_effect = [self.TARGET_EFIDIR, self.TARGET_EFIDIR] ++ mock_exists.return_value = True ++ ++ updateefi._try_remove_source_efi_dir() ++ ++ mock_exists.assert_called_once_with(self.TARGET_EFIDIR) ++ mock_rmdir.assert_not_called() ++ msg = f"Source and target distros use the same '{self.TARGET_EFIDIR}' EFI directory." ++ assert msg in mock_logger.dbgmsg ++ ++ @mock.patch("os.path.exists") ++ @mock.patch("leapp.libraries.actor.updateefi.get_distro_efidir_canon_path") ++ @mock.patch("os.rmdir") ++ def test_rmdir_fail( ++ self, mock_rmdir, mock_efidir_path, mock_exists, mock_logger, mock_create_report ++ ): ++ """ ++ Test removal failures ++ """ ++ mock_efidir_path.side_effect = [self.SOURCE_EFIDIR, self.TARGET_EFIDIR] ++ mock_rmdir.side_effect = OSError ++ ++ updateefi._try_remove_source_efi_dir() ++ ++ mock_exists.assert_called_once_with(self.SOURCE_EFIDIR) ++ mock_rmdir.assert_called_once_with(self.SOURCE_EFIDIR) ++ msg = f"Failed to remove the source system EFI directory at {self.SOURCE_EFIDIR}" ++ assert msg in mock_logger.errmsg[0] ++ assert mock_create_report.called == 1 ++ title = "Failed to remove source system EFI directory" ++ assert mock_create_report.report_fields["title"] == title ++ ++ @mock.patch("os.path.exists") ++ @mock.patch("leapp.libraries.actor.updateefi.get_distro_efidir_canon_path") ++ @mock.patch("os.rmdir") ++ def test_dir_no_longer_exists_failed_rmdir( ++ self, mock_rmdir, mock_efidir_path, mock_exists, mock_logger ++ ): ++ mock_efidir_path.side_effect = [self.SOURCE_EFIDIR, self.TARGET_EFIDIR] ++ mock_rmdir.side_effect = FileNotFoundError( ++ 2, "No such file or directory", self.SOURCE_EFIDIR ++ ) ++ ++ updateefi._try_remove_source_efi_dir() ++ ++ mock_exists.assert_called_once_with(self.SOURCE_EFIDIR) ++ mock_rmdir.assert_called_once_with(self.SOURCE_EFIDIR) ++ msg = ( ++ "Couldn't remove the source system EFI directory at" ++ f" {self.SOURCE_EFIDIR}: the directory no longer exists" ++ ) ++ assert msg in mock_logger.dbgmsg[0] ++ ++ @mock.patch("os.path.exists") ++ @mock.patch("leapp.libraries.actor.updateefi.get_distro_efidir_canon_path") ++ @mock.patch("os.rmdir") ++ def test_dir_not_empty( ++ self, mock_rmdir, mock_efidir_path, mock_exists, mock_logger, mock_create_report ++ ): ++ """ ++ Test that the directory is not removed if there are any leftover files ++ ++ The distro provided files in the efi dir are usually removed during the RPM ++ upgrade transaction (shim and grub own them). If there are any leftover ++ files, such as custom user files, the directory should be preserved and ++ report created. ++ """ ++ mock_efidir_path.side_effect = [self.SOURCE_EFIDIR, self.TARGET_EFIDIR] ++ mock_rmdir.side_effect = OSError( ++ errno.ENOTEMPTY, os.strerror(errno.ENOTEMPTY), self.SOURCE_EFIDIR ++ ) ++ ++ updateefi._try_remove_source_efi_dir() ++ ++ mock_rmdir.assert_called_once_with(self.SOURCE_EFIDIR) ++ mock_exists.assert_called_once_with(self.SOURCE_EFIDIR) ++ msg = "Didn't remove the source EFI directory {}, it does not exist".format( ++ self.SOURCE_EFIDIR ++ ) ++ assert msg in mock_logger.dbgmsg[0] ++ assert mock_create_report.called == 1 ++ title = "Review leftover files in the source OS EFI directory" ++ assert mock_create_report.report_fields["title"] == title ++ ++ ++@pytest.mark.parametrize( ++ "is_conversion, arch, is_efi, should_skip", ++ [ ++ # conversion, is efi ++ (True, architecture.ARCH_X86_64, True, False), ++ (True, architecture.ARCH_ARM64, True, False), ++ (True, architecture.ARCH_PPC64LE, True, True), ++ (True, architecture.ARCH_S390X, True, True), ++ # conversion, not efi ++ (True, architecture.ARCH_X86_64, False, True), ++ (True, architecture.ARCH_ARM64, False, True), ++ (True, architecture.ARCH_PPC64LE, False, True), ++ (True, architecture.ARCH_S390X, False, True), ++ # not conversion, is efi ++ (False, architecture.ARCH_X86_64, True, True), ++ (False, architecture.ARCH_ARM64, True, True), ++ (False, architecture.ARCH_PPC64LE, True, True), ++ (False, architecture.ARCH_S390X, True, True), ++ # not conversion, not efi ++ (False, architecture.ARCH_X86_64, False, True), ++ (False, architecture.ARCH_ARM64, False, True), ++ (False, architecture.ARCH_PPC64LE, False, True), ++ (False, architecture.ARCH_S390X, False, True), ++ ], ++) ++@mock.patch("leapp.libraries.actor.updateefi._replace_boot_entries") ++def test_process_skip( ++ mock_replace_boot_entries, monkeypatch, is_conversion, arch, is_efi, should_skip ++): ++ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(arch=arch)) ++ monkeypatch.setattr(updateefi, "is_conversion", lambda: is_conversion) ++ monkeypatch.setattr(efi, "is_efi", lambda: is_efi) ++ ++ updateefi.process() ++ ++ if should_skip: ++ mock_replace_boot_entries.assert_not_called() ++ else: ++ mock_replace_boot_entries.assert_called_once() ++ ++ ++class TestReplaceBootEntries: ++ ++ @pytest.fixture ++ def mocks(self): # pylint:disable=no-self-use ++ UPDATE_EFI = 'leapp.libraries.actor.updateefi' ++ EFI_LIB = 'leapp.libraries.common.efi' ++ with mock.patch(f'{UPDATE_EFI}._try_remove_source_efi_dir') as remove_source_dir, \ ++ mock.patch(f'{UPDATE_EFI}._remove_boot_entry_for_source') as remove_source_entry, \ ++ mock.patch(f'{UPDATE_EFI}._add_boot_entry_for_target') as add_target_entry, \ ++ mock.patch(f'{EFI_LIB}.set_bootnext') as set_bootnext, \ ++ mock.patch(f'{EFI_LIB}.EFIBootInfo') as efibootinfo: ++ ++ # default for happy path ++ efibootinfo_obj = mock.MagicMock(name="EFIBootInfo_instance") ++ efibootinfo.return_value = efibootinfo_obj ++ ++ entry = mock.MagicMock(name="target_entry") ++ entry.boot_number = "0003" ++ add_target_entry.return_value = entry ++ ++ yield types.SimpleNamespace( ++ EFIBootInfo=efibootinfo, ++ set_bootnext=set_bootnext, ++ add_boot_entry_for_target=add_target_entry, ++ try_remove_source_efi_dir=remove_source_dir, ++ remove_boot_entry_for_source=remove_source_entry, ++ logger=mock_logger, ++ ) ++ ++ def test__fail_remove_source_entry( # pylint:disable=no-self-use ++ self, mocks, mock_logger, mock_create_report ++ ): ++ mocks.remove_boot_entry_for_source.side_effect = efi.EFIError ++ ++ updateefi._replace_boot_entries() ++ ++ msg = "Failed to remove source distro EFI boot entry" ++ assert msg in mock_logger.errmsg[0] ++ ++ assert mock_create_report.called == 1 ++ title = "Failed to remove source system EFI boot entry" ++ assert mock_create_report.report_fields["title"] == title ++ ++ @pytest.mark.parametrize( ++ "which_fail", ["EFIBootInfo", "add_target", "set_bootnext"] ++ ) ++ def test__fail_add_target_entry( # pylint:disable=no-self-use ++ self, mocks, mock_logger, mock_create_report, which_fail ++ ): ++ if which_fail == "EFIBootInfo": ++ mocks.EFIBootInfo.side_effect = efi.EFIError ++ elif which_fail == "add_target": ++ mocks.add_boot_entry_for_target.side_effect = efi.EFIError ++ elif which_fail == "set_bootnext": ++ mocks.set_bootnext.side_effect = efi.EFIError ++ ++ with pytest.raises(StopActorExecutionError): ++ updateefi._replace_boot_entries() ++ ++ mocks.try_remove_source_efi_dir.assert_not_called() ++ mocks.remove_boot_entry_for_source.assert_not_called() ++ assert not mock_create_report.called ++ ++ def test__replace_boot_entries_success( # pylint:disable=no-self-use ++ self, mocks, mock_logger ++ ): ++ """Test that operations are carried out in the right order""" ++ mgr = mock.MagicMock() ++ mgr.attach_mock(mocks.EFIBootInfo, "EFIBootInfo") ++ mgr.attach_mock(mocks.set_bootnext, "set_bootnext") ++ mgr.attach_mock(mocks.add_boot_entry_for_target, "add_target_entry") ++ mgr.attach_mock(mocks.remove_boot_entry_for_source, "remove_source_entry") ++ mgr.attach_mock(mocks.try_remove_source_efi_dir, "remove_source_efidir") ++ ++ updateefi._replace_boot_entries() ++ ++ expected_sequence = [ ++ mock.call.EFIBootInfo(), ++ mock.call.add_target_entry(efi.EFIBootInfo.return_value), ++ mock.call.set_bootnext(mocks.add_boot_entry_for_target.return_value.boot_number), ++ mock.call.remove_source_efidir(), ++ mock.call.remove_source_entry(efi.EFIBootInfo.return_value), ++ ] ++ assert mgr.mock_calls == expected_sequence diff --git a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py index 003f3fc5..9e7bbf4a 100644 --- a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py @@ -3910,6 +17597,134 @@ index f42909f0..6383a56f 100644 + + if not has_grub_cfg: + run(['/sbin/grub2-mkconfig', '-o', grub_cfg_path]) +diff --git a/repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/actor.py b/repos/system_upgrade/common/actors/emit_net_naming_scheme/actor.py +similarity index 100% +rename from repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/actor.py +rename to repos/system_upgrade/common/actors/emit_net_naming_scheme/actor.py +diff --git a/repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/libraries/emit_net_naming.py b/repos/system_upgrade/common/actors/emit_net_naming_scheme/libraries/emit_net_naming.py +similarity index 79% +rename from repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/libraries/emit_net_naming.py +rename to repos/system_upgrade/common/actors/emit_net_naming_scheme/libraries/emit_net_naming.py +index bab62a56..7b112ff0 100644 +--- a/repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/libraries/emit_net_naming.py ++++ b/repos/system_upgrade/common/actors/emit_net_naming_scheme/libraries/emit_net_naming.py +@@ -1,5 +1,5 @@ + from leapp.exceptions import StopActorExecutionError +-from leapp.libraries.common.config import get_env, version ++from leapp.libraries.common.config import get_env, get_target_distro_id, version + from leapp.libraries.stdlib import api + from leapp.models import ( + KernelCmdline, +@@ -10,7 +10,10 @@ from leapp.models import ( + UpgradeKernelCmdlineArgTasks + ) + +-NET_NAMING_SYSATTRS_RPM_NAME = 'rhel-net-naming-sysattrs' ++NET_NAMING_SYSATTRS_RPM_NAME = { ++ '9': 'rhel-net-naming-sysattrs', ++ '10': 'net-naming-sysattrs', ++} + + + def is_net_scheme_compatible_with_current_cmdline(): +@@ -45,15 +48,21 @@ def is_net_scheme_compatible_with_current_cmdline(): + + def emit_msgs_to_use_net_naming_schemes(): + is_feature_enabled = get_env('LEAPP_DISABLE_NET_NAMING_SCHEMES', '0') != '1' +- is_upgrade_8to9 = version.get_target_major_version() == '9' +- is_net_naming_enabled_and_permitted = is_feature_enabled and is_upgrade_8to9 +- if not is_net_naming_enabled_and_permitted: ++ ++ is_net_naming_available = version.get_target_major_version() == "9" or ( ++ version.matches_target_version(">= 10.2") ++ # TODO the net-naming-sysattrs pkg is not yet available on CS10, remove ++ # this when it becomes ++ and not get_target_distro_id() == "centos" ++ ) ++ ++ if not (is_feature_enabled and is_net_naming_available): + return + + # The package should be installed regardless of whether we will modify the cmdline - + # if the cmdline already contains net.naming-scheme, then the package will be useful + # in both, the upgrade environment and on the target system. +- pkgs_to_install = [NET_NAMING_SYSATTRS_RPM_NAME] ++ pkgs_to_install = [NET_NAMING_SYSATTRS_RPM_NAME[version.get_target_major_version()]] + api.produce(TargetUserSpaceUpgradeTasks(install_rpms=pkgs_to_install)) + api.produce(RpmTransactionTasks(to_install=pkgs_to_install)) + +diff --git a/repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/tests/test_emit_net_naming_scheme.py b/repos/system_upgrade/common/actors/emit_net_naming_scheme/tests/test_emit_net_naming_scheme.py +similarity index 74% +rename from repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/tests/test_emit_net_naming_scheme.py +rename to repos/system_upgrade/common/actors/emit_net_naming_scheme/tests/test_emit_net_naming_scheme.py +index acf72241..9c1f91bb 100644 +--- a/repos/system_upgrade/el8toel9/actors/emit_net_naming_scheme/tests/test_emit_net_naming_scheme.py ++++ b/repos/system_upgrade/common/actors/emit_net_naming_scheme/tests/test_emit_net_naming_scheme.py +@@ -50,12 +50,32 @@ def test_is_net_scheme_compatible_with_current_cmdline(monkeypatch, kernel_args, + (False, True) + ] + ) +-def test_emit_msgs_to_use_net_naming_schemes(monkeypatch, is_net_scheme_enabled, is_current_cmdline_compatible): +- envvar_value = '0' if is_net_scheme_enabled else '1' +- +- mocked_actor = CurrentActorMocked(src_ver='8.10', +- dst_ver='9.5', +- envars={'LEAPP_DISABLE_NET_NAMING_SCHEMES': envvar_value}) ++@pytest.mark.parametrize( ++ 'source_ver, target_ver', ++ [ ++ ('8.10', '9.5'), ++ ('8.10', '9.8'), ++ ('9.6', '10.0'), ++ ('9.8', '10.2'), ++ ] ++) ++@pytest.mark.parametrize('target_distro', ['rhel', 'centos']) ++def test_emit_msgs_to_use_net_naming_schemes( ++ monkeypatch, ++ is_net_scheme_enabled, ++ is_current_cmdline_compatible, ++ source_ver, ++ target_ver, ++ target_distro, ++): ++ mocked_actor = CurrentActorMocked( ++ src_ver=source_ver, ++ dst_ver=target_ver, ++ dst_distro=target_distro, ++ envars={ ++ "LEAPP_DISABLE_NET_NAMING_SCHEMES": "0" if is_net_scheme_enabled else "1" ++ }, ++ ) + monkeypatch.setattr(api, 'current_actor', mocked_actor) + + monkeypatch.setattr(api, 'produce', produce_mocked()) +@@ -71,13 +91,22 @@ def test_emit_msgs_to_use_net_naming_schemes(monkeypatch, is_net_scheme_enabled, + assert not next(msgs, None), 'More than one message of type {type} produced'.format(type=type) + return msg + ++ target_major = target_ver.split(".")[0] ++ pkg_name = emit_net_naming_lib.NET_NAMING_SYSATTRS_RPM_NAME[target_major] + produced_messages = api.produce.model_instances +- if is_net_scheme_enabled: ++ ++ if ( ++ is_net_scheme_enabled ++ # the package is available since 10.2 ++ and target_ver != "10.0" ++ # TODO not yet available in CS 10, remove this when it is ++ and not (target_distro == "centos" and target_major == "10") ++ ): + userspace_tasks = ensure_one_msg_of_type_produced(produced_messages, TargetUserSpaceUpgradeTasks) +- assert userspace_tasks.install_rpms == [emit_net_naming_lib.NET_NAMING_SYSATTRS_RPM_NAME] ++ assert userspace_tasks.install_rpms == [pkg_name] + + rpm_tasks = ensure_one_msg_of_type_produced(produced_messages, RpmTransactionTasks) +- assert rpm_tasks.to_install == [emit_net_naming_lib.NET_NAMING_SYSATTRS_RPM_NAME] ++ assert rpm_tasks.to_install == [pkg_name] + else: + assert not api.produce.called + return diff --git a/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py b/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py index 582a5821..18f2c33f 100644 --- a/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py @@ -3945,6 +17760,378 @@ index 582a5821..18f2c33f 100644 + to_reinstall=list(to_reinstall), modules_to_reset=list(modules_to_reset.values()), modules_to_enable=list(modules_to_enable.values()))) +diff --git a/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/actor.py b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/actor.py +new file mode 100644 +index 00000000..4a3cd85d +--- /dev/null ++++ b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/actor.py +@@ -0,0 +1,22 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import fix_nvmf_initqueue_rules as fix_nvmf_initqueue_rules_lib ++from leapp.models import LiveModeConfig, NVMEInfo, TargetUserSpaceInfo, UpgradeInitramfsTasks ++from leapp.tags import InterimPreparationPhaseTag, IPUWorkflowTag ++ ++ ++class FixNvmfInitqueueRules(Actor): ++ """ ++ Replace nvmf dracut module's initqueue rules with a our own version. ++ ++ The original 95-nvmf-initqueue.rules file in the nvmf dracut module ++ calls initqueue, which might not be running when the udev event lands. ++ Therefore, we call `nvme connect-all` directly when when the udev event is triggered. ++ """ ++ ++ name = 'fix_nvmf_initqueue_rules' ++ consumes = (LiveModeConfig, NVMEInfo, TargetUserSpaceInfo) ++ produces = (UpgradeInitramfsTasks,) ++ tags = (IPUWorkflowTag, InterimPreparationPhaseTag) ++ ++ def process(self): ++ fix_nvmf_initqueue_rules_lib.replace_nvmf_initqueue_rules() +diff --git a/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/files/95-nvmf-initqueue.rules b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/files/95-nvmf-initqueue.rules +new file mode 100644 +index 00000000..52a77fef +--- /dev/null ++++ b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/files/95-nvmf-initqueue.rules +@@ -0,0 +1,7 @@ ++# ++# Original nvmf-initqueue rules called initqueue, which might not be running when the udev event lands. ++# Therefore, we call it directly. ++ ++ACTION=="change", SUBSYSTEM=="fc", ENV{FC_EVENT}=="nvmediscovery", \ ++ ENV{NVMEFC_HOST_TRADDR}=="*", ENV{NVMEFC_TRADDR}=="*", \ ++ RUN+="/usr/sbin/nvme connect-all --transport=fc --traddr=$env{NVMEFC_TRADDR} --host-traddr=$env{NVMEFC_HOST_TRADDR}" +diff --git a/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/libraries/fix_nvmf_initqueue_rules.py b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/libraries/fix_nvmf_initqueue_rules.py +new file mode 100644 +index 00000000..9fd74ea9 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/libraries/fix_nvmf_initqueue_rules.py +@@ -0,0 +1,66 @@ ++import os ++import shutil ++ ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.stdlib import api ++from leapp.models import LiveModeConfig, NVMEInfo, TargetUserSpaceInfo, UpgradeInitramfsTasks ++ ++NVMF_DRACUT_MODULE_DIR = '/usr/lib/dracut/modules.d/95nvmf' ++NVMF_INITQUEUE_RULES_FILENAME = '95-nvmf-initqueue.rules' ++NVMF_INITQUEUE_RULES_PATH = os.path.join(NVMF_DRACUT_MODULE_DIR, NVMF_INITQUEUE_RULES_FILENAME) ++ ++ ++def _get_rules_file_path(): ++ """ ++ Get the path to the fixed 95-nvmf-initqueue.rules file bundled with this actor. ++ """ ++ return api.get_actor_file_path(NVMF_INITQUEUE_RULES_FILENAME) ++ ++ ++def is_livemode_enabled() -> bool: ++ livemode_config = next(api.consume(LiveModeConfig), None) ++ if livemode_config and livemode_config.is_enabled: ++ return True ++ return False ++ ++ ++def replace_nvmf_initqueue_rules(): ++ """ ++ Replace the nvmf dracut module's initqueue rules in the target userspace. ++ """ ++ nvme_info = next(api.consume(NVMEInfo), None) ++ if not nvme_info or not nvme_info.devices: ++ api.current_logger().debug('No NVMe devices detected, skipping nvmf initqueue rules replacement.') ++ return ++ ++ if is_livemode_enabled(): ++ api.current_logger().debug('LiveMode is enabled. Modifying initqueue stop condition is not required.') ++ return ++ ++ userspace_info = next(api.consume(TargetUserSpaceInfo), None) ++ source_rules_path = _get_rules_file_path() ++ ++ target_rules_path = os.path.join(userspace_info.path, NVMF_INITQUEUE_RULES_PATH.lstrip('/')) ++ target_dir = os.path.dirname(target_rules_path) ++ ++ # Check if the nvmf dracut module directory exists in the target userspace ++ if not os.path.isdir(target_dir): ++ api.current_logger().debug( ++ 'The nvmf dracut module directory {} does not exist in target userspace. ' ++ 'Skipping rules replacement.'.format(target_dir) ++ ) ++ return ++ ++ api.current_logger().info( ++ 'Replacing {} in target userspace with fixed version.'.format(NVMF_INITQUEUE_RULES_PATH) ++ ) ++ ++ try: ++ shutil.copy2(source_rules_path, target_rules_path) ++ api.current_logger().debug( ++ 'Successfully copied {} to {}'.format(source_rules_path, target_rules_path) ++ ) ++ except (IOError, OSError) as e: ++ raise StopActorExecutionError('Failed to copy nvmf initqueue rules to target userspace: {}'.format(e)) ++ ++ api.produce(UpgradeInitramfsTasks()) # To enforce ordering of actors +diff --git a/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/tests/test_fix_nvmf_initqueue_rules.py b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/tests/test_fix_nvmf_initqueue_rules.py +new file mode 100644 +index 00000000..93bc0285 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/initramfs/fix_nvmf_initqueue_rules/tests/test_fix_nvmf_initqueue_rules.py +@@ -0,0 +1,92 @@ ++import os ++import tempfile ++ ++from leapp.libraries.actor import fix_nvmf_initqueue_rules ++from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked, produce_mocked ++from leapp.libraries.stdlib import api ++from leapp.models import LiveModeConfig, NVMEDevice, NVMEInfo, TargetUserSpaceInfo, UpgradeInitramfsTasks ++ ++ ++def test_replace_nvmf_initqueue_rules_no_nvme_devices(monkeypatch): ++ """Test that replacement is skipped when no NVMe devices are detected.""" ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) ++ monkeypatch.setattr(api, 'current_logger', logger_mocked()) ++ ++ fix_nvmf_initqueue_rules.replace_nvmf_initqueue_rules() ++ ++ assert any('No NVMe devices detected' in msg for msg in api.current_logger.dbgmsg) ++ ++ ++def test_replace_nvmf_initqueue_rules_livemode_enabled(monkeypatch): ++ """Test that replacement is skipped when no LiveMode is enabled.""" ++ livemode_info = LiveModeConfig( ++ is_enabled=True, ++ squashfs_fullpath='' ++ ) ++ ++ nvme_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='fc' ++ ) ++ nvme_info = NVMEInfo(devices=[nvme_device], hostid='test-hostid', hostnqn='test-hostnqn') ++ ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[livemode_info, nvme_info])) ++ monkeypatch.setattr(api, 'current_logger', logger_mocked()) ++ ++ fix_nvmf_initqueue_rules.replace_nvmf_initqueue_rules() ++ ++ assert any('LiveMode is enabled.' in msg for msg in api.current_logger.dbgmsg) ++ ++ ++def test_replace_nvmf_initqueue_rules_empty_nvme_devices(monkeypatch): ++ """Test that replacement is skipped when NVMEInfo has no devices.""" ++ nvme_info = NVMEInfo(devices=[], hostid=None, hostnqn=None) ++ ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[nvme_info])) ++ monkeypatch.setattr(api, 'current_logger', logger_mocked()) ++ ++ fix_nvmf_initqueue_rules.replace_nvmf_initqueue_rules() ++ ++ assert any('No NVMe devices detected' in msg for msg in api.current_logger.dbgmsg) ++ ++ ++def test_replace_nvmf_initqueue_rules_success(monkeypatch): ++ """Test successful replacement of nvmf initqueue rules.""" ++ with tempfile.TemporaryDirectory(prefix='leapp_test_') as tmpdir: ++ nvmf_dir = os.path.join(tmpdir, 'usr/lib/dracut/modules.d/95nvmf') ++ os.makedirs(nvmf_dir) ++ ++ target_rules_path = os.path.join(nvmf_dir, '95-nvmf-initqueue.rules') ++ with open(target_rules_path, 'w') as f: ++ f.write('# original rules') ++ ++ source_file = os.path.join(tmpdir, 'source_rules') ++ with open(source_file, 'w') as f: ++ f.write('# fixed rules content') ++ ++ nvme_device = NVMEDevice( ++ sys_class_path='/sys/class/nvme/nvme0', ++ name='nvme0', ++ transport='fc' ++ ) ++ nvme_info = NVMEInfo(devices=[nvme_device], hostid='test-hostid', hostnqn='test-hostnqn') ++ userspace_info = TargetUserSpaceInfo(path=tmpdir, scratch='/tmp/scratch', mounts='/tmp/mounts') ++ ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[nvme_info, userspace_info])) ++ monkeypatch.setattr(api, 'current_logger', logger_mocked()) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(api, 'get_actor_file_path', lambda x: source_file) ++ ++ fix_nvmf_initqueue_rules.replace_nvmf_initqueue_rules() ++ ++ # Verify the file was replaced ++ with open(target_rules_path) as f: ++ content = f.read() ++ ++ assert content == '# fixed rules content' ++ ++ # Verify UpgradeInitramfsTasks was produced ++ assert api.produce.called == 1 ++ produced_msg = api.produce.model_instances[0] ++ assert isinstance(produced_msg, UpgradeInitramfsTasks) +diff --git a/repos/system_upgrade/common/actors/livemode/emit_livemode_userspace_requirements/libraries/emit_livemode_userspace_requirements.py b/repos/system_upgrade/common/actors/livemode/emit_livemode_userspace_requirements/libraries/emit_livemode_userspace_requirements.py +index 4ecf682b..80d38cb0 100644 +--- a/repos/system_upgrade/common/actors/livemode/emit_livemode_userspace_requirements/libraries/emit_livemode_userspace_requirements.py ++++ b/repos/system_upgrade/common/actors/livemode/emit_livemode_userspace_requirements/libraries/emit_livemode_userspace_requirements.py +@@ -11,16 +11,10 @@ _REQUIRED_PACKAGES_FOR_LIVE_MODE = [ + 'util-linux', + 'dracut-live', + 'dracut-squash', +- 'dmidecode', +- 'pciutils', +- 'lsscsi', + 'passwd', + 'kexec-tools', +- 'vi', + 'less', + 'openssh-clients', +- 'strace', +- 'tcpdump', + ] + + +@@ -33,6 +27,9 @@ def emit_livemode_userspace_requirements(): + if livemode_config.setup_opensshd_with_auth_keys: + packages += ['openssh-server', 'crypto-policies'] + ++ if livemode_config.capture_upgrade_strace_into: ++ packages += ['strace'] ++ + packages = sorted(set(packages)) + + api.produce(TargetUserSpaceUpgradeTasks(install_rpms=packages)) +diff --git a/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py b/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py +index 7d72204c..2daf0285 100644 +--- a/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py ++++ b/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py +@@ -1,6 +1,6 @@ + from leapp.configs.actor import livemode as livemode_config_lib + from leapp.exceptions import StopActorExecutionError +-from leapp.libraries.common.config import architecture, get_env ++from leapp.libraries.common.config import get_env + from leapp.libraries.common.rpms import has_package + from leapp.libraries.stdlib import api + from leapp.models import InstalledRPM, LiveModeConfig +@@ -15,14 +15,6 @@ def should_scan_config(): + api.current_logger().debug('Will not scan livemode config - the upgrade is not unsupported.') + return False + +- if not architecture.matches_architecture(architecture.ARCH_X86_64): +- api.current_logger().debug('Will not scan livemode config - livemode is currently limited to x86_64.') +- details = 'Live upgrades are currently limited to x86_64 only.' +- raise StopActorExecutionError( +- 'CPU architecture does not meet requirements for live upgrades', +- details={'Problem': details} +- ) +- + if not has_package(InstalledRPM, 'squashfs-tools'): + # This feature is not to be used by standard users, so stopping the upgrade and providing + # the developer a speedy feedback is OK. +diff --git a/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/tests/test_config_scanner.py b/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/tests/test_config_scanner.py +index e24aa366..6eb71fee 100644 +--- a/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/tests/test_config_scanner.py ++++ b/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/tests/test_config_scanner.py +@@ -37,7 +37,7 @@ EnablementTestCase = namedtuple('EnablementTestCase', ('env_vars', 'arch', 'pkgs + result=EnablementResult.DO_NOTHING), + EnablementTestCase(env_vars={'LEAPP_UNSUPPORTED': '1'}, + arch=architecture.ARCH_ARM64, pkgs=('squashfs-tools', ), +- result=EnablementResult.RAISE), ++ result=EnablementResult.SCAN_CONFIG), + EnablementTestCase(env_vars={'LEAPP_UNSUPPORTED': '1'}, + arch=architecture.ARCH_ARM64, pkgs=tuple(), + result=EnablementResult.RAISE), +diff --git a/repos/system_upgrade/common/actors/livemode/removeliveimage/libraries/remove_live_image.py b/repos/system_upgrade/common/actors/livemode/removeliveimage/libraries/remove_live_image.py +index 5bb7e40f..a3718dcf 100644 +--- a/repos/system_upgrade/common/actors/livemode/removeliveimage/libraries/remove_live_image.py ++++ b/repos/system_upgrade/common/actors/livemode/removeliveimage/libraries/remove_live_image.py +@@ -21,5 +21,11 @@ def remove_live_image(): + + try: + os.unlink(artifacts.squashfs_path) ++ except FileNotFoundError: ++ api.current_logger().debug( ++ 'The %s file does not exist. Most likely it has been removed before. Usually happens with "leapp rerun".', ++ artifacts.squashfs_path ++ ) ++ return + except OSError as error: +- api.current_logger().warning('Failed to remove %s with error: %s', artifacts.squashfs, error) ++ api.current_logger().warning('Failed to remove %s with error: %s', artifacts.squashfs_path, error) +diff --git a/repos/system_upgrade/common/actors/livemode/removeliveimage/tests/test_remove_live_image.py b/repos/system_upgrade/common/actors/livemode/removeliveimage/tests/test_remove_live_image.py +index 4d6aa821..21a5fb93 100644 +--- a/repos/system_upgrade/common/actors/livemode/removeliveimage/tests/test_remove_live_image.py ++++ b/repos/system_upgrade/common/actors/livemode/removeliveimage/tests/test_remove_live_image.py +@@ -1,10 +1,11 @@ ++import errno + import functools + import os + + import pytest + + from leapp.libraries.actor import remove_live_image as remove_live_image_lib +-from leapp.libraries.common.testutils import CurrentActorMocked ++from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked + from leapp.libraries.stdlib import api + from leapp.models import LiveModeArtifacts, LiveModeConfig + +@@ -22,23 +23,51 @@ _LiveModeConfig = functools.partial(LiveModeConfig, squashfs_fullpath='configure + ) + ) + def test_remove_live_image(monkeypatch, livemode_config, squashfs_path, should_unlink_be_called): +- """ Test whether live-mode image (as found in LiveModeArtifacts) is removed. """ +- ++ """ ++ Test whether live-mode image (as found in LiveModeArtifacts) is removed. ++ """ + messages = [] + if livemode_config: + messages.append(livemode_config) + if squashfs_path: + messages.append(LiveModeArtifacts(squashfs_path=squashfs_path)) + +- monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=messages)) +- + def unlink_mock(path): + if should_unlink_be_called: + assert path == squashfs_path + return + assert False # If we should not call unlink and we call it then fail the test ++ + monkeypatch.setattr(os, 'unlink', unlink_mock) ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=messages)) ++ ++ remove_live_image_lib.remove_live_image() ++ ++ ++@pytest.mark.parametrize('do_file_exists', (True, False)) ++def test_remove_live_image_oserror(monkeypatch, do_file_exists): ++ """ ++ Test that errors are properly handled when trying to unlink the file. ++ """ ++ messages = [ ++ _LiveModeConfig(is_enabled=True), ++ LiveModeArtifacts(squashfs_path='/var/lib/leapp/upgrade.img') ++ ] ++ ++ def unlink_mock(dummyPath): ++ if do_file_exists: ++ raise OSError('OSError happened :)') ++ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), '/squashfs') + + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=messages)) ++ monkeypatch.setattr(api, 'current_logger', logger_mocked()) ++ monkeypatch.setattr(os, 'unlink', unlink_mock) + + remove_live_image_lib.remove_live_image() ++ ++ if do_file_exists: ++ assert api.current_logger.warnmsg ++ assert not api.current_logger.dbgmsg ++ else: ++ assert not api.current_logger.warnmsg ++ assert api.current_logger.dbgmsg diff --git a/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py b/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py index 32e4527b..1e595e9a 100644 --- a/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py @@ -4041,7 +18228,7 @@ index f24dda68..7ee5d016 100644 all_events = list(chain(*[parse_entry(entry) for entry in events_data['packageinfo']])) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py -index 67e517d1..ec7d001a 100644 +index 02107314..de4b6945 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -1,5 +1,6 @@ @@ -4097,7 +18284,7 @@ index 67e517d1..ec7d001a 100644 return transaction_configuration -@@ -133,6 +138,7 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, +@@ -133,25 +138,44 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, logger = api.current_logger() # Start with the installed packages and modify the set according to release events target_pkgs = set(source_installed_pkgs) @@ -4105,7 +18292,65 @@ index 67e517d1..ec7d001a 100644 release_events = [e for e in events if e.to_release == release] -@@ -176,9 +182,12 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, ++ def log_replaced_pkgs(removed, added): ++ removed_pkgs_str = ', '.join(str(pkg) for pkg in removed) or '[]' ++ added_pkgs_str = ', '.join(str(pkg) for pkg in added) or '[]' ++ logger.debug('Applying event %d (%s): replacing packages %s with %s', ++ event.id, event.action.name, removed_pkgs_str, added_pkgs_str) ++ + for event in release_events: + # PRESENCE events have a different semantics than the other events - they add a package to a target state + # only if it had been seen (installed) during the course of the overall target packages + if event.action == Action.PRESENT: +- for pkg in event.in_pkgs: +- if pkg in seen_pkgs: +- # First remove the package with the old repository and add it back, but now with the new +- # repository. As the Package class has a custom __hash__ and __eq__ comparing only name +- # and modulestream, the pkg.repository field is ignore and therefore the add() call +- # does not update the entry. +- if pkg in target_pkgs: +- target_pkgs.remove(pkg) +- target_pkgs.add(pkg) ++ # explicitly take the common pkgs from the event.in_pkgs, ++ # intersection cannot be used as it isn't defined from which set an ++ # element is taken if two elements have the same hash and are equal ++ # (there can be optimalizations such as always iterating the ++ # smaller set). ++ seen_in_pkgs = {pkg for pkg in event.in_pkgs if pkg in seen_pkgs} ++ if seen_in_pkgs: ++ removed_pkgs = target_pkgs.intersection(seen_in_pkgs) ++ log_replaced_pkgs(removed_pkgs, seen_in_pkgs) ++ ++ # First, remove the packages with the old repositories and add them ++ # back, but now with the new repositories. As the Package class has ++ # a custom __hash__ and __eq__ comparing only name and ++ # modulestream, the pkg.repository field is ignored and therefore ++ # the union() call does not update the entries. ++ target_pkgs = target_pkgs.difference(seen_in_pkgs) ++ target_pkgs = seen_in_pkgs.union(target_pkgs) ++ + elif event.action == Action.DEPRECATED: + if event.in_pkgs.intersection(source_installed_pkgs): + # Remove packages with old repositories add packages with the new one ++ removed_pkgs = target_pkgs.intersection(event.in_pkgs) ++ log_replaced_pkgs(removed_pkgs, event.in_pkgs) ++ + target_pkgs = target_pkgs.difference(event.in_pkgs) + target_pkgs = target_pkgs.union(event.in_pkgs) + else: +@@ -163,10 +187,7 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, + # For MERGE to be relevant it is sufficient for only one of its in_pkgs to be installed + if are_all_in_pkgs_present or (event.action == Action.MERGED and is_any_in_pkg_present): + removed_pkgs = target_pkgs.intersection(event.in_pkgs) +- removed_pkgs_str = ', '.join(str(pkg) for pkg in removed_pkgs) if removed_pkgs else '[]' +- added_pkgs_str = ', '.join(str(pkg) for pkg in event.out_pkgs) if event.out_pkgs else '[]' +- logger.debug('Applying event %d (%s): replacing packages %s with %s', +- event.id, event.action, removed_pkgs_str, added_pkgs_str) ++ log_replaced_pkgs(removed_pkgs, event.out_pkgs) + + # In pkgs are present, event can be applied + # Note: We do a .difference(event.out_packages) followed by an .union(event.out_packages) to overwrite +@@ -176,9 +197,12 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, target_pkgs = target_pkgs.difference(event.out_pkgs) target_pkgs = target_pkgs.union(event.out_pkgs) @@ -4119,7 +18364,7 @@ index 67e517d1..ec7d001a 100644 def remove_undesired_events(events, relevant_to_releases): -@@ -244,15 +253,17 @@ def compute_packages_on_target_system(source_pkgs, events, releases): +@@ -244,15 +268,17 @@ def compute_packages_on_target_system(source_pkgs, events, releases): did_processing_cross_major_version = True pkgs_to_demodularize = {pkg for pkg in target_pkgs if pkg.modulestream} @@ -4141,7 +18386,7 @@ index 67e517d1..ec7d001a 100644 def compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize): -@@ -356,15 +367,13 @@ def get_pesid_to_repoid_map(target_pesids): +@@ -356,15 +382,13 @@ def get_pesid_to_repoid_map(target_pesids): :return: Dictionary mapping the target_pesids to their corresponding repoid """ @@ -4160,7 +18405,7 @@ index 67e517d1..ec7d001a 100644 rhui_info = next(api.consume(RHUIInfo), None) cloud_provider = rhui_info.provider if rhui_info else '' -@@ -554,6 +563,19 @@ def process(): +@@ -555,6 +579,19 @@ def process(): if not events: return @@ -4180,7 +18425,7 @@ index 67e517d1..ec7d001a 100644 releases = get_relevant_releases(events) installed_pkgs = get_installed_pkgs() transaction_configuration = get_transaction_configuration() -@@ -567,7 +589,7 @@ def process(): +@@ -568,7 +605,7 @@ def process(): events = remove_undesired_events(events, releases) # Apply events - compute what packages should the target system have @@ -4189,431 +18434,12 @@ index 67e517d1..ec7d001a 100644 events, releases) # Packages coming out of the events have PESID as their repository, however, we need real repoid -@@ -587,4 +609,5 @@ def process(): +@@ -588,4 +625,5 @@ def process(): rpm_tasks = include_instructions_from_transaction_configuration(rpm_tasks, transaction_configuration, installed_pkgs) if rpm_tasks: + rpm_tasks.to_reinstall = sorted(pkgs_to_reinstall) api.produce(rpm_tasks) -diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py -index 5674ee3f..58b15a84 100644 ---- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py -+++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/actor.py -@@ -8,9 +8,14 @@ class RemoveObsoleteGpgKeys(Actor): - """ - Remove obsoleted RPM GPG keys. - -- New version might make existing RPM GPG keys obsolete. This might be caused -- for example by the hashing algorithm becoming deprecated or by the key -- getting replaced. -+ The definition of what keys are considered obsolete depends on whether the -+ upgrade also does a conversion: -+ - If not converting, the obsolete keys are those that are no longer valid -+ on the target version. This might be caused for example by the hashing -+ algorithm becoming deprecated or by the key getting replaced. Note that -+ only keys provided by the vendor of the OS are handled. -+ - If converting, the obsolete keys are all of the keys provided by the -+ vendor of the source distribution. - - A DNFWorkaround is registered to actually remove the keys. - """ -diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py -index df08e6fa..7d047395 100644 ---- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py -+++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py -@@ -1,3 +1,5 @@ -+import itertools -+ - from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id - from leapp.libraries.common.config.version import get_target_major_version - from leapp.libraries.common.distro import get_distribution_data -@@ -6,18 +8,25 @@ from leapp.libraries.stdlib import api - from leapp.models import DNFWorkaround, InstalledRPM - - -+def _is_key_installed(key): -+ """ -+ :param key: The NVR of the gpg key RPM (e.g. gpg-pubkey-1d997668-61bae63b) -+ """ -+ name, version, release = key.rsplit("-", 2) -+ return has_package(InstalledRPM, name, version=version, release=release) -+ -+ - def _get_obsolete_keys(): - """ -- Return keys obsoleted in target and previous versions -+ Get keys obsoleted in target and previous major versions - """ - distribution = get_target_distro_id() -- obsoleted_keys_map = get_distribution_data(distribution).get('obsoleted-keys', {}) -+ obsoleted_keys_map = get_distribution_data(distribution).get("obsoleted-keys", {}) - keys = [] - for version in range(7, int(get_target_major_version()) + 1): - try: - for key in obsoleted_keys_map[str(version)]: -- name, version, release = key.rsplit("-", 2) -- if has_package(InstalledRPM, name, version=version, release=release): -+ if _is_key_installed(key): - keys.append(key) - except KeyError: - pass -@@ -25,6 +34,22 @@ def _get_obsolete_keys(): - return keys - - -+def _get_source_distro_keys(): -+ """ -+ Get all known keys of the source distro -+ -+ This includes keys from all relevant previous OS versions as all of those -+ might be present on the system. -+ """ -+ distribution = get_source_distro_id() -+ keys = get_distribution_data(distribution).get("keys", {}) -+ return [ -+ key -+ for key in itertools.chain.from_iterable(keys.values()) -+ if _is_key_installed(key) -+ ] -+ -+ - def register_dnfworkaround(keys): - api.produce( - DNFWorkaround( -@@ -36,13 +61,12 @@ def register_dnfworkaround(keys): - - - def process(): -- if get_source_distro_id() != get_target_distro_id(): -- # TODO adjust for conversions, in the current state it would not have -- # any effect, just skip it -- return -- -- keys = _get_obsolete_keys() -- if not keys: -- return -+ if get_source_distro_id() == get_target_distro_id(): -+ # only upgrading - remove keys obsoleted in previous versions -+ keys = _get_obsolete_keys() -+ else: -+ # also converting - we need to remove all keys from the source distro -+ keys = _get_source_distro_keys() - -- register_dnfworkaround(keys) -+ if keys: -+ register_dnfworkaround(keys) -diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py -index b78174cc..8b9b842b 100644 ---- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py -+++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py -@@ -1,77 +1,79 @@ - import os -+import unittest.mock as mock - - import pytest - - from leapp.libraries.actor import removeobsoleterpmgpgkeys --from leapp.libraries.common.config.version import get_target_major_version --from leapp.libraries.common.rpms import has_package - from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked - from leapp.libraries.stdlib import api --from leapp.models import DNFWorkaround, InstalledRPM, RPM -+from leapp.models import InstalledRPM, RPM - -+_CUR_DIR = os.path.dirname(os.path.abspath(__file__)) - --def _get_test_installedrpm(): -- return InstalledRPM( -+ -+def common_folder_path_mocked(folder): -+ return os.path.join(_CUR_DIR, "../../../files/", folder) -+ -+ -+def test_is_key_installed(monkeypatch): -+ installed_rpms = InstalledRPM( - items=[ - RPM( -- name='gpg-pubkey', -- version='d4082792', -- release='5b32db75', -- epoch='0', -- packager='Red Hat, Inc. (auxiliary key 2) ', -- arch='noarch', -- pgpsig='' -+ name="gpg-pubkey", -+ version="d4082792", -+ release="5b32db75", -+ epoch="0", -+ packager="Red Hat, Inc. (auxiliary key 2) ", -+ arch="noarch", -+ pgpsig="", - ), - RPM( -- name='gpg-pubkey', -- version='2fa658e0', -- release='45700c69', -- epoch='0', -- packager='Red Hat, Inc. (auxiliary key) ', -- arch='noarch', -- pgpsig='' -+ name="gpg-pubkey", -+ version="2fa658e0", -+ release="45700c69", -+ epoch="0", -+ packager="Red Hat, Inc. (auxiliary key) ", -+ arch="noarch", -+ pgpsig="", - ), - RPM( -- name='gpg-pubkey', -- version='12345678', -- release='abcdefgh', -- epoch='0', -- packager='made up', -- arch='noarch', -- pgpsig='' -+ name="gpg-pubkey", -+ version="12345678", -+ release="abcdefgh", -+ epoch="0", -+ packager="made up", -+ arch="noarch", -+ pgpsig="", - ), - ] - ) - -+ monkeypatch.setattr( -+ api, "current_actor", CurrentActorMocked(msgs=[installed_rpms]) -+ ) -+ -+ assert removeobsoleterpmgpgkeys._is_key_installed("gpg-pubkey-d4082792-5b32db75") -+ assert removeobsoleterpmgpgkeys._is_key_installed("gpg-pubkey-2fa658e0-45700c69") -+ assert removeobsoleterpmgpgkeys._is_key_installed("gpg-pubkey-12345678-abcdefgh") -+ assert not removeobsoleterpmgpgkeys._is_key_installed( -+ "gpg-pubkey-db42a60e-37ea5438" -+ ) -+ - - @pytest.mark.parametrize( - "version, expected", - [ -- (9, ["gpg-pubkey-d4082792-5b32db75", "gpg-pubkey-2fa658e0-45700c69"]), -- (8, ["gpg-pubkey-2fa658e0-45700c69"]) -+ ("9", ["gpg-pubkey-d4082792-5b32db75", "gpg-pubkey-2fa658e0-45700c69"]), -+ ("8", ["gpg-pubkey-2fa658e0-45700c69"]) - ] - ) - def test_get_obsolete_keys(monkeypatch, version, expected): -- def get_target_major_version_mocked(): -- return version -- -- monkeypatch.setattr( -- removeobsoleterpmgpgkeys, -- "get_target_major_version", -- get_target_major_version_mocked, -- ) -- -+ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(dst_ver=version)) -+ monkeypatch.setattr(api, "get_common_folder_path", common_folder_path_mocked) - monkeypatch.setattr( -- api, -- "current_actor", -- CurrentActorMocked( -- msgs=[_get_test_installedrpm()] -- ), -+ removeobsoleterpmgpgkeys, "_is_key_installed", lambda key: key in expected - ) - -- cur_dir = os.path.dirname(os.path.abspath(__file__)) -- monkeypatch.setattr(api, 'get_common_folder_path', lambda folder: os.path.join(cur_dir, '../../../files/', folder)) -- - keys = removeobsoleterpmgpgkeys._get_obsolete_keys() - assert set(keys) == set(expected) - -@@ -79,50 +81,83 @@ def test_get_obsolete_keys(monkeypatch, version, expected): - @pytest.mark.parametrize( - "version, obsoleted_keys, expected", - [ -- (10, None, []), -- (10, {}, []), -- (10, {"8": ["gpg-pubkey-888-abc"], "10": ["gpg-pubkey-10-10"]}, ["gpg-pubkey-888-abc", "gpg-pubkey-10-10"]), -- (9, {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ["gpg-pubkey-999-def", "gpg-pubkey-888-abc"]), -- (8, {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ["gpg-pubkey-888-abc"]) -- ] -+ ("10", None, []), -+ ("10", {}, []), -+ ( -+ "10", -+ {"8": ["gpg-pubkey-888-abc"], "10": ["gpg-pubkey-10-10"]}, -+ ["gpg-pubkey-888-abc", "gpg-pubkey-10-10"], -+ ), -+ ( -+ "9", -+ {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, -+ ["gpg-pubkey-999-def", "gpg-pubkey-888-abc"], -+ ), -+ ( -+ "8", -+ {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, -+ ["gpg-pubkey-888-abc"], -+ ), -+ ], - ) --def test_get_obsolete_keys_incomplete_data(monkeypatch, version, obsoleted_keys, expected): -- def get_target_major_version_mocked(): -- return version -+def test_get_obsolete_keys_incomplete_data( -+ monkeypatch, version, obsoleted_keys, expected -+): -+ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(dst_ver=version)) -+ monkeypatch.setattr( -+ removeobsoleterpmgpgkeys, "_is_key_installed", lambda key: key in expected -+ ) - - def get_distribution_data_mocked(_distro): - if obsoleted_keys is None: - return {} -- return {'obsoleted-keys': obsoleted_keys} -- -- def has_package_mocked(*args, **kwargs): -- return True -+ return {"obsoleted-keys": obsoleted_keys} - - monkeypatch.setattr( -- removeobsoleterpmgpgkeys, -- "get_target_major_version", -- get_target_major_version_mocked, -+ removeobsoleterpmgpgkeys, "get_distribution_data", get_distribution_data_mocked - ) - -- monkeypatch.setattr( -- removeobsoleterpmgpgkeys, -- "get_distribution_data", -- get_distribution_data_mocked, -- ) -+ keys = removeobsoleterpmgpgkeys._get_obsolete_keys() -+ assert set(keys) == set(expected) - -- monkeypatch.setattr( -- removeobsoleterpmgpgkeys, -- "has_package", -- has_package_mocked, -- ) - -+@pytest.mark.parametrize( -+ "distro, expected", -+ [ -+ ( -+ "centos", -+ [ -+ "gpg-pubkey-8483c65d-5ccc5b19", -+ "gpg-pubkey-1d997668-621e3cac", -+ "gpg-pubkey-1d997668-61bae63b", -+ ], -+ ), -+ ( -+ "rhel", -+ [ -+ "gpg-pubkey-fd431d51-4ae0493b", -+ "gpg-pubkey-37017186-45761324", -+ "gpg-pubkey-f21541eb-4a5233e8", -+ "gpg-pubkey-897da07a-3c979a7f", -+ "gpg-pubkey-2fa658e0-45700c69", -+ "gpg-pubkey-d4082792-5b32db75", -+ "gpg-pubkey-5a6340b3-6229229e", -+ "gpg-pubkey-db42a60e-37ea5438", -+ ], -+ ), -+ ], -+) -+def test_get_source_distro_keys(monkeypatch, distro, expected): -+ """ -+ Test that the correct keys are returned for each distro. -+ """ -+ monkeypatch.setattr(api, "current_actor", CurrentActorMocked(src_distro=distro)) -+ monkeypatch.setattr(api, "get_common_folder_path", common_folder_path_mocked) - monkeypatch.setattr( -- api, -- "current_actor", -- CurrentActorMocked(), -+ removeobsoleterpmgpgkeys, "_is_key_installed", lambda _key: True - ) - -- keys = removeobsoleterpmgpgkeys._get_obsolete_keys() -+ keys = removeobsoleterpmgpgkeys._get_source_distro_keys() - assert set(keys) == set(expected) - - -@@ -134,16 +169,61 @@ def test_get_obsolete_keys_incomplete_data(monkeypatch, version, obsoleted_keys, - ] - ) - def test_workaround_should_register(monkeypatch, keys, should_register): -- def get_obsolete_keys_mocked(): -- return keys -- - monkeypatch.setattr( -- removeobsoleterpmgpgkeys, -- '_get_obsolete_keys', -- get_obsolete_keys_mocked -+ removeobsoleterpmgpgkeys, "_get_obsolete_keys", lambda: keys - ) -- monkeypatch.setattr(api, 'produce', produce_mocked()) -+ monkeypatch.setattr(api, "produce", produce_mocked()) - monkeypatch.setattr(api, "current_actor", CurrentActorMocked()) - - removeobsoleterpmgpgkeys.process() - assert api.produce.called == should_register -+ -+ -+def test_process(monkeypatch): -+ """ -+ Test that the correct path is taken depending on whether also converting -+ """ -+ obsolete = ["gpg-pubkey-12345678-abcdefgh"] -+ source_distro = ["gpg-pubkey-87654321-hgfedcba"] -+ -+ monkeypatch.setattr( -+ removeobsoleterpmgpgkeys, "_get_obsolete_keys", lambda: obsolete -+ ) -+ monkeypatch.setattr( -+ removeobsoleterpmgpgkeys, "_get_source_distro_keys", lambda: source_distro, -+ ) -+ -+ # upgrade only path -+ monkeypatch.setattr( -+ api, "current_actor", CurrentActorMocked(src_distro="rhel", dst_distro="rhel") -+ ) -+ with mock.patch( -+ "leapp.libraries.actor.removeobsoleterpmgpgkeys.register_dnfworkaround" -+ ): -+ removeobsoleterpmgpgkeys.process() -+ removeobsoleterpmgpgkeys.register_dnfworkaround.assert_called_once_with( -+ obsolete -+ ) -+ -+ # upgrade + conversion paths -+ monkeypatch.setattr( -+ api, "current_actor", CurrentActorMocked(src_distro="rhel", dst_distro="centos") -+ ) -+ with mock.patch( -+ "leapp.libraries.actor.removeobsoleterpmgpgkeys.register_dnfworkaround" -+ ): -+ removeobsoleterpmgpgkeys.process() -+ removeobsoleterpmgpgkeys.register_dnfworkaround.assert_called_once_with( -+ source_distro -+ ) -+ -+ monkeypatch.setattr( -+ api, "current_actor", CurrentActorMocked(src_distro="centos", dst_distro="rhel") -+ ) -+ with mock.patch( -+ "leapp.libraries.actor.removeobsoleterpmgpgkeys.register_dnfworkaround" -+ ): -+ removeobsoleterpmgpgkeys.process() -+ removeobsoleterpmgpgkeys.register_dnfworkaround.assert_called_once_with( -+ source_distro -+ ) diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py index 503e66a3..4ec1d6e0 100644 --- a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py @@ -4748,6 +18574,62 @@ index 503e66a3..4ec1d6e0 100644 def _inhibit_upgrade(msg): local_path = os.path.join('/etc/leapp/file', REPOMAP_FILE) hint = ( +diff --git a/repos/system_upgrade/common/actors/rpmscanner/libraries/rpmscanner.py b/repos/system_upgrade/common/actors/rpmscanner/libraries/rpmscanner.py +index 74c4b101..2a2ee03e 100644 +--- a/repos/system_upgrade/common/actors/rpmscanner/libraries/rpmscanner.py ++++ b/repos/system_upgrade/common/actors/rpmscanner/libraries/rpmscanner.py +@@ -53,6 +53,20 @@ def _get_package_repository_data_dnf(): + # NOTE: currently we do not initialize/load DNF plugins here as we are + # working just with the local stuff (load_system_repo=True) + dnf_base.fill_sack(load_system_repo=True, load_available_repos=False) ++ except dnf.exceptions.RepoError as e: ++ err_msg = str(e) ++ repoid = err_msg.split('repo:')[-1].strip() if 'repo:' in err_msg else 'unknown repo' ++ repoid = repoid.strip('"').strip("'").replace('\\"', '') ++ raise StopActorExecutionError( ++ message='DNF failed to load repositories: {}'.format(str(e)), ++ details={ ++ 'hint': 'Ensure the repository {} definition is correct or remove it ' ++ 'if the repository is not needed anymore.' ++ .format(repoid) ++ } ++ ) ++ ++ try: + for pkg in dnf_base.sack.query(): + pkg_repos[pkg.name] = pkg._from_repo.lstrip('@') + except ValueError as e: +diff --git a/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py b/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py +index e9455feb..151a1b2b 100644 +--- a/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py ++++ b/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py +@@ -2,6 +2,7 @@ import sys + + import pytest + ++from leapp.exceptions import StopActorExecutionError + from leapp.libraries.actor import rpmscanner + from leapp.libraries.common import module as module_lib + from leapp.libraries.common import rpms, testutils +@@ -192,3 +193,18 @@ def test_process(monkeypatch): + assert items['passwd'].arch == 'x86_64' + assert not items['passwd'].module + assert not items['passwd'].stream ++ ++ ++@pytest.mark.skipif(no_dnf, reason='dnf is unavailable') ++def test_get_package_repository_data_dnf_repo_error(monkeypatch): ++ # pylint: disable=bad-option-value,useless-option-value,no-self-use ++ class MockDnfBase: ++ def fill_sack(self, *args, **kwargs): ++ raise rpmscanner.dnf.exceptions.RepoError('Cannot find a valid baseurl for repo: broken-repo') ++ ++ monkeypatch.setattr(rpmscanner.dnf, 'Base', MockDnfBase) ++ ++ with pytest.raises(StopActorExecutionError) as exc_info: ++ rpmscanner._get_package_repository_data_dnf() ++ ++ assert 'Ensure the repository broken-repo definition' in exc_info.value.details['hint'] diff --git a/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py b/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py index 84895f83..62aefaf4 100644 --- a/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py @@ -4797,6 +18679,434 @@ index 84895f83..62aefaf4 100644 + to_reinstall=to_reinstall_filtered, to_keep=load_tasks_file(os.path.join(base_dir, 'to_keep'), logger), to_remove=load_tasks_file(os.path.join(base_dir, 'to_remove'), logger)) +diff --git a/repos/system_upgrade/common/actors/scancustomrepofile/libraries/scancustomrepofile.py b/repos/system_upgrade/common/actors/scancustomrepofile/libraries/scancustomrepofile.py +index 1b48689a..86486b5c 100644 +--- a/repos/system_upgrade/common/actors/scancustomrepofile/libraries/scancustomrepofile.py ++++ b/repos/system_upgrade/common/actors/scancustomrepofile/libraries/scancustomrepofile.py +@@ -1,5 +1,6 @@ + import os + ++from leapp.exceptions import StopActorExecutionError + from leapp.libraries.common import repofileutils + from leapp.libraries.stdlib import api + from leapp.models import CustomTargetRepository, CustomTargetRepositoryFile +@@ -21,7 +22,18 @@ def process(): + .format(CUSTOM_REPO_PATH)) + return + api.current_logger().info("The {} file exists.".format(CUSTOM_REPO_PATH)) +- repofile = repofileutils.parse_repofile(CUSTOM_REPO_PATH) ++ try: ++ repofile = repofileutils.parse_repofile(CUSTOM_REPO_PATH) ++ except repofileutils.InvalidRepoDefinition as e: ++ raise StopActorExecutionError( ++ message="Failed to parse custom repository definition: {}".format(str(e)), ++ details={ ++ 'hint': 'Ensure the repository {} definition is correct or remove it ' ++ 'if the repository is not needed anymore. ' ++ 'This issue is typically caused by missing definition of the name field. ' ++ 'For more information, see: https://access.redhat.com/solutions/6969001.' ++ .format(CUSTOM_REPO_PATH) ++ }) + if not repofile.data: + return + api.produce(CustomTargetRepositoryFile(file=CUSTOM_REPO_PATH)) +diff --git a/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py b/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py +index 772b33e6..95f664ad 100644 +--- a/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py ++++ b/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py +@@ -1,5 +1,8 @@ + import os + ++import pytest ++ ++from leapp.exceptions import StopActorExecutionError + from leapp.libraries.actor import scancustomrepofile + from leapp.libraries.common import repofileutils + from leapp.libraries.common.testutils import produce_mocked +@@ -75,3 +78,23 @@ def test_empty_repofile_exists(monkeypatch): + msg = "The {} file exists.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.infomsg == msg + assert not api.produce.called ++ ++ ++def test_invalid_repofile_raises_stop_actor_error(monkeypatch): ++ def _mocked_parse_repofile_raises(fpath): ++ raise repofileutils.InvalidRepoDefinition( ++ msg='mocked error', ++ repofile=fpath, ++ repoid='invalid-repo' ++ ) ++ ++ monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile_raises) ++ monkeypatch.setattr(api, 'current_logger', LoggerMocked()) ++ ++ with pytest.raises(StopActorExecutionError) as exc_info: ++ scancustomrepofile.process() ++ ++ assert 'Failed to parse custom repository definition' in str(exc_info.value) ++ assert scancustomrepofile.CUSTOM_REPO_PATH in exc_info.value.details['hint'] +diff --git a/repos/system_upgrade/common/actors/scangrubdevice/actor.py b/repos/system_upgrade/common/actors/scangrubdevice/actor.py +index cb6be7ea..e6f9bf8a 100644 +--- a/repos/system_upgrade/common/actors/scangrubdevice/actor.py ++++ b/repos/system_upgrade/common/actors/scangrubdevice/actor.py +@@ -1,6 +1,5 @@ + from leapp.actors import Actor +-from leapp.libraries.common import grub +-from leapp.libraries.common.config import architecture ++from leapp.libraries.actor import scangrubdevice + from leapp.models import GrubInfo + from leapp.tags import FactsPhaseTag, IPUWorkflowTag + +@@ -16,10 +15,4 @@ class ScanGrubDeviceName(Actor): + tags = (FactsPhaseTag, IPUWorkflowTag) + + def process(self): +- if architecture.matches_architecture(architecture.ARCH_S390X): +- return +- +- devices = grub.get_grub_devices() +- grub_info = GrubInfo(orig_devices=devices) +- grub_info.orig_device_name = devices[0] if len(devices) == 1 else None +- self.produce(grub_info) ++ scangrubdevice.process() +diff --git a/repos/system_upgrade/common/actors/scangrubdevice/libraries/scangrubdevice.py b/repos/system_upgrade/common/actors/scangrubdevice/libraries/scangrubdevice.py +new file mode 100644 +index 00000000..608c67e5 +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scangrubdevice/libraries/scangrubdevice.py +@@ -0,0 +1,22 @@ ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.common import grub ++from leapp.libraries.common.config import architecture ++from leapp.libraries.stdlib import api ++from leapp.models import GrubInfo ++ ++ ++def process(): ++ if architecture.matches_architecture(architecture.ARCH_S390X): ++ return ++ ++ try: ++ devices = grub.get_grub_devices() ++ except grub.GRUBDeviceError as err: ++ raise StopActorExecutionError( ++ message='Cannot detect GRUB devices', ++ details={'details': str(err)} ++ ) ++ ++ grub_info = GrubInfo(orig_devices=devices) ++ grub_info.orig_device_name = devices[0] if len(devices) == 1 else None ++ api.produce(grub_info) +diff --git a/repos/system_upgrade/common/actors/scangrubdevice/tests/test_scangrubdevice.py b/repos/system_upgrade/common/actors/scangrubdevice/tests/test_scangrubdevice.py +index 0114d717..50c5ce8d 100644 +--- a/repos/system_upgrade/common/actors/scangrubdevice/tests/test_scangrubdevice.py ++++ b/repos/system_upgrade/common/actors/scangrubdevice/tests/test_scangrubdevice.py +@@ -1,35 +1,68 @@ ++import pytest ++ ++from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.actor import scangrubdevice + from leapp.libraries.common import grub +-from leapp.libraries.common.config import mock_configs ++from leapp.libraries.common.config import architecture ++from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked ++from leapp.libraries.stdlib import api + from leapp.models import GrubInfo + + +-def _get_grub_devices_mocked(): +- return ['/dev/vda', '/dev/vdb'] +- ++def test_process_one_dev(monkeypatch): ++ def _get_grub_devices_mocked(): ++ return ['/dev/vda'] + +-def test_actor_scan_grub_device(current_actor_context, monkeypatch): + monkeypatch.setattr(grub, 'get_grub_devices', _get_grub_devices_mocked) +- current_actor_context.run(config_model=mock_configs.CONFIG) +- info = current_actor_context.consume(GrubInfo) +- assert info and info[0].orig_devices == ['/dev/vda', '/dev/vdb'] +- assert len(info) == 1, 'Expected just one GrubInfo message' +- assert not info[0].orig_device_name ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) + ++ scangrubdevice.process() + +-def test_actor_scan_grub_device_one(current_actor_context, monkeypatch): ++ assert api.produce.called == 1 ++ assert len(api.produce.model_instances) == 1 ++ grubinfo = api.produce.model_instances[0] ++ assert isinstance(grubinfo, GrubInfo) ++ assert grubinfo.orig_devices == ['/dev/vda'] ++ assert grubinfo.orig_device_name == '/dev/vda' + ++ ++def test_process_multiple_devs(monkeypatch): + def _get_grub_devices_mocked(): +- return ['/dev/vda'] ++ return ['/dev/vda', '/dev/vdb'] + + monkeypatch.setattr(grub, 'get_grub_devices', _get_grub_devices_mocked) +- current_actor_context.run(config_model=mock_configs.CONFIG) +- info = current_actor_context.consume(GrubInfo) +- assert info and info[0].orig_devices == ['/dev/vda'] +- assert len(info) == 1, 'Expected just one GrubInfo message' +- assert info[0].orig_device_name == '/dev/vda' ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ ++ scangrubdevice.process() ++ ++ assert api.produce.called == 1 ++ assert len(api.produce.model_instances) == 1 ++ grubinfo = api.produce.model_instances[0] ++ assert isinstance(grubinfo, GrubInfo) ++ assert grubinfo.orig_devices == ['/dev/vda', '/dev/vdb'] ++ assert grubinfo.orig_device_name is None + + +-def test_actor_scan_grub_device_s390x(current_actor_context, monkeypatch): ++def test_process_no_produce_on_s390x(monkeypatch): ++ monkeypatch.setattr( ++ api, "current_actor", CurrentActorMocked(arch=architecture.ARCH_S390X) ++ ) ++ monkeypatch.setattr(api, 'produce', produce_mocked()) ++ ++ scangrubdevice.process() ++ ++ assert api.produce.called == 0 ++ ++ ++def test_process_fail_to_get_grubdevs(monkeypatch): ++ ++ def _get_grub_devices_mocked(): ++ raise grub.GRUBDeviceError() ++ + monkeypatch.setattr(grub, 'get_grub_devices', _get_grub_devices_mocked) +- current_actor_context.run(config_model=mock_configs.CONFIG_S390X) +- assert not current_actor_context.consume(GrubInfo) ++ monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) ++ ++ with pytest.raises(StopActorExecutionError, match='Cannot detect GRUB devices'): ++ scangrubdevice.process() +diff --git a/repos/system_upgrade/common/actors/scannvme/actor.py b/repos/system_upgrade/common/actors/scannvme/actor.py +new file mode 100644 +index 00000000..a4f7aefe +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scannvme/actor.py +@@ -0,0 +1,25 @@ ++from leapp.actors import Actor ++from leapp.libraries.actor import scannvme ++from leapp.models import NVMEInfo ++from leapp.tags import FactsPhaseTag, IPUWorkflowTag ++ ++ ++class ScanNVMe(Actor): ++ """ ++ Detect existing NVMe devices. ++ ++ The detection is performed by checking content under /sys/class/nvme/ ++ directory where all NVMe devices should be listed. Additional information ++ is collected from the present files under each specific device. ++ ++ Namely the NVMe transport type and the device name is collected at this ++ moment. ++ """ ++ ++ name = 'scan_nvme' ++ consumes = () ++ produces = (NVMEInfo,) ++ tags = (FactsPhaseTag, IPUWorkflowTag) ++ ++ def process(self): ++ scannvme.process() +diff --git a/repos/system_upgrade/common/actors/scannvme/libraries/scannvme.py b/repos/system_upgrade/common/actors/scannvme/libraries/scannvme.py +new file mode 100644 +index 00000000..ef77171d +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scannvme/libraries/scannvme.py +@@ -0,0 +1,88 @@ ++import os ++ ++from leapp.libraries.common.utils import read_file ++from leapp.libraries.stdlib import api ++from leapp.models import NVMEDevice, NVMEInfo ++ ++NVME_CLASS_DIR = '/sys/class/nvme' ++NVME_CONF_DIR = '/etc/nvme' ++NVME_CONF_HOSTID = '/etc/nvme/hostid' ++NVME_CONF_HOSTNQN = '/etc/nvme/hostnqn' ++ ++ ++class NVMEMissingTransport(Exception): ++ def __init__(self, message): ++ super().__init__(message) ++ self.message = message ++ ++ ++def _get_transport_type(device_path): ++ tpath = os.path.join(device_path, 'transport') ++ if not os.path.exists(tpath): ++ raise NVMEMissingTransport(f'The {tpath} file is missing.') ++ ++ transport = read_file(tpath).strip() ++ if not transport: ++ raise NVMEMissingTransport('The transport type is not defined.') ++ ++ return transport ++ ++ ++def scan_device(device_name): ++ device_path = os.path.join(NVME_CLASS_DIR, device_name) ++ if not os.path.isdir(device_path): ++ api.current_logger().warning( ++ 'Cannot scan NVMe device: Following path is not dir: {0}'.format(device_path) ++ ) ++ return None ++ ++ try: ++ transport = _get_transport_type(device_path) ++ except NVMEMissingTransport as e: ++ # unexpected; seatbelt - skipping tests ++ api.current_logger().warning( ++ 'Skipping {0} NVMe device: Cannot detect transport type: {1}'.format(device_name, e.message) ++ ) ++ return None ++ ++ return NVMEDevice( ++ sys_class_path=device_path, ++ name=device_name, ++ transport=transport ++ ) ++ ++ ++def get_hostid(fpath=NVME_CONF_HOSTID): ++ if not os.path.exists(fpath): ++ api.current_logger().debug('NVMe hostid config file is missing.') ++ return None ++ return read_file(fpath).strip() ++ ++ ++def get_hostnqn(fpath=NVME_CONF_HOSTNQN): ++ if not os.path.exists(fpath): ++ api.current_logger().debug('NVMe hostnqn config file is missing.') ++ return None ++ return read_file(fpath).strip() ++ ++ ++def process(): ++ if not os.path.isdir(NVME_CLASS_DIR): ++ api.current_logger().debug( ++ 'NVMe is not active: {0} does not exist.'.format(NVME_CLASS_DIR) ++ ) ++ return ++ ++ devices = [scan_device(device_name) for device_name in os.listdir(NVME_CLASS_DIR)] ++ # drop possible None values from the list ++ devices = [dev for dev in devices if dev is not None] ++ if not devices: ++ # NOTE(pstodulk): This could be suspicious possibly. ++ api.current_logger().warning('No NVMe device detected but NVMe seems active.') ++ return ++ ++ api.produce(NVMEInfo( ++ devices=devices, ++ hostnqn=get_hostnqn(), ++ hostid=get_hostid(), ++ )) +diff --git a/repos/system_upgrade/common/actors/scannvme/tests/test_scannvme.py b/repos/system_upgrade/common/actors/scannvme/tests/test_scannvme.py +new file mode 100644 +index 00000000..97b3980b +--- /dev/null ++++ b/repos/system_upgrade/common/actors/scannvme/tests/test_scannvme.py +@@ -0,0 +1,84 @@ ++import pytest ++ ++from leapp.libraries.actor import scannvme ++from leapp.models import NVMEDevice ++ ++ ++def test_get_transport_type_file_missing(monkeypatch): ++ """Test that NVMEMissingTransport is raised when transport file does not exist.""" ++ monkeypatch.setattr('os.path.join', lambda *args: '/sys/class/nvme/nvme0/transport') ++ monkeypatch.setattr('os.path.exists', lambda path: False) ++ ++ with pytest.raises(scannvme.NVMEMissingTransport): ++ scannvme._get_transport_type('/sys/class/nvme/nvme0') ++ ++ ++def test_get_transport_type_file_empty(monkeypatch): ++ """Test that NVMEMissingTransport is raised when transport file is empty.""" ++ monkeypatch.setattr('os.path.join', lambda *args: '/sys/class/nvme/nvme0/transport') ++ monkeypatch.setattr('os.path.exists', lambda path: True) ++ monkeypatch.setattr( ++ 'leapp.libraries.actor.scannvme.read_file', ++ lambda path: ' \n' ++ ) ++ ++ with pytest.raises(scannvme.NVMEMissingTransport): ++ scannvme._get_transport_type('/sys/class/nvme/nvme0') ++ ++ ++@pytest.mark.parametrize('transport_value', ['pcie', 'tcp', 'rdma', 'fc', 'loop']) ++def test_get_transport_type_valid(monkeypatch, transport_value): ++ """Test that transport type is correctly read from the file.""" ++ monkeypatch.setattr('os.path.join', lambda *args: '/sys/class/nvme/nvme0/transport') ++ monkeypatch.setattr('os.path.exists', lambda path: True) ++ monkeypatch.setattr(scannvme, 'read_file', lambda path: transport_value + '\n') ++ ++ result = scannvme._get_transport_type('/sys/class/nvme/nvme0') ++ assert result == transport_value ++ ++ ++def test_scan_device_transport_detection_fails(monkeypatch): ++ """Test that None is returned when transport detection fails.""" ++ monkeypatch.setattr('os.path.join', lambda *args: '/'.join(args)) ++ monkeypatch.setattr('os.path.isdir', lambda path: True) ++ monkeypatch.setattr('os.path.exists', lambda path: False) ++ ++ result = scannvme.scan_device('nvme0') ++ ++ assert result is None ++ ++ ++@pytest.mark.parametrize('device_name,transport', [ ++ ('nvme0', 'pcie'), ++ ('nvme1', 'tcp'), ++ ('nvme2', 'rdma'), ++]) ++def test_scan_device_successful(monkeypatch, device_name, transport): ++ """Test that NVMEDevice is returned for a valid device.""" ++ expected_device_path = '/sys/class/nvme/{}'.format(device_name) ++ expected_transport_path = '{}/transport'.format(expected_device_path) ++ ++ def mock_isdir(path): ++ assert path == expected_device_path ++ return True ++ ++ def mock_exists(path): ++ assert path == expected_transport_path ++ return True ++ ++ def mock_read_file(path): ++ assert path == expected_transport_path ++ return transport + '\n' ++ ++ monkeypatch.setattr('os.path.join', lambda *args: '/'.join(args)) ++ monkeypatch.setattr('os.path.isdir', mock_isdir) ++ monkeypatch.setattr('os.path.exists', mock_exists) ++ monkeypatch.setattr(scannvme, 'read_file', mock_read_file) ++ ++ result = scannvme.scan_device(device_name) ++ ++ assert result is not None ++ assert isinstance(result, NVMEDevice) ++ assert result.name == device_name ++ assert result.transport == transport ++ assert result.sys_class_path == expected_device_path diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py new file mode 100644 index 00000000..a5e481cb @@ -5165,6 +19475,114 @@ index df17a217..41e10247 100644 # produce message about skipped repositories enabled_repoids_with_mapping = _get_mapped_repoids(repomap, enabled_repoids) skipped_repoids = enabled_repoids & set(used_repoids_dict.keys()) - enabled_repoids_with_mapping +diff --git a/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/libraries/transitionsystemdservicesstates.py b/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/libraries/transitionsystemdservicesstates.py +index 53f53fb5..b21fe2b5 100644 +--- a/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/libraries/transitionsystemdservicesstates.py ++++ b/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/libraries/transitionsystemdservicesstates.py +@@ -163,7 +163,7 @@ def _report_kept_enabled(tasks): + "Systemd services which were enabled on the system before the upgrade" + " were kept enabled after the upgrade. " + ) +- if tasks: ++ if tasks.to_enable: + summary += ( + "The following services were originally disabled by preset on the" + " upgraded system and Leapp attempted to enable them:{}{}" +@@ -193,9 +193,12 @@ def _get_newly_enabled(services_source, desired_states): + + + def _report_newly_enabled(newly_enabled): ++ if not newly_enabled: ++ return ++ + summary = ( + "The following services were disabled before the upgrade and were set" +- "to enabled by a systemd preset after the upgrade:{}{}.".format( ++ " to enabled by a systemd preset after the upgrade:{}{}".format( + FMT_LIST_SEPARATOR, FMT_LIST_SEPARATOR.join(sorted(newly_enabled)) + ) + ) +diff --git a/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py b/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py +index 488b37d4..90ab3917 100644 +--- a/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py ++++ b/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py +@@ -132,24 +132,28 @@ def test_tasks_produced_reports_created(monkeypatch): + services_source = [ + SystemdServiceFile(name="rsyncd.service", state="enabled"), + SystemdServiceFile(name="test.service", state="enabled"), ++ SystemdServiceFile(name="newly_enabled.service", state="disabled"), + ] + service_info_source = SystemdServicesInfoSource(service_files=services_source) + + presets_source = [ + SystemdServicePreset(service="rsyncd.service", state="enable"), + SystemdServicePreset(service="test.service", state="enable"), ++ SystemdServicePreset(service="newly_enabled.service", state="disable"), + ] + preset_info_source = SystemdServicesPresetInfoSource(presets=presets_source) + + services_target = [ + SystemdServiceFile(name="rsyncd.service", state="disabled"), + SystemdServiceFile(name="test.service", state="enabled"), ++ SystemdServiceFile(name="newly_enabled.service", state="enabled"), + ] + service_info_target = SystemdServicesInfoTarget(service_files=services_target) + + presets_target = [ + SystemdServicePreset(service="rsyncd.service", state="enable"), + SystemdServicePreset(service="test.service", state="enable"), ++ SystemdServicePreset(service="newly_enabled.service", state="enable"), + ] + preset_info_target = SystemdServicesPresetInfoTarget(presets=presets_target) + +@@ -188,7 +192,8 @@ def test_tasks_produced_reports_created(monkeypatch): + ), + True, + ), +- (None, False), ++ (SystemdServicesTasks(), False), ++ (SystemdServicesTasks(to_enable=[], to_disable=["some.service"]), False), + ), + ) + def test_report_kept_enabled(monkeypatch, tasks, expect_extended_summary): +@@ -202,7 +207,7 @@ def test_report_kept_enabled(monkeypatch, tasks, expect_extended_summary): + " upgraded system and Leapp attempted to enable them" + ) + +- assert created_reports.called ++ assert created_reports.called == 1 + if expect_extended_summary: + assert extended_summary_str in created_reports.report_fields["summary"] + all(s in created_reports.report_fields['summary'] for s in tasks.to_enable) +@@ -228,15 +233,24 @@ def test_get_newly_enabled(): + assert newly_enabled == ["test.service"] + + +-def test_report_newly_enabled(monkeypatch): ++@pytest.mark.parametrize( ++ "newly_enabled, should_report", ++ [ ++ (["test.service", "other.service"], True), ++ ([], False), ++ ] ++) ++def test_report_newly_enabled(monkeypatch, newly_enabled, should_report): + created_reports = create_report_mocked() + monkeypatch.setattr(reporting, "create_report", created_reports) + +- newly_enabled = ["test.service", "other.service"] + transitionsystemdservicesstates._report_newly_enabled(newly_enabled) + +- assert created_reports.called +- assert all(s in created_reports.report_fields["summary"] for s in newly_enabled) ++ if should_report: ++ assert created_reports.called == 1 ++ assert all(s in created_reports.report_fields["summary"] for s in newly_enabled) ++ else: ++ assert not created_reports.called + + + @pytest.mark.parametrize( diff --git a/repos/system_upgrade/common/actors/systemfacts/actor.py b/repos/system_upgrade/common/actors/systemfacts/actor.py index 59b12c87..85d4a09e 100644 --- a/repos/system_upgrade/common/actors/systemfacts/actor.py @@ -5178,11 +19596,245 @@ index 59b12c87..85d4a09e 100644 def process(self): self.produce(systemfacts.get_sysctls_status()) +diff --git a/repos/system_upgrade/common/actors/systemfacts/libraries/systemfacts.py b/repos/system_upgrade/common/actors/systemfacts/libraries/systemfacts.py +index f16cea1d..ba7bdb82 100644 +--- a/repos/system_upgrade/common/actors/systemfacts/libraries/systemfacts.py ++++ b/repos/system_upgrade/common/actors/systemfacts/libraries/systemfacts.py +@@ -221,14 +221,13 @@ def get_repositories_status(): + return RepositoriesFacts(repositories=repofileutils.get_parsed_repofiles()) + except repofileutils.InvalidRepoDefinition as e: + raise StopActorExecutionError( +- message=str(e), ++ message="Failed to parse repositories on the source system: {}".format(str(e)), + details={ +- 'hint': 'For more directions on how to resolve the issue, see: {url}.' +- .format( +- url='https://access.redhat.com/solutions/6969001' +- ) +- } +- ) ++ 'hint': 'Ensure the repository definition is correct or remove it ' ++ 'if the repository is not needed anymore. ' ++ 'This issue is typically caused by missing definition of the name field. ' ++ 'For more information, see: https://access.redhat.com/solutions/6969001.' ++ }) + + + def get_selinux_status(): +@@ -295,12 +294,35 @@ def get_firewalls_status(): + ) + + ++def _get_secure_boot_state(): ++ try: ++ stdout = run(['mokutil', '--sb-state'])['stdout'] ++ return 'enabled' in stdout ++ except CalledProcessError as e: ++ if "doesn't support Secure Boot" in e.stderr: ++ return None ++ ++ raise StopActorExecutionError('Failed to determine SecureBoot state: {}'.format(e)) ++ except OSError as e: ++ # shim depends on mokutil, if it's not installed assume SecureBoot is disabled ++ api.current_logger().debug( ++ 'Failed to execute mokutil, assuming SecureBoot is disabled: {}'.format(e) ++ ) ++ return False ++ ++ + def get_firmware(): + firmware = 'efi' if os.path.isdir('/sys/firmware/efi') else 'bios' ++ ++ ppc64le_opal = None + if architecture.matches_architecture(architecture.ARCH_PPC64LE): +- ppc64le_opal = bool(os.path.isdir('/sys/firmware/opal/')) +- return FirmwareFacts(firmware=firmware, ppc64le_opal=ppc64le_opal) +- return FirmwareFacts(firmware=firmware) ++ ppc64le_opal = os.path.isdir('/sys/firmware/opal/') ++ ++ is_secureboot = None ++ if firmware == 'efi': ++ is_secureboot = _get_secure_boot_state() ++ ++ return FirmwareFacts(firmware=firmware, ppc64le_opal=ppc64le_opal, secureboot_enabled=is_secureboot) + + + @aslist +diff --git a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py +index 16405b15..22ee7b7b 100644 +--- a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py ++++ b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py +@@ -1,20 +1,27 @@ + import grp ++import os + import pwd ++from unittest import mock + + import pytest + + from leapp.exceptions import StopActorExecutionError ++from leapp.libraries.actor import systemfacts + from leapp.libraries.actor.systemfacts import ( ++ _get_secure_boot_state, + _get_system_groups, + _get_system_users, + anyendswith, + anyhasprefix, + aslist, ++ get_firmware, + get_repositories_status + ) + from leapp.libraries.common import repofileutils +-from leapp.libraries.common.testutils import logger_mocked +-from leapp.libraries.stdlib import api ++from leapp.libraries.common.config import architecture ++from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked ++from leapp.libraries.stdlib import api, CalledProcessError ++from leapp.models import FirmwareFacts + from leapp.snactor.fixture import current_actor_libraries + + +@@ -138,3 +145,114 @@ def test_failed_parsed_repofiles(monkeypatch): + + with pytest.raises(StopActorExecutionError): + get_repositories_status() ++ ++ ++@pytest.mark.parametrize('is_enabled', (True, False)) ++@mock.patch('leapp.libraries.actor.systemfacts.run') ++def test_get_secure_boot_state_ok(mocked_run: mock.MagicMock, is_enabled): ++ mocked_run.return_value = { ++ 'stdout': f'SecureBoot {"enabled" if is_enabled else "disabled"}' ++ } ++ ++ out = _get_secure_boot_state() ++ ++ assert out == is_enabled ++ mocked_run.assert_called_once_with(['mokutil', '--sb-state']) ++ ++ ++@mock.patch('leapp.libraries.actor.systemfacts.run') ++def test_get_secure_boot_state_no_mokutil(mocked_run: mock.MagicMock): ++ mocked_run.side_effect = OSError ++ ++ out = _get_secure_boot_state() ++ ++ assert out is False ++ mocked_run.assert_called_once_with(['mokutil', '--sb-state']) ++ ++ ++@mock.patch('leapp.libraries.actor.systemfacts.run') ++def test_get_secure_boot_state_not_supported(mocked_run: mock.MagicMock): ++ cmd = ['mokutil', '--sb-state'] ++ result = { ++ 'stderr': "This system doesn't support Secure Boot", ++ 'exit_code': 255, ++ } ++ mocked_run.side_effect = CalledProcessError( ++ "Command mokutil --sb-state failed with exit code 255.", ++ cmd, ++ result ++ ) ++ ++ out = _get_secure_boot_state() ++ ++ assert out is None ++ mocked_run.assert_called_once_with(cmd) ++ ++ ++@mock.patch('leapp.libraries.actor.systemfacts.run') ++def test_get_secure_boot_state_failed(mocked_run: mock.MagicMock): ++ cmd = ['mokutil', '--sb-state'] ++ result = { ++ 'stderr': 'EFI variables are not supported on this system', ++ 'exit_code': 1, ++ } ++ mocked_run.side_effect = CalledProcessError( ++ "Command mokutil --sb-state failed with exit code 1.", ++ cmd, ++ result ++ ) ++ ++ with pytest.raises( ++ StopActorExecutionError, ++ match='Failed to determine SecureBoot state' ++ ): ++ _get_secure_boot_state() ++ ++ mocked_run.assert_called_once_with(cmd) ++ ++ ++def _ff(firmware, ppc64le_opal, is_secureboot): ++ return FirmwareFacts( ++ firmware=firmware, ++ ppc64le_opal=ppc64le_opal, ++ secureboot_enabled=is_secureboot ++ ) ++ ++ ++@pytest.mark.parametrize( ++ "has_sys_efi, has_sys_opal, is_ppc, secboot_state, expect", ++ [ ++ # 1. Standard BIOS on x86 ++ (False, False, False, None, _ff("bios", None, None)), ++ # 2. EFI on x86 with Secure Boot Enabled ++ (True, False, False, True, _ff("efi", None, True)), ++ # 3. EFI on x86 with Secure Boot Disabled ++ (True, False, False, False, _ff("efi", None, False)), ++ # 4. PPC64LE with OPAL (No EFI) ++ (False, True, True, None, _ff("bios", True, None)), ++ # 5. PPC64LE without OPAL (No EFI) ++ (False, False, True, None, _ff("bios", False, None)), ++ # 6. EFI on PPC64LE with OPAL ++ (True, True, True, True, _ff("efi", True, True)), ++ ] ++) ++def test_get_firmware_logic( ++ has_sys_efi, has_sys_opal, is_ppc, secboot_state, expect ++): ++ with mock.patch('os.path.isdir') as mock_isdir, \ ++ mock.patch('leapp.libraries.stdlib.api.current_actor') as mock_curr_actor, \ ++ mock.patch('leapp.libraries.actor.systemfacts._get_secure_boot_state') as mock_get_sb_state: ++ ++ mock_isdir.side_effect = lambda path: { ++ '/sys/firmware/efi': has_sys_efi, ++ '/sys/firmware/opal/': has_sys_opal ++ }.get(path, False) ++ ++ mock_curr_actor.return_value = CurrentActorMocked( ++ arch=architecture.ARCH_PPC64LE if is_ppc else architecture.ARCH_X86_64 ++ ) ++ mock_get_sb_state.return_value = secboot_state ++ ++ result = get_firmware() ++ ++ assert result == expect diff --git a/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py b/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py -index c825c731..62a84a85 100644 +index c825c731..ee8f45e3 100644 --- a/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py +++ b/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py -@@ -155,9 +155,10 @@ def _import_gpg_keys(context, install_root_dir, target_major_version): +@@ -7,8 +7,18 @@ from leapp import reporting + from leapp.exceptions import StopActorExecution, StopActorExecutionError + from leapp.libraries.actor import constants + from leapp.libraries.common import distro, dnfplugin, mounting, overlaygen, repofileutils, rhsm, utils +-from leapp.libraries.common.config import get_env, get_product_type, get_source_distro_id, get_target_distro_id +-from leapp.libraries.common.config.version import get_target_major_version, get_target_version ++from leapp.libraries.common.config import ( ++ get_env, ++ get_product_type, ++ get_source_distro_id, ++ get_target_distro_id, ++ is_conversion ++) ++from leapp.libraries.common.config.version import ( ++ get_source_major_version, ++ get_target_major_version, ++ get_target_version ++) + from leapp.libraries.common.gpg import get_path_to_gpg_certs, is_nogpgcheck_set + from leapp.libraries.stdlib import api, CalledProcessError, config, run + from leapp.models import RequiredTargetUserspacePackages # deprecated +@@ -155,9 +165,10 @@ def _import_gpg_keys(context, install_root_dir, target_major_version): # installation of initial packages try: # Import also any other keys provided by the customer in the same directory @@ -5196,7 +19848,7 @@ index c825c731..62a84a85 100644 except CalledProcessError as exc: raise StopActorExecutionError( message=( -@@ -660,6 +661,7 @@ def _prep_repository_access(context, target_userspace): +@@ -660,6 +671,7 @@ def _prep_repository_access(context, target_userspace): run(["chroot", target_userspace, "/bin/bash", "-c", "su - -c update-ca-trust"]) if not rhsm.skip_rhsm(): @@ -5204,6 +19856,136 @@ index c825c731..62a84a85 100644 run(['rm', '-rf', os.path.join(target_etc, 'rhsm')]) context.copytree_from('/etc/rhsm', os.path.join(target_etc, 'rhsm')) +@@ -820,7 +832,15 @@ def _inhibit_on_duplicate_repos(repofiles): + + + def _get_all_available_repoids(context): +- repofiles = repofileutils.get_parsed_repofiles(context) ++ try: ++ repofiles = repofileutils.get_parsed_repofiles(context) ++ except repofileutils.InvalidRepoDefinition as e: ++ raise StopActorExecutionError( ++ message="Failed to parse available repoids: {}".format(str(e)), ++ details={ ++ 'hint': 'Ensure the repository definition is correct or remove it ' ++ 'if the repository is not required for the upgrade.' ++ }) + # TODO: this is not good solution, but keep it as it is now + # Issue: #486 + if rhsm.skip_rhsm(): +@@ -973,14 +993,29 @@ def _get_distro_available_repoids(context, indata): + provider has itw own rpm). + On other: Repositories are provided in specific repofiles (e.g. centos.repo + and centos-addons.repo on CS) ++ Exception: On CS8->CS9 and AL8->AL9 there are no distro-provided ++ repoids as the repofile layout and urls are different ++ Conversions: Only custom repos - no distro repoids (all distros) + + :return: A set of repoids provided by distribution + :rtype: set[str] + """ + distro_repoids = distro.get_target_distro_repoids(context) +- distro_id = get_target_distro_id() +- rhel_and_rhsm = distro_id == 'rhel' and not rhsm.skip_rhsm() +- if distro_id != 'rhel' or rhel_and_rhsm: ++ target_distro = get_target_distro_id() ++ rhel_and_rhsm = target_distro == 'rhel' and not rhsm.skip_rhsm() ++ is_source_cs8 = ( ++ get_source_distro_id() == "centos" and get_source_major_version() == '8' ++ ) ++ is_source_almalinux8 = ( ++ get_source_distro_id() == "almalinux" and get_source_major_version() == '8' ++ ) ++ ++ if ( ++ not is_conversion() # conversions only work with custom repos ++ and not (is_source_cs8 # there are no distro_repoids on CS8->CS9 ++ or is_source_almalinux8) # there are no distro_repoids on AL8->AL9 ++ and (target_distro != "rhel" or rhel_and_rhsm) ++ ): + _inhibit_if_no_base_repos(distro_repoids) + + if indata and indata.rhui_info: +@@ -1302,7 +1337,15 @@ def setup_target_rhui_access_if_needed(context, indata): + copied_repofiles = [copy.src for copy in copy_tasks if copy.src.endswith('.repo')] + copied_repoids = set() + for repofile in copied_repofiles: +- repofile_contents = repofileutils.parse_repofile(repofile) ++ try: ++ repofile_contents = repofileutils.parse_repofile(repofile) ++ except repofileutils.InvalidRepoDefinition as e: ++ raise StopActorExecutionError( ++ message="Failed to parse repositories for RHUI: {}".format(str(e)), ++ details={ ++ 'hint': 'Ensure the repository definition is correct or remove it ' ++ 'if the repository is not required for the upgrade.' ++ }) + copied_repoids.update(entry.repoid for entry in repofile_contents.data) + + cmd += ['--disablerepo', '*'] +@@ -1403,7 +1446,17 @@ def perform(): + target_repoids = _gather_target_repositories(context, indata, prod_cert_path) + _create_target_userspace(context, indata, indata.packages, indata.files, target_repoids) + # TODO: this is tmp solution as proper one needs significant refactoring +- target_repo_facts = repofileutils.get_parsed_repofiles(context) ++ try: ++ target_repo_facts = repofileutils.get_parsed_repofiles(context) ++ except repofileutils.InvalidRepoDefinition as e: ++ raise StopActorExecutionError( ++ message="Failed to parse target system repofiles: {}".format(str(e)), ++ details={ ++ 'hint': 'Ensure the repository definition is correct or remove it ' ++ 'if the repository is not needed anymore. ' ++ 'This issue is typically caused by missing definition of the name field. ' ++ 'For more information, see: https://access.redhat.com/solutions/6969001.' ++ }) + api.produce(TMPTargetRepositoriesFacts(repositories=target_repo_facts)) + # ## TODO ends here + api.produce(UsedTargetRepositories( +diff --git a/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py b/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py +index d783843c..f4ce390f 100644 +--- a/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py ++++ b/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py +@@ -1216,17 +1216,23 @@ def test__get_distro_available_repoids_norhsm_norhui(monkeypatch): + assert repoids == set() + + ++@pytest.mark.parametrize("src_distro", ["rhel", "centos", "almalinux"]) + @pytest.mark.parametrize( +- "distro_id,skip_rhsm", [("rhel", False), ("centos", True), ("almalinux", True)] ++ "dst_distro, skip_rhsm", [("rhel", False), ("centos", True), ("almalinux", True)] + ) ++@pytest.mark.parametrize("src_ver, dst_ver", [("9.6", "10.2"), ("8.10", "9.6")]) + def test__get_distro_available_repoids_nobaserepos_inhibit( +- monkeypatch, distro_id, skip_rhsm ++ monkeypatch, src_distro, dst_distro, skip_rhsm, src_ver, dst_ver + ): + """ + Test that get_distro_available repoids reports and raises if there are no base repos. + """ + monkeypatch.setattr( +- userspacegen.api, "current_actor", CurrentActorMocked(dst_distro=distro_id) ++ userspacegen.api, ++ "current_actor", ++ CurrentActorMocked( ++ src_distro=src_distro, dst_distro=dst_distro, src_ver=src_ver, dst_ver=dst_ver ++ ), + ) + monkeypatch.setattr(userspacegen.api.current_actor(), 'produce', produce_mocked()) + monkeypatch.setattr(reporting, "create_report", create_report_mocked()) +@@ -1235,6 +1241,12 @@ def test__get_distro_available_repoids_nobaserepos_inhibit( + monkeypatch.setattr(distro, 'get_target_distro_repoids', lambda ctx: []) + + indata = testInData(_PACKAGES_MSGS, None, None, _XFS_MSG, _STORAGEINFO_MSG, None) ++ ++ if src_distro in ("centos", "almalinux") and src_ver == "8.10" or src_distro != dst_distro: ++ # should not raise on CS 8to9, AL 8to9, and when converting ++ userspacegen._get_distro_available_repoids(None, indata) ++ return ++ + with pytest.raises(StopActorExecution): + # NOTE: context is not used without rhsm, for simplicity setting to None + userspacegen._get_distro_available_repoids(None, indata) diff --git a/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py b/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py index 6377f767..4c5420f6 100644 --- a/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py @@ -5230,6 +20012,44 @@ index 6377f767..4c5420f6 100644 return pubkeys +diff --git a/repos/system_upgrade/common/actors/updategrubcore/libraries/updategrubcore.py b/repos/system_upgrade/common/actors/updategrubcore/libraries/updategrubcore.py +index 6a116db4..cc9bf280 100644 +--- a/repos/system_upgrade/common/actors/updategrubcore/libraries/updategrubcore.py ++++ b/repos/system_upgrade/common/actors/updategrubcore/libraries/updategrubcore.py +@@ -1,4 +1,5 @@ + from leapp import reporting ++from leapp.exceptions import StopActorExecution + from leapp.libraries.common import grub + from leapp.libraries.common.config import architecture + from leapp.libraries.stdlib import api, CalledProcessError, config, run +@@ -61,7 +62,11 @@ def process(): + return + ff = next(api.consume(FirmwareFacts), None) + if ff and ff.firmware == 'bios': +- grub_devs = grub.get_grub_devices() ++ try: ++ grub_devs = grub.get_grub_devices() ++ except grub.GRUBDeviceError as err: ++ api.current_logger().warning('Failed to detect GRUB devices: %s', err) ++ raise StopActorExecution() + if grub_devs: + update_grub_core(grub_devs) + else: +diff --git a/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py b/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py +index 93816103..2262e326 100644 +--- a/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py ++++ b/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py +@@ -107,9 +107,7 @@ def test_update_grub_nogrub_system_ibmz(monkeypatch): + + def test_update_grub_nogrub_system(monkeypatch): + def get_grub_devices_mocked(): +- # this is not very well documented, but the grub.get_grub_devices function raises a StopActorExecution on error +- # (whether that's caused by determining root partition or determining the block device a given partition is on +- raise StopActorExecution() ++ raise grub.GRUBDeviceError() + + monkeypatch.setattr(grub, 'get_grub_devices', get_grub_devices_mocked) + monkeypatch.setattr(reporting, 'create_report', testutils.create_report_mocked()) diff --git a/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py b/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py new file mode 100644 index 00000000..dbf86974 @@ -5432,25 +20252,17 @@ index 00000000..6a41d4e5 + # for repomap in vendor_repomap_collection.maps: + # self.produce(repomap) diff --git a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json -index 24bc93ba..3bd7376c 100644 +index b17e8a66..3bd7376c 100644 --- a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json +++ b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json -@@ -1,18 +1,26 @@ - { -- "keys": [ -- "51d6647ec21ad6ea", -- "d36cb86cb86b3716", -- "2ae81e8aced7258b", -- "429785e181b961a5", -- "d07bf2a08d50eb66" -- ], -+ "keys": { -+ "51d6647ec21ad6ea": ["gpg-pubkey-3abb34f8-5ffd890e"], -+ "d36cb86cb86b3716": ["gpg-pubkey-ced7258b-6525146f"], -+ "2ae81e8aced7258b": ["gpg-pubkey-b86b3716-61e69f29"], +@@ -3,15 +3,24 @@ + "51d6647ec21ad6ea": ["gpg-pubkey-3abb34f8-5ffd890e"], + "d36cb86cb86b3716": ["gpg-pubkey-ced7258b-6525146f"], + "2ae81e8aced7258b": ["gpg-pubkey-b86b3716-61e69f29"], +- "429785e181b961a5": ["gpg-pubkey-81b961a5-64106f70"] + "429785e181b961a5": ["gpg-pubkey-81b961a5-64106f70"], + "d07bf2a08d50eb66": [] -+ }, + }, "obsoleted-keys": { "7": [], - "8": [], @@ -5471,23 +20283,17 @@ index 24bc93ba..3bd7376c 100644 + } diff --git a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json -index fe85e03c..8056e825 100644 +index 1092ff58..8056e825 100644 --- a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json +++ b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json -@@ -1,10 +1,24 @@ - { -- "keys": [ -- "24c6a8a7f4a80eb5", -- "05b555b38483c65d", -- "4eb84e71f2ee9d55" -- ], -+ "keys": { -+ "24c6a8a7f4a80eb5": [], -+ "4eb84e71f2ee9d55": [], -+ "05b555b38483c65d": ["gpg-pubkey-8483c65d-5ccc5b19"], +@@ -3,9 +3,22 @@ + "24c6a8a7f4a80eb5": [], + "4eb84e71f2ee9d55": [], + "05b555b38483c65d": ["gpg-pubkey-8483c65d-5ccc5b19"], +- "1ff6a2171d997668": ["gpg-pubkey-1d997668-621e3cac", "gpg-pubkey-1d997668-61bae63b"] + "1ff6a2171d997668": ["gpg-pubkey-1d997668-621e3cac", "gpg-pubkey-1d997668-61bae63b"], + "6c7cb6ef305d49d6": [] -+ }, + }, "obsoleted-keys": { + "7": [], + "8": [ @@ -5563,44 +20369,20 @@ index 00000000..a53775cf + } +} diff --git a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json -index 3cc67f82..0b989984 100644 +index d6c2328d..0b989984 100644 --- a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json +++ b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json -@@ -1,19 +1,28 @@ - { -- "keys": [ -- "199e2f91fd431d51", -- "5326810137017186", -- "938a80caf21541eb", -- "fd372689897da07a", -- "45689c882fa658e0" -- ], -+ "keys": { -+ "199e2f91fd431d51": ["gpg-pubkey-fd431d51-4ae0493b"], -+ "5326810137017186": ["gpg-pubkey-37017186-45761324"], -+ "938a80caf21541eb": ["gpg-pubkey-f21541eb-4a5233e8"], -+ "fd372689897da07a": ["gpg-pubkey-897da07a-3c979a7f"], -+ "45689c882fa658e0": ["gpg-pubkey-2fa658e0-45700c69"], -+ "f76f66c3d4082792": ["gpg-pubkey-d4082792-5b32db75"], -+ "5054e4a45a6340b3": ["gpg-pubkey-5a6340b3-6229229e"], -+ "219180cddb42a60e": ["gpg-pubkey-db42a60e-37ea5438"] -+ }, - "obsoleted-keys": { - "7": [], - "8": [ - "gpg-pubkey-2fa658e0-45700c69", - "gpg-pubkey-37017186-45761324", -- "gpg-pubkey-db42a60e-37ea5438" -+ "gpg-pubkey-db42a60e-37ea5438", -+ "gpg-pubkey-897da07a-3c979a7f" -+ ], +@@ -17,7 +17,12 @@ + "gpg-pubkey-db42a60e-37ea5438", + "gpg-pubkey-897da07a-3c979a7f" + ], +- "9": ["gpg-pubkey-d4082792-5b32db75"], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" - ], -- "9": ["gpg-pubkey-d4082792-5b32db75"], ++ ], "10": ["gpg-pubkey-fd431d51-4ae0493b"] } } @@ -5683,11 +20465,82 @@ index 63910fe0..4e8b380d 100644 self.base.distro_sync() if self.opts.tid[0] == 'check': +diff --git a/repos/system_upgrade/common/files/upgrade_paths.json b/repos/system_upgrade/common/files/upgrade_paths.json +index 39fbe6c1..ca0fe590 100644 +--- a/repos/system_upgrade/common/files/upgrade_paths.json ++++ b/repos/system_upgrade/common/files/upgrade_paths.json +@@ -2,19 +2,18 @@ + "rhel": { + "default": { + "7.9": ["8.10"], +- "8.10": ["9.4", "9.6", "9.7", "9.8"], ++ "8.10": ["9.6", "9.8"], + "9.6": ["10.0"], +- "9.7": ["10.1"], + "9.8": ["10.2"], + "7": ["8.10"], +- "8": ["9.4", "9.6"], ++ "8": ["9.6", "9.8"], + "9": ["10.0"] + }, + "saphana": { + "7.9": ["8.10"], + "7": ["8.10"], +- "8.10": ["9.6", "9.4"], +- "8": ["9.6", "9.4"], ++ "8.10": ["9.8", "9.6"], ++ "8": ["9.8", "9.6"], + "9.6": ["10.0"], + "9.8": ["10.2"], + "9": ["10.0"] +diff --git a/repos/system_upgrade/common/libraries/config/__init__.py b/repos/system_upgrade/common/libraries/config/__init__.py +index 396c524a..8a2b4e35 100644 +--- a/repos/system_upgrade/common/libraries/config/__init__.py ++++ b/repos/system_upgrade/common/libraries/config/__init__.py +@@ -141,3 +141,19 @@ def get_target_distro_id(): + :rtype: str + """ + return api.current_actor().configuration.distro.target ++ ++ ++def is_conversion(): ++ """ ++ Return whether a conversion is happening during the upgrade. ++ ++ Conversions in means that a target distro different from source distro was ++ specified. ++ ++ This is a wrapper which compares source and target distro IDs. This can also ++ be helpful for testing. ++ ++ :return: True if converting False otherwise ++ :rtype: bool ++ """ ++ return get_source_distro_id() != get_target_distro_id() +diff --git a/repos/system_upgrade/common/libraries/config/version.py b/repos/system_upgrade/common/libraries/config/version.py +index c9bc3fb2..5c1e30c6 100644 +--- a/repos/system_upgrade/common/libraries/config/version.py ++++ b/repos/system_upgrade/common/libraries/config/version.py +@@ -16,7 +16,7 @@ OP_MAP = { + + _SUPPORTED_VERSIONS = { + '8': {'rhel': ['8.10'], 'rhel-saphana': ['8.10']}, +- '9': {'rhel': ['9.6'], 'rhel-saphana': ['9.6']}, ++ '9': {'rhel': ['9.6', '9.8'], 'rhel-saphana': ['9.6', '9.8']}, + } + + diff --git a/repos/system_upgrade/common/libraries/distro.py b/repos/system_upgrade/common/libraries/distro.py -index 04e553ac..b342d4fc 100644 +index 04e553ac..734d152b 100644 --- a/repos/system_upgrade/common/libraries/distro.py +++ b/repos/system_upgrade/common/libraries/distro.py -@@ -7,6 +7,7 @@ from leapp.libraries.common.config import get_target_distro_id +@@ -2,11 +2,12 @@ import json + import os + + from leapp.exceptions import StopActorExecutionError +-from leapp.libraries.common import repofileutils, rhsm ++from leapp.libraries.common import efi, repofileutils, rhsm + from leapp.libraries.common.config import get_target_distro_id from leapp.libraries.common.config.architecture import ARCH_ACCEPTED, ARCH_X86_64 from leapp.libraries.common.config.version import get_target_major_version from leapp.libraries.stdlib import api @@ -5716,19 +20569,67 @@ index 04e553ac..b342d4fc 100644 # distro -> major_version -> repofile -> tuple of architectures where it's present _DISTRO_REPOFILES_MAP = { -@@ -68,6 +76,7 @@ _DISTRO_REPOFILES_MAP = { - '/etc/yum.repos.d/almalinux.repo': ARCH_ACCEPTED, - }, - '9': { -+ '/etc/yum.repos.d/almalinux.repo': ARCH_ACCEPTED, - '/etc/yum.repos.d/almalinux-appstream.repo': ARCH_ACCEPTED, - '/etc/yum.repos.d/almalinux-baseos.repo': ARCH_ACCEPTED, - '/etc/yum.repos.d/almalinux-crb.repo': ARCH_ACCEPTED, +@@ -177,7 +185,18 @@ def get_distro_repoids(context, distro, major_version, arch): + # TODO: very similar thing should happens for all other repofiles in container + return rhsm.get_available_repo_ids(context) + +- repofiles = repofileutils.get_parsed_repofiles(context) ++ try: ++ repofiles = repofileutils.get_parsed_repofiles(context) ++ except repofileutils.InvalidRepoDefinition as e: ++ raise StopActorExecutionError( ++ message="Failed to get distro provided repositories: {}".format(str(e)), ++ details={ ++ 'hint': 'Ensure the repository definition is correct or remove it ' ++ 'if the repository is not needed anymore. ' ++ 'This issue is typically caused by missing definition of the name field. ' ++ 'For more information, see: https://access.redhat.com/solutions/6969001.' ++ }) ++ + distro_repofiles = _get_distro_repofiles(distro, major_version, arch) + if not distro_repofiles: + # TODO: a different way of signaling an error would be preferred (e.g. returning None), +@@ -208,3 +227,35 @@ def get_distro_repoids(context, distro, major_version, arch): + distro_repoids.extend([repo.repoid for repo in rfile.data]) + + return sorted(distro_repoids) ++ ++ ++def distro_id_to_pretty_name(distro_id): ++ """ ++ Get pretty name for the given distro id. ++ ++ The pretty name is what is found in the NAME field of /etc/os-release. ++ """ ++ return { ++ "rhel": "Red Hat Enterprise Linux", ++ "centos": "CentOS Stream", ++ "almalinux": "AlmaLinux", ++ }[distro_id] ++ ++ ++def get_distro_efidir_canon_path(distro_id): ++ """ ++ Get canonical path to the distro EFI directory in the EFI mountpoint. ++ ++ NOTE: The path might be incorrect for distros not properly enabled for IPU, ++ when enabling new distros in the codebase, make sure the path is correct. ++ """ ++ if distro_id == "rhel": ++ return os.path.join(efi.EFI_MOUNTPOINT, "EFI", "redhat") ++ ++ if distro_id == "almalinux": ++ return os.path.join(efi.EFI_MOUNTPOINT, "EFI", "almalinux") ++ ++ if distro_id == "centos": ++ return os.path.join(efi.EFI_MOUNTPOINT, "EFI", "centos") ++ ++ return os.path.join(efi.EFI_MOUNTPOINT, "EFI", distro_id) diff --git a/repos/system_upgrade/common/libraries/dnfplugin.py b/repos/system_upgrade/common/libraries/dnfplugin.py -index 1af52dc5..66b89aed 100644 +index 7e1fd497..a42af5ca 100644 --- a/repos/system_upgrade/common/libraries/dnfplugin.py +++ b/repos/system_upgrade/common/libraries/dnfplugin.py -@@ -90,6 +90,7 @@ def build_plugin_data(target_repoids, debug, test, tasks, on_aws): +@@ -89,6 +89,7 @@ def build_plugin_data(target_repoids, debug, test, tasks, on_aws): 'to_install': sorted(tasks.to_install), 'to_remove': sorted(tasks.to_remove), 'to_upgrade': sorted(tasks.to_upgrade), @@ -5736,6 +20637,455 @@ index 1af52dc5..66b89aed 100644 'modules_to_enable': sorted(['{}:{}'.format(m.name, m.stream) for m in tasks.modules_to_enable]), }, 'dnf_conf': { +@@ -270,26 +271,24 @@ def _transaction(context, stage, target_repoids, tasks, plugin_info, xfs_info, + # allow handling new RHEL 9 syscalls by systemd-nspawn + env = {'SYSTEMD_SECCOMP': '0'} + +- # We need to reset modules twice, once before we check, and the second time before we actually perform +- # the upgrade. Not more often as the modules will be reset already. +- if stage in ('check', 'upgrade') and tasks.modules_to_reset: +- # We shall only reset modules that are not going to be enabled +- # This will make sure it is so +- modules_to_reset = {(module.name, module.stream) for module in tasks.modules_to_reset} +- modules_to_enable = {(module.name, module.stream) for module in tasks.modules_to_enable} +- module_reset_list = [module[0] for module in modules_to_reset - modules_to_enable] +- # Perform module reset +- cmd = ['/usr/bin/dnf', 'module', 'reset', '--enabled', ] + module_reset_list +- cmd += ['--disablerepo', '*', '-y', '--installroot', '/installroot'] +- try: +- context.call( +- cmd=cmd_prefix + cmd + common_params, +- callback_raw=utils.logging_handler, +- env=env +- ) +- except (CalledProcessError, OSError): +- api.current_logger().debug('Failed to reset modules via dnf with an error. Ignoring.', +- exc_info=True) ++ if tasks.modules_to_reset: ++ # We shall only reset modules that are not going to be enabled ++ # This will make sure it is so ++ modules_to_reset = {(module.name, module.stream) for module in tasks.modules_to_reset} ++ modules_to_enable = {(module.name, module.stream) for module in tasks.modules_to_enable} ++ module_reset_list = [module[0] for module in modules_to_reset - modules_to_enable] ++ # Perform module reset ++ cmd = ['/usr/bin/dnf', 'module', 'reset', '--enabled', ] + module_reset_list ++ cmd += ['--disablerepo', '*', '-y', '--installroot', '/installroot'] ++ try: ++ context.call( ++ cmd=cmd_prefix + cmd + common_params, ++ callback_raw=utils.logging_handler, ++ env=env ++ ) ++ except (CalledProcessError, OSError): ++ api.current_logger().debug('Failed to reset modules via dnf with an error. Ignoring.', ++ exc_info=True) + + cmd = [ + '/usr/bin/dnf', +diff --git a/repos/system_upgrade/common/libraries/efi.py b/repos/system_upgrade/common/libraries/efi.py +new file mode 100644 +index 00000000..c30d67e0 +--- /dev/null ++++ b/repos/system_upgrade/common/libraries/efi.py +@@ -0,0 +1,362 @@ ++import os ++import re ++ ++from leapp.libraries.common.partitions import ( ++ _get_partition_for_dir, ++ blk_dev_from_partition, ++ get_partition_number, ++ StorageScanError ++) ++from leapp.libraries.stdlib import api, CalledProcessError, run ++ ++EFI_MOUNTPOINT = '/boot/efi/' ++"""The path to the required mountpoint for ESP.""" ++ ++ ++class EFIError(Exception): ++ """ ++ Exception raised when EFI operation failed. ++ """ ++ ++ ++def canonical_path_to_efi_format(canonical_path): ++ r""" ++ Transform the canonical path to the UEFI format. ++ ++ e.g. /boot/efi/EFI/redhat/shimx64.efi -> \EFI\redhat\shimx64.efi ++ (just single backslash; so the string needs to be put into apostrophes ++ when used for /usr/sbin/efibootmgr cmd) ++ ++ The path has to start with /boot/efi otherwise the path is invalid for UEFI. ++ """ ++ ++ # We want to keep the last "/" of the EFI_MOUNTPOINT ++ return canonical_path.replace(EFI_MOUNTPOINT[:-1], "").replace("/", "\\") ++ ++ ++class EFIBootLoaderEntry: ++ """ ++ Representation of an UEFI boot loader entry. ++ """ ++ ++ def __init__(self, boot_number, label, active, efi_bin_source): ++ self.boot_number = boot_number ++ """Expected string, e.g. '0001'. """ ++ ++ self.label = label ++ """Label of the UEFI entry. E.g. 'Redhat'""" ++ ++ self.active = active ++ """True when the UEFI entry is active (asterisk is present next to the boot number)""" ++ ++ self.efi_bin_source = efi_bin_source ++ """Source of the UEFI binary. ++ ++ It could contain various values, e.g.: ++ FvVol(7cb8bdc9-f8eb-4f34-aaea-3ee4af6516a1)/FvFile(462caa21-7614-4503-836e-8ab6f4662331) ++ HD(1,GPT,28c77f6b-3cd0-4b22-985f-c99903835d79,0x800,0x12c000)/File(\\EFI\\redhat\\shimx64.efi) ++ PciRoot(0x0)/Pci(0x2,0x3)/Pci(0x0,0x0)N.....YM....R,Y. ++ """ ++ ++ def __eq__(self, other): ++ return all( ++ [ ++ self.boot_number == other.boot_number, ++ self.label == other.label, ++ self.active == other.active, ++ self.efi_bin_source == other.efi_bin_source, ++ ] ++ ) ++ ++ def __ne__(self, other): ++ return not self.__eq__(other) ++ ++ def __repr__(self): ++ return 'EFIBootLoaderEntry({boot_number}, {label}, {active}, {efi_bin_source})'.format( ++ boot_number=repr(self.boot_number), ++ label=repr(self.label), ++ active=repr(self.active), ++ efi_bin_source=repr(self.efi_bin_source) ++ ) ++ ++ def is_referring_to_file(self): ++ """Return True when the boot source is a file. ++ ++ Some sources could refer e.g. to PXE boot. Return true if the source ++ refers to a file ("ends with /File(...path...)") ++ ++ Does not matter whether the file exists or not. ++ """ ++ return '/File(\\' in self.efi_bin_source ++ ++ @staticmethod ++ def _efi_path_to_canonical(efi_path): ++ return os.path.join(EFI_MOUNTPOINT, efi_path.replace("\\", "/").lstrip("/")) ++ ++ def get_canonical_path(self): ++ """Return expected canonical path for the referred UEFI bin or None. ++ ++ Return None in case the entry is not referring to any UEFI bin ++ (e.g. when it refers to a PXE boot). ++ """ ++ if not self.is_referring_to_file(): ++ return None ++ match = re.search(r'/File\((?P\\.*)\)$', self.efi_bin_source) ++ return EFIBootLoaderEntry._efi_path_to_canonical(match.groups('path')[0]) ++ ++ ++class EFIBootInfo: ++ """ ++ Data about the current UEFI boot configuration. ++ ++ :raises EFIError: when unable to obtain info about the UEFI configuration, ++ BIOS is detected or ESP is not mounted where expected. ++ """ ++ ++ def __init__(self): ++ if not is_efi(): ++ raise EFIError('Unable to collect data about UEFI on a BIOS system.') ++ try: ++ result = run(['/usr/sbin/efibootmgr', '-v']) ++ except CalledProcessError: ++ raise EFIError('Unable to get information about UEFI boot entries.') ++ ++ bootmgr_output = result['stdout'] ++ ++ self.current_bootnum = None ++ """The boot number (str) of the current boot.""" ++ self.next_bootnum = None ++ """The boot number (str) of the next boot.""" ++ self.boot_order = tuple() ++ """The tuple of the UEFI boot loader entries in the boot order.""" ++ self.entries = {} ++ """The UEFI boot loader entries {'boot_number': EFIBootLoaderEntry}""" ++ ++ self._parse_efi_boot_entries(bootmgr_output) ++ self._parse_current_bootnum(bootmgr_output) ++ self._parse_next_bootnum(bootmgr_output) ++ self._parse_boot_order(bootmgr_output) ++ self._print_loaded_info() ++ ++ def _parse_efi_boot_entries(self, bootmgr_output): ++ """ ++ Return dict of UEFI boot loader entries: {"": EFIBootLoader} ++ """ ++ ++ self.entries = {} ++ regexp_entry = re.compile( ++ r"^Boot(?P[a-zA-Z0-9]+)(?P\*?)\s*(?P