diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 3e595e32..4b07e4b3 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: codespell-project/actions-codespell@v2 with: ignore_words_list: ro,fo,couldn,repositor,zeor,bootup diff --git a/.github/workflows/differential-shellcheck.yml b/.github/workflows/differential-shellcheck.yml index e1bafb93..6c81713c 100644 --- a/.github/workflows/differential-shellcheck.yml +++ b/.github/workflows/differential-shellcheck.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Repository checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index ed82e0e5..d1b8fb2a 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -19,40 +19,40 @@ jobs: - name: 'Unit tests (python:3.12; repos:el9toel10,common)' python: python3.12 repos: 'el9toel10,common' - container: ubi9 + container: el9 - name: 'Linters (python:3.12; repos:el9toel10,common)' python: python3.12 repos: 'el9toel10,common' - container: ubi9-lint + container: el9-lint - name: 'Unit tests (python:3.9; repos:el9toel10,common)' python: python3.9 repos: 'el9toel10,common' - container: ubi9 + container: el9 - name: 'Linters (python:3.9; repos:el9toel10,common)' python: python3.9 repos: 'el9toel10,common' - container: ubi9-lint + container: el9-lint # 8to9 - name: 'Unit tests (python:3.9; repos:el8toel9,common)' python: python3.9 repos: 'el8toel9,common' - container: ubi9 + container: el9 - name: 'Linters (python:3.9; repos:el8toel9,common)' python: python3.9 repos: 'el8toel9,common' - container: ubi9-lint + container: el9-lint - name: 'Unit tests (python:3.6; repos:el8toel9,common)' python: python3.6 repos: 'el8toel9,common' - container: ubi8 + container: el8 - name: 'Linters (python:3.6; repos:el8toel9,common)' python: python3.6 repos: 'el8toel9,common' - container: ubi8-lint + container: el8-lint steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: # NOTE(ivasilev) fetch-depth 0 is critical here as leapp deps discovery depends on specific substring in # commit message and default 1 option will get us just merge commit which has an unrelevant message. @@ -63,4 +63,10 @@ jobs: run: | git branch -f main origin/main - name: ${{matrix.scenarios.name}} - run: script -e -c /bin/bash -c 'TERM=xterm podman build --security-opt=seccomp=unconfined -t leapp-tests -f utils/container-tests/Containerfile.${{matrix.scenarios.container}} utils/container-tests && PYTHON_VENV=${{matrix.scenarios.python}} REPOSITORIES=${{matrix.scenarios.repos}} podman run --security-opt=seccomp=unconfined --rm -ti -v ${PWD}:/payload --env=PYTHON_VENV --env=REPOSITORIES leapp-tests' + run: | + script -e -c /bin/bash -c \ + 'TERM=xterm \ + podman build -t leapp-tests -f utils/container-tests/ci/Containerfile.${{matrix.scenarios.container}} . && \ + PYTHON_VENV=${{matrix.scenarios.python}} \ + REPOSITORIES=${{matrix.scenarios.repos}} \ + podman run --rm -ti -v ${PWD}:/payload --env=PYTHON_VENV --env=REPOSITORIES leapp-tests' diff --git a/.gitignore b/.gitignore index 0bb92d3d..a04c7ded 100644 --- a/.gitignore +++ b/.gitignore @@ -115,6 +115,7 @@ ENV/ # visual studio code configuration .vscode +*.code-workspace # pycharm .idea diff --git a/.packit.yaml b/.packit.yaml index 607dff93..0c3f682a 100644 --- a/.packit.yaml +++ b/.packit.yaml @@ -104,6 +104,8 @@ jobs: # is the last RHEL 8 release and all new future tests will start from this # one release. +# This job is never triggered - we define abstract anchor that are reused in jobs that 'inherit' +# and have actionable triggers - &sanity-abstract-8to9 job: tests trigger: ignore @@ -116,6 +118,47 @@ jobs: epel-8-x86_64: distros: [RHEL-8.10.0-Nightly] identifier: sanity-abstract-8to9 + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9' + environments: + - &tmt-env-settings-810to94 + tmt: + context: &tmt-context-810to94 + distro: "rhel-8.10" + distro_target: "rhel-9.4" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-810to96 + tmt: + context: &tmt-context-810to96 + distro: "rhel-8.10" + distro_target: "rhel-9.6" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-810to97 + tmt: + context: &tmt-context-810to97 + distro: "rhel-8.10" + distro_target: "rhel-9.7" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-810to98 + tmt: + context: &tmt-context-810to98 + distro: "rhel-8.10" + distro_target: "rhel-9.8" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test - &sanity-abstract-8to9-aws <<: *sanity-abstract-8to9 @@ -147,7 +190,10 @@ jobs: # ######################### Individual tests ########################### # # ###################################################################### # -# Tests: 8.10 -> 9.4 +# ###################################################################### # +# ############################# 8.10 > 9.4 ############################# # +# ###################################################################### # + - &sanity-810to94 <<: *sanity-abstract-8to9 trigger: pull_request @@ -155,17 +201,10 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:tier0 & enabled:true' + plan_filter: 'tag:8to9 & tag:tier0 & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.4" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test - env: + - *tmt-env-settings-810to94 + env: &env-810to94 SOURCE_RELEASE: "8.10" TARGET_RELEASE: "9.4" LEAPP_TARGET_PRODUCT_CHANNEL: "EUS" @@ -182,20 +221,11 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:partitioning & enabled:true' + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.4" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-810to94 env: - SOURCE_RELEASE: "8.10" - TARGET_RELEASE: "9.4" - LEAPP_TARGET_PRODUCT_CHANNEL: "EUS" + <<: *env-810to94 # On-demand kernel-rt tests - &kernel-rt-810to94 @@ -209,22 +239,22 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true' + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' environments: - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.4" + context: *tmt-context-810to94 settings: provisioning: tags: BusinessUnit: sst_upgrades@leapp_upstream_test env: - SOURCE_RELEASE: "8.10" - TARGET_RELEASE: "9.4" - LEAPP_TARGET_PRODUCT_CHANNEL: "EUS" + <<: *env-810to94 + + +# ###################################################################### # +# ############################# 8.10 > 9.6 ############################# # +# ###################################################################### # -# Tests: 8.10 -> 9.6 - &sanity-810to96 <<: *sanity-abstract-8to9 trigger: pull_request @@ -232,17 +262,10 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:tier0 & enabled:true' + plan_filter: 'tag:8to9 & tag:tier0 & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.6" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test - env: + - *tmt-env-settings-810to96 + env: &env-810to96 SOURCE_RELEASE: "8.10" TARGET_RELEASE: "9.6" @@ -258,19 +281,11 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:partitioning & enabled:true' + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.6" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-810to96 env: - SOURCE_RELEASE: "8.10" - TARGET_RELEASE: "9.6" + <<: *env-810to96 # On-demand kernel-rt tests - &kernel-rt-810to96 @@ -284,21 +299,38 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true' + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' + environments: + - *tmt-env-settings-810to96 + env: + <<: *env-810to96 + +- &sanity-810to96-aws + <<: *sanity-abstract-8to9-aws + trigger: pull_request + targets: + epel-8-x86_64: + distros: [RHEL-8.10-rhui] + identifier: sanity-8.10to9.6-aws + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9 & tag:rhui-aws-tier0 & enabled:true & tag:-rhsm' environments: - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.6" + context: *tmt-context-810to96 settings: provisioning: tags: BusinessUnit: sst_upgrades@leapp_upstream_test env: - SOURCE_RELEASE: "8.10" - TARGET_RELEASE: "9.6" + <<: *env-810to96 + + +# ###################################################################### # +# ############################# 8.10 > 9.7 ############################# # +# ###################################################################### # -# Tests: 8.10 -> 9.7 - &sanity-810to97 <<: *sanity-abstract-8to9 trigger: pull_request @@ -306,17 +338,10 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:tier0 & enabled:true' + plan_filter: 'tag:8to9 & tag:tier0 & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.7" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test - env: + - *tmt-env-settings-810to97 + env: &env-810to97 SOURCE_RELEASE: "8.10" TARGET_RELEASE: "9.7" @@ -332,19 +357,11 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:partitioning & enabled:true' + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.7" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-810to97 env: - SOURCE_RELEASE: "8.10" - TARGET_RELEASE: "9.7" + <<: *env-810to97 # On-demand kernel-rt tests - &kernel-rt-810to97 @@ -358,19 +375,65 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true' + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-8.10" - distro_target: "rhel-9.7" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-810to97 env: + <<: *env-810to97 + +# ###################################################################### # +# ############################# 8.10 > 9.8 ############################# # +# ###################################################################### # + +- &sanity-810to98 + <<: *sanity-abstract-8to9 + trigger: pull_request + identifier: sanity-8.10to9.8 + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9 & tag:tier0 & enabled:true' + environments: + - *tmt-env-settings-810to98 + env: &env-810to98 SOURCE_RELEASE: "8.10" - TARGET_RELEASE: "9.7" + TARGET_RELEASE: "9.8" + +# On-demand minimal beaker tests +- &beaker-minimal-810to98 + <<: *beaker-minimal-8to9-abstract-ondemand + trigger: pull_request + labels: + - beaker-minimal + - beaker-minimal-8.10to9.8 + - 8.10to9.8 + identifier: sanity-8.10to9.8-beaker-minimal-ondemand + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true' + environments: + - *tmt-env-settings-810to98 + env: + <<: *env-810to98 + +# On-demand kernel-rt tests +- &kernel-rt-810to98 + <<: *kernel-rt-abstract-8to9-ondemand + trigger: pull_request + labels: + - kernel-rt + - kernel-rt-8.10to9.8 + - 8.10to9.8 + identifier: sanity-8.10to9.8-kernel-rt-ondemand + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true' + environments: + - *tmt-env-settings-810to98 + env: + <<: *env-810to98 # ###################################################################### # # ############################## 9 TO 10 ################################ # @@ -392,6 +455,38 @@ jobs: epel-9-x86_64: distros: [RHEL-9.6.0-Nightly] identifier: sanity-abstract-9to10 + tf_extra_params: + test: + tmt: + plan_filter: 'tag:9to10' + environments: + - &tmt-env-settings-96to100 + tmt: + context: &tmt-context-96to100 + distro: "rhel-9.6" + distro_target: "rhel-10.0" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-97to101 + tmt: + context: &tmt-context-97to101 + distro: "rhel-9.7" + distro_target: "rhel-10.1" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test + - &tmt-env-settings-98to102 + tmt: + context: &tmt-context-98to102 + distro: "rhel-9.8" + distro_target: "rhel-10.2" + settings: + provisioning: + tags: + BusinessUnit: sst_upgrades@leapp_upstream_test - &sanity-abstract-9to10-aws <<: *sanity-abstract-9to10 @@ -423,7 +518,10 @@ jobs: # ######################### Individual tests ########################### # # ###################################################################### # -# Tests: 9.6 -> 10.0 +# ###################################################################### # +# ############################# 9.6 > 10.0 ############################# # +# ###################################################################### # + - &sanity-96to100 <<: *sanity-abstract-9to10 trigger: pull_request @@ -434,17 +532,10 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:9to10 & tag:tier0 & enabled:true' + plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-9.6" - distro_target: "rhel-10.0" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test - env: + - *tmt-env-settings-96to100 + env: &env-96to100 SOURCE_RELEASE: "9.6" TARGET_RELEASE: "10.0" @@ -463,19 +554,11 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:partitioning & enabled:true' + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-9.6" - distro_target: "rhel-10.0" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-96to100 env: - SOURCE_RELEASE: "9.6" - TARGET_RELEASE: "10.0" + <<: *env-96to100 # On-demand kernel-rt tests - &kernel-rt-96to100 @@ -489,21 +572,16 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true' + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-9.6" - distro_target: "rhel-10.0" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-96to100 env: - SOURCE_RELEASE: "9.6" - TARGET_RELEASE: "10.0" + <<: *env-96to100 + +# ###################################################################### # +# ############################# 9.7 > 10.1 ############################# # +# ###################################################################### # -# Tests: 9.7 -> 10.1 - &sanity-97to101 <<: *sanity-abstract-9to10 trigger: pull_request @@ -514,17 +592,10 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:9to10 & tag:tier0 & enabled:true' + plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-9.7" - distro_target: "rhel-10.1" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test - env: + - *tmt-env-settings-97to101 + env: &env-97to101 SOURCE_RELEASE: "9.7" TARGET_RELEASE: "10.1" @@ -543,19 +614,11 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:partitioning & enabled:true' + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-9.7" - distro_target: "rhel-10.1" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-97to101 env: - SOURCE_RELEASE: "9.7" - TARGET_RELEASE: "10.1" + <<: *env-97to101 # On-demand kernel-rt tests - &kernel-rt-97to101 @@ -572,16 +635,73 @@ jobs: tf_extra_params: test: tmt: - plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true' + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' environments: - - tmt: - context: - distro: "rhel-9.7" - distro_target: "rhel-10.1" - settings: - provisioning: - tags: - BusinessUnit: sst_upgrades@leapp_upstream_test + - *tmt-env-settings-97to101 env: - SOURCE_RELEASE: "9.7" - TARGET_RELEASE: "10.1" + <<: *env-97to101 + + +# ###################################################################### # +# ############################# 9.8 > 10.2 ############################# # +# ###################################################################### # + +- &sanity-98to102 + <<: *sanity-abstract-9to10 + trigger: pull_request + identifier: sanity-9.8to10.2 + targets: + epel-9-x86_64: + distros: [RHEL-9.8.0-Nightly] + tf_extra_params: + test: + tmt: + plan_filter: 'tag:9to10 & tag:tier0 & enabled:true & tag:-rhsm' + environments: + - *tmt-env-settings-98to102 + env: &env-98to102 + SOURCE_RELEASE: "9.8" + TARGET_RELEASE: "10.2" + +# On-demand minimal beaker tests +- &beaker-minimal-98to102 + <<: *beaker-minimal-9to10-abstract-ondemand + trigger: pull_request + labels: + - beaker-minimal + - beaker-minimal-9.8to10.2 + - 9.8to10.2 + identifier: sanity-9.8to10.2-beaker-minimal-ondemand + targets: + epel-9-x86_64: + distros: [RHEL-9.8-Nightly] + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9 & tag:partitioning & enabled:true & tag:-rhsm' + environments: + - *tmt-env-settings-98to102 + env: + <<: *env-98to102 + +# On-demand kernel-rt tests +- &kernel-rt-98to102 + <<: *kernel-rt-abstract-9to10-ondemand + trigger: pull_request + labels: + - kernel-rt + - kernel-rt-9.8to10.2 + - 9.8to10.2 + identifier: sanity-9.8to10.2-kernel-rt-ondemand + targets: + epel-9-x86_64: + distros: [RHEL-9.8-Nightly] + tf_extra_params: + test: + tmt: + plan_filter: 'tag:8to9 & tag:kernel-rt & enabled:true & tag:-rhsm' + environments: + - *tmt-env-settings-98to102 + env: + <<: *env-98to102 + diff --git a/.pylintrc b/.pylintrc index 5d75df40..a82f8818 100644 --- a/.pylintrc +++ b/.pylintrc @@ -9,23 +9,19 @@ disable= raising-bad-type, redundant-keyword-arg, # it's one or the other, this one is not so bad at all # "W" Warnings for stylistic problems or minor programming issues - no-absolute-import, arguments-differ, cell-var-from-loop, fixme, lost-exception, - no-init, pointless-string-statement, protected-access, redefined-outer-name, - relative-import, undefined-loop-variable, unsubscriptable-object, unused-argument, unused-import, unspecified-encoding, # "C" Coding convention violations - bad-continuation, missing-docstring, wrong-import-order, use-maxsplit-arg, @@ -33,7 +29,6 @@ disable= consider-using-enumerate, # "R" Refactor recommendations duplicate-code, - no-self-use, too-few-public-methods, too-many-branches, too-many-locals, @@ -42,24 +37,12 @@ disable= use-list-literal, use-dict-literal, too-many-lines, # we do not want to take care about that one - too-many-positional-arguments, # we cannot set yet max-possitional-arguments unfortunately + too-many-positional-arguments, # new for python3 version of pylint - useless-object-inheritance, - consider-using-set-comprehension, # pylint3 force to use comprehension in place we don't want (py2 doesnt have these options, for inline skip) unnecessary-pass, - invalid-envvar-default, # pylint3 warnings envvar returns str/none by default - bad-option-value, # python 2 doesn't have import-outside-toplevel, but in some case we need to import outside toplevel - super-with-arguments, # required in python 2 raise-missing-from, # no 'raise from' in python 2 - use-a-generator, # cannot be modified because of Python2 support - consider-using-with, # on bunch spaces we cannot change that... - duplicate-string-formatting-argument, # TMP: will be fixed in close future consider-using-f-string, # sorry, not gonna happen, still have to support py2 - use-dict-literal, - redundant-u-string-prefix, # still have py2 to support - logging-format-interpolation, - logging-not-lazy, - use-yield-from # yield from cannot be used until we require python 3.3 or greater + logging-format-interpolation [FORMAT] # Maximum number of characters on a single line. diff --git a/Makefile b/Makefile index 81b16376..0db240a9 100644 --- a/Makefile +++ b/Makefile @@ -12,24 +12,29 @@ REPOS_PATH=repos _SYSUPG_REPOS="$(REPOS_PATH)/system_upgrade" LIBRARY_PATH= REPORT_ARG= -REPOSITORIES ?= $(shell ls $(_SYSUPG_REPOS) | xargs echo | tr " " ",") -SYSUPG_TEST_PATHS=$(shell echo $(REPOSITORIES) | sed -r "s|(,\\|^)| $(_SYSUPG_REPOS)/|g") -TEST_PATHS:=commands repos/common $(SYSUPG_TEST_PATHS) - -# Several commands can take arbitrary user supplied arguments from environment -# variables as well: -PYTEST_ARGS ?= -PYLINT_ARGS ?= -FLAKE8_ARGS ?= # python version to run test with _PYTHON_VENV=$${PYTHON_VENV:-python3.6} ifdef ACTOR - TEST_PATHS=`$(_PYTHON_VENV) utils/actor_path.py $(ACTOR)` + # If REPOSITORIES is set, the utils/actor_path.py script searches for the + # actor only in the specified repositories. + # if REPOSITORIES is not set i.e. it's empty, all repositories are searched + # - this is broken due to name collisions in repositories (FIXME) + TEST_PATHS = $(shell . $(VENVNAME)/bin/activate && $(_PYTHON_VENV) utils/actor_path.py $(ACTOR) $(REPOSITORIES)) APPROX_TEST_PATHS=$(shell $(_PYTHON_VENV) utils/find_actors.py -C repos $(ACTOR)) # Dev only +else + REPOSITORIES ?= $(shell ls $(_SYSUPG_REPOS) | xargs echo | tr " " ",") + SYSUPG_TEST_PATHS=$(shell echo $(REPOSITORIES) | sed -r "s|(,\\|^)| $(_SYSUPG_REPOS)/|g") + TEST_PATHS:=commands repos/common $(SYSUPG_TEST_PATHS) endif +# Several commands can take arbitrary user supplied arguments from environment +# variables as well: +PYTEST_ARGS ?= +PYLINT_ARGS ?= +FLAKE8_ARGS ?= + ifeq ($(TEST_LIBS),y) LIBRARY_PATH=`python utils/library_path.py` endif @@ -51,7 +56,7 @@ _COPR_CONFIG=$${COPR_CONFIG:-~/.config/copr_rh_oamg.conf} _CONTAINER_TOOL=$${CONTAINER_TOOL:-podman} # container to run tests in -_TEST_CONTAINER=$${TEST_CONTAINER:-rhel8} +_TEST_CONTAINER=$${TEST_CONTAINER:-el8} # In case just specific CHROOTs should be used for the COPR build, you can # set the multiple CHROOTs separated by comma in the COPR_CHROOT envar, e.g. @@ -129,7 +134,7 @@ help: @echo " test lint source code and run tests" @echo " test_no_lint run tests without linting the source code" @echo " test_container run lint and tests in container" - @echo " - default container is 'rhel8'" + @echo " - default container is 'el8'" @echo " - can be changed by setting TEST_CONTAINER env" @echo " test_container_all run lint and tests in all available containers" @echo " test_container_no_lint run tests without linting in container, see test_container" @@ -164,9 +169,9 @@ help: @echo " PR=7 SUFFIX='my_additional_suffix' make " @echo " MR=6 COPR_CONFIG='path/to/the/config/copr/file' make " @echo " ACTOR= TEST_LIBS=y make test" - @echo " BUILD_CONTAINER=rhel8 make build_container" - @echo " TEST_CONTAINER=f34 make test_container" - @echo " CONTAINER_TOOL=docker TEST_CONTAINER=rhel8 make test_container_no_lint" + @echo " BUILD_CONTAINER=el8 make build_container" + @echo " TEST_CONTAINER=f42 make test_container" + @echo " CONTAINER_TOOL=docker TEST_CONTAINER=el8 make test_container_no_lint" @echo "" clean: @@ -252,10 +257,10 @@ build_container: echo "--- Build RPM ${PKGNAME}-${VERSION}-${RELEASE}.el$(DIST_VERSION).rpm in container ---"; case "$(BUILD_CONTAINER)" in \ el8) \ - CONT_FILE="utils/container-builds/Containerfile.ubi8"; \ + CONT_FILE="utils/container-builds/Containerfile.el8"; \ ;; \ el9) \ - CONT_FILE="utils/container-builds/Containerfile.ubi9"; \ + CONT_FILE="utils/container-builds/Containerfile.el9"; \ ;; \ "") \ echo "BUILD_CONTAINER must be set"; \ @@ -334,7 +339,7 @@ install-deps-fedora: $(VENVNAME)/bin/pip install -I "git+https://github.com/oamg/leapp.git@refs/pull/$(REQ_LEAPP_PR)/head"; \ fi -lint: +lint: _warn_misssing_repos_if_using_actor . $(VENVNAME)/bin/activate; \ echo "--- Linting ... ---" && \ SEARCH_PATH="$(TEST_PATHS)" && \ @@ -370,15 +375,28 @@ lint_fix: git diff $(MASTER_BRANCH) --name-only --diff-filter AMR | grep -v "^docs/" | xargs isort && \ echo "--- isort inplace fixing done. ---;" -test_no_lint: - @. $(VENVNAME)/bin/activate; \ +test_no_lint: _warn_misssing_repos_if_using_actor + @echo "============= snactor sanity-check ipu ===============" 2>&1 + . $(VENVNAME)/bin/activate; \ snactor repo find --path repos/; \ - for dir in repos/system_upgrade/*/; do \ - echo "Running sanity-check in $$dir"; \ - (cd $$dir && snactor workflow sanity-check ipu); \ - done; \ - $(_PYTHON_VENV) -m pytest $(REPORT_ARG) $(TEST_PATHS) $(LIBRARY_PATH) $(PYTEST_ARGS) + for dir in $$(echo $(REPOSITORIES) | tr "," " "); do \ + echo "Running sanity-check in $(_SYSUPG_REPOS)/$$dir"; \ + (cd $(_SYSUPG_REPOS)/$$dir && snactor workflow sanity-check ipu); \ + done + @echo "==================== unit tests ======================" 2>&1; +# the below commands need to be one shell invocation for the early exit to work; +# note: need to store the paths into separate var as it here as it's lazily +# evaluated on each use :), using ?= for the assignment does not help for +# some reason + @paths="$(TEST_PATHS)"; \ + if [[ $$(echo "$$paths" | grep 'ERROR:') && -n "$(ACTOR)" ]]; then \ + echo Failed to find the '$(ACTOR)' actor in the '$(REPOSITORIES)' repositories: $$paths; \ + printf "\033[0;33mSkipping unit tests, could not find the '$(ACTOR)' actor in $(REPOSITORIES) repositories\033[0m\n"; \ + exit 0; \ + fi; \ + . $(VENVNAME)/bin/activate; \ + $(_PYTHON_VENV) -m pytest $(REPORT_ARG) $$paths $(LIBRARY_PATH) $(PYTEST_ARGS) test: lint test_no_lint @@ -408,7 +426,7 @@ _test_container_ipu: ;; \ esac && \ $(_CONTAINER_TOOL) exec -w /repocopy $$_CONT_NAME make clean && \ - $(_CONTAINER_TOOL) exec -w /repocopy -e REPOSITORIES $$_CONT_NAME make $${_TEST_CONT_TARGET:-test} + $(_CONTAINER_TOOL) exec -w /repocopy -e ACTOR -e REPOSITORIES $$_CONT_NAME make $${_TEST_CONT_TARGET:-test} # Runs lint in a container @@ -416,7 +434,7 @@ lint_container: @_TEST_CONT_TARGET="lint" $(MAKE) test_container lint_container_all: - @for container in "f34" "rhel8" "rhel9"; do \ + @for container in f42 el{8,9}; do \ TEST_CONTAINER=$$container $(MAKE) lint_container || exit 1; \ done @@ -426,20 +444,20 @@ lint_container_all: # because e.g RHEL8 to RHEL9 IPU must work on python3.6 and python3.9. test_container: @case $(_TEST_CONTAINER) in \ - f34) \ - export CONT_FILE="utils/container-tests/Containerfile.f34"; \ - export _VENV="python3.9"; \ + f42) \ + export CONT_FILE="utils/container-tests/Containerfile.f42"; \ + export _VENV="python3.13"; \ ;; \ - rhel8) \ - export CONT_FILE="utils/container-tests/Containerfile.rhel8"; \ + el8) \ + export CONT_FILE="utils/container-tests/Containerfile.el8"; \ export _VENV="python3.6"; \ ;; \ - rhel9) \ - export CONT_FILE="utils/container-tests/Containerfile.rhel9"; \ + el9) \ + export CONT_FILE="utils/container-tests/Containerfile.el9"; \ export _VENV="python3.9"; \ ;; \ *) \ - echo "Error: Available containers are: f34, rhel8, rhel9"; exit 1; \ + echo "Error: Available containers are: f42, el8, el9"; exit 1; \ ;; \ esac; \ export TEST_IMAGE="leapp-repo-tests-$(_TEST_CONTAINER)"; \ @@ -448,7 +466,9 @@ test_container: export _CONT_NAME="leapp-repo-tests-$(_TEST_CONTAINER)-cont"; \ $(_CONTAINER_TOOL) ps -q -f name=$$_CONT_NAME && { $(_CONTAINER_TOOL) kill $$_CONT_NAME; $(_CONTAINER_TOOL) rm $$_CONT_NAME; }; \ $(_CONTAINER_TOOL) run -di --name $$_CONT_NAME -v "$$PWD":/repo:Z -e PYTHON_VENV=$$_VENV $$TEST_IMAGE && \ - $(_CONTAINER_TOOL) exec $$_CONT_NAME rsync -aur --delete --exclude "tut*" /repo/ /repocopy && \ + $(_CONTAINER_TOOL) exec $$_CONT_NAME rsync -aur --delete --exclude 'tut/' --exclude 'docs/' --exclude '**/__pycache__/' --exclude 'packaging/' --exclude '.git/' /repo/ /repocopy && \ + $(_CONTAINER_TOOL) exec $$_CONT_NAME rsync -aur --delete --exclude '**/__pycache__/' /repo/commands/ /repocopy/tut/lib/$$_VENV/site-packages/leapp/cli/commands/ && \ + $(_CONTAINER_TOOL) exec -w /repocopy $$_CONT_NAME bash -c '. $(VENVNAME)/bin/activate && snactor repo find --path repos' && \ export res=0; \ case $$_VENV in \ python3.6) \ @@ -471,7 +491,7 @@ test_container: exit $$res test_container_all: - @for container in "f34" "rhel8" "rhel9"; do \ + @for container in "f42" "el8" "el9"; do \ TEST_CONTAINER=$$container $(MAKE) test_container || exit 1; \ done @@ -479,14 +499,13 @@ test_container_no_lint: @_TEST_CONT_TARGET="test_no_lint" $(MAKE) test_container test_container_all_no_lint: - @for container in "f34" "rhel8" "rhel9"; do \ + @for container in f42 el{8,9}; do \ TEST_CONTAINER=$$container $(MAKE) test_container_no_lint || exit 1; \ done # clean all testing and building containers and their images clean_containers: - @for i in "leapp-repo-tests-f34" "leapp-repo-tests-rhel8" \ - "leapp-repo-tests-rhel9" "leapp-repo-build-el8"; do \ + @for i in leapp-repo-tests-f42 leapp-repo-tests-el{8,9} leapp-repo-build-el{8,9}; do \ $(_CONTAINER_TOOL) kill "$$i-cont" || :; \ $(_CONTAINER_TOOL) rm "$$i-cont" || :; \ $(_CONTAINER_TOOL) rmi "$$i" || :; \ @@ -521,5 +540,14 @@ dashboard_data: $(_PYTHON_VENV) ../../../utils/dashboard-json-dump.py > ../../../discover.json; \ popd -.PHONY: help build clean prepare source srpm copr_build _build_local build_container print_release register install-deps install-deps-fedora lint test_no_lint test dashboard_data fast_lint +_warn_misssing_repos_if_using_actor: + @if [ -z "$(REPOSITORIES)" -a -n "$(ACTOR)" ]; then \ + printf "\033[0;31mERROR\033[0m: Running linters/tests with ACTOR without"; \ + printf " specifying REPOSITORIES is currently broken.\n" 2>&1; \ + printf " Specify REPOSITORIES with only one elXtoelY repository"; \ + printf " (e.g. REPOSITORIES=common,el8toel9).\n" 2>&1; \ + exit 1; \ + fi + +.PHONY: help build clean prepare source srpm copr_build _build_local build_container print_release register install-deps install-deps-fedora lint test_no_lint test dashboard_data fast_lint _warn_missing_repos_if_using_actor .PHONY: test_container test_container_no_lint test_container_all test_container_all_no_lint clean_containers _build_container_image _test_container_ipu dev_test_no_lint diff --git a/ci/.gitignore b/ci/.gitignore new file mode 100644 index 00000000..e6f97f0f --- /dev/null +++ b/ci/.gitignore @@ -0,0 +1 @@ +**/.vagrant diff --git a/ci/ansible/ansible.cfg b/ci/ansible/ansible.cfg new file mode 100644 index 00000000..d5c13036 --- /dev/null +++ b/ci/ansible/ansible.cfg @@ -0,0 +1,4 @@ +[defaults] +callbacks_enabled=ansible.posix.profile_tasks +stdout_callback=community.general.yaml +pipelining=True diff --git a/ci/ansible/docker-ce.yaml b/ci/ansible/docker-ce.yaml new file mode 100644 index 00000000..bba5f3df --- /dev/null +++ b/ci/ansible/docker-ce.yaml @@ -0,0 +1,6 @@ +--- +- name: Docker CE configuration + hosts: all + become: yes + roles: + - docker-ce diff --git a/ci/ansible/minimal.yaml b/ci/ansible/minimal.yaml new file mode 100644 index 00000000..517cc81b --- /dev/null +++ b/ci/ansible/minimal.yaml @@ -0,0 +1,6 @@ +--- +- name: Minimal configuration + hosts: all + become: yes + roles: + - minimal diff --git a/ci/ansible/requirements.yaml b/ci/ansible/requirements.yaml new file mode 100644 index 00000000..13ca0224 --- /dev/null +++ b/ci/ansible/requirements.yaml @@ -0,0 +1,3 @@ +collections: + - name: community.general + - name: ansible.posix diff --git a/ci/ansible/roles/docker-ce/README.md b/ci/ansible/roles/docker-ce/README.md new file mode 100644 index 00000000..860444b1 --- /dev/null +++ b/ci/ansible/roles/docker-ce/README.md @@ -0,0 +1,43 @@ +Docker CE Install and configuration +========= + +Install latest version of Docker CE Engine form upstream repository. Start and enable services after installation. + +Requirements +------------ + +Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required. + +Role Variables +-------------- + +`docker_ce_repo_checksum` in defaults/main.yaml. SHA512 Checksum of the docker-ce.repo file. +A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well. + +Dependencies +------------ + +A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles. + +Example Playbook +---------------- + +Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too: + + - hosts: all + become: yes + roles: + - role: docker + vars: + docker_ce_repo_checksum: sha512:XXXX # You can provide the new checksum if the default one not actual + + +License +------- + +GPL-3.0-or-later + +Author Information +------------------ + +AlmaLinux OS Foundation diff --git a/ci/ansible/roles/docker-ce/defaults/main.yaml b/ci/ansible/roles/docker-ce/defaults/main.yaml new file mode 100644 index 00000000..d0fd0c09 --- /dev/null +++ b/ci/ansible/roles/docker-ce/defaults/main.yaml @@ -0,0 +1,3 @@ +--- +# defaults file for docker-ce +docker_ce_repo_checksum: sha512:1de0b99cbb427e974144f226451711dc491caef6b1256cb599ff307a687ba2d7dd959a016d4e4cfdd4acbd83423ba1f78fa89db61bab35351e35f1152aedaf5c diff --git a/ci/ansible/roles/docker-ce/handlers/main.yaml b/ci/ansible/roles/docker-ce/handlers/main.yaml new file mode 100644 index 00000000..a7236219 --- /dev/null +++ b/ci/ansible/roles/docker-ce/handlers/main.yaml @@ -0,0 +1,2 @@ +--- +# handlers file for docker-ce diff --git a/ci/ansible/roles/docker-ce/meta/main.yaml b/ci/ansible/roles/docker-ce/meta/main.yaml new file mode 100644 index 00000000..aa67ded8 --- /dev/null +++ b/ci/ansible/roles/docker-ce/meta/main.yaml @@ -0,0 +1,25 @@ +galaxy_info: + author: AlmaLinux OS Community + description: Install and configure Docker CE Engine + company: AlmaLinux OS Foundation + + license: GPL-3.0-or-later + + min_ansible_version: 2.11 + + platforms: + - name: EL + versions: + - 7 + - 8 + - 9 + + galaxy_tags: + - docker + - el7 + - el8 + - el9 + - almalinux + +dependencies: + - minimal diff --git a/ci/ansible/roles/docker-ce/tasks/install_docker_el7.yaml b/ci/ansible/roles/docker-ce/tasks/install_docker_el7.yaml new file mode 100644 index 00000000..320477af --- /dev/null +++ b/ci/ansible/roles/docker-ce/tasks/install_docker_el7.yaml @@ -0,0 +1,11 @@ +--- +# Install Docker +- name: Install Docker CE Stable + ansible.builtin.yum: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + update_cache: yes + state: present diff --git a/ci/ansible/roles/docker-ce/tasks/install_docker_el8.yaml b/ci/ansible/roles/docker-ce/tasks/install_docker_el8.yaml new file mode 100644 index 00000000..d44a202a --- /dev/null +++ b/ci/ansible/roles/docker-ce/tasks/install_docker_el8.yaml @@ -0,0 +1,11 @@ +--- +# Install Docker +- name: Install Docker CE Stable + ansible.builtin.dnf: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + update_cache: yes + state: present diff --git a/ci/ansible/roles/docker-ce/tasks/main.yaml b/ci/ansible/roles/docker-ce/tasks/main.yaml new file mode 100644 index 00000000..989af23f --- /dev/null +++ b/ci/ansible/roles/docker-ce/tasks/main.yaml @@ -0,0 +1,38 @@ +--- +# tasks file for docker-ce +- name: Add Docker CE repository + ansible.builtin.get_url: + url: https://download.docker.com/linux/centos/docker-ce.repo + dest: /etc/yum.repos.d/docker-ce.repo + checksum: "{{ docker_ce_repo_checksum }}" + owner: root + group: root + mode: '0644' + seuser: system_u + serole: object_r + setype: system_conf_t + +- name: Remove older versions of Docker on EL7 + ansible.builtin.include_tasks: remove_old_docker_el7.yaml + when: ansible_facts['distribution_major_version'] == '7' + +- name: Remove older versions of Docker on >= EL8 + ansible.builtin.include_tasks: remove_old_docker_el8.yaml + when: ansible_facts['distribution_major_version'] == '8' + +- name: Install Docker CE Stable on EL7 + ansible.builtin.include_tasks: install_docker_el7.yaml + when: ansible_facts['distribution_major_version'] == '7' + +- name: Install Docker CE Stable on >= EL8 + ansible.builtin.include_tasks: install_docker_el8.yaml + when: ansible_facts['distribution_major_version'] == '8' + +- name: Start and Enable Docker services + ansible.builtin.systemd: + name: "{{ item }}" + enabled: yes + state: started + loop: + - docker.service + - containerd.service diff --git a/ci/ansible/roles/docker-ce/tasks/remove_old_docker_el7.yaml b/ci/ansible/roles/docker-ce/tasks/remove_old_docker_el7.yaml new file mode 100644 index 00000000..db9e0960 --- /dev/null +++ b/ci/ansible/roles/docker-ce/tasks/remove_old_docker_el7.yaml @@ -0,0 +1,15 @@ +--- +# Remove older versions of Docker +- name: Uninstall older versions of Docker + ansible.builtin.yum: + name: + - docker + - docker-client + - docker-client-latest + - docker-common + - docker-latest + - docker-latest-logrotate + - docker-logrotate + - docker-engine + autoremove: yes + state: absent diff --git a/ci/ansible/roles/docker-ce/tasks/remove_old_docker_el8.yaml b/ci/ansible/roles/docker-ce/tasks/remove_old_docker_el8.yaml new file mode 100644 index 00000000..88f860cf --- /dev/null +++ b/ci/ansible/roles/docker-ce/tasks/remove_old_docker_el8.yaml @@ -0,0 +1,15 @@ +--- +# Remove older versions of Docker +- name: Uninstall older versions of Docker + ansible.builtin.dnf: + name: + - docker + - docker-client + - docker-client-latest + - docker-common + - docker-latest + - docker-latest-logrotate + - docker-logrotate + - docker-engine + autoremove: yes + state: absent diff --git a/ci/ansible/roles/docker-ce/tests/inventory b/ci/ansible/roles/docker-ce/tests/inventory new file mode 100644 index 00000000..878877b0 --- /dev/null +++ b/ci/ansible/roles/docker-ce/tests/inventory @@ -0,0 +1,2 @@ +localhost + diff --git a/ci/ansible/roles/docker-ce/tests/test.yaml b/ci/ansible/roles/docker-ce/tests/test.yaml new file mode 100644 index 00000000..789ba96e --- /dev/null +++ b/ci/ansible/roles/docker-ce/tests/test.yaml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + remote_user: root + roles: + - docker-ce diff --git a/ci/ansible/roles/docker-ce/vars/main.yaml b/ci/ansible/roles/docker-ce/vars/main.yaml new file mode 100644 index 00000000..7ff8a18f --- /dev/null +++ b/ci/ansible/roles/docker-ce/vars/main.yaml @@ -0,0 +1,2 @@ +--- +# vars file for docker-ce diff --git a/ci/ansible/roles/minimal/README.md b/ci/ansible/roles/minimal/README.md new file mode 100644 index 00000000..225dd44b --- /dev/null +++ b/ci/ansible/roles/minimal/README.md @@ -0,0 +1,38 @@ +Role Name +========= + +A brief description of the role goes here. + +Requirements +------------ + +Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required. + +Role Variables +-------------- + +A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well. + +Dependencies +------------ + +A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles. + +Example Playbook +---------------- + +Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too: + + - hosts: servers + roles: + - { role: username.rolename, x: 42 } + +License +------- + +BSD + +Author Information +------------------ + +An optional section for the role authors to include contact information, or a website (HTML is not allowed). diff --git a/ci/ansible/roles/minimal/defaults/main.yaml b/ci/ansible/roles/minimal/defaults/main.yaml new file mode 100644 index 00000000..4a5a46cd --- /dev/null +++ b/ci/ansible/roles/minimal/defaults/main.yaml @@ -0,0 +1,2 @@ +--- +# defaults file for minimal diff --git a/ci/ansible/roles/minimal/handlers/main.yaml b/ci/ansible/roles/minimal/handlers/main.yaml new file mode 100644 index 00000000..89105fec --- /dev/null +++ b/ci/ansible/roles/minimal/handlers/main.yaml @@ -0,0 +1,2 @@ +--- +# handlers file for minimal diff --git a/ci/ansible/roles/minimal/meta/main.yaml b/ci/ansible/roles/minimal/meta/main.yaml new file mode 100644 index 00000000..ecc81ab7 --- /dev/null +++ b/ci/ansible/roles/minimal/meta/main.yaml @@ -0,0 +1,23 @@ +galaxy_info: + author: AlmaLinux OS Community + description: Minimal configuration for ELevate + company: AlmaLinux OS Foundation + + license: GPL-3.0-or-later + + min_ansible_version: 2.11 + + platforms: + - name: EL + versions: + - 7 + - 8 + - 9 + + galaxy_tags: + - elevate + - upgrade + - cleanup + - el7 + - el8 + - el9 diff --git a/ci/ansible/roles/minimal/tasks/cleanup_el7.yaml b/ci/ansible/roles/minimal/tasks/cleanup_el7.yaml new file mode 100644 index 00000000..1b4af7c6 --- /dev/null +++ b/ci/ansible/roles/minimal/tasks/cleanup_el7.yaml @@ -0,0 +1,10 @@ +--- +# Remove old kernels +- name: Install the yum-utils + ansible.builtin.yum: + name: yum-utils + state: present + update_cache: yes + +- name: Remove the old kernels on EL7 + ansible.builtin.command: package-cleanup -y --oldkernels --count=1 diff --git a/ci/ansible/roles/minimal/tasks/cleanup_el8.yaml b/ci/ansible/roles/minimal/tasks/cleanup_el8.yaml new file mode 100644 index 00000000..56aeefd3 --- /dev/null +++ b/ci/ansible/roles/minimal/tasks/cleanup_el8.yaml @@ -0,0 +1,7 @@ +--- +# Remove old kernels +- name: Remove old kernels on EL8 + ansible.builtin.command: dnf -y remove --oldinstallonly + register: removeoldkernels + changed_when: removeoldkernels.rc == 0 + failed_when: removeoldkernels.rc > 1 diff --git a/ci/ansible/roles/minimal/tasks/main.yaml b/ci/ansible/roles/minimal/tasks/main.yaml new file mode 100644 index 00000000..8c1b35bd --- /dev/null +++ b/ci/ansible/roles/minimal/tasks/main.yaml @@ -0,0 +1,21 @@ +--- +# tasks file for minimal +- name: Upgrade the packages on EL7 + ansible.builtin.include_tasks: upgrade_el7.yaml + when: ansible_facts['distribution_major_version'] == '7' + +- name: Upgrade the packages on EL8 + ansible.builtin.include_tasks: upgrade_el8.yaml + when: ansible_facts['distribution_major_version'] == '8' + +- name: Reboot the system + ansible.builtin.reboot: + when: upgrade_status is changed + +- name: Cleanup the older kernels on EL7 + ansible.builtin.include_tasks: cleanup_el7.yaml + when: ansible_facts['distribution_major_version'] == '7' + +- name: Cleanup the older kernels on El8 + ansible.builtin.include_tasks: cleanup_el8.yaml + when: ansible_facts['distribution_major_version'] == '8' diff --git a/ci/ansible/roles/minimal/tasks/upgrade_el7.yaml b/ci/ansible/roles/minimal/tasks/upgrade_el7.yaml new file mode 100644 index 00000000..7648a586 --- /dev/null +++ b/ci/ansible/roles/minimal/tasks/upgrade_el7.yaml @@ -0,0 +1,8 @@ +--- +# Upgrade the system +- name: Upgrade the system + ansible.builtin.yum: + name: "*" + state: latest + update_cache: yes + register: upgrade_status diff --git a/ci/ansible/roles/minimal/tasks/upgrade_el8.yaml b/ci/ansible/roles/minimal/tasks/upgrade_el8.yaml new file mode 100644 index 00000000..0d4a5d2a --- /dev/null +++ b/ci/ansible/roles/minimal/tasks/upgrade_el8.yaml @@ -0,0 +1,8 @@ +--- +# Upgrade the system +- name: Upgrade the system + ansible.builtin.dnf: + name: "*" + state: latest + update_cache: yes + register: upgrade_status diff --git a/ci/ansible/roles/minimal/tests/inventory b/ci/ansible/roles/minimal/tests/inventory new file mode 100644 index 00000000..878877b0 --- /dev/null +++ b/ci/ansible/roles/minimal/tests/inventory @@ -0,0 +1,2 @@ +localhost + diff --git a/ci/ansible/roles/minimal/tests/test.yaml b/ci/ansible/roles/minimal/tests/test.yaml new file mode 100644 index 00000000..db5c4c17 --- /dev/null +++ b/ci/ansible/roles/minimal/tests/test.yaml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + remote_user: root + roles: + - minimal diff --git a/ci/ansible/roles/minimal/vars/main.yaml b/ci/ansible/roles/minimal/vars/main.yaml new file mode 100644 index 00000000..b24df080 --- /dev/null +++ b/ci/ansible/roles/minimal/vars/main.yaml @@ -0,0 +1,2 @@ +--- +# vars file for minimal diff --git a/ci/jenkins/ELevate_el7toel8_Development.jenkinsfile b/ci/jenkins/ELevate_el7toel8_Development.jenkinsfile new file mode 100644 index 00000000..317209ef --- /dev/null +++ b/ci/jenkins/ELevate_el7toel8_Development.jenkinsfile @@ -0,0 +1,249 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'LEAPP_SRC_GIT_USER', defaultValue: 'AlmaLinux', description: 'Input name of Git user of LEAPP source', trim: true) + string(name: 'LEAPP_SRC_GIT_BRANCH', defaultValue: 'almalinux', description: 'Input name of Git branch of LEAPP source', trim: true) + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + sh script: 'git clone https://github.com/AlmaLinux/leapp-data.git --branch devel', + label: 'Fetch devel version of leapp data' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el7toel8_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum-config-manager --add-repo https://repo.almalinux.org/elevate/testing/elevate-testing.repo\"", + label: 'Add testing repo of ELevate' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y leapp-upgrade\"", + label: 'Install testing version of ELevate' + sh script: "vagrant upload ci/scripts/install_elevate_dev.sh install_elevate_dev.sh $targetDistro.vmName", + label: 'Upload installer script to VMs' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo bash install_elevate_dev.sh -u ${LEAPP_SRC_GIT_USER} -b ${LEAPP_SRC_GIT_BRANCH}\"", + label: 'Install development version of ELevate', + returnStatus: true + sh script: "vagrant upload leapp-data/ leapp-data/ --compress $targetDistro.vmName", + label: 'Upload devel branch of leapp data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo mkdir -p /etc/leapp/files/vendors.d\"", + label: 'Create directory structrue of leapp data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo install -t /etc/leapp/files leapp-data/files/${targetDistro.leappData}/*\"", + label: 'Install devel version of leapp data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo install -t /etc/leapp/files/vendors.d leapp-data/vendors.d/*\"", + label: 'Install devel version of leapp vendor data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo mv -f /etc/leapp/files/leapp_upgrade_repositories.repo.el8 /etc/leapp/files/leapp_upgrade_repositories.repo\"", + label: 'Configure leapp upgrade repositories for EL7toEL8' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo mv -f /etc/leapp/files/repomap.json.el8 /etc/leapp/files/repomap.json\"", + label: 'Configure leapp repository mapping for EL7toEL8' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum -y install tree && sudo tree -ha /etc/leapp\"", + label: 'Check if development version of leapp data installed correctly' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start pre-upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"", + label: 'Permit ssh as root login' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"", + label: 'Answer the leapp question' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-8': + vm = 'almalinux_8' + ldata = 'almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'centos-stream-8': + vm = 'centosstream_8' + ldata = 'centos' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'oraclelinux-8': + vm = 'oraclelinux_8' + ldata = 'oraclelinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-8': + vm = 'rocky_8' + ldata = 'rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el7toel8_Internal.jenkinsfile b/ci/jenkins/ELevate_el7toel8_Internal.jenkinsfile new file mode 100644 index 00000000..97f900fe --- /dev/null +++ b/ci/jenkins/ELevate_el7toel8_Internal.jenkinsfile @@ -0,0 +1,230 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el7toel8_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y https://repo.almalinux.org/elevate/elevate-release-latest-el7.noarch.rpm\"", + label: 'Install the elevate-release-latest rpm packages for EL7' + sh script: "vagrant ssh $targetDistro.vmName -c \"wget https://build.almalinux.org/pulp/content/copr/eabdullin1-leapp-data-internal-almalinux-8-x86_64-dr/config.repo -O /etc/yum.repos.d/internal-leapp.repo\"", + label: 'Add pulp repository' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y leapp-upgrade\"", + label: 'Install the leap rpm package' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y $targetDistro.leappData\"", + label: 'Install the LEAP migration data rpm packages' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"", + label: 'Permit ssh as root login' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"", + label: 'Answer the LEAP question' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-8': + vm = 'almalinux_8' + ldata = 'leapp-data-almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'centos-stream-8': + vm = 'centosstream_8' + ldata = 'leapp-data-centos' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'oraclelinux-8': + vm = 'oraclelinux_8' + ldata = 'leapp-data-oraclelinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-8': + vm = 'rocky_8' + ldata = 'leapp-data-rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el7toel8_Internal_Dev.jenkinsfile b/ci/jenkins/ELevate_el7toel8_Internal_Dev.jenkinsfile new file mode 100644 index 00000000..af2fabe2 --- /dev/null +++ b/ci/jenkins/ELevate_el7toel8_Internal_Dev.jenkinsfile @@ -0,0 +1,253 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'LEAPP_SRC_GIT_USER', defaultValue: 'AlmaLinux', description: 'Input name of Git user of LEAPP source', trim: true) + string(name: 'LEAPP_SRC_GIT_BRANCH', defaultValue: 'almalinux', description: 'Input name of Git branch of LEAPP source', trim: true) + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + sh script: 'git clone https://github.com/AlmaLinux/leapp-data.git --branch devel', + label: 'Fetch devel version of leapp data' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el7toel8_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum-config-manager --add-repo https://repo.almalinux.org/elevate/testing/elevate-testing.repo\"", + label: 'Add testing repo of ELevate' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo wget https://build.almalinux.org/pulp/content/copr/eabdullin1-leapp-data-internal-centos7-x86_64-dr/config.repo -O /etc/yum.repos.d/internal-leapp.repo\"", + label: 'Add pulp repository' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo sed -i 's|enabled=1|enabled=1\\npriority=80|' /etc/yum.repos.d/internal-leapp.repo\"", + label: 'Set priority for pulp repository' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y leapp-upgrade\"", + label: 'Install testing version of ELevate' + sh script: "vagrant upload ci/scripts/install_elevate_dev.sh install_elevate_dev.sh $targetDistro.vmName", + label: 'Upload installer script to VMs' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo bash install_elevate_dev.sh -u ${LEAPP_SRC_GIT_USER} -b ${LEAPP_SRC_GIT_BRANCH}\"", + label: 'Install development version of ELevate', + returnStatus: true + sh script: "vagrant upload leapp-data/ leapp-data/ --compress $targetDistro.vmName", + label: 'Upload devel branch of leapp data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo mkdir -p /etc/leapp/files/vendors.d\"", + label: 'Create directory structrue of leapp data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo install -t /etc/leapp/files leapp-data/files/${targetDistro.leappData}/*\"", + label: 'Install devel version of leapp data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo install -t /etc/leapp/files/vendors.d leapp-data/vendors.d/*\"", + label: 'Install devel version of leapp vendor data' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo mv -f /etc/leapp/files/leapp_upgrade_repositories.repo.el8 /etc/leapp/files/leapp_upgrade_repositories.repo\"", + label: 'Configure leapp upgrade repositories for EL7toEL8' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo mv -f /etc/leapp/files/repomap.json.el8 /etc/leapp/files/repomap.json\"", + label: 'Configure leapp repository mapping for EL7toEL8' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum -y install tree && sudo tree -ha /etc/leapp\"", + label: 'Check if development version of leapp data installed correctly' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start pre-upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"", + label: 'Permit ssh as root login' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"", + label: 'Answer the leapp question' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-8': + vm = 'almalinux_8' + ldata = 'almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'centos-stream-8': + vm = 'centosstream_8' + ldata = 'centos' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'oraclelinux-8': + vm = 'oraclelinux_8' + ldata = 'oraclelinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-8': + vm = 'rocky_8' + ldata = 'rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el7toel8_Stable.jenkinsfile b/ci/jenkins/ELevate_el7toel8_Stable.jenkinsfile new file mode 100644 index 00000000..ae9bdb57 --- /dev/null +++ b/ci/jenkins/ELevate_el7toel8_Stable.jenkinsfile @@ -0,0 +1,228 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el7toel8_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y https://repo.almalinux.org/elevate/elevate-release-latest-el7.noarch.rpm\"", + label: 'Install the elevate-release-latest rpm packages for EL7' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y leapp-upgrade\"", + label: 'Install the leap rpm package' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y $targetDistro.leappData\"", + label: 'Install the LEAP migration data rpm packages' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"", + label: 'Permit ssh as root login' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"", + label: 'Answer the LEAP question' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-8': + vm = 'almalinux_8' + ldata = 'leapp-data-almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'centos-stream-8': + vm = 'centosstream_8' + ldata = 'leapp-data-centos' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'oraclelinux-8': + vm = 'oraclelinux_8' + ldata = 'leapp-data-oraclelinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-8': + vm = 'rocky_8' + ldata = 'leapp-data-rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el7toel8_Testing.jenkinsfile b/ci/jenkins/ELevate_el7toel8_Testing.jenkinsfile new file mode 100644 index 00000000..0f37cf2e --- /dev/null +++ b/ci/jenkins/ELevate_el7toel8_Testing.jenkinsfile @@ -0,0 +1,228 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el7toel8_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum-config-manager --add-repo https://repo.almalinux.org/elevate/testing/elevate-testing.repo\"", + label: 'Install the elevate-release-latest rpm packages for EL7' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y leapp-upgrade\"", + label: 'Install the leap rpm package' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo yum install -y $targetDistro.leappData\"", + label: 'Install the LEAP migration data rpm packages' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"", + label: 'Permit ssh as root login' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"", + label: 'Answer the LEAP question' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-8': + vm = 'almalinux_8' + ldata = 'leapp-data-almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'centos-stream-8': + vm = 'centosstream_8' + ldata = 'leapp-data-centos' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'oraclelinux-8': + vm = 'oraclelinux_8' + ldata = 'leapp-data-oraclelinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-8': + vm = 'rocky_8' + ldata = 'leapp-data-rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el8toel9_Development.jenkinsfile b/ci/jenkins/ELevate_el8toel9_Development.jenkinsfile new file mode 100644 index 00000000..7eb5430b --- /dev/null +++ b/ci/jenkins/ELevate_el8toel9_Development.jenkinsfile @@ -0,0 +1,200 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label params.AGENT + } + options { + timestamps() + } + parameters { + string(name: 'AGENT', defaultValue: 'almalinux-8-vagrant-libvirt-x86_64', description: 'Input label of the Jenkins Agent', trim: true) + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + string(name: 'REPO_URL', defaultValue: 'https://github.com/LKHN/el-test-auto-dev.git', description: 'URL of the pipeline repository', trim: true) + string(name: 'REPO_BRANCH', defaultValue: 'main', description: 'Branch of the pipeline repository', trim: true) + choice(name: 'SOURCE_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a source distro or all for ELevation') + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'centos-stream-9', 'oraclelinux-9', 'rocky-9', 'all'], description: 'Select a target distro or all to ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + } + stages { + stage('Source') { + steps { + git url: REPO_URL, + branch: REPO_BRANCH, + credentialsId: 'github-almalinuxautobot' + } + } + stage('Prepare Build and Test enviroment') { + steps { + sh script: 'cp Vagrantfile.el8toel9 Vagrantfile', + label: 'Generate the el8toel9 Vagrantfile' + sh script: 'sudo dnf -y install python39-devel python39-wheel', + label: 'Install Python 3.9, PIP and Wheel' + sh script: 'sudo python3 -m pip install --no-cache-dir --upgrade -r requirements.txt', + label: 'Install TestInfra' + sh script: 'git clone https://github.com/AlmaLinux/leapp-data.git --branch devel', + label: 'Clone the leapp-data git repository' + } + } + stage('ELevation') { + matrix { + when { + allOf { + anyOf { + expression { params.SOURCE_DISTRO_FILTER == 'all' } + expression { params.SOURCE_DISTRO_FILTER == env.SOURCE_DISTRO } + } + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + } + axes { + axis { + name 'SOURCE_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + axis { + name 'TARGET_DISTRO' + values 'almalinux-9', 'centos-stream-9', 'oraclelinux-9', 'rocky-9' + } + } + stages { + stage('Create and Configure Machines') { + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'vagrant destroy -f $SOURCE_DISTRO', + label: 'Make sure no machine present from the last retry' + sh script: 'vagrant up $SOURCE_DISTRO', + label: 'Create the source machines' + } + } + } + } + stage('ELevate to the all target distros') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf config-manager --add-repo https://repo.almalinux.org/elevate/testing/elevate-testing.repo\"', + label: 'Add the ELevate Testing RPM repository' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf install -y leapp-upgrade\"', + label: 'Install the leap rpm package' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo bash /vagrant/scripts/install_elevate_dev.sh\"', + label: 'Install Development version of ELevate', + returnStatus: true + script { + def LEAPP_DATA = getLeappDataDistro(TARGET_DISTRO) + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo mkdir -p /etc/leapp/files/vendors.d\"", + label:'Create the LEAPP directory') + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo install -t /etc/leapp/files /vagrant/leapp-data/files/${LEAPP_DATA}/*\"", + label:"Install the LEAPP DATA") + sh(script:'vagrant ssh $SOURCE_DISTRO -c \"sudo install -t /etc/leapp/files/vendors.d /vagrant/leapp-data/vendors.d/*\"', + label:"Install the Vendor DATA") + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo mv -f /etc/leapp/files/leapp_upgrade_repositories.repo.el9 /etc/leapp/files/leapp_upgrade_repositories.repo\"", + label:'Set LEAPP Repos for EL8') + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo mv -f /etc/leapp/files/repomap.json.el9 /etc/leapp/files/repomap.json\"", + label:'Set LEAPP Repo map for EL8') + sh(script:'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf -y install tree && sudo tree -ha /etc/leapp\"', + label:"Debug: Data paths") + } + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp preupgrade\"', + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"', + label: 'Permit ssh as root login' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"', + label: 'Answer the LEAP question' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp upgrade\"', + label: 'Start the Upgrade' + sh script: 'vagrant reload $SOURCE_DISTRO', + label: 'Reboot to the ELevate initramfs' + sh script: 'vagrant ssh-config $SOURCE_DISTRO >> .vagrant/ssh-config', + label: 'Generate the ssh-config file' + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal'} + expression { params.CONF_FILTER == 'docker-ce'} + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'py.test -v --hosts=$SOURCE_DISTRO --ssh-config=.vagrant/ssh-config --junit-xml tests/distro/test_osinfo_$SOURCE_DISTRO-junit.xml tests/distro/test_osinfo_$SOURCE_DISTRO.py', + label: 'Run the distro specific tests' + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce'} + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'py.test -v --hosts=$SOURCE_DISTRO --ssh-config=.vagrant/ssh-config --junit-xml tests/docker/test_docker_ce_$SOURCE_DISTRO-junit.xml tests/docker/test_docker_ce.py', + label: 'Run the distro specific tests' + } + } + } + } + } + } + } + } + post { + success { + junit testResults: '**/tests/**/**-junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f', + label: 'Destroy All Machines' + cleanWs() + } + } +} + +/* +* Common Functions +*/ +def getLeappDataDistro(TARGET_DISTRO) { + def leapp_data = "" + + switch(TARGET_DISTRO) { + case "almalinux-9": + leapp_data = TARGET_DISTRO.substring(0, 9) + break + + case "centos-stream-9": + leapp_data = TARGET_DISTRO.substring(0, 6) + break + + case "oraclelinux-9": + leapp_data = TARGET_DISTRO.substring(0, 11) + break + + case "rocky-9": + leapp_data = TARGET_DISTRO.substring(0, 5) + break + + default: + leap_data = "Error: Target Distro Not Supported" + break + } + return leapp_data +} diff --git a/ci/jenkins/ELevate_el8toel9_Internal.jenkinsfile b/ci/jenkins/ELevate_el8toel9_Internal.jenkinsfile new file mode 100644 index 00000000..aa6be967 --- /dev/null +++ b/ci/jenkins/ELevate_el8toel9_Internal.jenkinsfile @@ -0,0 +1,214 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + // choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'centos-stream-9', 'rocky-9', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'rocky-9', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el8toel9_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + // values 'almalinux-9', 'centos-stream-9', 'rocky-9' + values 'almalinux-9', 'rocky-9' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y https://repo.almalinux.org/elevate/elevate-release-latest-el8.noarch.rpm\"", + label: 'Install the elevate-release-latest rpm packages for EL8' + sh script: "vagrant ssh $targetDistro.vmName -c \"wget https://build.almalinux.org/pulp/content/copr/eabdullin1-leapp-data-internal-centos7-x86_64-dr/config.repo -O /etc/yum.repos.d/internal-leapp.repo\"", + label: 'Add pulp repository' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y leapp-upgrade\"", + label: 'Install the leap rpm package' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y $targetDistro.leappData\"", + label: 'Install the LEAP migration data rpm packages' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo sed -i \'s/^AllowZoneDrifting=.*/AllowZoneDrifting=no/\' /etc/firewalld/firewalld.conf\"", + label: 'TODO' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section check_vdo.no_vdo_devices=True\"", + label: 'TODO' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-9': + vm = 'almalinux_9' + ldata = 'leapp-data-almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-9': + vm = 'rocky_9' + ldata = 'leapp-data-rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el8toel9_Internal_Dev.jenkinsfile b/ci/jenkins/ELevate_el8toel9_Internal_Dev.jenkinsfile new file mode 100644 index 00000000..82626697 --- /dev/null +++ b/ci/jenkins/ELevate_el8toel9_Internal_Dev.jenkinsfile @@ -0,0 +1,206 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label params.AGENT + } + options { + timestamps() + } + parameters { + string(name: 'AGENT', defaultValue: 'almalinux-8-vagrant-libvirt-x86_64', description: 'Input label of the Jenkins Agent', trim: true) + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + string(name: 'REPO_URL', defaultValue: 'https://github.com/LKHN/el-test-auto-dev.git', description: 'URL of the pipeline repository', trim: true) + string(name: 'REPO_BRANCH', defaultValue: 'main', description: 'Branch of the pipeline repository', trim: true) + choice(name: 'SOURCE_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a source distro or all for ELevation') + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'centos-stream-9', 'oraclelinux-9', 'rocky-9', 'all'], description: 'Select a target distro or all to ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + } + stages { + stage('Source') { + steps { + git url: REPO_URL, + branch: REPO_BRANCH, + credentialsId: 'github-almalinuxautobot' + } + } + stage('Prepare Build and Test enviroment') { + steps { + sh script: 'cp Vagrantfile.el8toel9 Vagrantfile', + label: 'Generate the el8toel9 Vagrantfile' + sh script: 'sudo dnf -y install python39-devel python39-wheel', + label: 'Install Python 3.9, PIP and Wheel' + sh script: 'sudo python3 -m pip install --no-cache-dir --upgrade -r requirements.txt', + label: 'Install TestInfra' + sh script: 'git clone https://github.com/AlmaLinux/leapp-data.git --branch devel', + label: 'Clone the leapp-data git repository' + } + } + stage('ELevation') { + matrix { + when { + allOf { + anyOf { + expression { params.SOURCE_DISTRO_FILTER == 'all' } + expression { params.SOURCE_DISTRO_FILTER == env.SOURCE_DISTRO } + } + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + } + axes { + axis { + name 'SOURCE_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + axis { + name 'TARGET_DISTRO' + values 'almalinux-9', 'centos-stream-9', 'oraclelinux-9', 'rocky-9' + } + } + stages { + stage('Create and Configure Machines') { + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'vagrant destroy -f $SOURCE_DISTRO', + label: 'Make sure no machine present from the last retry' + sh script: 'vagrant up $SOURCE_DISTRO', + label: 'Create the source machines' + } + } + } + } + stage('ELevate to the all target distros') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf config-manager --add-repo https://repo.almalinux.org/elevate/testing/elevate-testing.repo\"', + label: 'Add the ELevate Testing RPM repository' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y wget\"", + label: 'Install wget' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo wget https://build.almalinux.org/pulp/content/copr/eabdullin1-leapp-data-internal-almalinux-8-x86_64-dr/config.repo -O /etc/yum.repos.d/internal-leapp.repo\"", + label: 'Add pulp repository' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo sed -i 's|enabled=1|enabled=1\\npriority=80|' /etc/yum.repos.d/internal-leapp.repo\"", + label: 'Set priority for pulp repository' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf install -y leapp-upgrade\"', + label: 'Install the leap rpm package' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo bash /vagrant/scripts/install_elevate_dev.sh\"', + label: 'Install Development version of ELevate', + returnStatus: true + script { + def LEAPP_DATA = getLeappDataDistro(TARGET_DISTRO) + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo mkdir -p /etc/leapp/files/vendors.d\"", + label:'Create the LEAPP directory') + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo install -t /etc/leapp/files /vagrant/leapp-data/files/${LEAPP_DATA}/*\"", + label:"Install the LEAPP DATA") + sh(script:'vagrant ssh $SOURCE_DISTRO -c \"sudo install -t /etc/leapp/files/vendors.d /vagrant/leapp-data/vendors.d/*\"', + label:"Install the Vendor DATA") + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo mv -f /etc/leapp/files/leapp_upgrade_repositories.repo.el9 /etc/leapp/files/leapp_upgrade_repositories.repo\"", + label:'Set LEAPP Repos for EL8') + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo mv -f /etc/leapp/files/repomap.json.el9 /etc/leapp/files/repomap.json\"", + label:'Set LEAPP Repo map for EL8') + sh(script:'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf -y install tree && sudo tree -ha /etc/leapp\"', + label:"Debug: Data paths") + } + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp preupgrade\"', + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"', + label: 'Permit ssh as root login' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"', + label: 'Answer the LEAP question' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp upgrade\"', + label: 'Start the Upgrade' + sh script: 'vagrant reload $SOURCE_DISTRO', + label: 'Reboot to the ELevate initramfs' + sh script: 'vagrant ssh-config $SOURCE_DISTRO >> .vagrant/ssh-config', + label: 'Generate the ssh-config file' + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal'} + expression { params.CONF_FILTER == 'docker-ce'} + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'py.test -v --hosts=$SOURCE_DISTRO --ssh-config=.vagrant/ssh-config --junit-xml tests/distro/test_osinfo_$SOURCE_DISTRO-junit.xml tests/distro/test_osinfo_$SOURCE_DISTRO.py', + label: 'Run the distro specific tests' + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce'} + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'py.test -v --hosts=$SOURCE_DISTRO --ssh-config=.vagrant/ssh-config --junit-xml tests/docker/test_docker_ce_$SOURCE_DISTRO-junit.xml tests/docker/test_docker_ce.py', + label: 'Run the distro specific tests' + } + } + } + } + } + } + } + } + post { + success { + junit testResults: '**/tests/**/**-junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f', + label: 'Destroy All Machines' + cleanWs() + } + } +} + +/* +* Common Functions +*/ +def getLeappDataDistro(TARGET_DISTRO) { + def leapp_data = "" + + switch(TARGET_DISTRO) { + case "almalinux-9": + leapp_data = TARGET_DISTRO.substring(0, 9) + break + + case "centos-stream-9": + leapp_data = TARGET_DISTRO.substring(0, 6) + break + + case "oraclelinux-9": + leapp_data = TARGET_DISTRO.substring(0, 11) + break + + case "rocky-9": + leapp_data = TARGET_DISTRO.substring(0, 5) + break + + default: + leap_data = "Error: Target Distro Not Supported" + break + } + return leapp_data +} diff --git a/ci/jenkins/ELevate_el8toel9_Stable.jenkinsfile b/ci/jenkins/ELevate_el8toel9_Stable.jenkinsfile new file mode 100644 index 00000000..68f00165 --- /dev/null +++ b/ci/jenkins/ELevate_el8toel9_Stable.jenkinsfile @@ -0,0 +1,212 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label 'x86_64 && bm' + } + options { + timestamps() + parallelsAlwaysFailFast() + } + parameters { + // choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'centos-stream-9', 'rocky-9', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'rocky-9', 'all'], description: 'Select a target distro or all for ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + } + environment { + VAGRANT_NO_COLOR = '1' + } + stages { + stage('Prepare') { + steps { + sh script: 'ansible-galaxy install -r ci/ansible/requirements.yaml', + label: 'Install Ansible collections' + sh script: 'python3.11 -m venv .venv', + label: 'Create Python virtual environment' + sh script: '. .venv/bin/activate && pip install --no-color pip pytest-testinfra paramiko', + label: 'Install Testinfra' + } + } + stage('CreateSingleMachine') { + when { + expression { params.TARGET_DISTRO_FILTER != 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO_FILTER) + + sh script: 'cp ci/vagrant/el7toel8toel9_single.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: "vagrant up $targetDistro.vmName", + label: 'Create source VM' + } + } + } + stage('CreateMultiMachine') { + when { + expression { params.TARGET_DISTRO_FILTER == 'all' } + } + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + sh script: 'cp ci/vagrant/el8toel9_multi.rb Vagrantfile', + label: 'Generate Vagrantfile' + sh script: 'vagrant up', + label: 'Create source VM' + } + } + stage('ELevationAndTest') { + matrix { + when { + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + axes { + axis { + name 'TARGET_DISTRO' + // values 'almalinux-9', 'centos-stream-9', 'rocky-9' + values 'almalinux-9', 'rocky-9' + } + } + stages { + stage('ELevate') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y https://repo.almalinux.org/elevate/elevate-release-latest-el8.noarch.rpm\"", + label: 'Install the elevate-release-latest rpm packages for EL8' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y leapp-upgrade\"", + label: 'Install the leap rpm package' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo dnf install -y $targetDistro.leappData\"", + label: 'Install the LEAP migration data rpm packages' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp preupgrade\"", + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo sed -i \'s/^AllowZoneDrifting=.*/AllowZoneDrifting=no/\' /etc/firewalld/firewalld.conf\"", + label: 'TODO' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp answer --section check_vdo.no_vdo_devices=True\"", + label: 'TODO' + sh script: "vagrant ssh $targetDistro.vmName -c \"sudo leapp upgrade\"", + label: 'Start the Upgrade' + sh script: "vagrant reload $targetDistro.vmName", + label: 'Reboot to the ELevate initramfs' + sh script: "vagrant ssh-config $targetDistro.vmName >> .vagrant/ssh-config", + label: 'Generate the ssh-config file' + } + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal' } + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: 'rm -f conftest.py pytest.ini', + label: 'Delete root conftest.py file' + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/distro/test_osinfo_${targetDistro.vmName}.py + """, + label: 'Run the distro specific tests' + } + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce' } + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + script { + def targetDistro = targetDistroSpec(TARGET_DISTRO) + + sh script: """ + . .venv/bin/activate \ + && py.test -v --hosts=${targetDistro.vmName} \ + --ssh-config=.vagrant/ssh-config \ + --junit-xml ci/tests/tests/docker/test_docker_ce_${targetDistro.vmName}_junit.xml \ + ci/tests/tests/docker/test_docker_ce.py + """, + label: 'Run the docker specific tests' + } + } + } + } + } + } + } + } + } + post { + success { + junit testResults: 'ci/tests/tests/**/**_junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f --no-parallel -g', + label: 'Destroy VMs' + cleanWs() + } + } +} + +def targetDistroSpec(distro) { + def spec = [:] + + switch (distro) { + case 'almalinux-9': + vm = 'almalinux_9' + ldata = 'leapp-data-almalinux' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + case 'rocky-9': + vm = 'rocky_9' + ldata = 'leapp-data-rocky' + + spec = [ + vmName: vm, + leappData: ldata + ] + break + default: + spec = [ + vmName: 'unknown', + leappData: 'unknown' + ] + break + } + return spec +} diff --git a/ci/jenkins/ELevate_el8toel9_Testing.jenkinsfile b/ci/jenkins/ELevate_el8toel9_Testing.jenkinsfile new file mode 100644 index 00000000..79cdd472 --- /dev/null +++ b/ci/jenkins/ELevate_el8toel9_Testing.jenkinsfile @@ -0,0 +1,187 @@ +RETRY = params.RETRY +TIMEOUT = params.TIMEOUT + +pipeline { + agent { + label params.AGENT + } + options { + timestamps() + } + parameters { + string(name: 'AGENT', defaultValue: 'almalinux-8-vagrant-libvirt-x86_64', description: 'Input label of the Jenkins Agent', trim: true) + string(name: 'RETRY', defaultValue: '3', description: 'Input count of retry', trim: true) + string(name: 'TIMEOUT', defaultValue: '60', description: 'Input timeout value in minutes', trim: true) + string(name: 'REPO_URL', defaultValue: 'https://github.com/LKHN/el-test-auto-dev.git', description: 'URL of the pipeline repository', trim: true) + string(name: 'REPO_BRANCH', defaultValue: 'main', description: 'Branch of the pipeline repository', trim: true) + choice(name: 'SOURCE_DISTRO_FILTER', choices: ['almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8', 'all'], description: 'Select a source distro or all for ELevation') + choice(name: 'TARGET_DISTRO_FILTER', choices: ['almalinux-9', 'centos-stream-9', 'oraclelinux-9', 'rocky-9', 'all'], description: 'Select a target distro or all to ELevation') + choice(name: 'CONF_FILTER', choices: ['minimal', 'docker-ce'], description: 'Select a configuration') + } + stages { + stage('Source') { + steps { + git url: REPO_URL, + branch: REPO_BRANCH, + credentialsId: 'github-almalinuxautobot' + } + } + stage('Prepare Build and Test enviroment') { + steps { + sh script: 'cp Vagrantfile.el8toel9 Vagrantfile', + label: 'Generate the el8toel9 Vagrantfile' + sh script: 'sudo dnf -y install python39-devel python39-wheel', + label: 'Install Python 3.9, PIP and Wheel' + sh script: 'sudo python3 -m pip install --no-cache-dir --upgrade -r requirements.txt', + label: 'Install TestInfra' + } + } + stage('ELevation') { + matrix { + when { + allOf { + anyOf { + expression { params.SOURCE_DISTRO_FILTER == 'all' } + expression { params.SOURCE_DISTRO_FILTER == env.SOURCE_DISTRO } + } + anyOf { + expression { params.TARGET_DISTRO_FILTER == 'all' } + expression { params.TARGET_DISTRO_FILTER == env.TARGET_DISTRO } + } + } + } + axes { + axis { + name 'SOURCE_DISTRO' + values 'almalinux-8', 'centos-stream-8', 'oraclelinux-8', 'rocky-8' + } + axis { + name 'TARGET_DISTRO' + values 'almalinux-9', 'centos-stream-9', 'oraclelinux-9', 'rocky-9' + } + } + stages { + stage('Create and Configure Machines') { + environment { + CONFIG = "${CONF_FILTER}" + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'vagrant destroy -f $SOURCE_DISTRO', + label: 'Make sure no machine present from the last retry' + sh script: 'vagrant up $SOURCE_DISTRO', + label: 'Create the source machines' + } + } + } + } + stage('ELevate to the all target distros') { + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf config-manager --add-repo https://repo.almalinux.org/elevate/testing/elevate-testing.repo\"', + label: 'Add the ELevate Testing RPM repository' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf -y install leapp-upgrade\"', + label: 'Install the leap rpm package' + script { + def LEAPP_DATA = getLeappDataDistro(TARGET_DISTRO) + sh(script:"vagrant ssh $SOURCE_DISTRO -c \"sudo dnf -y install leapp-data-$LEAPP_DATA\"", + label:'Install the LEAP migration data rpm packages') + sh(script:'vagrant ssh $SOURCE_DISTRO -c \"sudo dnf -y install tree && sudo tree -ha /etc/leapp\"', + label:'Debug: Data paths') + } + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp preupgrade\"', + label: 'Start the Pre-Upgrade check', + returnStatus: true + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"echo PermitRootLogin yes | sudo tee -a /etc/ssh/sshd_config\"', + label: 'Permit ssh as root login' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp answer --section remove_pam_pkcs11_module_check.confirm=True\"', + label: 'Answer the LEAP question' + sh script: 'vagrant ssh $SOURCE_DISTRO -c \"sudo leapp upgrade\"', + label: 'Start the Upgrade' + sh script: 'vagrant reload $SOURCE_DISTRO', + label: 'Reboot to the ELevate initramfs' + sh script: 'vagrant ssh-config $SOURCE_DISTRO >> .vagrant/ssh-config', + label: 'Generate the ssh-config file' + } + } + } + } + stage('Distro Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'minimal'} + expression { params.CONF_FILTER == 'docker-ce'} + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'py.test -v --hosts=$SOURCE_DISTRO --ssh-config=.vagrant/ssh-config --junit-xml tests/distro/test_osinfo_$TARGET_DISTRO-junit.xml tests/distro/test_osinfo_$TARGET_DISTRO.py', + label: 'Run the distro specific tests' + } + } + } + } + stage('Docker Tests') { + when { + anyOf { + expression { params.CONF_FILTER == 'docker-ce'} + } + } + steps { + retry(RETRY) { + timeout(time: TIMEOUT, unit: 'MINUTES') { + sh script: 'py.test -v --hosts=$SOURCE_DISTRO --ssh-config=.vagrant/ssh-config --junit-xml tests/docker/test_docker_ce_$SOURCE_DISTRO-junit.xml tests/docker/test_docker_ce.py', + label: 'Run the distro specific tests' + } + } + } + } + } + } + } + } + post { + success { + junit testResults: '**/tests/**/**-junit.xml', + skipPublishingChecks: true + } + cleanup { + sh script: 'vagrant destroy -f', + label: 'Destroy All Machines' + cleanWs() + } + } +} + +/* +* Common Functions +*/ +def getLeappDataDistro(TARGET_DISTRO) { + def leapp_data = "" + + switch(TARGET_DISTRO) { + case "almalinux-9": + leapp_data = TARGET_DISTRO.substring(0, 9) + break + + case "centos-stream-9": + leapp_data = TARGET_DISTRO.substring(0, 6) + break + + case "oraclelinux-9": + leapp_data = TARGET_DISTRO.substring(0, 11) + break + + case "rocky-9": + leapp_data = TARGET_DISTRO.substring(0, 5) + break + + default: + leap_data = "Error: Target Distro Not Supported" + break + } + return leapp_data +} diff --git a/ci/scripts/install_elevate_dev.sh b/ci/scripts/install_elevate_dev.sh new file mode 100644 index 00000000..f9cc2903 --- /dev/null +++ b/ci/scripts/install_elevate_dev.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bash + +USER='AlmaLinux' +BRANCH='almalinux' + +show_usage() { + echo 'Usage: sync_cloudlinux [OPTION]...' + echo '' + echo ' -h, --help show this message and exit' + echo ' -u, --user github user name (default: AlmaLinux)' + echo ' -b, --branch github branch name (default: almalinux)' +} + +while [[ $# -gt 0 ]]; do + opt="$1" + case ${opt} in + -h|--help) + show_usage + exit 0 + ;; + -u|--user) + USER="$2" + shift + shift + ;; + -b|--branch) + BRANCH="$2" + shift + shift + ;; + *) + echo -e "Error: unknown option ${opt}" >&2 + exit 2 + ;; + esac +done + +RHEL_MAJOR_VERSION=$(rpm --eval %rhel) +WORK_DIR="$HOME" +NEW_LEAPP_NAME="leapp-repository-$BRANCH" +NEW_LEAPP_DIR="$WORK_DIR/$NEW_LEAPP_NAME/" +LEAPP_PATH='/usr/share/leapp-repository/repositories/' +LEAPP_GPG_PATH='/etc/leapp/repos.d/system_upgrade/common/files/rpm-gpg' +EXCLUDE_PATH=' +/usr/share/leapp-repository/repositories/system_upgrade/el7toel8/files/bundled-rpms +/usr/share/leapp-repository/repositories/system_upgrade/el7toel8/files +/usr/share/leapp-repository/repositories/system_upgrade/el7toel8 +/usr/share/leapp-repository/repositories/system_upgrade/el8toel9/files/bundled-rpms +/usr/share/leapp-repository/repositories/system_upgrade/el8toel9/files +/usr/share/leapp-repository/repositories/system_upgrade/el8toel9 +/usr/share/leapp-repository/repositories/system_upgrade +/usr/share/leapp-repository/repositories/ +' + + +echo "RHEL_MAJOR_VERSION=$RHEL_MAJOR_VERSION" +echo "WORK_DIR=$WORK_DIR" +echo "EXCLUDED_PATHS=$EXCLUDE_PATH" + +echo "Preserve GPG keys if any" +for major in 8 9; do + test -e ${LEAPP_GPG_PATH}/${major} && mv ${LEAPP_GPG_PATH}/${major} ${WORK_DIR}/ +done + + +echo 'Remove old files' +for dir in $(find $LEAPP_PATH -type d); +do + skip=0 + for exclude in $(echo $EXCLUDE_PATH); + do + if [[ $exclude == $dir ]];then + skip=1 + break + fi + done + if [ $skip -eq 0 ];then + rm -rf $dir + fi +done + +echo "Download new tarball from https://github.com/$USER/leapp-repository/archive/$BRANCH/leapp-repository-$BRANCH.tar.gz" +curl -s -L https://github.com/$USER/leapp-repository/archive/$BRANCH/leapp-repository-$BRANCH.tar.gz | tar -xmz -C $WORK_DIR/ || exit 1 + +echo 'Deleting files as in spec file' +rm -rf $NEW_LEAPP_DIR/repos/common/actors/testactor +find $NEW_LEAPP_DIR/repos/common -name "test.py" -delete +rm -rf `find $NEW_LEAPP_DIR -name "tests" -type d` +find $NEW_LEAPP_DIR -name "Makefile" -delete +if [ $RHEL_MAJOR_VERSION -eq '7' ]; then + rm -rf $NEW_LEAPP_DIR/repos/system_upgrade/el8toel9 +else + rm -rf $NEW_LEAPP_DIR/repos/system_upgrade/el7toel8 + rm -rf $NEW_LEAPP_DIR/repos/system_upgrade/cloudlinux +fi + +echo 'Copy new data to system' +cp -r $NEW_LEAPP_DIR/repos/* $LEAPP_PATH || exit 1 + +for DIRECTORY in $(find $LEAPP_PATH -mindepth 1 -maxdepth 1 -type d); +do + REPOSITORY=$(basename $DIRECTORY) + if ! [ -e /etc/leapp/repos.d/$REPOSITORY ];then + echo "Enabling repository $REPOSITORY" + ln -s $LEAPP_PATH/$REPOSITORY /etc/leapp/repos.d/$REPOSITORY || exit 1 + fi +done + +echo "Restore GPG keys if any" +for major in 8 9; do + rm -rf ${LEAPP_GPG_PATH}/${major} + test -e ${WORK_DIR}/${major} && mv ${WORK_DIR}/${major} ${LEAPP_GPG_PATH}/ +done + +rm -rf $NEW_LEAPP_DIR + +exit 0 diff --git a/ci/tests/tests/conftest.py b/ci/tests/tests/conftest.py new file mode 100644 index 00000000..01f9443e --- /dev/null +++ b/ci/tests/tests/conftest.py @@ -0,0 +1,52 @@ +import pytest +import re + + +@pytest.fixture(scope="module") +def get_os_release(host): + """Get content of the /etc/os-release""" + os_release = host.file("/etc/os-release") + return os_release + + +@pytest.fixture(scope="module") +def get_redhat_release(host): + """Get content of the /etc/redhat-release""" + redhat_release = host.file("/etc/redhat-release") + return redhat_release + + +@pytest.fixture(scope="module") +def get_kernel_info(host): + """Get kernel version and vendor information""" + kernel_ver_pattern = re.compile( + f".*(^[0-9][0-9]?[0-9]?.[0-9][0-9]?[0-9]?.[0-9][0-9]?[0-9]?).*" + ) + kernel_ver_output = host.check_output("uname -r") + kernel_version = kernel_ver_pattern.match(kernel_ver_output).group(1) + + with host.sudo(): + kernel_vendor = host.check_output( + "grep -Ei '(.*kernel signing key|.*CA Server|.*Build)' /proc/keys | sed -E" + " 's/ +/:/g' | cut -d ':' -f 9 | uniq" + ) + kernel_info = (kernel_version, kernel_vendor) + return kernel_info + + +@pytest.fixture(scope="module", params=["glibc", "systemd", "coreutils", "rpm"]) +def get_pkg_info(host, request): + """Get vendor and version of installed packages""" + pkg_name = request.param + pkg_vendor = host.check_output( + f"rpm -qa --queryformat \"%{{VENDOR}}\n\" {request.param} | sed '$p;d' " + ) + pkg_version = host.check_output( + f'rpm -qa --queryformat "%{{VERSION}}\n" {request.param} | sort -n | sed' + " '$p;d'" + ) + pkg_info = (pkg_name, pkg_vendor, pkg_version) + # print(pkg_name) + # print(pkg_vendor) + # print(pkg_version) + return pkg_info diff --git a/ci/tests/tests/distro/test_osinfo_almalinux_8.py b/ci/tests/tests/distro/test_osinfo_almalinux_8.py new file mode 100644 index 00000000..c5219b35 --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_almalinux_8.py @@ -0,0 +1,43 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="AlmaLinux"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="almalinux"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="8.*"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("AlmaLinux release 8.*") + + +@pytest.mark.usefixtures("get_pkg_info") +class TestPkgInfo: + """Test vendor and version of packages""" + + def test_pkg_vendor(self, get_pkg_info): + assert get_pkg_info[1] == "AlmaLinux" + + def test_pkg_version(self, get_pkg_info): + if get_pkg_info[0] == "kernel": + assert get_pkg_info[2] == "4.18.0" + elif get_pkg_info[0] == "glibc": + assert get_pkg_info[2] == "2.28" + elif get_pkg_info[0] == "systemd": + assert get_pkg_info[2] == "239" + elif get_pkg_info[0] == "coreutils": + assert get_pkg_info[2] == "8.30" + else: + assert get_pkg_info[2] == "4.14.3" diff --git a/ci/tests/tests/distro/test_osinfo_almalinux_9.py b/ci/tests/tests/distro/test_osinfo_almalinux_9.py new file mode 100644 index 00000000..1536e52b --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_almalinux_9.py @@ -0,0 +1,52 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="AlmaLinux"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="almalinux"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="9.*"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("AlmaLinux release 9.*") + + +@pytest.mark.usefixtures("get_kernel_info") +class TestKernelInfo: + """Test version and vendor of running kernel""" + + def test_kernel_version(self, get_kernel_info): + assert get_kernel_info[0] == "5.14.0" + + def test_kernel_vendor(self, get_kernel_info): + assert get_kernel_info[1] == "AlmaLinux" + + +@pytest.mark.usefixtures("get_pkg_info") +class TestPkgInfo: + """Test vendor and version of packages""" + + def test_pkg_vendor(self, get_pkg_info): + assert get_pkg_info[1] == "AlmaLinux" + + def test_pkg_version(self, get_pkg_info): + if get_pkg_info[0] == "glibc": + assert get_pkg_info[2] == "2.34" + elif get_pkg_info[0] == "systemd": + assert get_pkg_info[2] == "252" + elif get_pkg_info[0] == "coreutils": + assert get_pkg_info[2] == "8.32" + else: + assert get_pkg_info[2] == "4.16.1.3" diff --git a/ci/tests/tests/distro/test_osinfo_centosstream_8.py b/ci/tests/tests/distro/test_osinfo_centosstream_8.py new file mode 100644 index 00000000..995ae61e --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_centosstream_8.py @@ -0,0 +1,23 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="CentOS Stream"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="centos"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="8"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("CentOS Stream release 8") diff --git a/ci/tests/tests/distro/test_osinfo_centosstream_9.py b/ci/tests/tests/distro/test_osinfo_centosstream_9.py new file mode 100644 index 00000000..28e47202 --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_centosstream_9.py @@ -0,0 +1,23 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="CentOS Stream"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="centos"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="9"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("CentOS Stream release 9") diff --git a/ci/tests/tests/distro/test_osinfo_oraclelinux_8.py b/ci/tests/tests/distro/test_osinfo_oraclelinux_8.py new file mode 100644 index 00000000..2080fd2f --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_oraclelinux_8.py @@ -0,0 +1,23 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="Oracle Linux Server"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="ol"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="8.*"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("Red Hat Enterprise Linux release 8.*") diff --git a/ci/tests/tests/distro/test_osinfo_oraclelinux_9.py b/ci/tests/tests/distro/test_osinfo_oraclelinux_9.py new file mode 100644 index 00000000..bd5044bb --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_oraclelinux_9.py @@ -0,0 +1,23 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="Oracle Linux Server"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="ol"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="9.*"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("Red Hat Enterprise Linux release 9.*") diff --git a/ci/tests/tests/distro/test_osinfo_rocky_8.py b/ci/tests/tests/distro/test_osinfo_rocky_8.py new file mode 100644 index 00000000..cce5d668 --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_rocky_8.py @@ -0,0 +1,23 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="Rocky Linux"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="rocky"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="8.*"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("Rocky Linux release 8.*") diff --git a/ci/tests/tests/distro/test_osinfo_rocky_9.py b/ci/tests/tests/distro/test_osinfo_rocky_9.py new file mode 100644 index 00000000..ce8cccdb --- /dev/null +++ b/ci/tests/tests/distro/test_osinfo_rocky_9.py @@ -0,0 +1,23 @@ +import pytest + + +@pytest.mark.usefixtures("get_os_release") +class TestOSRelease: + """Test values of NAME, ID and VERSION_ID""" + + def test_os_rel_name(self, get_os_release): + assert get_os_release.contains('NAME="Rocky Linux"') + + def test_os_rel_id(self, get_os_release): + assert get_os_release.contains('ID="rocky"') + + def test_os_rel_version_id(self, get_os_release): + assert get_os_release.contains('VERSION_ID="9.*"') + + +@pytest.mark.usefixtures("get_redhat_release") +class TestRHRelease: + """Test contents of the /etc/redhat-release""" + + def test_redhat_release(self, get_redhat_release): + assert get_redhat_release.contains("Rocky Linux release 9.*") diff --git a/ci/tests/tests/docker/test_docker_ce.py b/ci/tests/tests/docker/test_docker_ce.py new file mode 100644 index 00000000..3c2550c7 --- /dev/null +++ b/ci/tests/tests/docker/test_docker_ce.py @@ -0,0 +1,26 @@ +import pytest + + +class TestDockerServices: + """Test docker and containerd services running and enabled""" + + def test_docker_is_running(self, host): + assert host.service("docker.service").is_running + + def test_containerd_is_running(self, host): + assert host.service("containerd.service").is_running + + def test_docker_is_enabled(self, host): + assert host.service("docker.service").is_enabled + + def test_containerd_is_enabled(self, host): + assert host.service("containerd.service").is_enabled + + +class TestDockerWorking: + """Test docker working with the hello world container""" + + def test_docker_is_working(self, host): + with host.sudo(): + cmd = host.run("sudo docker run --rm hello-world") + assert cmd.succeeded diff --git a/ci/vagrant/el7toel8_multi.rb b/ci/vagrant/el7toel8_multi.rb new file mode 100644 index 00000000..a18da81d --- /dev/null +++ b/ci/vagrant/el7toel8_multi.rb @@ -0,0 +1,40 @@ +# -*- mode: ruby -*- +# vi: set ft=ruby : + +configuration = ENV['CONFIG'] + +Vagrant.configure('2') do |config| + config.vagrant.plugins = 'vagrant-libvirt' + + config.vm.synced_folder '.', '/vagrant', disabled: true + config.vm.box = 'generic/centos7' + config.vm.boot_timeout = 3600 + + config.vm.provider 'libvirt' do |v| + v.uri = 'qemu:///system' + v.memory = 4096 + v.machine_type = 'q35' + v.cpu_mode = 'host-passthrough' + v.cpus = 2 + v.disk_bus = 'scsi' + v.disk_driver cache: 'writeback', discard: 'unmap' + v.random_hostname = true + end + + target_distros = ['almalinux', 'centosstream', 'oraclelinux', 'rocky'] + + target_distros.each do |target_distro| + config.vm.define "#{target_distro}_8" do |machine| + machine.vm.hostname = "#{target_distro}-8.test" + + if target_distro == target_distros[-1] + machine.vm.provision 'ansible' do |ansible| + ansible.compatibility_mode = '2.0' + ansible.limit = 'all' + ansible.playbook = "ci/ansible/#{configuration}.yaml" + ansible.config_file = 'ci/ansible/ansible.cfg' + end + end + end + end +end diff --git a/ci/vagrant/el7toel8toel9_single.rb b/ci/vagrant/el7toel8toel9_single.rb new file mode 100644 index 00000000..8cd05ac3 --- /dev/null +++ b/ci/vagrant/el7toel8toel9_single.rb @@ -0,0 +1,53 @@ +# -*- mode: ruby -*- +# vi: set ft=ruby : + +configuration = ENV['CONFIG'] + +Vagrant.configure('2') do |config| + config.vagrant.plugins = 'vagrant-libvirt' + + config.vm.synced_folder '.', '/vagrant', disabled: true + config.ssh.disable_deprecated_algorithms = true + config.vm.boot_timeout = 3600 + + config.vm.provider 'libvirt' do |v| + v.uri = 'qemu:///system' + v.memory = 4096 + v.machine_type = 'q35' + v.cpu_mode = 'host-passthrough' + v.cpus = 2 + v.disk_bus = 'scsi' + v.disk_driver cache: 'writeback', discard: 'unmap' + v.random_hostname = true + end + + # EL7toEL8 + target_distros = ['almalinux', 'centosstream', 'oraclelinux', 'rocky'] + + target_distros.each do |target_distro| + config.vm.define "#{target_distro}_8" do |machine| + machine.vm.box = 'generic/centos7' + machine.vm.hostname = "#{target_distro}-8.test" + end + end + + # EL8toEL9 + target_distros_el9 = { + almalinux: 'almalinux/8', + # centosstream: 'generic/centos8s', + rocky: 'generic/rocky8' + } + + target_distros_el9.each_pair do |vm, box| + config.vm.define "#{vm}_9" do |machine| + machine.vm.box = "#{box}" + machine.vm.hostname = "#{vm}-9.test" + end + end + + config.vm.provision 'ansible' do |ansible| + ansible.compatibility_mode = '2.0' + ansible.playbook = "ci/ansible/#{configuration}.yaml" + ansible.config_file = 'ci/ansible/ansible.cfg' + end +end diff --git a/ci/vagrant/el8toel9_multi.rb b/ci/vagrant/el8toel9_multi.rb new file mode 100644 index 00000000..370758e6 --- /dev/null +++ b/ci/vagrant/el8toel9_multi.rb @@ -0,0 +1,45 @@ +# -*- mode: ruby -*- +# vi: set ft=ruby : + +configuration = ENV['CONFIG'] + +Vagrant.configure('2') do |config| + config.vagrant.plugins = 'vagrant-libvirt' + + config.vm.synced_folder '.', '/vagrant', disabled: true + config.ssh.disable_deprecated_algorithms = true + config.vm.boot_timeout = 3600 + + config.vm.provider 'libvirt' do |v| + v.uri = 'qemu:///system' + v.memory = 4096 + v.machine_type = 'q35' + v.cpu_mode = 'host-passthrough' + v.cpus = 2 + v.disk_bus = 'scsi' + v.disk_driver cache: 'writeback', discard: 'unmap' + v.random_hostname = true + end + + target_distros = { + almalinux: 'almalinux/8', + # centosstream: 'generic/centos8s', + rocky: 'generic/rocky8' + } + + target_distros.each_pair do |vm, box| + config.vm.define "#{vm}_9" do |machine| + machine.vm.box = "#{box}" + machine.vm.hostname = "#{vm}-9.test" + + if [vm, box] == target_distros.to_a.last + machine.vm.provision 'ansible' do |ansible| + ansible.compatibility_mode = '2.0' + ansible.limit = 'all' + ansible.playbook = "ci/ansible/#{configuration}.yaml" + ansible.config_file = 'ci/ansible/ansible.cfg' + end + end + end + end +end diff --git a/commands/command_utils.py b/commands/command_utils.py index 647e7b44..735144f8 100644 --- a/commands/command_utils.py +++ b/commands/command_utils.py @@ -62,9 +62,9 @@ def assert_version_format(version_str, desired_format, version_kind): """ if not re.match(desired_format.regex, version_str): error_str = ( - 'Unexpected format of target version: {0}. The required format is \'{1}\'.' - ) - raise CommandError(error_str.format(version_str, desired_format.human_readable)) + "Unexpected format of {} version: {}. The required format is '{}'." + ).format(version_kind.value, version_str, desired_format.human_readable) + raise CommandError(error_str) def get_major_version_from_a_valid_version(version): @@ -136,7 +136,7 @@ def get_os_release_version_id(filepath): return _retrieve_os_release_contents(_os_release_path=filepath).get('VERSION_ID', '') -def get_distro_id(): +def get_source_distro_id(): """ Retrieve the OS release ID from /etc/os-release. @@ -165,7 +165,17 @@ def get_target_versions_from_config(src_version_id, distro, flavor): return upgrade_paths_map.get(distro, {}).get(flavor, {}).get(src_version_id, []) -def get_supported_target_versions(flavour=get_upgrade_flavour()): +def get_virtual_version_from_config(src_version_id, distro): + """ + Retrieve the virtual version for the given version from upgrade_paths_map. + + :return: The virtual version or None if no match. + """ + upgrade_paths_map = get_upgrade_paths_config() + return upgrade_paths_map.get(distro, {}).get('_virtual_versions').get(src_version_id) + + +def get_supported_target_versions(target_distro, flavour=get_upgrade_flavour()): """ Return a list of supported target versions for the given `flavour` of upgrade. The default value for `flavour` is `default`. @@ -173,26 +183,30 @@ def get_supported_target_versions(flavour=get_upgrade_flavour()): os_release_contents = _retrieve_os_release_contents() current_version_id = os_release_contents.get('VERSION_ID', '') - distro_id = os_release_contents.get('ID', '') + source_distro = os_release_contents.get('ID', '') # We want to guarantee our actors that if they see 'centos'/'rhel'/... # then they will always see expected version format - expected_version_format = _DISTRO_VERSION_FORMATS.get(distro_id, VersionFormats.MAJOR_MINOR).value - assert_version_format(current_version_id, expected_version_format, _VersionKind.SOURCE) + expected_version_format = _DISTRO_VERSION_FORMATS.get(source_distro, VersionFormats.MAJOR_MINOR) + assert_version_format(current_version_id, expected_version_format.value, _VersionKind.SOURCE) + if source_distro == 'centos' and target_distro != 'centos': + # when upconverting from centos, we need to lookup by virtual version + current_version_id = get_virtual_version_from_config(current_version_id, source_distro) - target_versions = get_target_versions_from_config(current_version_id, distro_id, flavour) + target_versions = get_target_versions_from_config(current_version_id, target_distro, flavour) if not target_versions: # If we cannot find a particular major.minor version in the map, # we fallback to pick a target version just based on a major version. - # This can happen for example when testing not yet released versions + # This can happen for example when testing not yet released versions. + # But also removes the need to handle virtual versions on X->centos upgrades. major_version = get_major_version_from_a_valid_version(current_version_id) - target_versions = get_target_versions_from_config(major_version, distro_id, flavour) + target_versions = get_target_versions_from_config(major_version, target_distro, flavour) return target_versions -def get_target_version(flavour): - target_versions = get_supported_target_versions(flavour) +def get_target_version(flavour, target_distro): + target_versions = get_supported_target_versions(target_distro, flavour) return target_versions[-1] if target_versions else None @@ -201,8 +215,8 @@ def get_target_release(args): Return the user selected target release or choose one from config. A target release can be specified, ordered by priority, by the - LEAPP_DEVEL_TARGET_RELEASE or args.target (--target cmdline arg) or in the - config file. + LEAPP_DEVEL_TARGET_RELEASE or args.target_version (--target cmdline arg) or + in the config file. NOTE: when specified via the env var or cmdline arg, the version isn't checked against supported versions, this is done later by an actor in the @@ -213,14 +227,16 @@ def get_target_release(args): flavor = get_upgrade_flavour() env_version_override = os.getenv('LEAPP_DEVEL_TARGET_RELEASE') - target_ver = env_version_override or args.target + target_ver = env_version_override or args.target_version + target_distro_id = os.getenv('LEAPP_TARGET_OS') if target_ver: - distro_id = get_distro_id() - expected_version_format = _DISTRO_VERSION_FORMATS.get(distro_id, VersionFormats.MAJOR_MINOR).value - assert_version_format(target_ver, expected_version_format, _VersionKind.TARGET) + expected_version_format = _DISTRO_VERSION_FORMATS.get( + target_distro_id, VersionFormats.MAJOR_MINOR + ) + assert_version_format(target_ver, expected_version_format.value, _VersionKind.TARGET) return (target_ver, flavor) - return (get_target_version(flavor), flavor) + return (get_target_version(flavor, target_distro_id), flavor) def set_resource_limits(): @@ -286,3 +302,7 @@ def load_actor_configs_and_store_it_in_db(context, repositories, framework_cfg): config_data = audit.ActorConfigData(config=config_text, hash_id=config_text_hash) db_config = audit.ActorConfig(config=config_data, context=context) db_config.store() + + +def get_available_target_distro_ids(): + return [member.value for member in DistroIDs] diff --git a/commands/preupgrade/__init__.py b/commands/preupgrade/__init__.py index 6443bd8a..8ddfcd8a 100644 --- a/commands/preupgrade/__init__.py +++ b/commands/preupgrade/__init__.py @@ -26,7 +26,7 @@ from leapp.utils.output import beautify_actor_exception, report_errors, report_i help='Use only custom repositories and skip actions with Red Hat Subscription Manager.' ' This only has effect on Red Hat Enterprise Linux systems.' ) -@command_opt('no-insights-register', is_flag=True, help='Do not register into Red Hat Insights') +@command_opt('no-insights-register', is_flag=True, help='Do not register into Red Hat Lightspeed') @command_opt('no-rhsm-facts', is_flag=True, help='Do not store migration information using Red Hat ' 'Subscription Manager. Automatically implied by --no-rhsm.') @command_opt('enablerepo', action='append', metavar='', @@ -36,8 +36,20 @@ from leapp.utils.output import beautify_actor_exception, report_errors, report_i choices=['ga', 'e4s', 'eus', 'aus'], value_type=str.lower) # This allows the choices to be case insensitive @command_opt('iso', help='Use provided target RHEL installation image to perform the in-place upgrade.') -@command_opt('target', help='Specify RHEL version to upgrade to for {} detected upgrade flavour'.format( - command_utils.get_upgrade_flavour())) +@command_opt( + 'target', + help='Specify RHEL version to upgrade to for {} detected upgrade flavour'.format( + command_utils.get_upgrade_flavour() + ), + dest='target_version', +) +@command_opt( + 'target-os', + help='Specify the OS to upgrade to. If this differs from the OS on the' + ' source system, a conversion is performed during the upgrade.', + choices=command_utils.get_available_target_distro_ids(), + default=command_utils.get_source_distro_id(), +) @command_opt('report-schema', help='Specify report schema version for leapp-report.json', choices=['1.0.0', '1.1.0', '1.2.0'], default=get_config().get('report', 'schema')) @command_opt('nogpgcheck', is_flag=True, help='Disable RPM GPG checks. Same as yum/dnf --nogpgcheck option.') diff --git a/commands/tests/test_upgrade_paths.py b/commands/tests/test_upgrade_paths.py index 89b5eb71..773cdf1c 100644 --- a/commands/tests/test_upgrade_paths.py +++ b/commands/tests/test_upgrade_paths.py @@ -8,26 +8,54 @@ from leapp.cli.commands import command_utils from leapp.exceptions import CommandError -@mock.patch("leapp.cli.commands.command_utils.get_upgrade_paths_config", - return_value={'rhel': {"default": {"7.9": ["8.4"], "8.6": ["9.0"], "7": ["8.4"], "8": ["9.0"]}}}) +@mock.patch( + "leapp.cli.commands.command_utils.get_upgrade_paths_config", + return_value={ + "rhel": { + "default": {"7.9": ["8.4"], "8.6": ["9.0"], "8.7": ["9.1"], "7": ["8.4"], "8": ["9.0"]} + }, + "centos": { + "default": {"8": ["9"], "9": ["10"]}, + "_virtual_versions": {"8": "8.7", "9": "9.8", "10": "10.2"}, + }, + "alma": { + "default": {"7.9": ["8.4"], "8.6": ["9.0"], "8.7": ["9.1"]} + }, + }, +) def test_get_target_version(mock_open, monkeypatch): - etc_os_release_contents = {'ID': 'rhel', 'VERSION_ID': '8.6'} - monkeypatch.setattr(command_utils, '_retrieve_os_release_contents', - lambda *args, **kwargs: etc_os_release_contents) - assert command_utils.get_target_version('default') == '9.0' + def set_etc_osrelease(distro_id, version_id): + etc_os_release_contents = {"ID": distro_id, "VERSION_ID": version_id} + monkeypatch.setattr( + command_utils, + "_retrieve_os_release_contents", + lambda *args, **kwargs: etc_os_release_contents, + ) + + set_etc_osrelease('rhel', '8.6') + assert command_utils.get_target_version('default', 'rhel') == '9.0' + + # the envar should not affect this function monkeypatch.setenv('LEAPP_DEVEL_TARGET_RELEASE', '') - etc_os_release_contents = {'ID': 'rhel', 'VERSION_ID': '8.6'} - monkeypatch.setattr(command_utils, '_retrieve_os_release_contents', - lambda *args, **kwargs: etc_os_release_contents) - assert command_utils.get_target_version('default') == '9.0' + assert command_utils.get_target_version('default', 'rhel') == '9.0' + # unsupported path, matches because of the major version fallback monkeypatch.delenv('LEAPP_DEVEL_TARGET_RELEASE', raising=True) - # unsupported path - etc_os_release_contents = {'ID': 'rhel', 'VERSION_ID': '8.5'} - monkeypatch.setattr(command_utils, '_retrieve_os_release_contents', - lambda *args, **kwargs: etc_os_release_contents) - assert command_utils.get_target_version('default') == '9.0' + set_etc_osrelease('rhel', '8.5') + assert command_utils.get_target_version('default', 'rhel') == '9.0' + + # centos->centos + set_etc_osrelease('centos', '9') + assert command_utils.get_target_version('default', 'centos') == '10' + + # centos->rhel, lookup based on virtual versions + set_etc_osrelease('centos', '8') + assert command_utils.get_target_version('default', 'rhel') == '9.1' + + # rhel->centos, reverse virtual versions lookup + set_etc_osrelease('rhel', '8.6') + assert command_utils.get_target_version('default', 'centos') == '9' @mock.patch( @@ -42,10 +70,20 @@ def test_get_target_version(mock_open, monkeypatch): }, ) def test_get_target_release(mock_open, monkeypatch): # do not remove mock_open + # NOTE Not testing with other distros, the tested function is mainly about + # handling of the CLI option, envar and format checking, the real target + # release retrieval is handled in get_target_version which is tested with + # different source/target distro combinanations elsewhere. + + # Make it look like it's RHEL even on centos, because that's what the test + # assumes. + # Otherwise the test, when ran on Centos, fails because it works + # with MAJOR.MINOR version format while Centos uses MAJOR format. + monkeypatch.setattr(command_utils, 'get_source_distro_id', lambda: 'rhel') monkeypatch.setattr(command_utils, 'get_os_release_version_id', lambda x: '8.6') # make sure env var LEAPP_DEVEL_TARGET_RELEASE takes precedence - args = mock.Mock(target='9.0') + args = mock.Mock(target_version='9.0') monkeypatch.setenv('LEAPP_DEVEL_TARGET_RELEASE', '9.2') print(os.getenv('LEAPP_DEVEL_TARGET_RELEASE')) assert command_utils.get_target_release(args) == ('9.2', 'default') @@ -62,12 +100,12 @@ def test_get_target_release(mock_open, monkeypatch): # do not remove mock_open assert command_utils.get_target_release(args) == ('1.2', 'default') # no env var set, --target is set to proper version - use it - args = mock.Mock(target='9.0') + args = mock.Mock(target_version='9.0') monkeypatch.delenv('LEAPP_DEVEL_TARGET_RELEASE', raising=False) assert command_utils.get_target_release(args) == ('9.0', 'default') # --target set with incorrectly formatted version, env var not set, fail - args = mock.Mock(target='9.0a') + args = mock.Mock(target_version='9.0a') with pytest.raises(CommandError) as err: command_utils.get_target_release(args) assert 'Unexpected format of target version' in err @@ -75,7 +113,7 @@ def test_get_target_release(mock_open, monkeypatch): # do not remove mock_open # env var is set to proper version, --target set to a bad one: # env var has priority, use it and go on with the upgrade monkeypatch.setenv('LEAPP_DEVEL_TARGET_RELEASE', '9.0') - args = mock.Mock(target='9.0.0') + args = mock.Mock(target_version='9.0.0') assert command_utils.get_target_release(args) == ('9.0', 'default') diff --git a/commands/upgrade/__init__.py b/commands/upgrade/__init__.py index 36be0719..7d7ec7ed 100644 --- a/commands/upgrade/__init__.py +++ b/commands/upgrade/__init__.py @@ -32,7 +32,7 @@ from leapp.utils.output import beautify_actor_exception, report_errors, report_i help='Use only custom repositories and skip actions with Red Hat Subscription Manager.' ' This only has effect on Red Hat Enterprise Linux systems.' ) -@command_opt('no-insights-register', is_flag=True, help='Do not register into Red Hat Insights') +@command_opt('no-insights-register', is_flag=True, help='Do not register into Red Hat Lightspeed') @command_opt('no-rhsm-facts', is_flag=True, help='Do not store migration information using Red Hat ' 'Subscription Manager. Automatically implied by --no-rhsm.') @command_opt('enablerepo', action='append', metavar='', @@ -42,8 +42,20 @@ from leapp.utils.output import beautify_actor_exception, report_errors, report_i choices=['ga', 'e4s', 'eus', 'aus'], value_type=str.lower) # This allows the choices to be case insensitive @command_opt('iso', help='Use provided target RHEL installation image to perform the in-place upgrade.') -@command_opt('target', help='Specify RHEL version to upgrade to for {} detected upgrade flavour'.format( - command_utils.get_upgrade_flavour())) +@command_opt( + 'target', + help='Specify RHEL version to upgrade to for {} detected upgrade flavour'.format( + command_utils.get_upgrade_flavour() + ), + dest='target_version', +) +@command_opt( + 'target-os', + help='Specify the OS to upgrade to. If this differs from the OS on the' + ' source system, a conversion is performed during the upgrade.', + choices=command_utils.get_available_target_distro_ids(), + default=command_utils.get_source_distro_id(), +) @command_opt('report-schema', help='Specify report schema version for leapp-report.json', choices=['1.0.0', '1.1.0', '1.2.0'], default=get_config().get('report', 'schema')) @command_opt('nogpgcheck', is_flag=True, help='Disable RPM GPG checks. Same as yum/dnf --nogpgcheck option.') diff --git a/commands/upgrade/breadcrumbs.py b/commands/upgrade/breadcrumbs.py index 3a3dcde3..95a551c3 100644 --- a/commands/upgrade/breadcrumbs.py +++ b/commands/upgrade/breadcrumbs.py @@ -36,7 +36,7 @@ def _flattened(d): return dict(items) -class _BreadCrumbs(object): +class _BreadCrumbs: def __init__(self, activity): self._crumbs = { 'activity': activity, @@ -80,7 +80,8 @@ class _BreadCrumbs(object): # even though it shouldn't though, just ignore it pass - def _commit_rhsm_facts(self): + @staticmethod + def _commit_rhsm_facts(): if runs_in_container(): return cmd = ['/usr/sbin/subscription-manager', 'facts', '--update'] @@ -122,7 +123,8 @@ class _BreadCrumbs(object): except OSError: sys.stderr.write('WARNING: Could not write to /etc/migration-results\n') - def _get_packages(self): + @staticmethod + def _get_packages(): cmd = ['/bin/bash', '-c', 'rpm -qa --queryformat="%{nevra} %{SIGPGP:pgpsig}\n" | grep -Ee "leapp|snactor"'] res = _call(cmd, lambda x, y: None, lambda x, y: None) if res.get('exit_code', None) == 0: @@ -131,7 +133,8 @@ class _BreadCrumbs(object): for t in [line.strip().split(' ', 1) for line in res['stdout'].split('\n') if line.strip()]] return [] - def _verify_leapp_pkgs(self): + @staticmethod + def _verify_leapp_pkgs(): if not os.environ.get('LEAPP_IPU_IN_PROGRESS'): return [] upg_path = os.environ.get('LEAPP_IPU_IN_PROGRESS').split('to') diff --git a/commands/upgrade/util.py b/commands/upgrade/util.py index dadfe7de..1dbc0abd 100644 --- a/commands/upgrade/util.py +++ b/commands/upgrade/util.py @@ -221,9 +221,16 @@ def prepare_configuration(args): if args.enable_experimental_feature: os.environ['LEAPP_EXPERIMENTAL'] = '1' + if os.getenv('LEAPP_DEVEL_TARGET_OS'): + os.environ['LEAPP_TARGET_OS'] = os.environ['LEAPP_DEVEL_TARGET_OS'] + elif args.target_os: + os.environ['LEAPP_TARGET_OS'] = args.target_os + else: + os.environ["LEAPP_TARGET_OS"] = command_utils.get_source_distro_id() + os.environ['LEAPP_UNSUPPORTED'] = '0' if os.getenv('LEAPP_UNSUPPORTED', '0') == '0' else '1' # force no rhsm on non-rhel systems, regardless of whether the binary is there - if args.no_rhsm or command_utils.get_distro_id() != 'rhel': + if args.no_rhsm or os.environ['LEAPP_TARGET_OS'] != 'rhel': os.environ['LEAPP_NO_RHSM'] = '1' elif not os.path.exists('/usr/sbin/subscription-manager'): os.environ['LEAPP_NO_RHSM'] = '1' @@ -256,14 +263,18 @@ def prepare_configuration(args): # Check upgrade path and fail early if it's invalid target_version, flavor = command_utils.get_target_release(args) - os.environ['LEAPP_UPGRADE_PATH_TARGET_RELEASE'] = target_version - os.environ['LEAPP_UPGRADE_PATH_FLAVOUR'] = flavor - current_version = command_utils.get_os_release_version_id('/etc/os-release') - os.environ['LEAPP_IPU_IN_PROGRESS'] = '{source}to{target}'.format( - source=command_utils.get_major_version_from_a_valid_version(current_version), - target=command_utils.get_major_version_from_a_valid_version(target_version) - ) + if current_version and target_version: + os.environ['LEAPP_UPGRADE_PATH_TARGET_RELEASE'] = target_version + os.environ['LEAPP_IPU_IN_PROGRESS'] = '{source}to{target}'.format( + source=command_utils.get_major_version_from_a_valid_version(current_version), + target=command_utils.get_major_version_from_a_valid_version(target_version) + ) + else: + # Setting these variables to prevent them being set outside of the leapp environment + os.environ['LEAPP_UPGRADE_PATH_TARGET_RELEASE'] = '' + os.environ['LEAPP_IPU_IN_PROGRESS'] = '' + os.environ['LEAPP_UPGRADE_PATH_FLAVOUR'] = flavor configuration = { 'debug': os.getenv('LEAPP_DEBUG', '0'), diff --git a/docs/source/configuring-ipu/envars.md b/docs/source/configuring-ipu/envars.md index a042ba4a..72d00634 100644 --- a/docs/source/configuring-ipu/envars.md +++ b/docs/source/configuring-ipu/envars.md @@ -21,7 +21,7 @@ Overrides the automatically detected storage device with GRUB core (e.g. /dev/sd Set to 1 to disable RPM GPG checks (same as yum/dnf –nogpgckeck option). It‘s equivalent to the --nogpgcheck leapp option. #### LEAPP_NO_INSIGHTS_REGISTER -If set to `1`, Leapp does not register the system into Red Hat Insights automatically. It‘s equivalent to the --no-insights-register leapp option. +If set to `1`, Leapp does not register the system into Red Hat Lightspeed automatically. It‘s equivalent to the --no-insights-register leapp option. #### LEAPP_NO_NETWORK_RENAMING If set to `1`, the actor responsible to handle NICs names ends without doing anything. The actor usually creates UDEV rules to preserve original NICs in case they are changed. However, in some cases it‘s not wanted and it leads in malfunction network configuration (e.g. in case the bonding is configured on the system). It‘s expected that NICs have to be handled manually if needed. @@ -88,3 +88,6 @@ Change the default target RHEL version. Format: `MAJOR.MINOR`. #### LEAPP_DEVEL_USE_PERSISTENT_PACKAGE_CACHE Caches downloaded packages when set to `1`. This will reduce the time needed by leapp when executed multiple times, because it will not have to download already downloaded packages. However, this can lead to a random issues in case the data is not up-to-date or when setting or repositories change. The environment variable is meant to be used only for the part of the upgrade before the reboot and has no effect or use otherwise. + +#### LEAPP_DEVEL_TARGET_OS +Change the target OS. This is similar to the --target-os CLI option except there is no restriction on what values can be passed in. This can be used when developing conversions to a yet unsupported target OS. diff --git a/docs/source/libraries-and-api/deprecations-list.md b/docs/source/libraries-and-api/deprecations-list.md index 7d6bef18..e620d70d 100644 --- a/docs/source/libraries-and-api/deprecations-list.md +++ b/docs/source/libraries-and-api/deprecations-list.md @@ -13,8 +13,8 @@ framework, see {ref}`deprecation:list of the deprecated functionality in leapp`. Only the versions in which a deprecation has been made are listed. ## Next release (till TODO date) - -- Note: nothing new deprecated yet +- Shared libraries + - **`leapp.libraries.common.config.get_distro_id()`** - The function has been replaced by variants for source and target distros - `leapp.libraries.common.config.get_source_distro_id()` and `leapp.libraries.common.config.get_target_distro_id()`. ## v0.23.0 (till March 2026) diff --git a/etc/leapp/files/device_driver_deprecation_data.json b/etc/leapp/files/device_driver_deprecation_data.json index 6d5d6ef9..a9c06956 100644 --- a/etc/leapp/files/device_driver_deprecation_data.json +++ b/etc/leapp/files/device_driver_deprecation_data.json @@ -1,6 +1,6 @@ { "provided_data_streams": [ - "4.0" + "4.1" ], "data": [ { diff --git a/etc/leapp/files/pes-events.json b/etc/leapp/files/pes-events.json index da62837f..fec9a900 100644 --- a/etc/leapp/files/pes-events.json +++ b/etc/leapp/files/pes-events.json @@ -1,7 +1,7 @@ { -"timestamp": "202507291505Z", +"timestamp": "202511121106Z", "provided_data_streams": [ -"4.0" +"4.1" ], "packageinfo": [ { @@ -176011,7 +176011,6 @@ null { "action": 4, "architectures": [ -"aarch64", "ppc64le", "s390x", "x86_64" @@ -176038,13 +176037,23 @@ null { "in_modulestream": null, "out_modulestream": null +}, +{ +"in_modulestream": null, +"out_modulestream": { +"name": "gimp", +"stream": "2.8" +} } ], "out_packageset": { "package": [ { "modulestreams": [ -null +{ +"name": "gimp", +"stream": "2.8" +} ], "name": "python2-cairo", "repository": "rhel8-AppStream" @@ -190743,7 +190752,7 @@ null null ], "name": "python2-jmespath", -"repository": "rhel7-ansible-2" +"repository": "rhel7-ansible-2.5" } ], "set_id": 8199 @@ -190793,7 +190802,7 @@ null null ], "name": "python-httplib2", -"repository": "rhel7-optional" +"repository": "rhel7-extras" } ], "set_id": 8201 @@ -190816,14 +190825,14 @@ null null ], "name": "python3-httplib2", -"repository": "rhel8-CRB" +"repository": "rhel7-base" } ], "set_id": 8202 }, "release": { -"major_version": 8, -"minor_version": 0, +"major_version": 7, +"minor_version": 8, "os_name": "RHEL" } }, @@ -202084,7 +202093,6 @@ null { "action": 1, "architectures": [ -"aarch64", "ppc64le", "s390x", "x86_64" @@ -583403,7 +583411,7 @@ null } }, { -"action": 0, +"action": 6, "architectures": [ "aarch64", "ppc64le", @@ -583415,10 +583423,13 @@ null "package": [ { "modulestreams": [ -null +{ +"name": "gimp", +"stream": "2.8" +} ], "name": "gimp-libs", -"repository": "rhel9-AppStream" +"repository": "rhel8-AppStream" } ], "set_id": 22339 @@ -583428,8 +583439,27 @@ null "minor_version": 10, "os_name": "RHEL" }, -"modulestream_maps": [], -"out_packageset": null, +"modulestream_maps": [ +{ +"in_modulestream": { +"name": "gimp", +"stream": "2.8" +}, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gimp-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26437 +}, "release": { "major_version": 9, "minor_version": 0, @@ -699260,25 +699290,25 @@ null } }, { -"action": 0, +"action": 2, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19638, +"id": 19640, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "insights-core-selinux", +"name": "lftp", "repository": "rhel10-AppStream" } ], -"set_id": 26268 +"set_id": 26270 }, "initial_release": { "major_version": 10, @@ -699294,36 +699324,36 @@ null } }, { -"action": 0, +"action": 2, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19639, +"id": 19641, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "insights-core-selinux", -"repository": "rhel9-AppStream" +"name": "ftp", +"repository": "rhel10-AppStream" } ], -"set_id": 26269 +"set_id": 26271 }, "initial_release": { -"major_version": 9, -"minor_version": 6, +"major_version": 10, +"minor_version": 0, "os_name": "RHEL" }, "modulestream_maps": [], "out_packageset": null, "release": { -"major_version": 9, -"minor_version": 7, +"major_version": 10, +"minor_version": 1, "os_name": "RHEL" } }, @@ -699335,18 +699365,18 @@ null "s390x", "x86_64" ], -"id": 19640, +"id": 19642, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "lftp", +"name": "vsftpd", "repository": "rhel10-AppStream" } ], -"set_id": 26270 +"set_id": 26272 }, "initial_release": { "major_version": 10, @@ -699362,25 +699392,25 @@ null } }, { -"action": 2, +"action": 0, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19641, +"id": 19643, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "ftp", +"name": "hunspell-ka", "repository": "rhel10-AppStream" } ], -"set_id": 26271 +"set_id": 26273 }, "initial_release": { "major_version": 10, @@ -699396,6 +699426,73 @@ null } }, { +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19644, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "php", +"stream": "7.4" +}, +{ +"name": "php", +"stream": "8.0" +} +], +"name": "php-ffi", +"repository": "rhel8-AppStream" +} +], +"set_id": 26274 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": { +"name": "php", +"stream": "7.4" +}, +"out_modulestream": null +}, +{ +"in_modulestream": { +"name": "php", +"stream": "8.0" +}, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "php-ffi", +"repository": "rhel9-AppStream" +} +], +"set_id": 26275 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ "action": 2, "architectures": [ "aarch64", @@ -699403,18 +699500,149 @@ null "s390x", "x86_64" ], -"id": 19642, +"id": 19645, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "php", +"stream": "7.2" +}, +{ +"name": "php", +"stream": "7.3" +}, +{ +"name": "php", +"stream": "7.4" +} +], +"name": "php-json", +"repository": "rhel8-AppStream" +} +], +"set_id": 26276 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19646, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "php", +"stream": "7.2" +}, +{ +"name": "php", +"stream": "7.3" +} +], +"name": "php-recode", +"repository": "rhel8-AppStream" +} +], +"set_id": 26277 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19647, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "php", +"stream": "7.2" +}, +{ +"name": "php", +"stream": "7.3" +}, +{ +"name": "php", +"stream": "7.4" +} +], +"name": "php-xmlrpc", +"repository": "rhel8-AppStream" +} +], +"set_id": 26278 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19648, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "vsftpd", -"repository": "rhel10-AppStream" +"name": "perl-XS-Parse-Keyword-Builder", +"repository": "rhel10-CRB" } ], -"set_id": 26272 +"set_id": 26279 }, "initial_release": { "major_version": 10, @@ -699437,18 +699665,18 @@ null "s390x", "x86_64" ], -"id": 19643, +"id": 19649, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "hunspell-ka", -"repository": "rhel10-AppStream" +"name": "bindgen-cli", +"repository": "rhel10-CRB" } ], -"set_id": 26273 +"set_id": 26280 }, "initial_release": { "major_version": 10, @@ -699464,200 +699692,784 @@ null } }, { -"action": 6, +"action": 0, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19644, +"id": 19650, "in_packageset": { "package": [ { "modulestreams": [ +null +], +"name": "bindgen-cli", +"repository": "rhel9-CRB" +} +], +"set_id": 26281 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, { -"name": "php", -"stream": "7.4" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19651, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "cbindgen", +"repository": "rhel10-CRB" +} +], +"set_id": 26282 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} }, { -"name": "php", -"stream": "8.0" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19652, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "cbindgen", +"repository": "rhel9-CRB" } ], -"name": "php-ffi", -"repository": "rhel8-AppStream" +"set_id": 26283 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" } +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" ], -"set_id": 26274 +"id": 19653, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "image-builder", +"repository": "rhel10-AppStream" +} +], +"set_id": 26284 }, "initial_release": { -"major_version": 8, -"minor_version": 10, +"major_version": 10, +"minor_version": 0, "os_name": "RHEL" }, -"modulestream_maps": [ +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, { -"in_modulestream": { -"name": "php", -"stream": "7.4" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19654, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "image-builder", +"repository": "rhel9-AppStream" +} +], +"set_id": 26285 }, -"out_modulestream": null +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} }, { -"in_modulestream": { -"name": "php", -"stream": "8.0" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19655, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "fips-provider-next", +"repository": "rhel10-AppStream" +} +], +"set_id": 26286 }, -"out_modulestream": null +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" } +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" ], -"out_packageset": { +"id": 19656, +"in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "php-ffi", +"name": "redhat-cloud-client-configuration-cdn", "repository": "rhel9-AppStream" } ], -"set_id": 26275 +"set_id": 26287 +}, +"initial_release": { +"major_version": 9, +"minor_version": 5, +"os_name": "RHEL" }, +"modulestream_maps": [], +"out_packageset": null, "release": { "major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19657, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gdal", +"repository": "rhel10-AppStream" +} +], +"set_id": 26288 +}, +"initial_release": { +"major_version": 10, "minor_version": 0, "os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" } }, { -"action": 2, +"action": 0, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19645, +"id": 19658, "in_packageset": { "package": [ { "modulestreams": [ +null +], +"name": "gdal-libs", +"repository": "rhel10-AppStream" +} +], +"set_id": 26289 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, { -"name": "php", -"stream": "7.2" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19659, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gdal-devel", +"repository": "rhel10-CRB" +} +], +"set_id": 26290 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} }, { -"name": "php", -"stream": "7.3" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19660, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "geos", +"repository": "rhel10-AppStream" +} +], +"set_id": 26291 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} }, { -"name": "php", -"stream": "7.4" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19661, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "geos-devel", +"repository": "rhel10-CRB" } ], -"name": "php-json", -"repository": "rhel8-AppStream" +"set_id": 26292 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" } +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" ], -"set_id": 26276 +"id": 19662, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "proj", +"repository": "rhel10-AppStream" +} +], +"set_id": 26293 }, "initial_release": { -"major_version": 8, -"minor_version": 9, +"major_version": 10, +"minor_version": 0, "os_name": "RHEL" }, "modulestream_maps": [], "out_packageset": null, "release": { -"major_version": 8, -"minor_version": 10, +"major_version": 10, +"minor_version": 1, "os_name": "RHEL" } }, { -"action": 2, +"action": 0, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19646, +"id": 19663, "in_packageset": { "package": [ { "modulestreams": [ +null +], +"name": "proj-data", +"repository": "rhel10-AppStream" +} +], +"set_id": 26294 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, { -"name": "php", -"stream": "7.2" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19664, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "proj-devel", +"repository": "rhel10-CRB" +} +], +"set_id": 26295 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} }, { -"name": "php", -"stream": "7.3" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19665, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "pkcs11-provider", +"repository": "rhel9-BaseOS" } ], -"name": "php-recode", -"repository": "rhel8-AppStream" +"set_id": 26296 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" } +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" ], -"set_id": 26277 +"id": 19666, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "geos", +"repository": "rhel9-AppStream" +} +], +"set_id": 26297 }, "initial_release": { -"major_version": 8, -"minor_version": 9, +"major_version": 9, +"minor_version": 6, "os_name": "RHEL" }, "modulestream_maps": [], "out_packageset": null, "release": { -"major_version": 8, -"minor_version": 10, +"major_version": 9, +"minor_version": 7, "os_name": "RHEL" } }, { -"action": 2, +"action": 0, "architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19647, +"id": 19667, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "geos-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26298 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19668, "in_packageset": { "package": [ { "modulestreams": [ +null +], +"name": "proj", +"repository": "rhel9-AppStream" +} +], +"set_id": 26299 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, { -"name": "php", -"stream": "7.2" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19669, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "proj-data", +"repository": "rhel9-AppStream" +} +], +"set_id": 26300 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} }, { -"name": "php", -"stream": "7.3" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19670, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "proj-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26301 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} }, { -"name": "php", -"stream": "7.4" +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19671, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "tesseract-osd", +"repository": "rhel9-AppStream" } ], -"name": "php-xmlrpc", -"repository": "rhel8-AppStream" +"set_id": 26302 +}, +"initial_release": { +"major_version": 9, +"minor_version": 5, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19672, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "tesseract-equ", +"repository": "rhel9-AppStream" } ], -"set_id": 26278 +"set_id": 26303 }, "initial_release": { -"major_version": 8, -"minor_version": 9, +"major_version": 9, +"minor_version": 5, "os_name": "RHEL" }, "modulestream_maps": [], "out_packageset": null, "release": { -"major_version": 8, -"minor_version": 10, +"major_version": 9, +"minor_version": 6, "os_name": "RHEL" } }, @@ -699669,18 +700481,18 @@ null "s390x", "x86_64" ], -"id": 19648, +"id": 19673, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "perl-XS-Parse-Keyword-Builder", -"repository": "rhel10-CRB" +"name": "tesseract-equ", +"repository": "rhel10-AppStream" } ], -"set_id": 26279 +"set_id": 26304 }, "initial_release": { "major_version": 10, @@ -699703,18 +700515,18 @@ null "s390x", "x86_64" ], -"id": 19649, +"id": 19674, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "bindgen-cli", -"repository": "rhel10-CRB" +"name": "tesseract-osd", +"repository": "rhel10-AppStream" } ], -"set_id": 26280 +"set_id": 26305 }, "initial_release": { "major_version": 10, @@ -699732,23 +700544,88 @@ null { "action": 0, "architectures": [ +"ppc64le" +], +"id": 19675, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "secvarctl", +"repository": "rhel9-BaseOS" +} +], +"set_id": 26306 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19676, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "podman-sequoia", +"repository": "rhel10-AppStream" +} +], +"set_id": 26307 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ "aarch64", "ppc64le", "s390x", "x86_64" ], -"id": 19650, +"id": 19677, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "bindgen-cli", -"repository": "rhel9-CRB" +"name": "pqrpm", +"repository": "rhel9-BaseOS" } ], -"set_id": 26281 +"set_id": 26308 }, "initial_release": { "major_version": 9, @@ -699771,18 +700648,83 @@ null "s390x", "x86_64" ], -"id": 19651, +"id": 19678, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "cbindgen", -"repository": "rhel10-CRB" +"name": "python3-dnf-plugin-multisig", +"repository": "rhel9-BaseOS" } ], -"set_id": 26282 +"set_id": 26309 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19679, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-dnf-plugin-multisig", +"repository": "rhel9-BaseOS" +} +], +"set_id": 26310 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"ppc64le" +], +"id": 19688, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "secvarctl", +"repository": "rhel10-BaseOS" +} +], +"set_id": 26324 }, "initial_release": { "major_version": 10, @@ -699805,18 +700747,256 @@ null "s390x", "x86_64" ], -"id": 19652, +"id": 19689, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "cbindgen", +"name": "postgis", +"repository": "rhel10-AppStream" +} +], +"set_id": 26325 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19690, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "postgis-client", +"repository": "rhel10-AppStream" +} +], +"set_id": 26326 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19691, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "postgis-docs", +"repository": "rhel10-AppStream" +} +], +"set_id": 26327 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19692, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "postgis-upgrade", +"repository": "rhel10-AppStream" +} +], +"set_id": 26328 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19693, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "postgis-utils", +"repository": "rhel10-AppStream" +} +], +"set_id": 26329 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19694, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gdal", +"repository": "rhel9-AppStream" +} +], +"set_id": 26330 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19695, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gdal-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26331 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19696, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gdal-devel", "repository": "rhel9-CRB" } ], -"set_id": 26283 +"set_id": 26332 }, "initial_release": { "major_version": 9, @@ -699839,18 +701019,203 @@ null "s390x", "x86_64" ], -"id": 19653, +"id": 19697, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "postgresql", +"stream": "16" +} +], +"name": "postgis", +"repository": "rhel9-AppStream" +} +], +"set_id": 26333 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19698, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "postgresql", +"stream": "16" +} +], +"name": "postgis-client", +"repository": "rhel9-AppStream" +} +], +"set_id": 26334 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19699, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "postgresql", +"stream": "16" +} +], +"name": "postgis-docs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26335 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19700, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "postgresql", +"stream": "16" +} +], +"name": "postgis-upgrade", +"repository": "rhel9-AppStream" +} +], +"set_id": 26336 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19701, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "postgresql", +"stream": "16" +} +], +"name": "postgis-utils", +"repository": "rhel9-AppStream" +} +], +"set_id": 26337 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19702, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "image-builder", +"name": "nodejs24", "repository": "rhel10-AppStream" } ], -"set_id": 26284 +"set_id": 26338 }, "initial_release": { "major_version": 10, @@ -699873,18 +701238,305 @@ null "s390x", "x86_64" ], -"id": 19654, +"id": 19703, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "image-builder", +"name": "nodejs24-devel", +"repository": "rhel10-AppStream" +} +], +"set_id": 26339 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19704, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "nodejs24-docs", +"repository": "rhel10-AppStream" +} +], +"set_id": 26340 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19705, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "nodejs24-full-i18n", +"repository": "rhel10-AppStream" +} +], +"set_id": 26341 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19706, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "nodejs24-libs", +"repository": "rhel10-AppStream" +} +], +"set_id": 26342 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19707, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "nodejs24-npm", +"repository": "rhel10-AppStream" +} +], +"set_id": 26343 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 4, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19708, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "p11-kit-server", +"repository": "rhel10-BaseOS" +} +], +"set_id": 26344 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "p11-kit-client", +"repository": "rhel10-BaseOS" +}, +{ +"modulestreams": [ +null +], +"name": "p11-kit-server", +"repository": "rhel10-BaseOS" +} +], +"set_id": 26345 +}, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 4, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19709, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "p11-kit-server", +"repository": "rhel9-BaseOS" +} +], +"set_id": 26346 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "p11-kit-client", +"repository": "rhel9-BaseOS" +}, +{ +"modulestreams": [ +null +], +"name": "p11-kit-server", +"repository": "rhel9-BaseOS" +} +], +"set_id": 26347 +}, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19710, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs", "repository": "rhel9-AppStream" } ], -"set_id": 26285 +"set_id": 26348 }, "initial_release": { "major_version": 9, @@ -699907,18 +701559,351 @@ null "s390x", "x86_64" ], -"id": 19655, +"id": 19711, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26349 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19712, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-docs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26350 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19713, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-full-i18n", +"repository": "rhel9-AppStream" +} +], +"set_id": 26351 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19714, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26352 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19715, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-nodemon", +"repository": "rhel9-AppStream" +} +], +"set_id": 26353 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19716, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-packaging", +"repository": "rhel9-AppStream" +} +], +"set_id": 26354 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19717, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "nodejs-packaging-bundler", +"repository": "rhel9-AppStream" +} +], +"set_id": 26355 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19718, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "npm", +"repository": "rhel9-AppStream" +} +], +"set_id": 26356 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19719, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "nodejs", +"stream": "24" +} +], +"name": "v8-13.6-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26357 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19720, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "fips-provider-next", +"name": "aspnetcore-runtime-10.0", "repository": "rhel10-AppStream" } ], -"set_id": 26286 +"set_id": 26358 }, "initial_release": { "major_version": 10, @@ -699941,29 +701926,6848 @@ null "s390x", "x86_64" ], -"id": 19656, +"id": 19721, "in_packageset": { "package": [ { "modulestreams": [ null ], -"name": "redhat-cloud-client-configuration-cdn", -"repository": "rhel9-AppStream" +"name": "aspnetcore-runtime-dbg-10.0", +"repository": "rhel10-AppStream" } ], -"set_id": 26287 +"set_id": 26359 }, "initial_release": { -"major_version": 9, -"minor_version": 5, +"major_version": 10, +"minor_version": 0, "os_name": "RHEL" }, "modulestream_maps": [], "out_packageset": null, "release": { -"major_version": 9, -"minor_version": 6, +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19722, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "aspnetcore-targeting-pack-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26360 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19723, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-apphost-pack-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26361 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19724, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-hostfxr-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26362 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19725, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-runtime-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26363 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19726, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-runtime-dbg-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26364 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19727, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26365 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"x86_64" +], +"id": 19728, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-aot-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26366 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19729, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-dbg-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26367 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19730, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-targeting-pack-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26368 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19731, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-templates-10.0", +"repository": "rhel10-AppStream" +} +], +"set_id": 26369 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19732, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-10.0-source-built-artifacts", +"repository": "rhel10-CRB" +} +], +"set_id": 26370 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19733, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "aspnetcore-runtime-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26371 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19734, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "aspnetcore-runtime-dbg-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26372 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19735, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "aspnetcore-targeting-pack-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26373 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19736, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-apphost-pack-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26374 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19737, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-hostfxr-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26375 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19738, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-runtime-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26376 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19739, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-runtime-dbg-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26377 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19740, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26378 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"x86_64" +], +"id": 19741, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-aot-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26379 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19742, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-dbg-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26380 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19743, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-targeting-pack-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26381 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19744, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-templates-10.0", +"repository": "rhel9-AppStream" +} +], +"set_id": 26382 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19745, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "dotnet-sdk-10.0-source-built-artifacts", +"repository": "rhel9-CRB" +} +], +"set_id": 26383 +}, +"initial_release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19746, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "rhel-drivers", +"repository": "rhel10-AppStream" +} +], +"set_id": 26384 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19747, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql", +"repository": "rhel8-AppStream" +} +], +"set_id": 26385 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19748, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-common", +"repository": "rhel8-AppStream" +} +], +"set_id": 26386 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19749, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-devel", +"repository": "rhel8-AppStream" +} +], +"set_id": 26387 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19750, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-errmsg", +"repository": "rhel8-AppStream" +} +], +"set_id": 26388 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19751, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-libs", +"repository": "rhel8-AppStream" +} +], +"set_id": 26389 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19752, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-server", +"repository": "rhel8-AppStream" +} +], +"set_id": 26390 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19753, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-test", +"repository": "rhel8-AppStream" +} +], +"set_id": 26391 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19754, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "mysql", +"stream": "8.4" +} +], +"name": "mysql-test-data", +"repository": "rhel8-AppStream" +} +], +"set_id": 26392 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19755, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "opencv", +"repository": "rhel8-CRB" +} +], +"set_id": 26393 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "opencv", +"repository": "rhel8-AppStream" +} +], +"set_id": 26394 +}, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19757, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk", +"repository": "rhel10-AppStream" +} +], +"set_id": 26396 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19758, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-demo", +"repository": "rhel10-AppStream" +} +], +"set_id": 26397 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19759, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-devel", +"repository": "rhel10-AppStream" +} +], +"set_id": 26398 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19760, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-headless", +"repository": "rhel10-AppStream" +} +], +"set_id": 26399 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19761, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-javadoc", +"repository": "rhel10-AppStream" +} +], +"set_id": 26400 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19762, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-javadoc-zip", +"repository": "rhel10-AppStream" +} +], +"set_id": 26401 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19763, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-jmods", +"repository": "rhel10-AppStream" +} +], +"set_id": 26402 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19764, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-src", +"repository": "rhel10-AppStream" +} +], +"set_id": 26403 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19765, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-static-libs", +"repository": "rhel10-AppStream" +} +], +"set_id": 26404 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19766, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-demo-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26405 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19767, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-demo-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26406 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19768, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-devel-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26407 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19769, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-devel-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26408 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19770, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26409 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19771, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-headless-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26410 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19772, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-headless-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26411 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19773, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-jmods-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26412 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19774, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-jmods-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26413 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19775, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26414 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19776, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-src-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26415 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19777, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-src-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26416 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19778, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-static-libs-fastdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26417 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19779, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "java-25-openjdk-static-libs-slowdebug", +"repository": "rhel10-CRB" +} +], +"set_id": 26418 +}, +"initial_release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"x86_64" +], +"id": 19780, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "beignet", +"repository": "rhel8-AppStream" +} +], +"set_id": 26419 +}, +"initial_release": { +"major_version": 8, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"x86_64" +], +"id": 19782, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libxcam", +"repository": "rhel8-AppStream" +} +], +"set_id": 26421 +}, +"initial_release": { +"major_version": 8, +"minor_version": 0, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 1, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19784, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "xmlstarlet", +"repository": "rhel8-AppStream" +} +], +"set_id": 26423 +}, +"initial_release": { +"major_version": 8, +"minor_version": 5, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19785, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "xmlstarlet", +"repository": "rhel8-AppStream" +} +], +"set_id": 26424 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19786, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libunicap", +"repository": "rhel8-AppStream" +} +], +"set_id": 26425 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19787, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libunicap", +"repository": "rhel8-AppStream" +} +], +"set_id": 26426 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19788, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libunicap-devel", +"repository": "rhel8-CRB" +} +], +"set_id": 26427 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19789, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libucil", +"repository": "rhel8-AppStream" +} +], +"set_id": 26428 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19790, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libucil", +"repository": "rhel8-AppStream" +} +], +"set_id": 26429 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19791, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libucil-devel", +"repository": "rhel8-CRB" +} +], +"set_id": 26430 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19792, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libdc1394", +"repository": "rhel8-AppStream" +} +], +"set_id": 26431 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"x86_64" +], +"id": 19793, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libdc1394", +"repository": "rhel8-AppStream" +} +], +"set_id": 26432 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19794, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "liboggz", +"repository": "rhel8-CRB" +} +], +"set_id": 26433 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19795, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "opencv", +"repository": "rhel8-AppStream" +} +], +"set_id": 26434 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19796, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "opencv", +"repository": "rhel8-AppStream" +} +], +"set_id": 26435 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19797, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "mypaint-brushes", +"repository": "rhel9-AppStream" +} +], +"set_id": 26436 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19798, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "gimp", +"stream": "2.8" +} +], +"name": "gimp-devel", +"repository": "rhel8-AppStream" +} +], +"set_id": 26438 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19799, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "gimp", +"stream": "2.8" +} +], +"name": "gimp-devel", +"repository": "rhel8-AppStream" +} +], +"set_id": 26439 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19800, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "gimp", +"stream": "2.8" +} +], +"name": "gimp-devel-tools", +"repository": "rhel8-AppStream" +} +], +"set_id": 26440 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19801, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "gimp", +"stream": "2.8" +} +], +"name": "gimp-devel-tools", +"repository": "rhel8-AppStream" +} +], +"set_id": 26441 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19802, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "expect", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26442 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "expect", +"repository": "rhel9-AppStream" +} +], +"set_id": 26443 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19803, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26444 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi", +"repository": "rhel9-AppStream" +} +], +"set_id": 26445 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19804, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi-bmc-watchdog", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26446 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi-bmc-watchdog", +"repository": "rhel9-AppStream" +} +], +"set_id": 26447 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19805, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi-ipmidetectd", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26448 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi-ipmidetectd", +"repository": "rhel9-AppStream" +} +], +"set_id": 26449 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19806, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi-ipmiseld", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26450 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "freeipmi-ipmiseld", +"repository": "rhel9-AppStream" +} +], +"set_id": 26451 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19807, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpgmepp", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26452 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpgmepp", +"repository": "rhel9-AppStream" +} +], +"set_id": 26453 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19808, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "kbd-legacy", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26454 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "kbd-legacy", +"repository": "rhel9-AppStream" +} +], +"set_id": 26455 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19809, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libmng-devel", +"repository": "rhel8-AppStream" +} +], +"set_id": 26456 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libmng-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26457 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19810, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "net-snmp-libs", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26458 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "net-snmp-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26459 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19811, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "OpenIPMI", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26460 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "OpenIPMI", +"repository": "rhel9-AppStream" +} +], +"set_id": 26461 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19812, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "OpenIPMI-lanserv", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26462 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "OpenIPMI-lanserv", +"repository": "rhel9-AppStream" +} +], +"set_id": 26463 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19813, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "OpenIPMI-libs", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26464 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "OpenIPMI-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26465 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19814, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-client", +"repository": "rhel8-AppStream" +} +], +"set_id": 26466 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-client", +"repository": "rhel9-CRB" +} +], +"set_id": 26467 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19815, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "patch", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26468 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "patch", +"repository": "rhel9-AppStream" +} +], +"set_id": 26469 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19816, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "pciutils-devel", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26470 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "pciutils-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26471 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19817, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "qgpgme", +"repository": "rhel8-AppStream" +} +], +"set_id": 26472 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "qgpgme", +"repository": "rhel9-CRB" +} +], +"set_id": 26473 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19818, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "tix", +"repository": "rhel8-AppStream" +} +], +"set_id": 26474 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "tix", +"repository": "rhel9-CRB" +} +], +"set_id": 26475 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19819, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "volume_key-devel", +"repository": "rhel8-AppStream" +} +], +"set_id": 26476 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "volume_key-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26477 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19820, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "watchdog", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26478 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "watchdog", +"repository": "rhel9-AppStream" +} +], +"set_id": 26479 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19821, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpm", +"repository": "rhel9-AppStream" +} +], +"set_id": 26480 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19822, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpm", +"repository": "rhel9-AppStream" +} +], +"set_id": 26481 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19823, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpm-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26482 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19824, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpm-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26483 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19825, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpm-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26484 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19826, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gpm-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26485 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19827, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gsl", +"repository": "rhel9-AppStream" +} +], +"set_id": 26486 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19828, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gsl", +"repository": "rhel9-AppStream" +} +], +"set_id": 26487 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19829, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gsl-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26488 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19830, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gsl-devel", +"repository": "rhel9-AppStream" +} +], +"set_id": 26489 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19831, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "udftools", +"repository": "rhel9-AppStream" +} +], +"set_id": 26490 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19832, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "udftools", +"repository": "rhel9-AppStream" +} +], +"set_id": 26491 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19833, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-selinux", +"repository": "rhel10-AppStream" +} +], +"set_id": 26492 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19834, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "smartmontools-selinux", +"repository": "rhel10-BaseOS" +} +], +"set_id": 26493 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 4, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19835, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "net-snmp-perl", +"repository": "rhel9-AppStream" +} +], +"set_id": 26494 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "net-snmp-perl", +"repository": "rhel10-AppStream" +}, +{ +"modulestreams": [ +null +], +"name": "net-snmp-perl-module", +"repository": "rhel10-AppStream" +} +], +"set_id": 26495 +}, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19836, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "oniguruma", +"repository": "rhel9-AppStream" +} +], +"set_id": 26496 +}, +"initial_release": { +"major_version": 9, +"minor_version": 3, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "oniguruma", +"repository": "rhel9-BaseOS" +} +], +"set_id": 26497 +}, +"release": { +"major_version": 9, +"minor_version": 4, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19837, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "kbd-legacy", +"repository": "rhel9-AppStream" +} +], +"set_id": 26498 +}, +"initial_release": { +"major_version": 9, +"minor_version": 2, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "kbd-legacy", +"repository": "rhel9-BaseOS" +} +], +"set_id": 26499 +}, +"release": { +"major_version": 9, +"minor_version": 3, +"os_name": "RHEL" +} +}, +{ +"action": 3, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19838, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "qgpgme", +"repository": "rhel9-CRB" +} +], +"set_id": 26500 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "qgpgme-qt6", +"repository": "rhel10-CRB" +} +], +"set_id": 26501 +}, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 4, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19839, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "qgpgme-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26502 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "qgpgme-common-devel", +"repository": "rhel10-CRB" +}, +{ +"modulestreams": [ +null +], +"name": "qgpgme-qt6-devel", +"repository": "rhel10-CRB" +} +], +"set_id": 26503 +}, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19840, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openjpeg2-devel-docs", +"repository": "rhel8-AppStream" +} +], +"set_id": 26504 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19841, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openjpeg2-devel-docs", +"repository": "rhel8-AppStream" +} +], +"set_id": 26505 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19842, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "recode-devel", +"repository": "rhel8-CRB" +} +], +"set_id": 26506 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 5, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19843, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-python3", +"repository": "rhel9-HighAvailability" +}, +{ +"modulestreams": [ +null +], +"name": "openwsman-python3", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26508 +}, +"initial_release": { +"major_version": 9, +"minor_version": 5, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-python3", +"repository": "rhel9-AppStream" +} +], +"set_id": 26509 +}, +"release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 6, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19844, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-python3", +"repository": "rhel8-AppStream" +} +], +"set_id": 26510 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-python3", +"repository": "rhel9-HighAvailability" +} +], +"set_id": 26511 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19845, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-python3", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26514 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19846, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04", +"repository": "rhel9-AppStream" +} +], +"set_id": 26515 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19847, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04", +"repository": "rhel9-AppStream" +} +], +"set_id": 26516 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19848, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04-devel-docs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26517 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19849, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04-devel-docs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26518 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19850, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04-tools", +"repository": "rhel9-AppStream" +} +], +"set_id": 26519 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19851, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04-tools", +"repository": "rhel9-AppStream" +} +], +"set_id": 26520 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"x86_64" +], +"id": 19852, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gegl04-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26521 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19853, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libmypaint", +"repository": "rhel9-AppStream" +} +], +"set_id": 26522 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19854, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libmypaint", +"repository": "rhel9-AppStream" +} +], +"set_id": 26523 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19855, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "insights-core", +"repository": "rhel10-AppStream" +} +], +"set_id": 26524 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19856, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "insights-core-selinux", +"repository": "rhel10-AppStream" +} +], +"set_id": 26525 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19857, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "insights-core", +"repository": "rhel9-AppStream" +} +], +"set_id": 26527 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19858, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "insights-core-selinux", +"repository": "rhel9-AppStream" +} +], +"set_id": 26528 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19859, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libyang-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26529 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19860, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libyang-devel-doc", +"repository": "rhel9-CRB" +} +], +"set_id": 26531 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19861, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libyang-devel", +"repository": "rhel10-CRB" +} +], +"set_id": 26532 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19862, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libyang-devel-doc", +"repository": "rhel10-CRB" +} +], +"set_id": 26533 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19863, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "perl-Crypt-DES", +"repository": "rhel10-AppStream" +} +], +"set_id": 26534 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19864, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "mrtg-selinux", +"repository": "rhel10-AppStream" +} +], +"set_id": 26535 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 5, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19865, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "gimp", +"stream": "2.8" +} +], +"name": "python2-cairo", +"repository": "rhel8-AppStream" +}, +{ +"modulestreams": [ +null +], +"name": "python3-cairo", +"repository": "rhel8-AppStream" +} +], +"set_id": 26536 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +}, +{ +"in_modulestream": { +"name": "gimp", +"stream": "2.8" +}, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-cairo", +"repository": "rhel9-AppStream" +} +], +"set_id": 26537 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 5, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19866, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +{ +"name": "gimp", +"stream": "2.8" +} +], +"name": "python2-cairo-devel", +"repository": "rhel8-AppStream" +}, +{ +"modulestreams": [ +null +], +"name": "python3-cairo-devel", +"repository": "rhel8-CRB" +} +], +"set_id": 26538 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +}, +{ +"in_modulestream": { +"name": "gimp", +"stream": "2.8" +}, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-cairo-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26539 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19867, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-libuser", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26540 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19868, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-libuser", +"repository": "rhel8-BaseOS" +} +], +"set_id": 26541 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19869, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "sblim-gather", +"repository": "rhel8-AppStream" +} +], +"set_id": 26542 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19870, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "sblim-gather", +"repository": "rhel8-AppStream" +} +], +"set_id": 26543 +}, +"initial_release": { +"major_version": 8, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19871, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "sblim-gather-provider", +"repository": "rhel8-CRB" +} +], +"set_id": 26544 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19872, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libsmi-devel", +"repository": "rhel8-CRB" +} +], +"set_id": 26545 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19873, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-httplib2", +"repository": "rhel8-CRB" +} +], +"set_id": 26546 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19874, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-perl", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26547 +}, +"initial_release": { +"major_version": 9, +"minor_version": 5, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19875, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-winrs", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26548 +}, +"initial_release": { +"major_version": 9, +"minor_version": 5, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19876, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "rubygem-openwsman", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26549 +}, +"initial_release": { +"major_version": 9, +"minor_version": 5, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 6, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19877, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "console-setup", +"repository": "rhel9-AppStream" +} +], +"set_id": 26550 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19878, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "console-setup", +"repository": "rhel9-AppStream" +} +], +"set_id": 26551 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19879, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "ansible-collection-redhat-rhel_mgmt", +"repository": "rhel9-AppStream" +} +], +"set_id": 26552 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19880, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "ansible-collection-redhat-rhel_mgmt", +"repository": "rhel9-AppStream" +} +], +"set_id": 26553 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19881, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-pyghmi", +"repository": "rhel9-AppStream" +} +], +"set_id": 26554 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19882, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "python3-pyghmi", +"repository": "rhel9-AppStream" +} +], +"set_id": 26555 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19883, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "mypaint-brushes", +"repository": "rhel9-AppStream" +} +], +"set_id": 26556 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19884, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "mypaint-brushes", +"repository": "rhel9-AppStream" +} +], +"set_id": 26557 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19885, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libwmf", +"repository": "rhel9-AppStream" +} +], +"set_id": 26558 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19886, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libwmf", +"repository": "rhel9-AppStream" +} +], +"set_id": 26559 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19887, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libwmf-lite", +"repository": "rhel9-AppStream" +} +], +"set_id": 26560 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19888, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libwmf-lite", +"repository": "rhel9-AppStream" +} +], +"set_id": 26561 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19889, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "libwmf-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26562 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19890, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "librx", +"repository": "rhel9-CRB" +} +], +"set_id": 26563 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19891, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "librx-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26564 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19892, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "expect-devel", +"repository": "rhel9-CRB" +} +], +"set_id": 26565 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19893, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-perl", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26566 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19894, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-perl", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26567 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19895, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-winrs", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26568 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19896, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "openwsman-winrs", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26569 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19897, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "rubygem-openwsman", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26570 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 2, +"architectures": [ +"ppc64le", +"s390x", +"x86_64" +], +"id": 19898, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "rubygem-openwsman", +"repository": "rhel9-ResilientStorage" +} +], +"set_id": 26571 +}, +"initial_release": { +"major_version": 9, +"minor_version": 7, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19899, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gimp", +"repository": "rhel9-AppStream" +} +], +"set_id": 26572 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19900, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "gimp-libs", +"repository": "rhel9-AppStream" +} +], +"set_id": 26573 +}, +"initial_release": { +"major_version": 9, +"minor_version": 8, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 1, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19901, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "opencv", +"repository": "rhel8-CRB" +} +], +"set_id": 26574 +}, +"initial_release": { +"major_version": 8, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 8, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 7, +"architectures": [ +"x86_64" +], +"id": 19902, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap8-wildfly-openssl-el8-x86_64", +"repository": "rhel8-jbeap-8.1" +} +], +"set_id": 26575 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap8-wildfly-openssl-el9-x86_64", +"repository": "rhel9-jbeap-8.1" +} +], +"set_id": 26576 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 7, +"architectures": [ +"x86_64" +], +"id": 19903, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap7-wildfly-openssl-el7-x86_64", +"repository": "rhel7-jbeap-7.4" +} +], +"set_id": 26577 +}, +"initial_release": { +"major_version": 7, +"minor_version": 9, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap7-wildfly-openssl-el8-x86_64", +"repository": "rhel8-jbeap-7.4" +} +], +"set_id": 26578 +}, +"release": { +"major_version": 8, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 7, +"architectures": [ +"x86_64" +], +"id": 19904, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap7-wildfly-openssl-el8-x86_64", +"repository": "rhel8-jbeap-7.4" +} +], +"set_id": 26579 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap7-wildfly-openssl-el9-x86_64", +"repository": "rhel9-jbeap-7.4" +} +], +"set_id": 26580 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 7, +"architectures": [ +"x86_64" +], +"id": 19905, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap8-wildfly-openssl-el8-x86_64", +"repository": "rhel8-jbeap-8.0" +} +], +"set_id": 26581 +}, +"initial_release": { +"major_version": 8, +"minor_version": 10, +"os_name": "RHEL" +}, +"modulestream_maps": [ +{ +"in_modulestream": null, +"out_modulestream": null +} +], +"out_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "eap8-wildfly-openssl-el9-x86_64", +"repository": "rhel9-jbeap-8.0" +} +], +"set_id": 26582 +}, +"release": { +"major_version": 9, +"minor_version": 0, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19906, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "capnproto", +"repository": "rhel10-CRB" +} +], +"set_id": 26583 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19907, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "capnproto-devel", +"repository": "rhel10-CRB" +} +], +"set_id": 26584 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, +"os_name": "RHEL" +} +}, +{ +"action": 0, +"architectures": [ +"aarch64", +"ppc64le", +"s390x", +"x86_64" +], +"id": 19908, +"in_packageset": { +"package": [ +{ +"modulestreams": [ +null +], +"name": "capnproto-libs", +"repository": "rhel10-CRB" +} +], +"set_id": 26585 +}, +"initial_release": { +"major_version": 10, +"minor_version": 1, +"os_name": "RHEL" +}, +"modulestream_maps": [], +"out_packageset": null, +"release": { +"major_version": 10, +"minor_version": 2, "os_name": "RHEL" } } diff --git a/etc/leapp/files/repomap.json b/etc/leapp/files/repomap.json index 87646393..c4ae9038 100644 --- a/etc/leapp/files/repomap.json +++ b/etc/leapp/files/repomap.json @@ -1,8 +1,8 @@ { - "datetime": "202508131404Z", + "datetime": "202511131423Z", "version_format": "1.3.0", "provided_data_streams": [ - "4.0" + "4.1" ], "mapping": [ { @@ -234,6 +234,24 @@ "target": [ "rhel9-rhui-custom-client-at-alibaba" ] + }, + { + "source": "rhel8-jbeap-7.4", + "target": [ + "rhel9-jbeap-7.4" + ] + }, + { + "source": "rhel8-jbeap-8.0", + "target": [ + "rhel9-jbeap-8.0" + ] + }, + { + "source": "rhel8-jbeap-8.1", + "target": [ + "rhel9-jbeap-8.1" + ] } ] }, @@ -3063,6 +3081,38 @@ { "pesid": "rhel8-BaseOS", "entries": [ + { + "major_version": "8", + "repoid": "baseos", + "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "baseos", + "arch": "ppc64le", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "baseos", + "arch": "s390x", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "baseos", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, { "major_version": "8", "repoid": "rhel-8-baseos-beta-rhui-rpms", @@ -3330,6 +3380,38 @@ { "pesid": "rhel8-AppStream", "entries": [ + { + "major_version": "8", + "repoid": "appstream", + "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "appstream", + "arch": "ppc64le", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "appstream", + "arch": "s390x", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "appstream", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, { "major_version": "8", "repoid": "rhel-8-appstream-beta-rhui-rpms", @@ -3729,6 +3811,38 @@ "repo_type": "rpm", "distro": "rhel" }, + { + "major_version": "8", + "repoid": "powertools", + "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "powertools", + "arch": "ppc64le", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "powertools", + "arch": "s390x", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "powertools", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, { "major_version": "8", "repoid": "rhui-codeready-builder-for-rhel-8-aarch64-rhui-rpms", @@ -3940,12 +4054,28 @@ "channel": "ga", "repo_type": "rpm", "distro": "rhel" + }, + { + "major_version": "8", + "repoid": "rt", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" } ] }, { "pesid": "rhel8-NFV", "entries": [ + { + "major_version": "8", + "repoid": "nfv", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, { "major_version": "8", "repoid": "rhel-8-for-x86_64-nfv-beta-rpms", @@ -4218,6 +4348,38 @@ { "pesid": "rhel8-HighAvailability", "entries": [ + { + "major_version": "8", + "repoid": "ha", + "arch": "aarch64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "ha", + "arch": "ppc64le", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "ha", + "arch": "s390x", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, + { + "major_version": "8", + "repoid": "ha", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "centos" + }, { "major_version": "8", "repoid": "rhel-8-for-aarch64-highavailability-beta-rpms", @@ -4417,6 +4579,45 @@ } ] }, + { + "pesid": "rhel8-jbeap-7.4", + "entries": [ + { + "major_version": "8", + "repoid": "jb-eap-7.4-for-rhel-8-x86_64-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + } + ] + }, + { + "pesid": "rhel8-jbeap-8.0", + "entries": [ + { + "major_version": "8", + "repoid": "jb-eap-8.0-for-rhel-8-x86_64-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + } + ] + }, + { + "pesid": "rhel8-jbeap-8.1", + "entries": [ + { + "major_version": "8", + "repoid": "jb-eap-8.1-for-rhel-8-x86_64-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + } + ] + }, { "pesid": "rhel8-rhui-client-config-server-8", "entries": [ @@ -6233,6 +6434,45 @@ "rhui": "alibaba" } ] + }, + { + "pesid": "rhel9-jbeap-7.4", + "entries": [ + { + "major_version": "9", + "repoid": "jb-eap-7.4-for-rhel-9-x86_64-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + } + ] + }, + { + "pesid": "rhel9-jbeap-8.0", + "entries": [ + { + "major_version": "9", + "repoid": "jb-eap-8.0-for-rhel-9-x86_64-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + } + ] + }, + { + "pesid": "rhel9-jbeap-8.1", + "entries": [ + { + "major_version": "9", + "repoid": "jb-eap-8.1-for-rhel-9-x86_64-rpms", + "arch": "x86_64", + "channel": "ga", + "repo_type": "rpm", + "distro": "rhel" + } + ] } ] } diff --git a/etc/leapp/transaction/to_reinstall b/etc/leapp/transaction/to_reinstall new file mode 100644 index 00000000..c6694a8e --- /dev/null +++ b/etc/leapp/transaction/to_reinstall @@ -0,0 +1,3 @@ +### List of packages (each on new line) to be reinstalled to the upgrade transaction +### Useful for packages that have identical version strings but contain binary changes between major OS versions +### Packages that aren't installed will be skipped diff --git a/packaging/leapp-repository.spec b/packaging/leapp-repository.spec index a2d245c2..ea7f7043 100644 --- a/packaging/leapp-repository.spec +++ b/packaging/leapp-repository.spec @@ -120,7 +120,7 @@ Requires: leapp-repository-dependencies = %{leapp_repo_deps} # IMPORTANT: this is capability provided by the leapp framework rpm. # Check that 'version' instead of the real framework rpm version. -Requires: leapp-framework >= 6.1, leapp-framework < 7 +Requires: leapp-framework >= 6.2, leapp-framework < 7 # Since we provide sub-commands for the leapp utility, we expect the leapp # tool to be installed as well. diff --git a/repos/system_upgrade/common/actors/addupgradebootentry/libraries/addupgradebootentry.py b/repos/system_upgrade/common/actors/addupgradebootentry/libraries/addupgradebootentry.py index b28ec57c..6882488a 100644 --- a/repos/system_upgrade/common/actors/addupgradebootentry/libraries/addupgradebootentry.py +++ b/repos/system_upgrade/common/actors/addupgradebootentry/libraries/addupgradebootentry.py @@ -91,7 +91,7 @@ def figure_out_commands_needed_to_add_entry(kernel_path, initramfs_path, args_to '/usr/sbin/grubby', '--add-kernel', '{0}'.format(kernel_path), '--initrd', '{0}'.format(initramfs_path), - '--title', 'RHEL-Upgrade-Initramfs', + '--title', 'ELevate-Upgrade-Initramfs', '--copy-default', '--make-default', '--args', args_to_add_str diff --git a/repos/system_upgrade/common/actors/addupgradebootentry/tests/unit_test_addupgradebootentry.py b/repos/system_upgrade/common/actors/addupgradebootentry/tests/unit_test_addupgradebootentry.py index e5f632bc..b2ced8ae 100644 --- a/repos/system_upgrade/common/actors/addupgradebootentry/tests/unit_test_addupgradebootentry.py +++ b/repos/system_upgrade/common/actors/addupgradebootentry/tests/unit_test_addupgradebootentry.py @@ -24,7 +24,7 @@ from leapp.models import ( CUR_DIR = os.path.dirname(os.path.abspath(__file__)) -class run_mocked(object): +class run_mocked: def __init__(self): self.args = [] @@ -32,7 +32,7 @@ class run_mocked(object): self.args.append(args) -class write_to_file_mocked(object): +class write_to_file_mocked: def __init__(self): self.content = None @@ -53,7 +53,7 @@ run_args_add = [ '/usr/sbin/grubby', '--add-kernel', '/abc', '--initrd', '/def', - '--title', 'RHEL-Upgrade-Initramfs', + '--title', 'ELevate-Upgrade-Initramfs', '--copy-default', '--make-default', '--args', diff --git a/repos/system_upgrade/common/actors/adjustlocalrepos/tests/test_adjustlocalrepos.py b/repos/system_upgrade/common/actors/adjustlocalrepos/tests/test_adjustlocalrepos.py index 41cff200..6abf4189 100644 --- a/repos/system_upgrade/common/actors/adjustlocalrepos/tests/test_adjustlocalrepos.py +++ b/repos/system_upgrade/common/actors/adjustlocalrepos/tests/test_adjustlocalrepos.py @@ -83,7 +83,7 @@ def test_adjust_local_file_url(repo_file_line, expected_adjusted_repo_file_line) assert adjusted_repo_file_line == expected_adjusted_repo_file_line -class MockedFileDescriptor(object): +class MockedFileDescriptor: def __init__(self, repo_file, expected_new_repo_file): self.repo_file = repo_file @@ -113,7 +113,7 @@ class MockedFileDescriptor(object): assert expected_repo_file_contents == new_contents -class MockedContext(object): +class MockedContext: def __init__(self, repo_contents, expected_repo_contents): self.repo_contents = repo_contents diff --git a/repos/system_upgrade/common/actors/applytransactionworkarounds/tests/unit_test_applytransactionworkarounds.py b/repos/system_upgrade/common/actors/applytransactionworkarounds/tests/unit_test_applytransactionworkarounds.py index 8fe33645..96b8094f 100644 --- a/repos/system_upgrade/common/actors/applytransactionworkarounds/tests/unit_test_applytransactionworkarounds.py +++ b/repos/system_upgrade/common/actors/applytransactionworkarounds/tests/unit_test_applytransactionworkarounds.py @@ -7,7 +7,7 @@ from leapp.models import DNFWorkaround class ShowMessageCurrentActorMocked(CurrentActorMocked): def __init__(self, *args, **kwargs): - super(ShowMessageCurrentActorMocked, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._show_messages = [] @property @@ -18,7 +18,7 @@ class ShowMessageCurrentActorMocked(CurrentActorMocked): self._show_messages.append(message) -class MockedNotIsolatedActions(object): +class MockedNotIsolatedActions: def __init__(self): self.called = 0 self.args = None diff --git a/repos/system_upgrade/common/actors/biosdevname/tests/test_biosdevname.py b/repos/system_upgrade/common/actors/biosdevname/tests/test_biosdevname.py index c60aa7a4..427eea54 100644 --- a/repos/system_upgrade/common/actors/biosdevname/tests/test_biosdevname.py +++ b/repos/system_upgrade/common/actors/biosdevname/tests/test_biosdevname.py @@ -9,7 +9,7 @@ from leapp.libraries.stdlib import api from leapp.models import Interface, PCIAddress -class LoggerMocked(object): +class LoggerMocked: def __init__(self): self.infomsg = None @@ -32,12 +32,12 @@ def test_biosdevname_enabled(monkeypatch): assert not biosdevname.is_biosdevname_disabled() -class pyudev_enum_mock(object): +class pyudev_enum_mock: def __init__(self, vendor): self.vendor = vendor def match_sys_name(self, _): - class dev(object): + class dev: attributes = {'sys_vendor': self.vendor} return [dev()] diff --git a/repos/system_upgrade/common/actors/cephvolumescan/libraries/cephvolumescan.py b/repos/system_upgrade/common/actors/cephvolumescan/libraries/cephvolumescan.py index b2364104..a9bff005 100644 --- a/repos/system_upgrade/common/actors/cephvolumescan/libraries/cephvolumescan.py +++ b/repos/system_upgrade/common/actors/cephvolumescan/libraries/cephvolumescan.py @@ -8,7 +8,7 @@ from leapp.libraries.stdlib import api, CalledProcessError, run from leapp.models import InstalledRPM CEPH_CONF = "/etc/ceph/ceph.conf" -CONTAINER = "ceph-osd" +CONTAINER = "ceph-.*osd" def select_osd_container(engine): @@ -63,7 +63,8 @@ def encrypted_osds_list(): output = get_ceph_lvm_list() if output is not None: try: - result = [output[key][0]['lv_uuid'] for key in output if output[key][0]['tags']['ceph.encrypted']] + for key in output: + result.extend([element['lv_uuid'] for element in output[key] if element['tags']['ceph.encrypted']]) except KeyError: # TODO: possibly raise a report item with a medium risk factor # TODO: possibly create list of problematic osds, extend the cephinfo diff --git a/repos/system_upgrade/common/actors/cephvolumescan/tests/test_cephvolumescan.py b/repos/system_upgrade/common/actors/cephvolumescan/tests/test_cephvolumescan.py index f3811c45..168b8fc2 100644 --- a/repos/system_upgrade/common/actors/cephvolumescan/tests/test_cephvolumescan.py +++ b/repos/system_upgrade/common/actors/cephvolumescan/tests/test_cephvolumescan.py @@ -8,6 +8,8 @@ from leapp.reporting import Report CONT_PS_COMMAND_OUTPUT = { "stdout": """CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + b5a3d8ef25b9 undercloud-0.ctlplane.redhat.local:8787/rh-osbs/rhceph:5 "-n osd.8 -f --set..." \ + 2 hours ago Up 2 hours ago ceph-bea1a933-0846-4aaa-8223-62cb8cb2873c-osd-8 50d96fe72019 registry.redhat.io/rhceph/rhceph-4-rhel8:latest "/opt/ceph-contain..." \ 2 weeks ago Up 2 weeks ceph-osd-0 f93c17b49c40 registry.redhat.io/rhceph/rhceph-4-rhel8:latest "/opt/ceph-contain..." \ @@ -41,6 +43,32 @@ CEPH_VOLUME_OUTPUT = { "type":"block", "vg_name":"ceph-a696c40d-6b1d-448d-a40e-fadca22b64bc" } + ], + "8":[ + { + "devices": [ + "/dev/nvme0n1" + ], + "lv_name": "osd-db-b04857a0-a2a2-40c3-a490-cbe1f892a76c", + "lv_uuid": "zcvGix-drzz-JwzP-6ktU-Od6W-N5jL-kxRFa3", + "tags":{ + "ceph.encrypted":"1" + }, + "type": "db", + "vg_name": "ceph-b78309b3-bd80-4399-87a3-ac647b216b63" + }, + { + "devices": [ + "/dev/sdb" + ], + "lv_name": "osd-block-477c303f-5eaf-4be8-b5cc-f6073eb345bf", + "lv_uuid": "Mz1dep-D715-Wxh1-zUuS-0cOA-mKXE-UxaEM3", + "tags":{ + "ceph.encrypted":"1" + }, + "type": "block", + "vg_name": "ceph-e3e0345b-8be1-40a7-955a-378ba967f954" + } ] }""" } @@ -51,7 +79,19 @@ CEPH_LVM_LIST = { 'lv_uuid': 'Tyc0TH-RDxr-ebAF-9mWF-Kh5R-YnvJ-cEcGVn', 'tags': {'ceph.encrypted': '1'}, 'type': 'block', - 'vg_name': 'ceph-a696c40d-6b1d-448d-a40e-fadca22b64bc'}] + 'vg_name': 'ceph-a696c40d-6b1d-448d-a40e-fadca22b64bc'}], + '8': [{'devices': ['/dev/nvme0n1'], + 'lv_name': 'osd-db-b04857a0-a2a2-40c3-a490-cbe1f892a76c', + 'lv_uuid': 'zcvGix-drzz-JwzP-6ktU-Od6W-N5jL-kxRFa3', + 'tags': {'ceph.encrypted': '1'}, + 'type': 'db', + 'vg_name': 'ceph-b78309b3-bd80-4399-87a3-ac647b216b63'}, + {'devices': ['/dev/sdb'], + 'lv_name': 'osd-block-477c303f-5eaf-4be8-b5cc-f6073eb345bf', + 'lv_uuid': 'Mz1dep-D715-Wxh1-zUuS-0cOA-mKXE-UxaEM3', + 'tags': {'ceph.encrypted': '1'}, + 'type': 'block', + 'vg_name': 'ceph-e3e0345b-8be1-40a7-955a-378ba967f954'}] } @@ -60,7 +100,7 @@ def test_select_osd_container(m_run): m_run.return_value = CONT_PS_COMMAND_OUTPUT - assert cephvolumescan.select_osd_container('docker') == "ceph-osd-0" + assert cephvolumescan.select_osd_container('docker') == "ceph-bea1a933-0846-4aaa-8223-62cb8cb2873c-osd-8" @patch('leapp.libraries.actor.cephvolumescan.has_package') @@ -82,4 +122,8 @@ def test_encrypted_osds_list(m_get_ceph_lvm_list, m_isfile): m_get_ceph_lvm_list.return_value = CEPH_LVM_LIST m_isfile.return_value = True - assert cephvolumescan.encrypted_osds_list() == ['Tyc0TH-RDxr-ebAF-9mWF-Kh5R-YnvJ-cEcGVn'] + assert cephvolumescan.encrypted_osds_list() == [ + 'Tyc0TH-RDxr-ebAF-9mWF-Kh5R-YnvJ-cEcGVn', + 'zcvGix-drzz-JwzP-6ktU-Od6W-N5jL-kxRFa3', + 'Mz1dep-D715-Wxh1-zUuS-0cOA-mKXE-UxaEM3' + ] diff --git a/repos/system_upgrade/common/actors/checkbootavailspace/tests/unit_test_checkbootavailspace.py b/repos/system_upgrade/common/actors/checkbootavailspace/tests/unit_test_checkbootavailspace.py index 094164c7..fbcd0820 100644 --- a/repos/system_upgrade/common/actors/checkbootavailspace/tests/unit_test_checkbootavailspace.py +++ b/repos/system_upgrade/common/actors/checkbootavailspace/tests/unit_test_checkbootavailspace.py @@ -10,7 +10,7 @@ from leapp.libraries.common.testutils import create_report_mocked from leapp.utils.report import is_inhibitor -class fake_get_avail_bytes_on_boot(object): +class fake_get_avail_bytes_on_boot: def __init__(self, size): self.size = size diff --git a/repos/system_upgrade/common/actors/checkconsumedassets/tests/test_asset_version_checking.py b/repos/system_upgrade/common/actors/checkconsumedassets/tests/test_asset_version_checking.py index 9c324b44..f37dcea4 100644 --- a/repos/system_upgrade/common/actors/checkconsumedassets/tests/test_asset_version_checking.py +++ b/repos/system_upgrade/common/actors/checkconsumedassets/tests/test_asset_version_checking.py @@ -44,4 +44,4 @@ def test_make_report_entries_with_unique_urls(): docs_url_to_title_map = {'/path/to/asset1': ['asset1_title1', 'asset1_title2'], '/path/to/asset2': ['asset2_title']} report_urls = check_consumed_assets_lib.make_report_entries_with_unique_urls(docs_url_to_title_map) - assert set([ru.value['url'] for ru in report_urls]) == {'/path/to/asset1', '/path/to/asset2'} + assert {ru.value['url'] for ru in report_urls} == {'/path/to/asset1', '/path/to/asset2'} diff --git a/repos/system_upgrade/common/actors/checkdnfpluginpath/actor.py b/repos/system_upgrade/common/actors/checkdnfpluginpath/actor.py new file mode 100644 index 00000000..34055886 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkdnfpluginpath/actor.py @@ -0,0 +1,22 @@ +from leapp.actors import Actor +from leapp.libraries.actor.checkdnfpluginpath import perform_check +from leapp.models import DnfPluginPathDetected +from leapp.reporting import Report +from leapp.tags import ChecksPhaseTag, IPUWorkflowTag + + +class CheckDnfPluginPath(Actor): + """ + Inhibits the upgrade if a custom DNF plugin path is configured. + + This actor checks whether the pluginpath option is configured in /etc/dnf/dnf.conf and produces a report if it is. + If the option is detected with any value, the upgrade is inhibited. + """ + + name = 'check_dnf_pluginpath' + consumes = (DnfPluginPathDetected,) + produces = (Report,) + tags = (ChecksPhaseTag, IPUWorkflowTag) + + def process(self): + perform_check() diff --git a/repos/system_upgrade/common/actors/checkdnfpluginpath/libraries/checkdnfpluginpath.py b/repos/system_upgrade/common/actors/checkdnfpluginpath/libraries/checkdnfpluginpath.py new file mode 100644 index 00000000..ce705361 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkdnfpluginpath/libraries/checkdnfpluginpath.py @@ -0,0 +1,35 @@ +from leapp import reporting +from leapp.libraries.stdlib import api +from leapp.models import DnfPluginPathDetected + +DNF_CONFIG_PATH = '/etc/dnf/dnf.conf' + + +def check_dnf_pluginpath(dnf_pluginpath_detected): + """Create an inhibitor when pluginpath is detected in DNF configuration.""" + if not dnf_pluginpath_detected.is_pluginpath_detected: + return + reporting.create_report([ + reporting.Title('Detected specified pluginpath in DNF configuration.'), + reporting.Summary( + 'The "pluginpath" option is set in the {} file. The path to DNF plugins differs between ' + 'system major releases due to different versions of Python. ' + 'This breaks the in-place upgrades if defined explicitly as DNF plugins ' + 'are stored on a different path on the new system.' + .format(DNF_CONFIG_PATH) + ), + reporting.Remediation( + hint='Remove or comment out the pluginpath option in the DNF ' + 'configuration file to be able to upgrade the system', + commands=[['sed', '-i', '\'s/^pluginpath[[:space:]]*=/#pluginpath=/\'', DNF_CONFIG_PATH]], + ), + reporting.Severity(reporting.Severity.HIGH), + reporting.Groups([reporting.Groups.INHIBITOR]), + reporting.RelatedResource('file', DNF_CONFIG_PATH), + ]) + + +def perform_check(): + dnf_pluginpath_detected = next(api.consume(DnfPluginPathDetected), None) + if dnf_pluginpath_detected: + check_dnf_pluginpath(dnf_pluginpath_detected) diff --git a/repos/system_upgrade/common/actors/checkdnfpluginpath/tests/test_checkdnfpluginpath.py b/repos/system_upgrade/common/actors/checkdnfpluginpath/tests/test_checkdnfpluginpath.py new file mode 100644 index 00000000..7dd8bbf2 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkdnfpluginpath/tests/test_checkdnfpluginpath.py @@ -0,0 +1,34 @@ +import pytest + +from leapp import reporting +from leapp.libraries.actor.checkdnfpluginpath import check_dnf_pluginpath, perform_check +from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked +from leapp.libraries.stdlib import api +from leapp.models import DnfPluginPathDetected +from leapp.utils.report import is_inhibitor + + +@pytest.mark.parametrize('is_detected', [False, True]) +def test_check_dnf_pluginpath(monkeypatch, is_detected): + actor_reports = create_report_mocked() + msg = DnfPluginPathDetected(is_pluginpath_detected=is_detected) + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[msg])) + monkeypatch.setattr(reporting, 'create_report', actor_reports) + + perform_check() + + assert bool(actor_reports.called) == is_detected + + if is_detected: + assert is_inhibitor(actor_reports.report_fields) + + +def test_perform_check_no_message_available(monkeypatch): + """Test perform_check when no DnfPluginPathDetected message is available.""" + actor_reports = create_report_mocked() + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) + monkeypatch.setattr(reporting, 'create_report', actor_reports) + + perform_check() + + assert not actor_reports.called diff --git a/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py new file mode 100644 index 00000000..52f5af9d --- /dev/null +++ b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py @@ -0,0 +1,53 @@ +from leapp.actors import Actor +from leapp.libraries.stdlib import api +from leapp.models import ( + RepositoriesFacts, + VendorSourceRepos, + ActiveVendorList, +) +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class CheckEnabledVendorRepos(Actor): + """ + Create a list of vendors whose repositories are present on the system and enabled. + Only those vendors' configurations (new repositories, PES actions, etc.) + will be included in the upgrade process. + """ + + name = "check_enabled_vendor_repos" + consumes = (RepositoriesFacts, VendorSourceRepos) + produces = (ActiveVendorList) + tags = (IPUWorkflowTag, FactsPhaseTag.Before) + + def process(self): + vendor_mapping_data = {} + active_vendors = set() + + # Make a dict for easy mapping of repoid -> corresponding vendor name. + for vendor_src_repodata in api.consume(VendorSourceRepos): + for vendor_src_repo in vendor_src_repodata.source_repoids: + vendor_mapping_data[vendor_src_repo] = vendor_src_repodata.vendor + + # Is the repo listed in the vendor map as from_repoid present on the system? + for repos_facts in api.consume(RepositoriesFacts): + for repo_file in repos_facts.repositories: + for repo_data in repo_file.data: + self.log.debug( + "Looking for repository {} in vendor maps".format(repo_data.repoid) + ) + if repo_data.enabled and repo_data.repoid in vendor_mapping_data: + # If the vendor's repository is present in the system and enabled, count the vendor as active. + new_vendor = vendor_mapping_data[repo_data.repoid] + self.log.debug( + "Repository {} found and enabled, enabling vendor {}".format( + repo_data.repoid, new_vendor + ) + ) + active_vendors.add(new_vendor) + + if active_vendors: + self.log.debug("Active vendor list: {}".format(active_vendors)) + api.produce(ActiveVendorList(data=list(active_vendors))) + else: + self.log.info("No active vendors found, vendor list not generated") diff --git a/repos/system_upgrade/common/actors/checkinsightsautoregister/actor.py b/repos/system_upgrade/common/actors/checkinsightsautoregister/actor.py index 70b3b670..52108566 100644 --- a/repos/system_upgrade/common/actors/checkinsightsautoregister/actor.py +++ b/repos/system_upgrade/common/actors/checkinsightsautoregister/actor.py @@ -7,7 +7,7 @@ from leapp.tags import ChecksPhaseTag, IPUWorkflowTag class CheckInsightsAutoregister(Actor): """ - Checks if system can be automatically registered into Red Hat Insights + Checks if system can be automatically registered into Red Hat Lightspeed The registration is skipped if NO_INSIGHTS_REGISTER=1 environment variable is set, the --no-insights-register command line argument present. if the diff --git a/repos/system_upgrade/common/actors/checkinsightsautoregister/libraries/checkinsightsautoregister.py b/repos/system_upgrade/common/actors/checkinsightsautoregister/libraries/checkinsightsautoregister.py index 762f3c08..8e26485b 100644 --- a/repos/system_upgrade/common/actors/checkinsightsautoregister/libraries/checkinsightsautoregister.py +++ b/repos/system_upgrade/common/actors/checkinsightsautoregister/libraries/checkinsightsautoregister.py @@ -24,9 +24,9 @@ def _ensure_package(package): def _report_registration_info(installing_client): pkg_msg = " The '{}' package required for the registration will be installed during the upgrade." - title = "Automatic registration into Red Hat Insights" + title = "Automatic registration into Red Hat Lightspeed" summary = ( - "After the upgrade, this system will be automatically registered into Red Hat Insights." + "After the upgrade, this system will be automatically registered into Red Hat Lightspeed." "{}" " To skip the automatic registration, use the '--no-insights-register' command line option or" " set the LEAPP_NO_INSIGHTS_REGISTER environment variable." @@ -38,6 +38,7 @@ def _report_registration_info(installing_client): reporting.Summary(summary), reporting.Severity(reporting.Severity.INFO), reporting.Groups([reporting.Groups.SERVICES]), + reporting.Key('693963253195f418526f045b6d630a1f4c7a193d'), ] ) diff --git a/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py b/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py index 57a94e9d..84e8e61f 100644 --- a/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py +++ b/repos/system_upgrade/common/actors/checkluks/libraries/checkluks.py @@ -3,6 +3,7 @@ from leapp.libraries.common.config.version import get_source_major_version from leapp.libraries.stdlib import api from leapp.models import ( CephInfo, + CopyFile, DracutModule, LuksDumps, StorageInfo, @@ -26,7 +27,7 @@ def _formatted_list_output(input_list, sep=FMT_LIST_SEPARATOR): def _at_least_one_tpm_token(luks_dump): - return any([token.token_type == "clevis-tpm2" for token in luks_dump.tokens]) + return any(token.token_type == "clevis-tpm2" for token in luks_dump.tokens) def _get_ceph_volumes(): @@ -156,8 +157,13 @@ def check_invalid_luks_devices(): 'tpm2-tools', 'tpm2-abrmd' ] - api.produce(TargetUserSpaceUpgradeTasks(install_rpms=required_crypt_rpms)) - api.produce(UpgradeInitramfsTasks(include_dracut_modules=[ + api.produce(TargetUserSpaceUpgradeTasks( + copy_files=[CopyFile(src="/etc/crypttab")], + install_rpms=required_crypt_rpms) + ) + api.produce(UpgradeInitramfsTasks( + include_files=['/etc/crypttab'], + include_dracut_modules=[ DracutModule(name='clevis'), DracutModule(name='clevis-pin-tpm2') ]) diff --git a/repos/system_upgrade/common/actors/checkmemory/libraries/checkmemory.py b/repos/system_upgrade/common/actors/checkmemory/libraries/checkmemory.py index 808c9662..040b404b 100644 --- a/repos/system_upgrade/common/actors/checkmemory/libraries/checkmemory.py +++ b/repos/system_upgrade/common/actors/checkmemory/libraries/checkmemory.py @@ -34,8 +34,8 @@ def process(): if minimum_req_error: title = 'Minimum memory requirements for RHEL {} are not met'.format(version.get_target_major_version()) summary = 'Memory detected: {} MiB, required: {} MiB'.format( - int(minimum_req_error['detected'] / 1024), # noqa: W1619; pylint: disable=old-division - int(minimum_req_error['minimal_req'] / 1024), # noqa: W1619; pylint: disable=old-division + int(minimum_req_error['detected'] / 1024), + int(minimum_req_error['minimal_req'] / 1024), ) reporting.create_report([ reporting.Title(title), diff --git a/repos/system_upgrade/common/actors/checkmemory/tests/test_checkmemory.py b/repos/system_upgrade/common/actors/checkmemory/tests/test_checkmemory.py index a0bac0a9..79158dc6 100644 --- a/repos/system_upgrade/common/actors/checkmemory/tests/test_checkmemory.py +++ b/repos/system_upgrade/common/actors/checkmemory/tests/test_checkmemory.py @@ -21,7 +21,7 @@ def test_check_memory_high(monkeypatch): def test_report(monkeypatch): - title_msg = 'Minimum memory requirements for RHEL 8 are not met' + title_msg = 'Minimum memory requirements for RHEL 9 are not met' monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) monkeypatch.setattr(api, 'consume', lambda x: iter([MemoryInfo(mem_total=129)])) monkeypatch.setattr(reporting, "create_report", create_report_mocked()) diff --git a/repos/system_upgrade/common/actors/checksaphana/tests/test_checksaphana.py b/repos/system_upgrade/common/actors/checksaphana/tests/test_checksaphana.py index 1e43f403..8ec8d17f 100644 --- a/repos/system_upgrade/common/actors/checksaphana/tests/test_checksaphana.py +++ b/repos/system_upgrade/common/actors/checksaphana/tests/test_checksaphana.py @@ -97,7 +97,7 @@ def _parse_manifest_data(manifest): return result -class MockSapHanaInstanceInfo(object): +class MockSapHanaInstanceInfo: def __init__(self, name, number, path, admin, manifest_data, running=True): self.manifest = _parse_manifest_data(manifest_data) self.name = name @@ -118,7 +118,7 @@ def _gen_instance_info(name, manifest_data, index, running=True): ) -class MockSapHanaInfo(object): +class MockSapHanaInfo: def __init__(self, v1names, v2names, v2lownames, running=None): self.installed = bool(v1names or v2names or v2lownames) self.running = running if running is not None else self.installed @@ -151,7 +151,7 @@ def _consume_mock_sap_hana_info(v1names=(), v2names=(), v2lownames=(), running=T return _consume -class MockSAPHanaVersionInstance(object): +class MockSAPHanaVersionInstance: def __init__(self, major, rev, patchlevel): self.name = "TestName" @@ -284,7 +284,7 @@ def test_checksaphana_perform_check(monkeypatch): # Expected 3 reports due to v1names + v2lownames + running assert len(reports) == 3 # Verifies that all expected title patterns are within the reports and not just coincidentally 3 - assert all([any([pattern(report) for report in reports]) for pattern in EXPECTED_TITLE_PATTERNS.values()]) + assert all(any(pattern(report) for report in reports) for pattern in EXPECTED_TITLE_PATTERNS.values()) list_clear(reports) monkeypatch.setattr(checksaphana.api, 'consume', _consume_mock_sap_hana_info( @@ -294,4 +294,7 @@ def test_checksaphana_perform_check(monkeypatch): # Expected 2 reports due to v1names + v2lownames assert len(reports) == 2 # Verifies that all expected title patterns are within the reports and not just coincidentally 2 - assert all([any([EXPECTED_TITLE_PATTERNS[pattern](report) for report in reports]) for pattern in ['v1', 'low']]) + assert all( + any(EXPECTED_TITLE_PATTERNS[pattern](report) for report in reports) + for pattern in ['v1', 'low'] + ) diff --git a/repos/system_upgrade/common/actors/checktargetrepos/actor.py b/repos/system_upgrade/common/actors/checktargetrepos/actor.py index d61fb685..a5bdde10 100644 --- a/repos/system_upgrade/common/actors/checktargetrepos/actor.py +++ b/repos/system_upgrade/common/actors/checktargetrepos/actor.py @@ -6,7 +6,9 @@ from leapp.tags import ChecksPhaseTag, IPUWorkflowTag class Checktargetrepos(Actor): """ - Check whether target yum repositories are specified. + Check whether target dnf repositories are specified on RHEL. + + NOTE: this actor does nothing on distros other than RHEL. RHSM | RHUI | ER | CTR | CTRF || result -----+------+----+-----+------++------- diff --git a/repos/system_upgrade/common/actors/checktargetrepos/libraries/checktargetrepos.py b/repos/system_upgrade/common/actors/checktargetrepos/libraries/checktargetrepos.py index c286ed4f..556b41a2 100644 --- a/repos/system_upgrade/common/actors/checktargetrepos/libraries/checktargetrepos.py +++ b/repos/system_upgrade/common/actors/checktargetrepos/libraries/checktargetrepos.py @@ -2,12 +2,14 @@ from leapp import reporting from leapp.libraries.common import config, rhsm from leapp.libraries.common.config.version import get_target_major_version from leapp.libraries.stdlib import api -from leapp.models import CustomTargetRepositoryFile, RHUIInfo, TargetRepositories +from leapp.models import CustomTargetRepositoryFile, RHELTargetRepository, RHUIInfo, TargetRepositories +from leapp.utils.deprecation import suppress_deprecation # TODO: we need to provide this path in a shared library CUSTOM_REPO_PATH = '/etc/leapp/files/leapp_upgrade_repositories.repo' +@suppress_deprecation(RHELTargetRepository) # member of TargetRepositories def _any_custom_repo_defined(): for tr in api.consume(TargetRepositories): if tr.custom_repos: @@ -38,9 +40,10 @@ def process(): rhui_info = next(api.consume(RHUIInfo), None) - if not rhsm.skip_rhsm() or rhui_info: - # getting RH repositories through RHSM or RHUI; resolved by seatbelts - # implemented in other actors + if config.get_target_distro_id() != 'rhel' or (not rhsm.skip_rhsm() or rhui_info): + # RHEL: getting RH repositories through RHSM or RHUI; + # resolved by seatbelts in other actors + # other: distro repos provided by the distro directly, seatbelts elsewhere return # rhsm skipped; take your seatbelts please diff --git a/repos/system_upgrade/common/actors/checktargetrepos/tests/test_checktargetrepos.py b/repos/system_upgrade/common/actors/checktargetrepos/tests/test_checktargetrepos.py index c1ca8cd1..dfe5d06a 100644 --- a/repos/system_upgrade/common/actors/checktargetrepos/tests/test_checktargetrepos.py +++ b/repos/system_upgrade/common/actors/checktargetrepos/tests/test_checktargetrepos.py @@ -8,16 +8,15 @@ from leapp.libraries.stdlib import api from leapp.models import ( CustomTargetRepository, CustomTargetRepositoryFile, - EnvVar, - Report, - RepositoryData, + DistroTargetRepository, RHELTargetRepository, TargetRepositories ) +from leapp.utils.deprecation import suppress_deprecation from leapp.utils.report import is_inhibitor -class MockedConsume(object): +class MockedConsume: def __init__(self, *args): self._msgs = [] for arg in args: @@ -32,11 +31,21 @@ class MockedConsume(object): return iter([msg for msg in self._msgs if isinstance(msg, model)]) -_RHEL_REPOS = [ - RHELTargetRepository(repoid='repo1'), - RHELTargetRepository(repoid='repo2'), - RHELTargetRepository(repoid='repo3'), - RHELTargetRepository(repoid='repo4'), +@suppress_deprecation(RHELTargetRepository) +def _test_rhel_repos(): + return [ + RHELTargetRepository(repoid='repo1'), + RHELTargetRepository(repoid='repo2'), + RHELTargetRepository(repoid='repo3'), + RHELTargetRepository(repoid='repo4'), + ] + + +_DISTRO_REPOS = [ + DistroTargetRepository(repoid='repo1'), + DistroTargetRepository(repoid='repo2'), + DistroTargetRepository(repoid='repo3'), + DistroTargetRepository(repoid='repo4'), ] _CUSTOM_REPOS = [ @@ -46,16 +55,17 @@ _CUSTOM_REPOS = [ CustomTargetRepository(repoid='repo4', name='repo4name', baseurl=None, enabled=True), ] -_TARGET_REPOS_CUSTOM = TargetRepositories(rhel_repos=_RHEL_REPOS, custom_repos=_CUSTOM_REPOS) -_TARGET_REPOS_NO_CUSTOM = TargetRepositories(rhel_repos=_RHEL_REPOS) +_TARGET_REPOS_CUSTOM = TargetRepositories( + rhel_repos=_test_rhel_repos(), distro_repos=_DISTRO_REPOS, custom_repos=_CUSTOM_REPOS +) +_TARGET_REPOS_NO_CUSTOM = TargetRepositories(rhel_repos=_test_rhel_repos(), distro_repos=_DISTRO_REPOS) _CUSTOM_TARGET_REPOFILE = CustomTargetRepositoryFile(file='/etc/leapp/files/leapp_upgrade_repositories.repo') def test_checktargetrepos_rhsm(monkeypatch): monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: False) - monkeypatch.setattr(api, 'consume', MockedConsume()) - monkeypatch.setattr(checktargetrepos, 'get_target_major_version', lambda: '8') + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) checktargetrepos.process() assert reporting.create_report.called == 0 diff --git a/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/actor.py b/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/actor.py new file mode 100644 index 00000000..e1868819 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/actor.py @@ -0,0 +1,21 @@ +from leapp.actors import Actor +from leapp.libraries.actor.checkthirdpartytargetpythonmodules import perform_check +from leapp.models import ThirdPartyTargetPythonModules +from leapp.reporting import Report +from leapp.tags import ChecksPhaseTag, IPUWorkflowTag + + +class CheckThirdPartyTargetPythonModules(Actor): + """ + Produces a report if any third-party target Python modules are detected on the source system. + + If such modules are detected, a high risk report is produced. + """ + + name = 'check_third_party_target_python_modules' + consumes = (ThirdPartyTargetPythonModules,) + produces = (Report,) + tags = (ChecksPhaseTag, IPUWorkflowTag) + + def process(self): + perform_check() diff --git a/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/libraries/checkthirdpartytargetpythonmodules.py b/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/libraries/checkthirdpartytargetpythonmodules.py new file mode 100644 index 00000000..7ed34738 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/libraries/checkthirdpartytargetpythonmodules.py @@ -0,0 +1,74 @@ +from leapp import reporting +from leapp.libraries.stdlib import api +from leapp.models import ThirdPartyTargetPythonModules + +FMT_LIST_SEPARATOR = '\n - ' +MAX_REPORTED_ITEMS = 30 + + +def _formatted_list_output_with_max_items(input_list, sep=FMT_LIST_SEPARATOR, max_items=MAX_REPORTED_ITEMS): + if not input_list: + return '' + + total_count = len(input_list) + items_to_show = input_list[:max_items] + formatted = ['{}{}'.format(sep, item) for item in items_to_show] + + if total_count > max_items: + formatted.append('{}... and {} more'.format(sep, total_count - max_items)) + + return ''.join(formatted) + + +def check_third_party_target_python_modules(third_party_target_python_modules): + """Create an inhibitor when third-party Python modules are detected.""" + target_python_version = third_party_target_python_modules.target_python.split('python')[1] + third_party_rpms = third_party_target_python_modules.third_party_rpm_names + third_party_modules = third_party_target_python_modules.third_party_modules + + summary = ( + 'Third-party target Python modules may interfere with ' + 'the upgrade process or cause unexpected behavior after the upgrade.' + ) + + if third_party_rpms: + summary = ( + '{pre}\n\nNon-distribution RPM packages detected:{rpmlist}' + .format( + pre=summary, + rpmlist=_formatted_list_output_with_max_items(third_party_rpms)) + ) + + if third_party_modules: + summary = ( + '{pre}\n\nNon-distribution modules detected (list can be incomplete):{modulelist}' + .format( + pre=summary, + modulelist=_formatted_list_output_with_max_items(third_party_modules)) + ) + + reporting.create_report([ + reporting.Title('Detected third-party Python modules for the target Python version'), + reporting.Summary(summary), + reporting.Remediation( + hint='Remove third-party target Python {} packages before attempting the upgrade or ensure ' + 'that those modules are not interfering with distribution-provided modules.' + .format(target_python_version), + ), + reporting.Severity(reporting.Severity.HIGH) + ]) + + +def perform_check(): + """Perform the check for third-party Python modules.""" + third_party_target_python_modules_msg = next(api.consume( + ThirdPartyTargetPythonModules), + None, + ) + + if not third_party_target_python_modules_msg: + return + + if (third_party_target_python_modules_msg.third_party_rpm_names or + third_party_target_python_modules_msg.third_party_modules): + check_third_party_target_python_modules(third_party_target_python_modules_msg) diff --git a/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/tests/test_check_third_party_target_python_modules.py b/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/tests/test_check_third_party_target_python_modules.py new file mode 100644 index 00000000..2a87d195 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkthirdpartytargetpythonmodules/tests/test_check_third_party_target_python_modules.py @@ -0,0 +1,46 @@ +import pytest + +from leapp import reporting +from leapp.libraries.actor import checkthirdpartytargetpythonmodules +from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked +from leapp.libraries.stdlib import api +from leapp.models import ThirdPartyTargetPythonModules + + +def test_perform_check_no_message_available(monkeypatch): + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[])) + monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) + + checkthirdpartytargetpythonmodules.perform_check() + + assert not reporting.create_report.called + + +def test_perform_check_empty_lists(monkeypatch): + msg = ThirdPartyTargetPythonModules( + target_python='python3.9', + third_party_modules=[], + third_party_rpm_names=[] + ) + + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[msg])) + monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) + + checkthirdpartytargetpythonmodules.perform_check() + + assert not reporting.create_report.called + + +def test_perform_check_with_third_party_modules(monkeypatch): + msg = ThirdPartyTargetPythonModules( + target_python='python3.9', + third_party_modules=['third_party_module'], + third_party_rpm_names=['third_party_rpm'] + ) + + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[msg])) + monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) + + checkthirdpartytargetpythonmodules.perform_check() + + assert reporting.create_report.called diff --git a/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py b/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py index ea154173..5dcdd967 100644 --- a/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py +++ b/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py @@ -22,6 +22,7 @@ from leapp.models import ( CustomTargetRepository, DNFPluginTask, InstalledRPM, + RHELTargetRepository, RHUIInfo, RpmTransactionTasks, TargetRepositories, @@ -30,6 +31,7 @@ from leapp.models import ( TargetRHUISetupInfo, TargetUserSpacePreupgradeTasks ) +from leapp.utils.deprecation import suppress_deprecation MatchingSetup = namedtuple('MatchingSetup', ['family', 'description']) @@ -370,11 +372,12 @@ def emit_rhui_setup_tasks_based_on_config(rhui_config_dict): api.produce(rhui_info) +@suppress_deprecation(RHELTargetRepository) # member of TargetRepositories def request_configured_repos_to_be_enabled(rhui_config): config_repos_to_enable = rhui_config[RhuiTargetRepositoriesToUse.name] custom_repos = [CustomTargetRepository(repoid=repoid) for repoid in config_repos_to_enable] if custom_repos: - target_repos = TargetRepositories(custom_repos=custom_repos, rhel_repos=[]) + target_repos = TargetRepositories(custom_repos=custom_repos, rhel_repos=[], distro_repos=[]) api.produce(target_repos) diff --git a/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py b/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py index 3ac9c1b8..2e6f279e 100644 --- a/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py +++ b/repos/system_upgrade/common/actors/cloud/checkrhui/tests/component_test_checkrhui.py @@ -279,10 +279,14 @@ class ExpectedAction(Enum): ) def test_process(monkeypatch, extra_installed_pkgs, skip_rhsm, expected_action): known_setups = { - RHUIFamily('rhui-variant'): [ - mk_rhui_setup(clients={'src_pkg'}, os_version='7'), - mk_rhui_setup(clients={'target_pkg'}, os_version='8', leapp_pkg='leapp_pkg', - mandatory_files=[('file1', '/etc'), ('file2', '/var')]), + RHUIFamily("rhui-variant"): [ + mk_rhui_setup(clients={"src_pkg"}, os_version="8"), + mk_rhui_setup( + clients={"target_pkg"}, + os_version="9", + leapp_pkg="leapp_pkg", + mandatory_files=[("file1", "/etc"), ("file2", "/var")], + ), ] } @@ -291,7 +295,7 @@ def test_process(monkeypatch, extra_installed_pkgs, skip_rhsm, expected_action): installed_rpms = InstalledRPM(items=installed_pkgs) monkeypatch.setattr(api, 'produce', produce_mocked()) - actor = CurrentActorMocked(src_ver='7.9', msgs=[installed_rpms], config=_make_default_config(all_rhui_cfg)) + actor = CurrentActorMocked(msgs=[installed_rpms], config=_make_default_config(all_rhui_cfg)) monkeypatch.setattr(api, 'current_actor', actor) monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: skip_rhsm) @@ -318,12 +322,12 @@ def test_unknown_target_rhui_setup(monkeypatch, is_target_setup_known): rhui_family = RHUIFamily('rhui-variant') known_setups = { rhui_family: [ - mk_rhui_setup(clients={'src_pkg'}, os_version='7'), + mk_rhui_setup(clients={'src_pkg'}, os_version='8'), ] } if is_target_setup_known: - target_setup = mk_rhui_setup(clients={'target_pkg'}, os_version='8', leapp_pkg='leapp_pkg') + target_setup = mk_rhui_setup(clients={'target_pkg'}, os_version='9', leapp_pkg='leapp_pkg') known_setups[rhui_family].append(target_setup) installed_pkgs = {'zip', 'kernel-core', 'python', 'src_pkg', 'leapp_pkg'} @@ -331,7 +335,7 @@ def test_unknown_target_rhui_setup(monkeypatch, is_target_setup_known): installed_rpms = InstalledRPM(items=installed_pkgs) monkeypatch.setattr(api, 'produce', produce_mocked()) - actor = CurrentActorMocked(src_ver='7.9', msgs=[installed_rpms], config=_make_default_config(all_rhui_cfg)) + actor = CurrentActorMocked(msgs=[installed_rpms], config=_make_default_config(all_rhui_cfg)) monkeypatch.setattr(api, 'current_actor', actor) monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: True) @@ -468,6 +472,7 @@ def test_request_configured_repos_to_be_enabled(monkeypatch): target_repos = api.produce.model_instances[0] assert isinstance(target_repos, TargetRepositories) + assert not target_repos.distro_repos assert not target_repos.rhel_repos custom_repoids = sorted(custom_repo_model.repoid for custom_repo_model in target_repos.custom_repos) diff --git a/repos/system_upgrade/common/actors/cloud/convertgrubenvtofile/tests/test_convertgrubenvtofile.py b/repos/system_upgrade/common/actors/cloud/convertgrubenvtofile/tests/test_convertgrubenvtofile.py index c4534bd6..257060cc 100644 --- a/repos/system_upgrade/common/actors/cloud/convertgrubenvtofile/tests/test_convertgrubenvtofile.py +++ b/repos/system_upgrade/common/actors/cloud/convertgrubenvtofile/tests/test_convertgrubenvtofile.py @@ -14,7 +14,7 @@ def raise_call_error(args=None): ) -class run_mocked(object): +class run_mocked: def __init__(self, raise_err=False): self.called = 0 self.args = [] diff --git a/repos/system_upgrade/common/actors/cloud/scanhybridimage/tests/test_scanhybridimage.py b/repos/system_upgrade/common/actors/cloud/scanhybridimage/tests/test_scanhybridimage.py index a0f6fd4c..660a1382 100644 --- a/repos/system_upgrade/common/actors/cloud/scanhybridimage/tests/test_scanhybridimage.py +++ b/repos/system_upgrade/common/actors/cloud/scanhybridimage/tests/test_scanhybridimage.py @@ -36,7 +36,7 @@ def raise_call_error(args=None): ) -class run_mocked(object): +class run_mocked: def __init__(self, hypervisor='', raise_err=False): self.hypervisor = hypervisor self.called = 0 diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh index 56a94b5d..46c5d9b6 100755 --- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh +++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/do-upgrade.sh @@ -390,4 +390,3 @@ getarg 'rd.break=leapp-logs' 'rd.upgrade.break=leapp-finish' && { sync mount -o "remount,$old_opts" "$NEWROOT" exit $result - diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh index d73060cb..45f98148 100755 --- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh +++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/module-setup.sh @@ -102,7 +102,6 @@ install() { inst_binary grep # script to actually run the upgrader binary - inst_hook upgrade 49 "$_moddir/mount_usr.sh" inst_hook upgrade 50 "$_moddir/do-upgrade.sh" #NOTE: some clean up?.. ideally, everything should be inside the leapp* diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/mount_usr.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/mount_usr.sh deleted file mode 100755 index 9366ac13..00000000 --- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/85sys-upgrade-redhat/mount_usr.sh +++ /dev/null @@ -1,148 +0,0 @@ -#!/bin/sh -# -*- mode: shell-script; indent-tabs-mode: nil; sh-basic-offset: 4; -*- -# ex: ts=8 sw=4 sts=4 et filetype=sh - -type info >/dev/null 2>&1 || . /lib/dracut-lib.sh - -export NEWROOT=${NEWROOT:-"/sysroot"} - -filtersubvol() { - _oldifs="$IFS" - IFS="," - set "$@" - IFS="$_oldifs" - while [ $# -gt 0 ]; do - case $1 in - subvol=*) :;; - *) printf '%s' "${1}," ;; - esac - shift - done -} - -mount_usr() -{ - # - # mount_usr [true | false] - # Expected a "true" value for the last attempt to mount /usr. On the last - # attempt, in case of failure drop to shell. - # - # Return 0 when everything is all right - # In case of failure and /usr has been detected: - # return 2 when $1 is "true" (drop to shell invoked) - # (note: possibly it's nonsense, but to be sure..) - # return 1 otherwise - # - _last_attempt="$1" - # check, if we have to mount the /usr filesystem - while read -r _dev _mp _fs _opts _freq _passno; do - [ "${_dev%%#*}" != "$_dev" ] && continue - if [ "$_mp" = "/usr" ]; then - case "$_dev" in - LABEL=*) - _dev="$(echo "$_dev" | sed 's,/,\\x2f,g')" - _dev="/dev/disk/by-label/${_dev#LABEL=}" - ;; - UUID=*) - _dev="${_dev#block:}" - _dev="/dev/disk/by-uuid/${_dev#UUID=}" - ;; - esac - - # shellcheck disable=SC2154 # Variable root is assigned by dracut - _root_dev=${root#block:} - - if strstr "$_opts" "subvol=" && \ - [ "$(stat -c '%D:%i' "$_root_dev")" = "$(stat -c '%D:%i' "$_dev")" ] && \ - [ -n "$rflags" ]; then - # for btrfs subvolumes we have to mount /usr with the same rflags - rflags=$(filtersubvol "$rflags") - rflags=${rflags%%,} - _opts="${_opts:+${_opts},}${rflags}" - elif getargbool 0 ro; then - # if "ro" is specified, we want /usr to be mounted read-only - _opts="${_opts:+${_opts},}ro" - elif getargbool 0 rw; then - # if "rw" is specified, we want /usr to be mounted read-write - _opts="${_opts:+${_opts},}rw" - fi - echo "$_dev ${NEWROOT}${_mp} $_fs ${_opts} $_freq $_passno" - _usr_found="1" - break - fi - done < "${NEWROOT}/etc/fstab" >> /etc/fstab - - if [ "$_usr_found" = "" ]; then - # nothing to do - return 0 - fi - - info "Mounting /usr with -o $_opts" - mount "${NEWROOT}/usr" 2>&1 | vinfo - mount -o remount,rw "${NEWROOT}/usr" - - if ismounted "${NEWROOT}/usr"; then - # success!! - return 0 - fi - - if [ "$_last_attempt" = "true" ]; then - warn "Mounting /usr to ${NEWROOT}/usr failed" - warn "*** Dropping you to a shell; the system will continue" - warn "*** when you leave the shell." - action_on_fail - return 2 - fi - - return 1 -} - - -try_to_mount_usr() { - _last_attempt="$1" - if [ ! -f "${NEWROOT}/etc/fstab" ]; then - warn "File ${NEWROOT}/etc/fstab doesn't exist." - return 1 - fi - - # In case we have the LVM command available try make it activate all partitions - if command -v lvm 2>/dev/null 1>/dev/null; then - lvm vgchange --sysinit -a y || { - warn "Detected problem when tried to activate LVM VG." - if [ "$_last_attempt" != "true" ]; then - # this is not last execution, retry - return 1 - fi - # NOTE(pstodulk): - # last execution, so call mount_usr anyway - # I am not 100% about lvm vgchange exit codes and I am aware of - # possible warnings, in this last run, let's keep it on mount_usr - # anyway.. - } - fi - - mount_usr "$1" -} - -_sleep_timeout=15 -_last_attempt="false" -for i in 0 1 2 3 4 5 6 7 8 9 10 11; do - info "Storage initialisation: Attempt $i of 11. Wait $_sleep_timeout seconds." - sleep $_sleep_timeout - if [ $i -eq 11 ]; then - _last_attempt="true" - fi - try_to_mount_usr "$_last_attempt" && break - - # something is wrong. In some cases, storage needs more time for the - # initialisation - especially in case of SAN. - - if [ "$_last_attempt" = "true" ]; then - warn "The last attempt to initialize storage has not been successful." - warn "Unknown state of the storage. It is possible that upgrade will be stopped." - break - fi - - warn "Failed attempt to initialize the storage. Retry..." -done - diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/initrd-cleanup-override.conf b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/initrd-cleanup-override.conf new file mode 100644 index 00000000..d24e0ef0 --- /dev/null +++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/initrd-cleanup-override.conf @@ -0,0 +1,3 @@ +[Service] +ExecStart= +ExecStart=-/usr/bin/true diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/module-setup.sh b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/module-setup.sh index 06479fb5..30ae57b3 100755 --- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/module-setup.sh +++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/module-setup.sh @@ -54,6 +54,17 @@ install() { ln -sf "../${s}.service" "$upgrade_wantsdir" done + # Setup modified initrd-cleanup.service in the upgrade initramfs to enable + # storage initialisation using systemd-fstab-generator. We want to run the + # initrd-parse-etc.service but this one triggers also the initrd-cleanup.service + # which triggers the switch-root and isolated actions that basically kills + # the original upgrade service when used. + # The initrd-parse-etc.service has different content across RHEL systems, + # so we override rather initrd-cleanup.service instead as we do not need + # that one for the upgrade process. + mkdir -p "${unitdir}/initrd-cleanup.service.d" + inst_simple "${_moddir}/initrd-cleanup-override.conf" "${unitdir}/initrd-cleanup.service.d/initrd-cleanup-override.conf" + # just try : set another services into the wantsdir # sysroot.mount \ # dracut-mount \ diff --git a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/upgrade.target b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/upgrade.target index 366b5cab..d2bf7313 100644 --- a/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/upgrade.target +++ b/repos/system_upgrade/common/actors/commonleappdracutmodules/files/dracut/90sys-upgrade/upgrade.target @@ -2,7 +2,7 @@ Description=System Upgrade Documentation=man:upgrade.target(7) # ##sysinit.target sockets.target initrd-root-fs.target initrd-root-device.target initrd-fs.target -Wants=initrd-root-fs.target initrd-root-device.target initrd-fs.target initrd-usr-fs.target +Wants=initrd-root-fs.target initrd-root-device.target initrd-fs.target initrd-usr-fs.target initrd-parse-etc.service Requires=basic.target sysroot.mount -After=basic.target sysroot.mount +After=basic.target sysroot.mount initrd-fs.target AllowIsolate=yes diff --git a/repos/system_upgrade/common/actors/createresumeservice/tests/test_createresumeservice.py b/repos/system_upgrade/common/actors/createresumeservice/tests/test_createresumeservice.py index 5302cdd2..c1cefc37 100644 --- a/repos/system_upgrade/common/actors/createresumeservice/tests/test_createresumeservice.py +++ b/repos/system_upgrade/common/actors/createresumeservice/tests/test_createresumeservice.py @@ -6,7 +6,7 @@ import pytest @pytest.mark.skipif(os.getuid() != 0, reason='User is not a root') @pytest.mark.skipif( - distro.linux_distribution()[0] == 'Fedora', + distro.id() == 'fedora', reason='default.target.wants does not exists on Fedora distro', ) def test_create_resume_service(current_actor_context): diff --git a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py index 003f3fc5..9e7bbf4a 100644 --- a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py +++ b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/actor.py @@ -1,6 +1,6 @@ from leapp.actors import Actor from leapp.libraries.actor import distributionsignedrpmscanner -from leapp.models import DistributionSignedRPM, InstalledRPM, InstalledUnsignedRPM, ThirdPartyRPM +from leapp.models import DistributionSignedRPM, InstalledRPM, InstalledUnsignedRPM, ThirdPartyRPM, VendorSignatures from leapp.tags import FactsPhaseTag, IPUWorkflowTag from leapp.utils.deprecation import suppress_deprecation @@ -8,7 +8,7 @@ from leapp.utils.deprecation import suppress_deprecation @suppress_deprecation(InstalledUnsignedRPM) class DistributionSignedRpmScanner(Actor): """ - Provide data about distribution signed & third-party RPM packages. + Provide data about distribution signed & third-party plus vendors RPM packages. For various checks and actions done during the upgrade it's important to know what packages are signed by GPG keys of the installed linux system @@ -22,11 +22,18 @@ class DistributionSignedRpmScanner(Actor): common/files/distro//gpg_signatures.json where is distribution ID of the installed system (e.g. centos, rhel). - If the file for the installed distribution is not found, end with error. + Fingerprints of vendors GPG keys are stored under + /etc/leapp/files/vendors.d/.sigs + where is name of the vendor (e.g. mariadb, postgresql). + + The "Distribution" in the name of the actor is a historical artifact - the actor + is used for both distribution and all vendors present in config files. + + If the file for the installed distribution is not find, end with error. """ name = 'distribution_signed_rpm_scanner' - consumes = (InstalledRPM,) + consumes = (InstalledRPM, VendorSignatures) produces = (DistributionSignedRPM, InstalledUnsignedRPM, ThirdPartyRPM) tags = (IPUWorkflowTag, FactsPhaseTag) diff --git a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/libraries/distributionsignedrpmscanner.py b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/libraries/distributionsignedrpmscanner.py index 18c859e2..a6ce16ac 100644 --- a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/libraries/distributionsignedrpmscanner.py +++ b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/libraries/distributionsignedrpmscanner.py @@ -1,5 +1,5 @@ from leapp.libraries.common import rhui -from leapp.libraries.common.config import get_env +from leapp.libraries.common.config import get_env, get_source_distro_id from leapp.libraries.common.distro import get_distribution_data from leapp.libraries.stdlib import api from leapp.models import DistributionSignedRPM, InstalledRPM, InstalledUnsignedRPM, ThirdPartyRPM @@ -32,8 +32,8 @@ def is_exceptional(pkg, allowlist): @suppress_deprecation(InstalledUnsignedRPM) def process(): - distribution = api.current_actor().configuration.os_release.release_id - distro_keys = get_distribution_data(distribution).get('keys', []) + distro = get_source_distro_id() + distro_keys = get_distribution_data(distro).get('keys', []) all_signed = get_env('LEAPP_DEVEL_RPMS_ALL_SIGNED', '0') == '1' rhui_pkgs = rhui.get_all_known_rhui_pkgs_for_current_upg() diff --git a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/tests/test_distributionsignedrpmscanner.py b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/tests/test_distributionsignedrpmscanner.py index f55a2295..b0c616cb 100644 --- a/repos/system_upgrade/common/actors/distributionsignedrpmscanner/tests/test_distributionsignedrpmscanner.py +++ b/repos/system_upgrade/common/actors/distributionsignedrpmscanner/tests/test_distributionsignedrpmscanner.py @@ -4,6 +4,7 @@ from leapp.libraries.common import rpms from leapp.libraries.common.config import mock_configs from leapp.models import ( DistributionSignedRPM, + Distro, fields, InstalledRPM, InstalledUnsignedRPM, @@ -79,6 +80,7 @@ def test_actor_execution_with_signed_and_third_party_pkgs_centos(current_actor_c version='7 (Core)', version_id='7' ) + config.distro = Distro(source='centos', target='centos') installed_rpm = [ RPM(name='sample01', version='0.1', release='1.sm01', epoch='1', packager=CENTOS_PACKAGER, arch='noarch', @@ -121,6 +123,7 @@ def test_actor_execution_with_signed_unsigned_data_almalinux(current_actor_conte version='8.10 (Cerulean Leopard)', version_id='8.10' ) + config.distro = Distro(source='almalinux', target='almalinux') installed_rpm = [ RPM(name='sample01', version='0.1', release='1.sm01', epoch='1', packager=ALMALINUX_PACKAGER, arch='noarch', @@ -151,6 +154,7 @@ def test_actor_execution_with_unknown_distro(current_actor_context): version='7 (Core)', version_id='7' ) + config.distro = Distro(source='myos', target='myos') current_actor_context.feed(InstalledRPM(items=[])) current_actor_context.run(config_model=config) diff --git a/repos/system_upgrade/common/actors/efibootorderfix/finalization/actor.py b/repos/system_upgrade/common/actors/efibootorderfix/finalization/actor.py index f42909f0..6383a56f 100644 --- a/repos/system_upgrade/common/actors/efibootorderfix/finalization/actor.py +++ b/repos/system_upgrade/common/actors/efibootorderfix/finalization/actor.py @@ -1,17 +1,117 @@ +import os +import re + +from leapp.libraries.stdlib import run, api from leapp.actors import Actor -from leapp.libraries.common import efi_reboot_fix +from leapp.models import InstalledTargetKernelVersion, KernelCmdlineArg, FirmwareFacts, MountEntry from leapp.tags import FinalizationPhaseTag, IPUWorkflowTag +from leapp.exceptions import StopActorExecutionError class EfiFinalizationFix(Actor): """ - Adjust EFI boot entry for final reboot + Ensure that EFI boot order is updated, which is particularly necessary + when upgrading to a different OS distro. Also rebuilds grub config + if necessary. """ name = 'efi_finalization_fix' - consumes = () + consumes = (KernelCmdlineArg, InstalledTargetKernelVersion, FirmwareFacts, MountEntry) produces = () - tags = (FinalizationPhaseTag, IPUWorkflowTag) + tags = (FinalizationPhaseTag.Before, IPUWorkflowTag) def process(self): - efi_reboot_fix.maybe_emit_updated_boot_entry() + is_system_efi = False + ff = next(self.consume(FirmwareFacts), None) + + dirname = { + 'AlmaLinux': 'almalinux', + 'CentOS Linux': 'centos', + 'CentOS Stream': 'centos', + 'Oracle Linux Server': 'redhat', + 'Red Hat Enterprise Linux': 'redhat', + 'Rocky Linux': 'rocky', + 'Scientific Linux': 'redhat', + } + + efi_shimname_dict = { + 'x86_64': 'shimx64.efi', + 'aarch64': 'shimaa64.efi' + } + + def devparts(dev): + """ + NVMe block devices aren't named like SCSI/ATA/etc block devices and must be parsed differently. + SCSI/ATA/etc devices have a syntax resembling /dev/sdb4 for the 4th partition on the 2nd disk. + NVMe devices have a syntax resembling /dev/nvme0n2p4 for the 4th partition on the 2nd disk. + """ + if '/dev/nvme' in dev: + """ + NVMe + """ + part = next(re.finditer(r'p\d+$', dev)).group(0) + dev = dev[:-len(part)] + part = part[1:] + else: + """ + Non-NVMe (SCSI, ATA, etc) + """ + part = next(re.finditer(r'\d+$', dev)).group(0) + dev = dev[:-len(part)] + return [dev, part]; + + with open('/etc/system-release', 'r') as sr: + release_line = next(line for line in sr if 'release' in line) + distro = release_line.split(' release ', 1)[0] + + efi_bootentry_label = distro + distro_dir = dirname.get(distro, 'default') + shim_filename = efi_shimname_dict.get(api.current_actor().configuration.architecture, 'shimx64.efi') + + shim_path = '/boot/efi/EFI/' + distro_dir + '/' + shim_filename + grub_cfg_path = '/boot/efi/EFI/' + distro_dir + '/grub.cfg' + bootmgr_path = '\\EFI\\' + distro_dir + '\\' + shim_filename + + has_efibootmgr = os.path.exists('/sbin/efibootmgr') + has_shim = os.path.exists(shim_path) + has_grub_cfg = os.path.exists(grub_cfg_path) + + if not ff: + raise StopActorExecutionError( + 'Could not identify system firmware', + details={'details': 'Actor did not receive FirmwareFacts message.'} + ) + + if not has_efibootmgr: + return + + for fact in self.consume(FirmwareFacts): + if fact.firmware == 'efi': + is_system_efi = True + break + + if is_system_efi and has_shim: + efidevlist = [] + with open('/proc/mounts', 'r') as fp: + for line in fp: + if '/boot/efi' in line: + efidevpath = line.split(' ', 1)[0] + efidevpart = efidevpath.split('/')[-1] + if os.path.exists('/proc/mdstat'): + with open('/proc/mdstat', 'r') as mds: + for line in mds: + if line.startswith(efidevpart): + mddev = line.split(' ') + for md in mddev: + if '[' in md: + efimd = md.split('[', 1)[0] + efidp = efidevpath.replace(efidevpart, efimd) + efidevlist.append(efidp) + if len(efidevlist) == 0: + efidevlist.append(efidevpath) + for devpath in efidevlist: + efidev, efipart = devparts(devpath) + run(['/sbin/efibootmgr', '-c', '-d', efidev, '-p', efipart, '-l', bootmgr_path, '-L', efi_bootentry_label]) + + if not has_grub_cfg: + run(['/sbin/grub2-mkconfig', '-o', grub_cfg_path]) diff --git a/repos/system_upgrade/common/actors/enablerhsmtargetrepos/tests/test_enablerhsmtargetrepos.py b/repos/system_upgrade/common/actors/enablerhsmtargetrepos/tests/test_enablerhsmtargetrepos.py index 12d53d26..dba38fff 100644 --- a/repos/system_upgrade/common/actors/enablerhsmtargetrepos/tests/test_enablerhsmtargetrepos.py +++ b/repos/system_upgrade/common/actors/enablerhsmtargetrepos/tests/test_enablerhsmtargetrepos.py @@ -13,11 +13,12 @@ from leapp.models import EnvVar, UsedTargetRepositories, UsedTargetRepository def not_isolated_actions(raise_err=False): commands_called = [] - class MockNotIsolatedActions(object): + class MockNotIsolatedActions: def __init__(self, base_dir=None): pass - def call(self, cmd, **kwargs): + @staticmethod + def call(cmd, **kwargs): commands_called.append((cmd, kwargs)) if raise_err: raise_call_error() @@ -33,7 +34,7 @@ def raise_call_error(args=None): ) -class run_mocked(object): +class run_mocked: def __init__(self, raise_err=False): self.called = 0 self.args = [] diff --git a/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py b/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py index 582a5821..18f2c33f 100644 --- a/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py +++ b/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py @@ -32,6 +32,7 @@ class FilterRpmTransactionTasks(Actor): to_remove = set() to_keep = set() to_upgrade = set() + to_reinstall = set() modules_to_enable = {} modules_to_reset = {} for event in self.consume(RpmTransactionTasks, PESRpmTransactionTasks): @@ -39,13 +40,14 @@ class FilterRpmTransactionTasks(Actor): to_install.update(event.to_install) to_remove.update(installed_pkgs.intersection(event.to_remove)) to_keep.update(installed_pkgs.intersection(event.to_keep)) + to_reinstall.update(installed_pkgs.intersection(event.to_reinstall)) modules_to_enable.update({'{}:{}'.format(m.name, m.stream): m for m in event.modules_to_enable}) modules_to_reset.update({'{}:{}'.format(m.name, m.stream): m for m in event.modules_to_reset}) to_remove.difference_update(to_keep) # run upgrade for the rest of RH signed pkgs which we do not have rule for - to_upgrade = installed_pkgs - (to_install | to_remove) + to_upgrade = installed_pkgs - (to_install | to_remove | to_reinstall) self.produce(FilteredRpmTransactionTasks( local_rpms=list(local_rpms), @@ -53,5 +55,6 @@ class FilterRpmTransactionTasks(Actor): to_remove=list(to_remove), to_keep=list(to_keep), to_upgrade=list(to_upgrade), + to_reinstall=list(to_reinstall), modules_to_reset=list(modules_to_reset.values()), modules_to_enable=list(modules_to_enable.values()))) diff --git a/repos/system_upgrade/common/actors/forcedefaultboottotargetkernelversion/tests/test_forcedefaultboot_forcedefaultboottotargetkernelversion.py b/repos/system_upgrade/common/actors/forcedefaultboottotargetkernelversion/tests/test_forcedefaultboot_forcedefaultboottotargetkernelversion.py index b903df45..a39e29a7 100644 --- a/repos/system_upgrade/common/actors/forcedefaultboottotargetkernelversion/tests/test_forcedefaultboot_forcedefaultboottotargetkernelversion.py +++ b/repos/system_upgrade/common/actors/forcedefaultboottotargetkernelversion/tests/test_forcedefaultboot_forcedefaultboottotargetkernelversion.py @@ -69,7 +69,7 @@ initrd=/boot/initramfs-{kernel_version}.img ''' -class MockedRun(object): +class MockedRun: def __init__(self, case): self.case = case self.called_setdefault = False diff --git a/repos/system_upgrade/common/actors/ifcfgscanner/libraries/ifcfgscanner.py b/repos/system_upgrade/common/actors/ifcfgscanner/libraries/ifcfgscanner.py index 683327b3..f0c8b847 100644 --- a/repos/system_upgrade/common/actors/ifcfgscanner/libraries/ifcfgscanner.py +++ b/repos/system_upgrade/common/actors/ifcfgscanner/libraries/ifcfgscanner.py @@ -18,23 +18,22 @@ def process_ifcfg(filename, secrets=False): return None properties = [] - for line in open(filename).readlines(): - try: - (name, value) = line.split("#")[0].strip().split("=") + with open(filename) as f: + for line in f: + try: + (name, value) = line.split("#")[0].strip().split("=") + except ValueError: + # We're not interested in lines that are not + # simple assignments. Play it safe. + continue + if secrets: value = None - except ValueError: - # We're not interested in lines that are not - # simple assignments. Play it safe. - continue - - # Deal with simple quoting. We don't expand anything, nor do - # multiline strings or anything of that sort. - if value is not None and len(value) > 1 and value[0] == value[-1]: - if value.startswith('"') or value.startswith("'"): + elif len(value) > 1 and value[0] in ('"', "'") and value[0] == value[-1]: + # Deal with simple quoting. We don't expand anything, nor do + # multiline strings or anything of that sort. value = value[1:-1] - - properties.append(IfCfgProperty(name=name, value=value)) + properties.append(IfCfgProperty(name=name, value=value)) return properties diff --git a/repos/system_upgrade/common/actors/ifcfgscanner/tests/unit_test_ifcfgscanner.py b/repos/system_upgrade/common/actors/ifcfgscanner/tests/unit_test_ifcfgscanner.py index d3b4846f..d996de84 100644 --- a/repos/system_upgrade/common/actors/ifcfgscanner/tests/unit_test_ifcfgscanner.py +++ b/repos/system_upgrade/common/actors/ifcfgscanner/tests/unit_test_ifcfgscanner.py @@ -1,5 +1,6 @@ import errno import textwrap +from io import StringIO from os.path import basename import mock @@ -63,8 +64,7 @@ def test_ifcfg1(monkeypatch): KEY_TYPE=key """) - mock_config = mock.mock_open(read_data=ifcfg_file) - with mock.patch(_builtins_open, mock_config): + with mock.patch(_builtins_open, return_value=StringIO(ifcfg_file)): monkeypatch.setattr(ifcfgscanner, "listdir", _listdir_ifcfg) monkeypatch.setattr(ifcfgscanner.path, "exists", _exists_ifcfg) monkeypatch.setattr(api, "produce", produce_mocked()) @@ -110,8 +110,8 @@ def test_ifcfg_key(monkeypatch): Report ifcfg secrets from keys- file. """ - mock_config = mock.mock_open(read_data="KEY_PASSPHRASE1=Hell0") - with mock.patch(_builtins_open, mock_config): + file_data = "KEY_PASSPHRASE1=Hell0" + with mock.patch(_builtins_open, side_effect=lambda *a, **k: StringIO(file_data)): monkeypatch.setattr(ifcfgscanner, "listdir", _listdir_ifcfg) monkeypatch.setattr(ifcfgscanner.path, "exists", _exists_keys) monkeypatch.setattr(api, "produce", produce_mocked()) diff --git a/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/actor.py b/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/actor.py new file mode 100644 index 00000000..aba60645 --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/actor.py @@ -0,0 +1,21 @@ +from leapp.actors import Actor +from leapp.libraries.actor import enable_lvm_autoactivation as enable_lvm_autoactivation_lib +from leapp.models import DistributionSignedRPM, UpgradeInitramfsTasks +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class EnableLVMAutoactivation(Actor): + """ + Enable LVM autoactivation in upgrade initramfs. + + Produce instructions for upgrade initramfs generation that will result in LVM + autoactivation in the initramfs. + """ + + name = 'enable_lvm_autoactivation' + consumes = (DistributionSignedRPM,) + produces = (UpgradeInitramfsTasks, ) + tags = (FactsPhaseTag, IPUWorkflowTag) + + def process(self): + enable_lvm_autoactivation_lib.emit_lvm_autoactivation_instructions() diff --git a/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/libraries/enable_lvm_autoactivation.py b/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/libraries/enable_lvm_autoactivation.py new file mode 100644 index 00000000..e312277b --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/libraries/enable_lvm_autoactivation.py @@ -0,0 +1,21 @@ +from leapp.libraries.common.rpms import has_package +from leapp.libraries.stdlib import api +from leapp.models import DistributionSignedRPM, UpgradeInitramfsTasks + + +def emit_lvm_autoactivation_instructions(): + if not has_package(DistributionSignedRPM, 'lvm2'): + api.current_logger().debug( + 'Upgrade initramfs will not autoenable LVM devices - `lvm2` RPM is not installed.' + ) + return + + # the 69-dm-lvm.rules trigger pvscan and vgchange when LVM device is detected + files_to_include = [ + '/usr/sbin/pvscan', + '/usr/sbin/vgchange', + '/usr/lib/udev/rules.d/69-dm-lvm.rules' + ] + lvm_autoactivation_instructions = UpgradeInitramfsTasks(include_files=files_to_include) + + api.produce(lvm_autoactivation_instructions) diff --git a/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/tests/test_lvm_autoactivation_enablement.py b/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/tests/test_lvm_autoactivation_enablement.py new file mode 100644 index 00000000..c5150aea --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/enable_lvm_autoactivation/tests/test_lvm_autoactivation_enablement.py @@ -0,0 +1,50 @@ +from leapp.libraries.actor import enable_lvm_autoactivation +from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked +from leapp.libraries.stdlib import api +from leapp.models import DistributionSignedRPM, RPM, UpgradeInitramfsTasks + + +def test_emit_lvm_autoactivation_instructions_produces_correct_message(monkeypatch): + """Test that emit_lvm_autoactivation_instructions produces UpgradeInitramfsTasks with correct files.""" + lvm_package = RPM( + name='lvm2', + version='2', + release='1', + epoch='1', + packager='', + arch='x86_64', + pgpsig='RSA/SHA256, Mon 01 Jan 1970 00:00:00 AM -03, Key ID 199e2f91fd431d51' + ) + + msgs = [ + DistributionSignedRPM(items=[lvm_package]) + ] + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) + monkeypatch.setattr(api, 'produce', produce_mocked()) + + enable_lvm_autoactivation.emit_lvm_autoactivation_instructions() + + assert api.produce.called == 1 + + produced_msg = api.produce.model_instances[0] + + assert isinstance(produced_msg, UpgradeInitramfsTasks) + + expected_files = [ + '/usr/sbin/pvscan', + '/usr/sbin/vgchange', + '/usr/lib/udev/rules.d/69-dm-lvm.rules' + ] + assert produced_msg.include_files == expected_files + + +def test_no_action_if_lvm_rpm_missing(monkeypatch): + msgs = [ + DistributionSignedRPM(items=[]) + ] + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs)) + monkeypatch.setattr(api, 'produce', produce_mocked()) + + enable_lvm_autoactivation.emit_lvm_autoactivation_instructions() + + assert api.produce.called == 0 diff --git a/repos/system_upgrade/common/actors/initramfs/mount_units_generator/actor.py b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/actor.py new file mode 100644 index 00000000..23c618b6 --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/actor.py @@ -0,0 +1,26 @@ +from leapp.actors import Actor +from leapp.libraries.actor import mount_unit_generator as mount_unit_generator_lib +from leapp.models import LiveModeConfig, StorageInfo, TargetUserSpaceInfo, UpgradeInitramfsTasks +from leapp.tags import InterimPreparationPhaseTag, IPUWorkflowTag + + +class MountUnitGenerator(Actor): + """ + Sets up storage initialization using systemd's mount units in the upgrade container. + + Note that this storage initialization is skipped when the LiveMode is enabled. + """ + + name = 'mount_unit_generator' + consumes = ( + LiveModeConfig, + TargetUserSpaceInfo, + StorageInfo, + ) + produces = ( + UpgradeInitramfsTasks, + ) + tags = (IPUWorkflowTag, InterimPreparationPhaseTag) + + def process(self): + mount_unit_generator_lib.setup_storage_initialization() diff --git a/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount new file mode 100644 index 00000000..869c5e4c --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/files/bundled_units/boot.mount @@ -0,0 +1,11 @@ +[Unit] +DefaultDependencies=no +Before=local-fs.target +After=sysroot-boot.target +Requires=sysroot-boot.target + +[Mount] +What=/sysroot/boot +Where=/boot +Type=none +Options=bind diff --git a/repos/system_upgrade/common/actors/initramfs/mount_units_generator/libraries/mount_unit_generator.py b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/libraries/mount_unit_generator.py new file mode 100644 index 00000000..e3070986 --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/libraries/mount_unit_generator.py @@ -0,0 +1,347 @@ +import os +import shutil +import tempfile + +from leapp.exceptions import StopActorExecutionError +from leapp.libraries.common import mounting +from leapp.libraries.stdlib import api, CalledProcessError, run +from leapp.models import LiveModeConfig, StorageInfo, TargetUserSpaceInfo, UpgradeInitramfsTasks + +BIND_MOUNT_SYSROOT_BOOT_UNIT = 'boot.mount' + + +def run_systemd_fstab_generator(output_directory): + api.current_logger().debug( + 'Generating mount units for the source system into {}'.format(output_directory) + ) + + try: + generator_cmd = [ + '/usr/lib/systemd/system-generators/systemd-fstab-generator', + output_directory, + output_directory, + output_directory + ] + run(generator_cmd) + except CalledProcessError as error: + api.current_logger().error( + 'Failed to generate mount units using systemd-fstab-generator. Error: {}'.format(error) + ) + details = {'details': str(error)} + raise StopActorExecutionError( + 'Failed to generate mount units using systemd-fstab-generator', + details + ) + + api.current_logger().debug( + 'Mount units successfully generated into {}'.format(output_directory) + ) + + +def _read_unit_file_lines(unit_file_path): # Encapsulate IO for tests + with open(unit_file_path) as unit_file: + return unit_file.readlines() + + +def _write_unit_file_lines(unit_file_path, lines): # Encapsulate IO for tests + with open(unit_file_path, 'w') as unit_file: + unit_file.write('\n'.join(lines) + '\n') + + +def _delete_file(file_path): + os.unlink(file_path) + + +def _prefix_mount_unit_with_sysroot(mount_unit_path, new_unit_destination): + """ + Prefix the mount target with /sysroot as expected in the upgrade initramfs. + + A new mount unit file is written to new_unit_destination. + """ + # NOTE(pstodulk): Note that right now we update just the 'Where' key, however + # what about RequiresMountsFor, .. there could be some hidden dragons. + # In case of issues, investigate these values in generated unit files. + api.current_logger().debug( + 'Prefixing {}\'s mount target with /sysroot. Output will be written to {}'.format( + mount_unit_path, + new_unit_destination + ) + ) + unit_lines = _read_unit_file_lines(mount_unit_path) + + output_lines = [] + for line in unit_lines: + line = line.strip() + if not line.startswith('Where='): + output_lines.append(line) + continue + + _, destination = line.split('=', 1) + new_destination = os.path.join('/sysroot', destination.lstrip('/')) + + output_lines.append('Where={}'.format(new_destination)) + + _write_unit_file_lines(new_unit_destination, output_lines) + + api.current_logger().debug( + 'Done. Modified mount unit successfully written to {}'.format(new_unit_destination) + ) + + +def prefix_all_mount_units_with_sysroot(dir_containing_units): + for unit_file_path in os.listdir(dir_containing_units): + # systemd requires mount path to be in the unit name + modified_unit_destination = 'sysroot-{}'.format(unit_file_path) + modified_unit_destination = os.path.join(dir_containing_units, modified_unit_destination) + + unit_file_path = os.path.join(dir_containing_units, unit_file_path) + + if not unit_file_path.endswith('.mount'): + api.current_logger().debug( + 'Skipping {} when prefixing mount units with /sysroot - not a mount unit.'.format( + unit_file_path + ) + ) + continue + + _prefix_mount_unit_with_sysroot(unit_file_path, modified_unit_destination) + + _delete_file(unit_file_path) + api.current_logger().debug('Original mount unit {} removed.'.format(unit_file_path)) + + +def _fix_symlinks_in_dir(dir_containing_mount_units, target_dir): + """ + Fix broken symlinks in given target_dir due to us modifying (renaming) the mount units. + + The target_dir contains symlinks to the (mount) units that are required + in order for the local-fs.target to be reached. However, we renamed these units to reflect + that we have changed their mount destinations by prefixing the mount destination with /sysroot. + Hence, we regenerate the symlinks. + """ + + target_dir_path = os.path.join(dir_containing_mount_units, target_dir) + if not os.path.exists(target_dir_path): + api.current_logger().debug( + 'The {} directory does not exist. Skipping' + .format(target_dir) + ) + return + + api.current_logger().debug( + 'Removing the old {} directory from {}.' + .format(target_dir, dir_containing_mount_units) + ) + + shutil.rmtree(target_dir_path) + os.mkdir(target_dir_path) + + api.current_logger().debug('Populating {} with new symlinks.'.format(target_dir)) + + for unit_file in os.listdir(dir_containing_mount_units): + if not unit_file.endswith('.mount'): + continue + + place_fastlink_at = os.path.join(target_dir_path, unit_file) + fastlink_points_to = os.path.join('../', unit_file) + try: + run(['ln', '-s', fastlink_points_to, place_fastlink_at]) + + api.current_logger().debug( + 'Dependency on {} created.'.format(unit_file) + ) + except CalledProcessError as err: + err_descr = ( + 'Failed to create required unit dependencies under {} for the upgrade initramfs.' + .format(target_dir) + ) + details = {'details': str(err)} + raise StopActorExecutionError(err_descr, details=details) + + +def fix_symlinks_in_targets(dir_containing_mount_units): + """ + Fix broken symlinks in *.target.* directories caused by earlier modified mount units. + + Generated mount unit files are part of one of systemd targets (list below), + which means that a symlink from a systemd target to exists for each of + them. Based on this, systemd knows when (local or remote file systems?) + they must (".requires" suffix") or could (".wants" suffix) be mounted. + See the man 5 systemd.mount for more details how mount units are split into + these targets. + + The list of possible target directories where these mount units could end: + * local-fs.target.requires + * local-fs.target.wants + * local-fs-pre.target.requires + * local-fs-pre.target.wants + * remote-fs.target.requires + * remote-fs.target.wants + * remote-fs-pre.target.requires + * remote-fs-pre.target.wants + Most likely, unit files are not generated for "*pre*" targets, but to be + sure really. Longer list does not cause any issues in this code. + + In most cases, "local-fs.target.requires" is the only important directory + for us during the upgrade. But in some (sometimes common) cases we will + need some of the others as well. + + These directories do not have to necessarily exists if there are no mount + unit files that could be put there. But most likely "local-fs.target.requires" + will always exists. + """ + dir_list = [ + 'local-fs.target.requires', + 'local-fs.target.wants', + 'local-fs-pre.target.requires', + 'local-fs-pre.target.wants', + 'remote-fs.target.requires', + 'remote-fs.target.wants', + 'remote-fs-pre.target.requires', + 'remote-fs-pre.target.wants', + ] + for tdir in dir_list: + _fix_symlinks_in_dir(dir_containing_mount_units, tdir) + + +def copy_units_into_system_location(upgrade_container_ctx, dir_with_our_mount_units): + """ + Copy units and their .wants/.requires directories into the target userspace container. + + :return: A list of files in the target userspace that were created by copying. + :rtype: list[str] + """ + dest_inside_container = '/usr/lib/systemd/system' + + api.current_logger().debug( + 'Copying generated mount units for upgrade from {} to {}'.format( + dir_with_our_mount_units, + upgrade_container_ctx.full_path(dest_inside_container) + ) + ) + + copied_files = [] + prefix_len_to_drop = len(upgrade_container_ctx.base_dir) + + # We cannot rely on mounting library when copying into container + # as we want to control what happens to symlinks and + # shutil.copytree in Python3.6 fails if dst directory exists already + # - which happens in some cases when copying these files. + for root, dummy_dirs, files in os.walk(dir_with_our_mount_units): + rel_path = os.path.relpath(root, dir_with_our_mount_units) + if rel_path == '.': + rel_path = '' + dst_dir = os.path.join(upgrade_container_ctx.full_path(dest_inside_container), rel_path) + os.makedirs(dst_dir, mode=0o755, exist_ok=True) + + for file in files: + src_file = os.path.join(root, file) + dst_file = os.path.join(dst_dir, file) + api.current_logger().debug( + 'Copying mount unit file {} to {}'.format(src_file, dst_file) + ) + if os.path.islink(dst_file): + # If the target file already exists and it is a symlink, it will + # fail and we want to overwrite this. + # NOTE(pstodulk): You could think that it cannot happen, but + # in future possibly it could happen, so let's rather be careful + # and handle it. If the dst file exists, we want to overwrite it + # for sure + _delete_file(dst_file) + shutil.copy2(src_file, dst_file, follow_symlinks=False) + copied_files.append(dst_file[prefix_len_to_drop:]) + + return copied_files + + +def remove_units_for_targets_that_are_already_mounted_by_dracut(dir_with_our_mount_units): + """ + Remove mount units for mount targets that are already mounted by dracut. + + Namely, remove mount units: + '-.mount' (mounts /) + 'usr.mount' (mounts /usr) + """ + + # NOTE: remount-fs.service creates dependency cycles that are nondeterministically broken + # by systemd, causing unpredictable failures. The service is supposed to remount root + # and /usr, reapplying mount options from /etc/fstab. However, the fstab file present in + # the initramfs is not the fstab from the source system, and, therefore, it is pointless + # to require the service. It would make sense after we switched root during normal boot + # process. + already_mounted_units = [ + '-.mount', + 'usr.mount', + 'local-fs.target.wants/systemd-remount-fs.service' + ] + + for unit in already_mounted_units: + unit_location = os.path.join(dir_with_our_mount_units, unit) + + if not os.path.exists(unit_location): + api.current_logger().debug('The {} unit does not exists, no need to remove it.'.format(unit)) + continue + + _delete_file(unit_location) + + +def request_units_inclusion_in_initramfs(files_to_include): + api.current_logger().debug('Including the following files into initramfs: {}'.format(files_to_include)) + + additional_files = [ + '/usr/sbin/swapon' # If the system has swap, we have also generated a swap unit to activate it + ] + + tasks = UpgradeInitramfsTasks(include_files=files_to_include + additional_files) + api.produce(tasks) + + +def does_system_have_separate_boot_partition(): + storage_info = next(api.consume(StorageInfo), None) + if not storage_info: + err_msg = 'Actor did not receive required information about system storage (StorageInfo)' + raise StopActorExecutionError(err_msg) + + for fstab_entry in storage_info.fstab: + if fstab_entry.fs_file == '/boot': + return True + + return False + + +def inject_bundled_units(workspace): + """ + Copy static units that are bundled within this actor into the workspace. + """ + bundled_units_dir = api.get_actor_folder_path('bundled_units') + for unit in os.listdir(bundled_units_dir): + if unit == BIND_MOUNT_SYSROOT_BOOT_UNIT: + has_separate_boot = does_system_have_separate_boot_partition() + if not has_separate_boot: + # We perform bind-mounting because of dracut's fips module. + # When /boot is not a separate partition, we don't need to bind mount it -- + # the fips module itself will create a symlink. + continue + + unit_path = os.path.join(bundled_units_dir, unit) + unit_dst = os.path.join(workspace, unit) + api.current_logger().debug('Copying static unit bundled within leapp {} to {}'.format(unit, unit_dst)) + shutil.copyfile(unit_path, unit_dst) + + +def setup_storage_initialization(): + livemode_config = next(api.consume(LiveModeConfig), None) + if livemode_config and livemode_config.is_enabled: + api.current_logger().debug('Pre-generation of systemd fstab mount units skipped: The LiveMode is enabled.') + return + + userspace_info = next(api.consume(TargetUserSpaceInfo), None) + with mounting.NspawnActions(base_dir=userspace_info.path) as upgrade_container_ctx: + with tempfile.TemporaryDirectory(dir='/var/lib/leapp/', prefix='tmp_systemd_fstab_') as workspace_path: + run_systemd_fstab_generator(workspace_path) + remove_units_for_targets_that_are_already_mounted_by_dracut(workspace_path) + prefix_all_mount_units_with_sysroot(workspace_path) + inject_bundled_units(workspace_path) + fix_symlinks_in_targets(workspace_path) + mount_unit_files = copy_units_into_system_location(upgrade_container_ctx, workspace_path) + request_units_inclusion_in_initramfs(mount_unit_files) diff --git a/repos/system_upgrade/common/actors/initramfs/mount_units_generator/tests/test_mount_unit_generation.py b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/tests/test_mount_unit_generation.py new file mode 100644 index 00000000..eb90a75d --- /dev/null +++ b/repos/system_upgrade/common/actors/initramfs/mount_units_generator/tests/test_mount_unit_generation.py @@ -0,0 +1,328 @@ +import os +import shutil + +import pytest + +from leapp.exceptions import StopActorExecutionError +from leapp.libraries.actor import mount_unit_generator +from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked +from leapp.libraries.stdlib import api, CalledProcessError +from leapp.models import FstabEntry, StorageInfo, TargetUserSpaceInfo, UpgradeInitramfsTasks + + +def test_run_systemd_fstab_generator_successful_generation(monkeypatch): + """Test successful mount unit generation.""" + + output_dir = '/tmp/test_output' + expected_cmd = [ + '/usr/lib/systemd/system-generators/systemd-fstab-generator', + output_dir, + output_dir, + output_dir + ] + + def mock_run(command): + assert command == expected_cmd + + return { + "stdout": "", + "stderr": "", + "exit_code": 0, + } + + monkeypatch.setattr(mount_unit_generator, 'run', mock_run) + mount_unit_generator.run_systemd_fstab_generator(output_dir) + + +def test_run_systemd_fstab_generator_failure(monkeypatch): + """Test handling of systemd-fstab-generator failure.""" + output_dir = '/tmp/test_output' + expected_cmd = [ + '/usr/lib/systemd/system-generators/systemd-fstab-generator', + output_dir, + output_dir, + output_dir + ] + + def mock_run(command): + assert command == expected_cmd + raise CalledProcessError(message='Generator failed', command=['test'], result={'exit_code': 1}) + + monkeypatch.setattr(mount_unit_generator, 'run', mock_run) + monkeypatch.setattr(api, 'current_logger', logger_mocked()) + + with pytest.raises(StopActorExecutionError): + mount_unit_generator.run_systemd_fstab_generator(output_dir) + + +def test_prefix_mount_unit_with_sysroot(monkeypatch): + """Test prefixing a single mount unit with /sysroot.""" + monkeypatch.setattr(api, 'current_logger', logger_mocked()) + + input_content = [ + "[Unit]\n", + "Description=Test Mount\n", + "[Mount]\n", + "Where=/home\n", + "What=/dev/sda1\n" + ] + + expected_output_lines = [ + "[Unit]", + "Description=Test Mount", + "[Mount]", + "Where=/sysroot/home", + "What=/dev/sda1" + ] + + def mock_read_unit_file_lines(unit_file_path): + return input_content + + def mock_write_unit_file_lines(unit_file_path, lines): + assert unit_file_path == '/test/output.mount' + assert lines == expected_output_lines + + monkeypatch.setattr(mount_unit_generator, '_read_unit_file_lines', mock_read_unit_file_lines) + monkeypatch.setattr(mount_unit_generator, '_write_unit_file_lines', mock_write_unit_file_lines) + + mount_unit_generator._prefix_mount_unit_with_sysroot( + '/test/input.mount', + '/test/output.mount' + ) + + +def test_prefix_all_mount_units_with_sysroot(monkeypatch): + """Test prefixing all mount units in a directory.""" + + expected_changes = { + '/test/dir/home.mount': { + 'new_unit_destination': '/test/dir/sysroot-home.mount', + 'should_be_deleted': True, + 'deleted': False, + }, + '/test/dir/var.mount': { + 'new_unit_destination': '/test/dir/sysroot-var.mount', + 'should_be_deleted': True, + 'deleted': False, + }, + '/test/dir/not-a-mount.service': { + 'new_unit_destination': None, + 'should_be_deleted': False, + 'deleted': False, + } + } + + def mock_listdir(dir_path): + return ['home.mount', 'var.mount', 'not-a-mount.service'] + + def mock_delete_file(file_path): + assert file_path in expected_changes + expected_changes[file_path]['deleted'] = True + + def mock_prefix(unit_file_path, new_unit_destination): + assert expected_changes[unit_file_path]['new_unit_destination'] == new_unit_destination + + monkeypatch.setattr('os.listdir', mock_listdir) + monkeypatch.setattr(mount_unit_generator, '_delete_file', mock_delete_file) + monkeypatch.setattr(mount_unit_generator, '_prefix_mount_unit_with_sysroot', mock_prefix) + + mount_unit_generator.prefix_all_mount_units_with_sysroot('/test/dir') + + for original_mount_unit_location in expected_changes: + should_be_deleted = expected_changes[original_mount_unit_location]['should_be_deleted'] + was_deleted = expected_changes[original_mount_unit_location]['deleted'] + assert should_be_deleted == was_deleted + + +@pytest.mark.parametrize('dirname', ( + 'local-fs.target.requires', + 'local-fs.target.wants', + 'local-fs-pre.target.requires', + 'local-fs-pre.target.wants', + 'remote-fs.target.requires', + 'remote-fs.target.wants', + 'remote-fs-pre.target.requires', + 'remote-fs-pre.target.wants', +)) +def test_fix_symlinks_in_dir(monkeypatch, dirname): + """Test fixing local-fs.target.requires symlinks.""" + + DIR_PATH = os.path.join('/test/dir/', dirname) + + def mock_rmtree(dir_path): + assert dir_path == DIR_PATH + + def mock_mkdir(dir_path): + assert dir_path == DIR_PATH + + def mock_listdir(dir_path): + return ['sysroot-home.mount', 'sysroot-var.mount', 'not-a-mount.service'] + + def mock_os_path_exist(dir_path): + assert dir_path == DIR_PATH + return dir_path == DIR_PATH + + expected_calls = [ + ['ln', '-s', '../sysroot-home.mount', os.path.join(DIR_PATH, 'sysroot-home.mount')], + ['ln', '-s', '../sysroot-var.mount', os.path.join(DIR_PATH, 'sysroot-var.mount')] + ] + call_count = 0 + + def mock_run(command): + nonlocal call_count + assert command in expected_calls + call_count += 1 + return { + "stdout": "", + "stderr": "", + "exit_code": 0, + } + + monkeypatch.setattr('shutil.rmtree', mock_rmtree) + monkeypatch.setattr('os.mkdir', mock_mkdir) + monkeypatch.setattr('os.listdir', mock_listdir) + monkeypatch.setattr('os.path.exists', mock_os_path_exist) + monkeypatch.setattr(mount_unit_generator, 'run', mock_run) + + mount_unit_generator._fix_symlinks_in_dir('/test/dir', dirname) + + +# Test the copy_units_into_system_location function +def test_copy_units_mixed_content(monkeypatch): + """Test copying units with mixed files and directories.""" + + def mock_walk(dir_path): + tuples_to_yield = [ + ('/source/dir', ['local-fs.target.requires'], ['unit1.mount', 'unit2.mount']), + ('/source/dir/local-fs.target.requires', [], ['unit1.mount', 'unit2.mount']), + ] + yield from tuples_to_yield + + def mock_isdir(path): + return 'local-fs.target.requires' in path + + def _make_couple(sub_path): + return ( + os.path.join('/source/dir/', sub_path), + os.path.join('/container/usr/lib/systemd/system/', sub_path) + ) + + def mock_copy2(src, dst, follow_symlinks=True): + valid_combinations = [ + _make_couple('unit1.mount'), + _make_couple('unit2.mount'), + _make_couple('local-fs.target.requires/unit1.mount'), + _make_couple('local-fs.target.requires/unit2.mount'), + ] + assert not follow_symlinks + assert (src, dst) in valid_combinations + + def mock_islink(file_path): + return file_path == '/container/usr/lib/systemd/system/local-fs.target.requires/unit2.mount' + + class MockedDeleteFile: + def __init__(self): + self.removal_called = False + + def __call__(self, file_path): + assert file_path == '/container/usr/lib/systemd/system/local-fs.target.requires/unit2.mount' + self.removal_called = True + + def mock_makedirs(dst_dir, mode=0o777, exist_ok=False): + assert exist_ok + assert mode == 0o755 + + allowed_paths = [ + '/container/usr/lib/systemd/system', + '/container/usr/lib/systemd/system/local-fs.target.requires' + ] + assert dst_dir.rstrip('/') in allowed_paths + + monkeypatch.setattr(os, 'walk', mock_walk) + monkeypatch.setattr(os, 'makedirs', mock_makedirs) + monkeypatch.setattr(os.path, 'isdir', mock_isdir) + monkeypatch.setattr(os.path, 'islink', mock_islink) + monkeypatch.setattr(mount_unit_generator, '_delete_file', MockedDeleteFile()) + monkeypatch.setattr(shutil, 'copy2', mock_copy2) + + class MockedContainerContext: + def __init__(self): + self.base_dir = '/container' + + @staticmethod + def full_path(path): + return os.path.join('/container', path.lstrip('/')) + + mock_container = MockedContainerContext() + + files = mount_unit_generator.copy_units_into_system_location( + mock_container, '/source/dir' + ) + + expected_files = [ + '/usr/lib/systemd/system/unit1.mount', + '/usr/lib/systemd/system/unit2.mount', + '/usr/lib/systemd/system/local-fs.target.requires/unit1.mount', + '/usr/lib/systemd/system/local-fs.target.requires/unit2.mount', + ] + assert sorted(files) == sorted(expected_files) + assert mount_unit_generator._delete_file.removal_called + + +class CurrentActorMockedWithActorFolder(CurrentActorMocked): + def __init__(self, actor_folder_path, *args, **kwargs): + self.actor_folder_path = actor_folder_path + super().__init__(*args, **kwargs) + + def get_actor_folder_path(self, subfolder): + return os.path.join(self.actor_folder_path, subfolder) + + +@pytest.mark.parametrize('has_separate_boot', (True, False)) +def test_injection_of_sysroot_boot_bindmount_unit(monkeypatch, has_separate_boot): + fstab_entries = [ + FstabEntry(fs_spec='UUID=123', fs_file='/root', fs_vfstype='xfs', + fs_mntops='defaults', fs_freq='0', fs_passno='0') + ] + + if has_separate_boot: + boot_fstab_entry = FstabEntry(fs_spec='UUID=123', fs_file='/root', fs_vfstype='xfs', + fs_mntops='defaults', fs_freq='0', fs_passno='0') + fstab_entries.append(boot_fstab_entry) + + storage_info = StorageInfo(fstab=fstab_entries) + + actor_mock = CurrentActorMockedWithActorFolder(actor_folder_path='/actor', msgs=[storage_info]) + monkeypatch.setattr(api, 'current_actor', actor_mock) + + workspace_path = '/workspace' + was_copyfile_for_sysroot_boot_called = False + + def copyfile_mocked(source, dest, *args, **kwargs): + if not os.path.basename(source) == mount_unit_generator.BIND_MOUNT_SYSROOT_BOOT_UNIT: + return + + assert has_separate_boot + assert dest == os.path.join(workspace_path, mount_unit_generator.BIND_MOUNT_SYSROOT_BOOT_UNIT) + + nonlocal was_copyfile_for_sysroot_boot_called + was_copyfile_for_sysroot_boot_called = True + + monkeypatch.setattr(shutil, 'copyfile', copyfile_mocked) + + def listdir_mocked(path): + assert path == actor_mock.get_actor_folder_path('bundled_units') + return [ + mount_unit_generator.BIND_MOUNT_SYSROOT_BOOT_UNIT, + 'other.mount' + ] + + monkeypatch.setattr(os, 'listdir', listdir_mocked) + monkeypatch.setattr(mount_unit_generator, + 'does_system_have_separate_boot_partition', + lambda: has_separate_boot) + + mount_unit_generator.inject_bundled_units(workspace_path) + + if has_separate_boot: + assert was_copyfile_for_sysroot_boot_called diff --git a/repos/system_upgrade/common/actors/initramfs/targetinitramfsgenerator/tests/test_targetinitramfsgenerator.py b/repos/system_upgrade/common/actors/initramfs/targetinitramfsgenerator/tests/test_targetinitramfsgenerator.py index b4c9dd89..4df9a485 100644 --- a/repos/system_upgrade/common/actors/initramfs/targetinitramfsgenerator/tests/test_targetinitramfsgenerator.py +++ b/repos/system_upgrade/common/actors/initramfs/targetinitramfsgenerator/tests/test_targetinitramfsgenerator.py @@ -33,7 +33,7 @@ def raise_call_error(args=None): }) -class RunMocked(object): +class RunMocked: def __init__(self, raise_err=False): self.called = 0 diff --git a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py index 02c3fd9d..f7e4a8af 100644 --- a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py +++ b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/libraries/upgradeinitramfsgenerator.py @@ -271,7 +271,7 @@ def _get_fspace(path, convert_to_mibs=False, coefficient=1): coefficient = min(coefficient, 1) fspace_bytes = int(stat.f_frsize * stat.f_bavail * coefficient) if convert_to_mibs: - return int(fspace_bytes / 1024 / 1024) # noqa: W1619; pylint: disable=old-division + return int(fspace_bytes / 1024 / 1024) return fspace_bytes @@ -436,6 +436,9 @@ def _generate_livemode_initramfs(context, userspace_initramfs_dest, target_kerne '--lvmconf', '--mdadmconf', '--kver', target_kernel_ver, '-f', userspace_initramfs_dest] + # Add included files + cmd.extend(itertools.chain(*(('--install', file) for file in initramfs_includes.files))) + # Add dracut modules cmd.extend(itertools.chain(*(('--add', module) for module in dracut_modules))) diff --git a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/tests/unit_test_upgradeinitramfsgenerator.py b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/tests/unit_test_upgradeinitramfsgenerator.py index 8408233e..b96bf79f 100644 --- a/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/tests/unit_test_upgradeinitramfsgenerator.py +++ b/repos/system_upgrade/common/actors/initramfs/upgradeinitramfsgenerator/tests/unit_test_upgradeinitramfsgenerator.py @@ -82,7 +82,7 @@ def gen_UDM_list(data): return [UpgradeDracutModule(name=i[0], module_path=i[1]) for i in data] -class MockedContext(object): +class MockedContext: def __init__(self): self.called_copy_from = [] self.called_copytree_from = [] @@ -174,7 +174,7 @@ def test_copy_boot_files(monkeypatch, arch): assert actual_boot_content == bootc -class MockedCopyArgs(object): +class MockedCopyArgs: def __init__(self): self.args = None @@ -250,14 +250,14 @@ def test_prepare_userspace_for_initram(monkeypatch, adjust_cwd, input_msgs, pkgs assert _sort_files(upgradeinitramfsgenerator._copy_files.args[1]) == _files -class MockedGetFspace(object): +class MockedGetFspace: def __init__(self, space): self.space = space def __call__(self, dummy_path, convert_to_mibs=False): if not convert_to_mibs: return self.space - return int(self.space / 1024 / 1024) # noqa: W1619; pylint: disable=old-division + return int(self.space / 1024 / 1024) @pytest.mark.parametrize('input_msgs,dracut_modules,kernel_modules', [ diff --git a/repos/system_upgrade/common/actors/insightsautoregister/actor.py b/repos/system_upgrade/common/actors/insightsautoregister/actor.py index a81b434c..56615390 100644 --- a/repos/system_upgrade/common/actors/insightsautoregister/actor.py +++ b/repos/system_upgrade/common/actors/insightsautoregister/actor.py @@ -7,7 +7,7 @@ from leapp.tags import FirstBootPhaseTag, IPUWorkflowTag class InsightsAutoregister(Actor): """ - Automatically registers system into Red Hat Insights + Automatically registers system into Red Hat Lightspeed The registration is skipped if NO_INSIGHTS_REGISTER=1 environment variable is set, the --no-insights-register command line argument present or the diff --git a/repos/system_upgrade/common/actors/insightsautoregister/libraries/insightsautoregister.py b/repos/system_upgrade/common/actors/insightsautoregister/libraries/insightsautoregister.py index 2134a8bb..bd113a1f 100644 --- a/repos/system_upgrade/common/actors/insightsautoregister/libraries/insightsautoregister.py +++ b/repos/system_upgrade/common/actors/insightsautoregister/libraries/insightsautoregister.py @@ -6,18 +6,18 @@ from leapp.libraries.stdlib import api, CalledProcessError, run def _insights_register(): try: run(['insights-client', '--register']) - api.current_logger().info('Automatically registered into Red Hat Insights') + api.current_logger().info('Automatically registered into Red Hat Lightspeed') except (CalledProcessError) as err: # TODO(mmatuska) produce post-upgrade report? api.current_logger().error( - 'Automatic registration into Red Hat Insights failed: {}'.format(err) + 'Automatic registration into Red Hat Lightspeed failed: {}'.format(err) ) def process(): if rhsm.skip_rhsm() or get_env('LEAPP_NO_INSIGHTS_REGISTER', '0') == '1': api.current_logger().debug( - 'Skipping registration into Insights due to --no-insights-register' + 'Skipping registration into Red Hat Lightspeed due to --no-insights-register' ' or LEAPP_NO_INSIGHTS_REGISTER=1 set' ) return diff --git a/repos/system_upgrade/common/actors/insightsautoregister/tests/test_insightsautoregister.py b/repos/system_upgrade/common/actors/insightsautoregister/tests/test_insightsautoregister.py index 0a039455..d5e6ba20 100644 --- a/repos/system_upgrade/common/actors/insightsautoregister/tests/test_insightsautoregister.py +++ b/repos/system_upgrade/common/actors/insightsautoregister/tests/test_insightsautoregister.py @@ -41,7 +41,7 @@ def test_insights_register_success_logged(monkeypatch): def run_mocked(cmd, **kwargs): return { - 'stdout': 'Successfully registered into Insights', + 'stdout': 'Successfully registered into Red Hat Lightspeed', 'stderr': '', 'exit_code': 0 } diff --git a/repos/system_upgrade/common/actors/ipuworkflowconfig/libraries/ipuworkflowconfig.py b/repos/system_upgrade/common/actors/ipuworkflowconfig/libraries/ipuworkflowconfig.py index f76677fd..999a001e 100644 --- a/repos/system_upgrade/common/actors/ipuworkflowconfig/libraries/ipuworkflowconfig.py +++ b/repos/system_upgrade/common/actors/ipuworkflowconfig/libraries/ipuworkflowconfig.py @@ -4,7 +4,7 @@ import platform from leapp.exceptions import StopActorExecutionError from leapp.libraries.stdlib import api, CalledProcessError, run -from leapp.models import EnvVar, IPUConfig, IPUSourceToPossibleTargets, OSRelease, Version +from leapp.models import Distro, EnvVar, IPUConfig, IPUSourceToPossibleTargets, OSRelease, Version ENV_IGNORE = ('LEAPP_CURRENT_PHASE', 'LEAPP_CURRENT_ACTOR', 'LEAPP_VERBOSE', 'LEAPP_DEBUG') @@ -93,21 +93,93 @@ def load_upgrade_paths_definitions(paths_definition_file): return definitions -def extract_upgrade_paths_for_distro_and_flavour(all_definitions, distro_id, flavour): - raw_upgrade_paths_for_distro = all_definitions.get(distro_id, {}) +def get_virtual_version(all_upgrade_path_defs, distro, version): + if distro.lower() != 'centos': + return version - if not raw_upgrade_paths_for_distro: - api.current_logger().warning('No upgrade paths defined for distro \'{}\''.format(distro_id)) + centos_upgrade_paths = all_upgrade_path_defs.get('centos', {}) + if not centos_upgrade_paths: + raise StopActorExecutionError('There are no upgrade paths defined for CentOS.') - raw_upgrade_paths_for_flavour = raw_upgrade_paths_for_distro.get(flavour, {}) + virtual_versions = centos_upgrade_paths.get(CENTOS_VIRTUAL_VERSIONS_KEY, {}) + if not virtual_versions: # Unlikely, only if using old upgrade_paths.json, but the user should not touch the file + details = { + "details": "The file does not contain any information about virtual versions of CentOS" + } + raise StopActorExecutionError( + "The internal upgrade_paths.json file is invalid.", details=details + ) - if not raw_upgrade_paths_for_flavour: - api.current_logger().warning('Cannot discover any upgrade paths for flavour: {}/{}'.format(distro_id, flavour)) + virtual_version = virtual_versions.get(version) + if not virtual_version: + details = ( + 'The {} field in upgrade path definitions for \'centos\' does not' + ' provide any virtual version for version {}' + ).format(CENTOS_VIRTUAL_VERSIONS_KEY, version) + raise StopActorExecutionError( + "Failed to identify virtual minor version number for the system.", + details={"details": details}, + ) + return virtual_version - return raw_upgrade_paths_for_flavour +def extract_upgrade_paths_for_distro_and_flavour(all_definitions, distro, flavour): + distro_paths = all_definitions.get(distro, {}) + if not distro_paths: + api.current_logger().warning( + "No upgrade paths defined for distro '{}'".format(distro) + ) -def construct_models_for_paths_matching_source_major(raw_paths, src_major_version): + distro_paths = distro_paths.get(flavour, {}) + if not distro_paths: + api.current_logger().warning( + "Cannot discover any upgrade paths for flavour: {}/{}".format( + distro, flavour + ) + ) + return distro_paths + + +def make_cross_distro_paths(all_paths, source_distro, target_distro, flavour): + """ + Make paths for upgrade + conversion. + + :param all_paths: The raw upgrade paths retrieved from upgrade_paths.json + :type all_paths: dict + :param source_distro: The source distro. + :type source_distro: str + :param target_distro: The target distro. + :type target_distro: str + :param flavour: The flavour to find paths for. + :type target_distro: str + :return: A dictionary with conversion paths for upgrade + conversion between + source and target distro. + :rtype: dict + """ + # using source and target for both distro and version gets confusing, using + # a and b for distro instead + paths_a = extract_upgrade_paths_for_distro_and_flavour( + all_paths, source_distro, flavour + ) + paths_b = extract_upgrade_paths_for_distro_and_flavour( + all_paths, target_distro, flavour + ) + + conversion_paths = {} + for source_ver_a, _ in paths_a.items(): + virt_source_ver_a = get_virtual_version(all_paths, source_distro, source_ver_a) + + for source_ver_b, target_ver_b in paths_b.items(): + virt_source_ver_b = get_virtual_version(all_paths, target_distro, source_ver_b) + if virt_source_ver_a == virt_source_ver_b: + conversion_paths[source_ver_a] = target_ver_b + + return conversion_paths + + +def construct_models_for_paths_matching_source_major( + raw_paths, src_major_version +): multipaths_matching_source = [] for src_version, target_versions in raw_paths.items(): if src_version.split('.')[0] == src_major_version: @@ -117,58 +189,97 @@ def construct_models_for_paths_matching_source_major(raw_paths, src_major_versio return multipaths_matching_source -def construct_virtual_versions(all_upgrade_path_defs, distro_id, source_version, target_version): - if distro_id.lower() != 'centos': - return (source_version, target_version) +def _centos_to_rhel_supported_version_workaround(exposed_supported_paths): + """ + Add target version one minor version lower than the latest version + + On CS to RHEL upgrades, particularly on 9->10, there is only one upgrade + path defined, CS 9 -> latest RHEL 10 (10.X). However a situation may occur, + in which the latest RHEL version has not yet been publicly released, e.g. + in pre-release builds. + + This is problematic because the upgrade fails if the content is not yet + available. If this happens the user is informed (by code elsewhere) to + specify the latest available RHEL version (the previous minor version) + manually using the --target-version CLI option. + However the previous minor version is not a supported target version. This + function adds it as one by appending it to exposed_supported_paths[0].target_versions. + The version is not appended if already present or if the defined latest is X.0. + + :param exposed_supported_paths: The supported upgrade paths. Length is expected to be 1. + :type exposed_supported_paths: list[IPUSourceToPossibleTargets] + """ - centos_upgrade_paths = all_upgrade_path_defs.get('centos', {}) - if not centos_upgrade_paths: - raise StopActorExecutionError('There are no upgrade paths defined for CentOS.') + if len(exposed_supported_paths) != 1: + raise StopActorExecutionError( + "Expected only 1 IPUSourceToPossibleTargets model on CS->RHEL upgrade" + ) + path = exposed_supported_paths[0] - virtual_versions = centos_upgrade_paths.get(CENTOS_VIRTUAL_VERSIONS_KEY, {}) - if not virtual_versions: # Unlikely, only if using old upgrade_paths.json, but the user should not touch the file - details = {'details': 'The file does not contain any information about virtual versions of CentOS'} - raise StopActorExecutionError('The internal upgrade_paths.json file is malformed.') - - source_virtual_version = virtual_versions.get(source_version) - target_virtual_version = virtual_versions.get(target_version) - - if not source_virtual_version or not target_virtual_version: - if not source_virtual_version and not target_virtual_version: - what_is_missing = 'CentOS {} (source) and CentOS {} (target)'.format(source_virtual_version, - target_virtual_version) - elif not source_virtual_version: - what_is_missing = 'CentOS {} (source)'.format(source_virtual_version) - else: - what_is_missing = 'CentOS {} (target)'.format(target_virtual_version) - - details_msg = 'The {} field in upgrade path definitions does not provide any information for {}' - details = {'details': details_msg.format(CENTOS_VIRTUAL_VERSIONS_KEY, what_is_missing)} - raise StopActorExecutionError('Failed to identify virtual minor version number for the system.', - details=details) + major, minor = max(path.target_versions).split('.') + if not minor or minor == '0': + api.current_logger().debug( + "Skipping centos->rhel supported versions workaround, the latest target minor version is 0." + ) + return - return (source_virtual_version, target_virtual_version) + new_minor = int(minor) - 1 + to_add = "{}.{}".format(major, new_minor) + + if to_add not in path.target_versions: + msg = "Adding {} as a supported target version for centos->rhel upgrade.".format(to_add) + path.target_versions.append(to_add) + else: + msg = "Skipping adding {} as a target version for centos->rhel upgrade, already present.".format( + to_add + ) + api.current_logger().debug(msg) def produce_ipu_config(actor): flavour = os.environ.get('LEAPP_UPGRADE_PATH_FLAVOUR') target_version = os.environ.get('LEAPP_UPGRADE_PATH_TARGET_RELEASE') + target_distro = os.environ.get('LEAPP_TARGET_OS') os_release = get_os_release('/etc/os-release') source_version = os_release.version_id + source_distro = os_release.release_id + all_upgrade_path_defs = load_upgrade_paths_definitions('upgrade_paths.json') + raw_upgrade_paths = extract_upgrade_paths_for_distro_and_flavour(all_upgrade_path_defs, source_distro, flavour) + + if not target_version: + details = {} + if source_distro not in all_upgrade_path_defs: + details['details'] = 'This is due to an unsupported system distribution.' + elif source_version not in raw_upgrade_paths: + details['details'] = 'This is due to an unsupported source version of the system.' + details['hint'] = ( + 'The in-place upgrade is possible only for the supported upgrade paths ' + 'listed here: https://access.redhat.com/articles/4263361' + ) + raise StopActorExecutionError(message='Could not determine the target version for the in-place upgrade.', + details=details) check_target_major_version(source_version, target_version) - all_upgrade_path_defs = load_upgrade_paths_definitions('upgrade_paths.json') - raw_upgrade_paths = extract_upgrade_paths_for_distro_and_flavour(all_upgrade_path_defs, - os_release.release_id, - flavour) + if source_distro == target_distro: + raw_upgrade_paths = extract_upgrade_paths_for_distro_and_flavour( + all_upgrade_path_defs, source_distro, flavour + ) + else: + raw_upgrade_paths = make_cross_distro_paths( + all_upgrade_path_defs, source_distro, target_distro, flavour + ) + + virtual_source_version = get_virtual_version(all_upgrade_path_defs, source_distro, source_version) + virtual_target_version = get_virtual_version(all_upgrade_path_defs, target_distro, target_version) + source_major_version = source_version.split('.')[0] - exposed_supported_paths = construct_models_for_paths_matching_source_major(raw_upgrade_paths, source_major_version) + exposed_supported_paths = construct_models_for_paths_matching_source_major( + raw_upgrade_paths, source_major_version + ) - virtual_source_version, virtual_target_version = construct_virtual_versions(all_upgrade_path_defs, - os_release.release_id, - source_version, - target_version) + if exposed_supported_paths and source_distro == 'centos' and target_distro == 'rhel': + _centos_to_rhel_supported_version_workaround(exposed_supported_paths) actor.produce(IPUConfig( leapp_env_vars=get_env_vars(), @@ -182,5 +293,9 @@ def produce_ipu_config(actor): ), kernel=get_booted_kernel(), flavour=flavour, - supported_upgrade_paths=exposed_supported_paths + supported_upgrade_paths=exposed_supported_paths, + distro=Distro( + source=source_distro, + target=target_distro, + ), )) diff --git a/repos/system_upgrade/common/actors/ipuworkflowconfig/tests/test_ipuworkflowconfig.py b/repos/system_upgrade/common/actors/ipuworkflowconfig/tests/test_ipuworkflowconfig.py index 8b7faffb..583cdfc5 100644 --- a/repos/system_upgrade/common/actors/ipuworkflowconfig/tests/test_ipuworkflowconfig.py +++ b/repos/system_upgrade/common/actors/ipuworkflowconfig/tests/test_ipuworkflowconfig.py @@ -1,12 +1,12 @@ -import json import os -import tempfile +from copy import deepcopy import pytest from leapp.exceptions import StopActorExecutionError from leapp.libraries.actor import ipuworkflowconfig -from leapp.libraries.stdlib import CalledProcessError +from leapp.libraries.common.testutils import logger_mocked +from leapp.libraries.stdlib import api, CalledProcessError from leapp.models import IPUSourceToPossibleTargets, OSRelease CUR_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -42,6 +42,43 @@ def _get_os_release(version='7.9', codename='Maipo'): return release +TEST_UPGRADE_PATHS = { + 'rhel': { + 'default': { + '8.10': ['9.4', '9.6', '9.7'], + '8.4': ['9.2'], + '9.6': ['10.0'], + '9.7': ['10.1'], + '8': ['9.4', '9.6'], + '9': ['10.1'], + }, + 'saphana': { + '8.10': ['9.6', '9.4'], + '8': ['9.6', '9.4'], + '9.6': ['10.0'], + '9': ['10.0'], + }, + }, + 'centos': { + 'default': { + '8': ['9'], + '9': ['10'], + }, + '_virtual_versions': { + '8': '8.10', + '9': '9.7', + '10': '10.1', + }, + }, + 'almalinux': { + 'default': { + '8.10': ['9.0', '9.1', '9.2', '9.3', '9.4', '9.5', '9.6', '9.7'], + '9.7': ['10.0', '10.1'], + }, + }, +} + + def test_leapp_env_vars(monkeypatch): _clean_leapp_envs(monkeypatch) monkeypatch.setenv('LEAPP_WHATEVER', '0') @@ -82,6 +119,7 @@ def test_get_booted_kernel(monkeypatch): IPUSourceToPossibleTargets(source_version='8.10', target_versions=['9.4', '9.5', '9.6']), IPUSourceToPossibleTargets(source_version='8.4', target_versions=['9.2']), IPUSourceToPossibleTargets(source_version='8', target_versions=['9.4', '9.5', '9.6']), + IPUSourceToPossibleTargets(source_version='8.6', target_versions=['9']), ] ), ( @@ -90,6 +128,13 @@ def test_get_booted_kernel(monkeypatch): IPUSourceToPossibleTargets(source_version='80.0', target_versions=['81.0']), ] ), + ( + '9', + [ + IPUSourceToPossibleTargets(source_version='9', target_versions=['10']), + IPUSourceToPossibleTargets(source_version='9.6', target_versions=['10.0']), + ] + ), ) ) def test_construct_models_for_paths_matching_source_major(source_major_version, expected_result): @@ -98,7 +143,9 @@ def test_construct_models_for_paths_matching_source_major(source_major_version, '8.4': ['9.2'], '9.6': ['10.0'], '8': ['9.4', '9.5', '9.6'], - '80.0': ['81.0'] + '80.0': ['81.0'], + '8.6': ['9'], + '9': ['10'], } result = ipuworkflowconfig.construct_models_for_paths_matching_source_major(RAW_PATHS, source_major_version) @@ -106,6 +153,38 @@ def test_construct_models_for_paths_matching_source_major(source_major_version, assert result == sorted(expected_result, key=lambda x: x.source_version) +@pytest.mark.parametrize( + "src_distro,dst_distro,expected", + [ + ("centos", "rhel", {"8": ["9.4", "9.6", "9.7"], "9": ["10.1"]}), + ("almalinux", "rhel", {"8.10": ["9.4", "9.6", "9.7"], "9.7": ["10.1"]}), + ("rhel", "centos", {"8.10": ["9"], "9.7": ["10"]}), + ("almalinux", "centos", {"8.10": ["9"], "9.7": ["10"]}), + ( + "rhel", + "almalinux", + { + "8.10": ["9.0", "9.1", "9.2", "9.3", "9.4", "9.5", "9.6", "9.7"], + "9.7": ["10.0", "10.1"], + }, + ), + ( + "centos", + "almalinux", + { + "8": ["9.0", "9.1", "9.2", "9.3", "9.4", "9.5", "9.6", "9.7"], + "9": ["10.0", "10.1"], + }, + ), + ], +) +def test_make_cross_distro_paths(src_distro, dst_distro, expected): + res = ipuworkflowconfig.make_cross_distro_paths( + TEST_UPGRADE_PATHS, src_distro, dst_distro, 'default' + ) + assert res == expected + + @pytest.mark.parametrize( ('distro', 'flavour', 'expected_result'), ( @@ -163,50 +242,66 @@ def test_load_raw_upgrade_paths_for_distro_and_flavour(monkeypatch, distro, flav } } - result = ipuworkflowconfig.extract_upgrade_paths_for_distro_and_flavour(defined_upgrade_paths, - distro, flavour) + result = ipuworkflowconfig.extract_upgrade_paths_for_distro_and_flavour( + defined_upgrade_paths, distro, flavour + ) assert result == expected_result @pytest.mark.parametrize( ('construction_params', 'expected_versions'), [ - (('centos', '8', '9'), ('8.10', '9.5')), - (('rhel', '8.10', '9.4'), ('8.10', '9.4')), - (('almalinux', '8.10', '9.6'), ('8.10', '9.6')), + (('centos', '8'), '8.10'), + (('centos', '9'), '9.7'), + (('rhel', '8.10'), '8.10'), + (('rhel', '9.4'), '9.4'), + (('almalinux', '8.10'), '8.10'), + (('almalinux', '9.6'), '9.6'), ] ) def test_virtual_version_construction(construction_params, expected_versions): - defined_upgrade_paths = { - 'rhel': { - 'default': { - '8.10': ['9.4', '9.5', '9.6'], - '8.4': ['9.2'], - '9.6': ['10.0'], - '8': ['9.4', '9.5', '9.6'], - '9': ['10.0'] - }, - 'saphana': { - '8.10': ['9.6', '9.4'], - '8': ['9.6', '9.4'], - '9.6': ['10.0'], - '9': ['10.0'] - } - }, - 'centos': { - '8': ['9'], - '_virtual_versions': { - '8': '8.10', - '9': '9.5', - } - }, - 'almalinux': { - 'default': { - '8.10': ['9.0', '9.1', '9.2', '9.3', '9.4', '9.5', '9.6'], - '9.6': ['10.0'] - } - }, - } - - result = ipuworkflowconfig.construct_virtual_versions(defined_upgrade_paths, *construction_params) + result = ipuworkflowconfig.get_virtual_version(TEST_UPGRADE_PATHS, *construction_params) assert result == expected_versions + + +def _make_path(source_ver, target_vers): + return IPUSourceToPossibleTargets(source_version=source_ver, target_versions=target_vers) + + +@pytest.mark.parametrize( + "paths,to_add,logmsg", + [ + ( + [_make_path("9.8", ["10.2"])], + ["10.1"], + "Adding 10.1 as a supported target version for centos->rhel upgrade." + ), + ( + [_make_path("9.10", ["10.10"])], + ["10.9"], + "Adding 10.9 as a supported target version for centos->rhel upgrade." + ), + # already present + ( + [_make_path("8.10", ["9.6", "9.7"])], + [], + "Skipping adding 9.6 as a target version for centos->rhel upgrade, already present." + ), + # lowest minor + ( + [_make_path("9.6", ["10.0"])], + [], + "Skipping centos->rhel supported versions workaround, the latest target minor version is 0." + ), + ], +) +def test_centos_to_rhel_supported_version_workaround(monkeypatch, paths, to_add, logmsg): + logger = logger_mocked() + monkeypatch.setattr(api, 'current_logger', logger) + + original = deepcopy(paths[0]) + ipuworkflowconfig._centos_to_rhel_supported_version_workaround(paths) + + assert paths[0].source_version == original.source_version + assert paths[0].target_versions == original.target_versions + to_add + assert logmsg in logger.dbgmsg[0] diff --git a/repos/system_upgrade/el8toel9/actors/kernel/checkkpatch/actor.py b/repos/system_upgrade/common/actors/kernel/checkkpatch/actor.py similarity index 100% rename from repos/system_upgrade/el8toel9/actors/kernel/checkkpatch/actor.py rename to repos/system_upgrade/common/actors/kernel/checkkpatch/actor.py diff --git a/repos/system_upgrade/common/actors/kernelcmdlineconfig/tests/test_kernelcmdlineconfig.py b/repos/system_upgrade/common/actors/kernelcmdlineconfig/tests/test_kernelcmdlineconfig.py index e5759a7b..5b35bcd3 100644 --- a/repos/system_upgrade/common/actors/kernelcmdlineconfig/tests/test_kernelcmdlineconfig.py +++ b/repos/system_upgrade/common/actors/kernelcmdlineconfig/tests/test_kernelcmdlineconfig.py @@ -15,7 +15,7 @@ from leapp.models import InstalledTargetKernelInfo, KernelCmdlineArg, TargetKern TARGET_KERNEL_NEVRA = 'kernel-core-1.2.3-4.x86_64.el8.x64_64' -# pylint: disable=E501 +# pylint: disable=line-too-long SAMPLE_KERNEL_ARGS = ('ro rootflags=subvol=root' ' resume=/dev/mapper/luks-2c0df999-81ec-4a35-a1f9-b93afee8c6ad' ' rd.luks.uuid=luks-90a6412f-c588-46ca-9118-5aca35943d25' @@ -31,10 +31,10 @@ title="Fedora Linux (6.5.13-100.fc37.x86_64) 37 (Thirty Seven)" id="a3018267cdd8451db7c77bb3e5b1403d-6.5.13-100.fc37.x86_64" """ # noqa: E501 SAMPLE_GRUBBY_INFO_OUTPUT = TEMPLATE_GRUBBY_INFO_OUTPUT.format(SAMPLE_KERNEL_ARGS, SAMPLE_KERNEL_ROOT) -# pylint: enable=E501 +# pylint: enable=line-too-long -class MockedRun(object): +class MockedRun: def __init__(self, outputs=None): """ Mock stdlib.run(). diff --git a/repos/system_upgrade/common/actors/livemode/liveimagegenerator/tests/test_image_generation.py b/repos/system_upgrade/common/actors/livemode/liveimagegenerator/tests/test_image_generation.py index 16ae0a09..e3ddce48 100644 --- a/repos/system_upgrade/common/actors/livemode/liveimagegenerator/tests/test_image_generation.py +++ b/repos/system_upgrade/common/actors/livemode/liveimagegenerator/tests/test_image_generation.py @@ -68,7 +68,7 @@ def test_generate_live_image_if_enabled(monkeypatch, livemode_config, should_pro actor_mock = CurrentActorMocked(msgs=messages) monkeypatch.setattr(api, 'current_actor', actor_mock) - class NspawnMock(object): + class NspawnMock: def __init__(self, *args, **kwargs): pass diff --git a/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py b/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py index 26fd9d09..7d72204c 100644 --- a/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py +++ b/repos/system_upgrade/common/actors/livemode/livemode_config_scanner/libraries/scan_livemode_config.py @@ -5,7 +5,6 @@ from leapp.libraries.common.rpms import has_package from leapp.libraries.stdlib import api from leapp.models import InstalledRPM, LiveModeConfig -LIVEMODE_CONFIG_LOCATION = '/etc/leapp/files/devel-livemode.ini' DEFAULT_SQUASHFS_PATH = '/var/lib/leapp/live-upgrade.img' @@ -39,8 +38,7 @@ def scan_config_and_emit_message(): if not should_scan_config(): return - api.current_logger().info('Loading livemode config from %s', LIVEMODE_CONFIG_LOCATION) - + api.current_logger().info('Loading the livemode configuration.') config = api.current_actor().config[livemode_config_lib.LIVEMODE_CONFIG_SECTION] # Mapping from model field names to configuration fields - because we might have diff --git a/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/libraries/prepareliveimage.py b/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/libraries/prepareliveimage.py index 686c4cd6..2587bf89 100644 --- a/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/libraries/prepareliveimage.py +++ b/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/libraries/prepareliveimage.py @@ -6,7 +6,7 @@ import os.path from leapp.exceptions import StopActorExecutionError from leapp.libraries.common import mounting from leapp.libraries.common.config.version import get_target_major_version -from leapp.libraries.stdlib import api, CalledProcessError +from leapp.libraries.stdlib import api from leapp.models import LiveImagePreparationInfo LEAPP_UPGRADE_SERVICE_FILE = 'upgrade.service' @@ -381,47 +381,6 @@ def setup_sshd(context, authorized_keys): error ) - # @Todo(mhecko): This is hazardous. I guess we are setting this so that we can use weaker SSH keys from RHEL7, - # # but this way we change crypto settings system-wise (could be a problem for FIPS). Instead, we - # # should check whether the keys will be OK on RHEL8, and inform the user otherwise. - if get_target_major_version() == '8': # set to LEGACY for 7>8 only - try: - with context.open('/etc/crypto-policies/config', 'w+') as f: - f.write('LEGACY\n') - except OSError as error: - api.current_logger().warning('Cannot set crypto policy to LEGACY') - details = {'details': 'Failed to set crypto-policies to LEGACY due to the error: {0}'.format(error)} - raise StopActorExecutionError('Failed to set up livemode SSHD', details=details) - - -# stolen from upgradeinitramfsgenerator.py -def _get_target_kernel_version(context): - """ - Get the version of the most recent kernel version within the container. - """ - try: - results = context.call(['rpm', '-qa', 'kernel-core'], split=True)['stdout'] - - except CalledProcessError as error: - problem = 'Could not query the target userspace kernel version through rpm. Full error: {0}'.format(error) - raise StopActorExecutionError( - 'Cannot get the version of the installed kernel.', - details={'Problem': problem}) - - if len(results) > 1: - raise StopActorExecutionError( - 'Cannot detect the version of the target userspace kernel.', - details={'Problem': 'Detected unexpectedly multiple kernels inside target userspace container.'}) - if not results: - raise StopActorExecutionError( - 'Cannot detect the version of the target userspace kernel.', - details={'Problem': 'An rpm query for the available kernels did not produce any results.'}) - - kernel_version = '-'.join(results[0].rsplit("-", 2)[-2:]) - api.current_logger().debug('Detected kernel version inside container: {}.'.format(kernel_version)) - - return kernel_version - def fakerootfs(): """ diff --git a/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/tests/test_livemode_userspace_modifications.py b/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/tests/test_livemode_userspace_modifications.py index 58046b61..b046d8c7 100644 --- a/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/tests/test_livemode_userspace_modifications.py +++ b/repos/system_upgrade/common/actors/livemode/modify_userspace_for_livemode/tests/test_livemode_userspace_modifications.py @@ -26,7 +26,7 @@ _LiveModeConfig = functools.partial(LiveModeConfig, squashfs_fullpath=' {}.'.format(rhel7_name, rhel8_name)) + if source_name != target_name and get_env('LEAPP_NO_NETWORK_RENAMING', '0') != '1': + api.current_logger().warning('Detected interface rename {} -> {}.'.format(source_name, target_name)) if re.search('eth[0-9]+', iface.name) is not None: api.current_logger().warning('Interface named using eth prefix, refusing to generate link file') - renamed_interfaces.append(RenamedInterface(**{'rhel7_name': rhel7_name, - 'rhel8_name': rhel8_name})) + renamed_interfaces.append(RenamedInterface(**{'rhel7_name': source_name, + 'rhel8_name': target_name})) continue initrd_files.append(generate_link_file(iface)) diff --git a/repos/system_upgrade/common/actors/persistentnetnamesconfig/tests/test_persistentnetnamesconfig.py b/repos/system_upgrade/common/actors/persistentnetnamesconfig/tests/test_persistentnetnamesconfig.py index 5ad52c43..c584c7ea 100644 --- a/repos/system_upgrade/common/actors/persistentnetnamesconfig/tests/test_persistentnetnamesconfig.py +++ b/repos/system_upgrade/common/actors/persistentnetnamesconfig/tests/test_persistentnetnamesconfig.py @@ -12,7 +12,6 @@ from leapp.models import ( PCIAddress, PersistentNetNamesFacts, PersistentNetNamesFactsInitramfs, - RenamedInterface, RenamedInterfaces, TargetInitramfsTasks ) @@ -170,14 +169,19 @@ def test_bz_1899455_crash_iface(monkeypatch, adjust_cwd): PersistentNetNamesFactsInitramfs.create(json_msgs["PersistentNetNamesFactsInitramfs"]), ] monkeypatch.setattr(persistentnetnamesconfig, 'generate_link_file', generate_link_file_mocked) - monkeypatch.setattr(persistentnetnamesconfig.api, 'current_actor', CurrentActorMocked(msgs=msgs)) + monkeypatch.setattr( + persistentnetnamesconfig.api, + "current_actor", + # without this the actor exits early + CurrentActorMocked(msgs=msgs, envars={"LEAPP_DISABLE_NET_NAMING_SCHEMES": "1"}), + ) monkeypatch.setattr(persistentnetnamesconfig.api, 'current_logger', logger_mocked()) monkeypatch.setattr(persistentnetnamesconfig.api, 'produce', produce_mocked()) persistentnetnamesconfig.process() for prod_models in [RenamedInterfaces, InitrdIncludes, TargetInitramfsTasks]: any(isinstance(i, prod_models) for i in persistentnetnamesconfig.api.produce.model_instances) - assert any(['Some network devices' in x for x in persistentnetnamesconfig.api.current_logger.warnmsg]) + assert any('Some network devices' in x for x in persistentnetnamesconfig.api.current_logger.warnmsg) def test_no_network_renaming(monkeypatch): @@ -194,7 +198,13 @@ def test_no_network_renaming(monkeypatch): msgs = [PersistentNetNamesFacts(interfaces=interfaces)] interfaces[0].name = 'changedinterfacename0' msgs.append(PersistentNetNamesFactsInitramfs(interfaces=interfaces)) - mocked_actor = CurrentActorMocked(msgs=msgs, envars={'LEAPP_NO_NETWORK_RENAMING': '1'}) + mocked_actor = CurrentActorMocked( + msgs=msgs, + envars={ + "LEAPP_DISABLE_NET_NAMING_SCHEMES": "1", + "LEAPP_NO_NETWORK_RENAMING": "1", + }, + ) monkeypatch.setattr(persistentnetnamesconfig.api, 'current_actor', mocked_actor) monkeypatch.setattr(persistentnetnamesconfig.api, 'current_logger', logger_mocked()) monkeypatch.setattr(persistentnetnamesconfig.api, 'produce', produce_mocked()) diff --git a/repos/system_upgrade/common/actors/persistentnetnamesdisable/actor.py b/repos/system_upgrade/common/actors/persistentnetnamesdisable/actor.py index 1add3588..b0182982 100644 --- a/repos/system_upgrade/common/actors/persistentnetnamesdisable/actor.py +++ b/repos/system_upgrade/common/actors/persistentnetnamesdisable/actor.py @@ -18,7 +18,8 @@ class PersistentNetNamesDisable(Actor): produces = (KernelCmdlineArg, Report) tags = (FactsPhaseTag, IPUWorkflowTag) - def ethX_count(self, interfaces): + @staticmethod + def ethX_count(interfaces): ethX = re.compile('eth[0-9]+') count = 0 @@ -27,7 +28,8 @@ class PersistentNetNamesDisable(Actor): count = count + 1 return count - def single_eth0(self, interfaces): + @staticmethod + def single_eth0(interfaces): return len(interfaces) == 1 and interfaces[0].name == 'eth0' def disable_persistent_naming(self): diff --git a/repos/system_upgrade/common/actors/persistentnetnamesinitramfs/tests/test_persistentnetnamesinitramfs.py b/repos/system_upgrade/common/actors/persistentnetnamesinitramfs/tests/test_persistentnetnamesinitramfs.py index 8da0dce4..f149502b 100644 --- a/repos/system_upgrade/common/actors/persistentnetnamesinitramfs/tests/test_persistentnetnamesinitramfs.py +++ b/repos/system_upgrade/common/actors/persistentnetnamesinitramfs/tests/test_persistentnetnamesinitramfs.py @@ -21,7 +21,7 @@ def interface_mocked(i=0): ) -class interfaces_mocked(object): +class interfaces_mocked: def __init__(self, count): self.count = count diff --git a/repos/system_upgrade/common/actors/peseventsscanner/actor.py b/repos/system_upgrade/common/actors/peseventsscanner/actor.py index f801f1a1..cb911471 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/actor.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/actor.py @@ -10,7 +10,8 @@ from leapp.models import ( RepositoriesMapping, RepositoriesSetupTasks, RHUIInfo, - RpmTransactionTasks + RpmTransactionTasks, + ActiveVendorList, ) from leapp.reporting import Report from leapp.tags import FactsPhaseTag, IPUWorkflowTag @@ -33,6 +34,7 @@ class PesEventsScanner(Actor): RepositoriesMapping, RHUIInfo, RpmTransactionTasks, + ActiveVendorList, ) produces = (ConsumedDataAsset, PESRpmTransactionTasks, RepositoriesSetupTasks, Report) tags = (IPUWorkflowTag, FactsPhaseTag) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py index f24dda68..7ee5d016 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py @@ -58,6 +58,7 @@ class Action(IntEnum): MERGED = 5 MOVED = 6 RENAMED = 7 + REINSTALLED = 8 def get_pes_events(pes_json_directory, pes_json_filename): @@ -72,13 +73,14 @@ def get_pes_events(pes_json_directory, pes_json_filename): # a case as we have no work to do in such a case here. events_data = fetch.load_data_asset(api.current_actor(), pes_json_filename, + asset_directory=pes_json_directory, asset_fulltext_name='PES events file', docs_url='', docs_title='') if not events_data: return None - if not events_data.get('packageinfo'): + if events_data.get('packageinfo') is None: raise ValueError('Found PES data with invalid structure') all_events = list(chain(*[parse_entry(entry) for entry in events_data['packageinfo']])) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py index e6741293..ec7d001a 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -1,12 +1,14 @@ from collections import defaultdict, namedtuple from functools import partial +import os from leapp import reporting from leapp.exceptions import StopActorExecutionError from leapp.libraries.actor import peseventsscanner_repomap from leapp.libraries.actor.pes_event_parsing import Action, get_pes_events, Package from leapp.libraries.common import rpms -from leapp.libraries.common.config import version +from leapp.libraries.common.config import get_target_distro_id, version +from leapp.libraries.common.repomaputils import combine_repomap_messages from leapp.libraries.stdlib import api from leapp.libraries.stdlib.config import is_verbose from leapp.models import ( @@ -20,7 +22,8 @@ from leapp.models import ( RepositoriesMapping, RepositoriesSetupTasks, RHUIInfo, - RpmTransactionTasks + RpmTransactionTasks, + ActiveVendorList, ) SKIPPED_PKGS_MSG = ( @@ -31,8 +34,9 @@ SKIPPED_PKGS_MSG = ( 'for details.\nThe list of these packages:' ) +VENDORS_DIR = "/etc/leapp/files/vendors.d" -TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep')) +TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep', 'to_reinstall')) def get_cloud_provider_name(cloud_provider_variant): @@ -86,7 +90,7 @@ def get_transaction_configuration(): :return: TransactionConfiguration """ - transaction_configuration = TransactionConfiguration(to_install=set(), to_remove=set(), to_keep=set()) + transaction_configuration = TransactionConfiguration(to_install=set(), to_remove=set(), to_keep=set(), to_reinstall=set()) _Pkg = partial(Package, repository=None, modulestream=None) @@ -94,6 +98,7 @@ def get_transaction_configuration(): transaction_configuration.to_install.update(_Pkg(name=pkg_name) for pkg_name in tasks.to_install) transaction_configuration.to_remove.update(_Pkg(name=pkg_name) for pkg_name in tasks.to_remove) transaction_configuration.to_keep.update(_Pkg(name=pkg_name) for pkg_name in tasks.to_keep) + transaction_configuration.to_reinstall.update(_Pkg(name=pkg_name) for pkg_name in tasks.to_reinstall) return transaction_configuration @@ -133,6 +138,7 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, logger = api.current_logger() # Start with the installed packages and modify the set according to release events target_pkgs = set(source_installed_pkgs) + pkgs_to_reinstall = set() release_events = [e for e in events if e.to_release == release] @@ -176,9 +182,12 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, target_pkgs = target_pkgs.difference(event.out_pkgs) target_pkgs = target_pkgs.union(event.out_pkgs) + if (event.action == Action.REINSTALLED and is_any_in_pkg_present): + pkgs_to_reinstall = pkgs_to_reinstall.union(event.in_pkgs) + pkgs_to_demodularize = pkgs_to_demodularize.difference(event.in_pkgs) - return (target_pkgs, pkgs_to_demodularize) + return (target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall) def remove_undesired_events(events, relevant_to_releases): @@ -244,15 +253,17 @@ def compute_packages_on_target_system(source_pkgs, events, releases): did_processing_cross_major_version = True pkgs_to_demodularize = {pkg for pkg in target_pkgs if pkg.modulestream} - target_pkgs, pkgs_to_demodularize = compute_pkg_changes_between_consequent_releases(target_pkgs, events, - release, seen_pkgs, - pkgs_to_demodularize) + target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_pkg_changes_between_consequent_releases( + target_pkgs, events, + release, seen_pkgs, + pkgs_to_demodularize + ) seen_pkgs = seen_pkgs.union(target_pkgs) demodularized_pkgs = {Package(pkg.name, pkg.repository, None) for pkg in pkgs_to_demodularize} demodularized_target_pkgs = target_pkgs.difference(pkgs_to_demodularize).union(demodularized_pkgs) - return (demodularized_target_pkgs, pkgs_to_demodularize) + return (demodularized_target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall) def compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize): @@ -356,15 +367,13 @@ def get_pesid_to_repoid_map(target_pesids): :return: Dictionary mapping the target_pesids to their corresponding repoid """ - repositories_map_msgs = api.consume(RepositoriesMapping) - repositories_map_msg = next(repositories_map_msgs, None) - if list(repositories_map_msgs): - api.current_logger().warning('Unexpectedly received more than one RepositoriesMapping message.') - if not repositories_map_msg: + repositories_map_msgs = list(api.consume(RepositoriesMapping)) + if not repositories_map_msgs: raise StopActorExecutionError( 'Cannot parse RepositoriesMapping data properly', details={'Problem': 'Did not receive a message with mapped repositories'} ) + repositories_map_msg = combine_repomap_messages(repositories_map_msgs) rhui_info = next(api.consume(RHUIInfo), None) cloud_provider = rhui_info.provider if rhui_info else '' @@ -400,7 +409,7 @@ def get_pesid_to_repoid_map(target_pesids): repo_type='rpm', channel='ga', rhui='', - distro=api.current_actor().configuration.os_release.release_id, + distro=get_target_distro_id(), ) for pesid in target_pesids: @@ -554,6 +563,19 @@ def process(): if not events: return + active_vendors = [] + for vendor_list in api.consume(ActiveVendorList): + active_vendors.extend(vendor_list.data) + + pes_json_suffix = "_pes.json" + if os.path.isdir(VENDORS_DIR): + vendor_pesfiles = list(filter(lambda vfile: pes_json_suffix in vfile, os.listdir(VENDORS_DIR))) + + for pesfile in vendor_pesfiles: + if pesfile[:-len(pes_json_suffix)] in active_vendors: + vendor_events = get_pes_events(VENDORS_DIR, pesfile) + events.extend(vendor_events) + releases = get_relevant_releases(events) installed_pkgs = get_installed_pkgs() transaction_configuration = get_transaction_configuration() @@ -567,7 +589,7 @@ def process(): events = remove_undesired_events(events, releases) # Apply events - compute what packages should the target system have - target_pkgs, pkgs_to_demodularize = compute_packages_on_target_system(pkgs_to_begin_computation_with, + target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_packages_on_target_system(pkgs_to_begin_computation_with, events, releases) # Packages coming out of the events have PESID as their repository, however, we need real repoid @@ -587,4 +609,5 @@ def process(): rpm_tasks = include_instructions_from_transaction_configuration(rpm_tasks, transaction_configuration, installed_pkgs) if rpm_tasks: + rpm_tasks.to_reinstall = sorted(pkgs_to_reinstall) api.produce(rpm_tasks) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py index 37be03f1..abd35e0b 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/peseventsscanner_repomap.py @@ -1,4 +1,4 @@ -from leapp.libraries.common.config import get_target_product_channel +from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id, get_target_product_channel from leapp.libraries.common.config.version import get_source_major_version, get_target_major_version from leapp.libraries.stdlib import api @@ -18,12 +18,19 @@ def _get_channel_prio(pesid_repo): return priorities.get(pesid_repo.channel, 10) -class RepoMapDataHandler(object): +class RepoMapDataHandler: """ Provide the basic functionality to work with the repository data easily. """ - def __init__(self, repo_map, distro='', cloud_provider='', default_channels=None): + def __init__( + self, + repo_map, + source_distro='', + target_distro='', + cloud_provider='', + default_channels=None, + ): """ Initialize the object based on the given RepositoriesMapping msg. @@ -32,8 +39,10 @@ class RepoMapDataHandler(object): :param repo_map: A valid RepositoryMapping message. :type repo_map: RepositoryMapping - :param distro: Which distribution's mappings to use, default to current - :type distro: str + :param source_distro: The distribution to map repos from, default to current + :type source_distro: str + :param target_distro: The distribution to map repos to, default to current + :type target_distro: str :param default_channels: A list of default channels to use when a target repository equivalent exactly matching a source repository was not found. :type default_channels: List[str] @@ -44,7 +53,9 @@ class RepoMapDataHandler(object): # ideal for work, but there is not any significant impact.. self.repositories = repo_map.repositories self.mapping = repo_map.mapping - self.distro = distro or api.current_actor().configuration.os_release.release_id + + self.source_distro = source_distro or get_source_distro_id() + self.target_distro = target_distro or get_target_distro_id() # FIXME(pstodulk): what about default_channel -> fallback_channel # hardcoded always as ga? instead of list of channels.. # it'd be possibly confusing naming now... @@ -89,19 +100,19 @@ class RepoMapDataHandler(object): """ self.default_channels = default_channels - def get_pesid_repo_entry(self, repoid, major_version): + def get_pesid_repo_entry(self, repoid, major_version, distro): """ - Retrieve the PESIDRepositoryEntry that matches the given repoid and OS major version. + Retrieve the PESIDRepositoryEntry that matches the given repoid, distro and OS major version If multiple pesid repo entries with the same repoid were found, the entry with rhui matching the source system's rhui info will be returned. If no entry with matching rhui exists, the CDN one is returned if any. - Note that repositories are automatically filtered based on the specified OS release ID (self.distro). - - :param repoid: RepoID that should the PESIDRepositoryEntry match. + :param repoid: RepoID that the PESIDRepositoryEntry should match. :type repoid: str - :param major_version: RepoID that should the PESIDRepositoryEntry match. + :param major_version: Major version that the PESIDRepositoryEntry should match. :type major_version: str + :param distro: Distro that the PESIDRepositoryEntry should match. + :type distro: str :return: The PESIDRepositoryEntry matching the given repoid and major_version or None if no such entry could be found. :rtype: Optional[PESIDRepositoryEntry] @@ -109,8 +120,8 @@ class RepoMapDataHandler(object): matching_pesid_repos = [] for pesid_repo in self.repositories: # FIXME(pstodulk): Why we do not check actually architecture here? - # It seems obvious we should check it but the fixme comment below - # suggests that it's expected - for not obvious reason. + # It seems obvious we should check it, but it's not clear why we + # don't and investigation might be required. # For the investigation: # # check repoids matching various architectures # # check repoids without $arch in substring on how many architectures they are present @@ -119,12 +130,13 @@ class RepoMapDataHandler(object): if ( pesid_repo.repoid == repoid and pesid_repo.major_version == major_version - and pesid_repo.distro == self.distro + and pesid_repo.distro == distro ): matching_pesid_repos.append(pesid_repo) # FIXME: when a PESID is present for multiple architectures, there - # multiple matching repos even though there should really be just one + # are multiple matching repos even though there should really be just + # one, the condition below fails even though it shouldn't if len(matching_pesid_repos) == 1: # Perform no heuristics if only a single pesid repository with matching repoid found return matching_pesid_repos[0] @@ -190,7 +202,7 @@ class RepoMapDataHandler(object): the OS Major version same as the source OS. :rtype: List[PESIDRepositoryEntry] """ - return self.get_pesid_repos(pesid, get_source_major_version(), self.distro) + return self.get_pesid_repos(pesid, get_source_major_version(), self.source_distro) def get_target_pesid_repos(self, pesid): """ @@ -203,7 +215,7 @@ class RepoMapDataHandler(object): the OS Major version same as the target OS. :rtype: List[PESIDRepositoryEntry] """ - return self.get_pesid_repos(pesid, get_target_major_version(), self.distro) + return self.get_pesid_repos(pesid, get_target_major_version(), self.target_distro) def _find_repository_target_equivalent(self, src_pesidrepo, target_pesid): """ @@ -223,7 +235,7 @@ class RepoMapDataHandler(object): matches_rhui = candidate.rhui == src_pesidrepo.rhui matches_repo_type = candidate.repo_type == 'rpm' matches_arch = candidate.arch == api.current_actor().configuration.architecture - matches_distro = candidate.distro == self.distro + matches_distro = candidate.distro == self.target_distro if matches_rhui and matches_arch and matches_distro and matches_repo_type: # user can specify in future the specific channel should be @@ -295,7 +307,7 @@ class RepoMapDataHandler(object): # {pesid: target_repo} target_repos_best_candidates = {} for src_repoid in src_repoids: - src_pesidrepo = self.get_pesid_repo_entry(src_repoid, get_source_major_version()) + src_pesidrepo = self.get_pesid_repo_entry(src_repoid, get_source_major_version(), self.source_distro) if not src_pesidrepo: # unmapped or custom repo -> skip this one continue @@ -340,7 +352,9 @@ def get_default_repository_channels(repomap, src_repoids): default_pesid = DEFAULT_PESID[get_source_major_version()] top_prio_pesid_repo = None for repoid in src_repoids: - pesid_repo = repomap.get_pesid_repo_entry(repoid, get_source_major_version()) + pesid_repo = repomap.get_pesid_repo_entry( + repoid, get_source_major_version(), get_source_distro_id() + ) if not pesid_repo or pesid_repo.pesid != default_pesid: continue if not top_prio_pesid_repo or _get_channel_prio(pesid_repo) > _get_channel_prio(top_prio_pesid_repo): diff --git a/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py index 09a1e82d..f67f3840 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/tests/test_pes_event_scanner.py @@ -325,18 +325,18 @@ def test_blacklisted_repoid_is_not_produced(monkeypatch): Test that upgrade with a package that would be from a blacklisted repository on the target system does not remove the package as it was already installed, however, the blacklisted repoid should not be produced. """ - installed_pkgs = {Package('pkg-a', 'blacklisted-rhel7', None), Package('pkg-b', 'repoid-rhel7', None)} + installed_pkgs = {Package('pkg-a', 'blacklisted-rhel8', None), Package('pkg-b', 'repoid-rhel8', None)} events = [ - Event(1, Action.MOVED, {Package('pkg-b', 'repoid-rhel7', None)}, {Package('pkg-b', 'repoid-rhel8', None)}, - (8, 0), (8, 1), []), - Event(2, Action.MOVED, {Package('pkg-a', 'repoid-rhel7', None)}, {Package('pkg-a', 'blacklisted-rhel8', None)}, - (8, 0), (8, 1), []), + Event(1, Action.MOVED, {Package('pkg-b', 'repoid-rhel8', None)}, {Package('pkg-b', 'repoid-rhel9', None)}, + (9, 0), (9, 1), []), + Event(2, Action.MOVED, {Package('pkg-a', 'repoid-rhel8', None)}, {Package('pkg-a', 'blacklisted-rhel9', None)}, + (9, 0), (9, 1), []), ] monkeypatch.setattr(pes_events_scanner, 'get_installed_pkgs', lambda: installed_pkgs) monkeypatch.setattr(pes_events_scanner, 'get_pes_events', lambda folder, filename: events) monkeypatch.setattr(pes_events_scanner, 'apply_transaction_configuration', lambda pkgs, transaction_cfg: pkgs) - monkeypatch.setattr(pes_events_scanner, 'get_blacklisted_repoids', lambda: {'blacklisted-rhel8'}) + monkeypatch.setattr(pes_events_scanner, 'get_blacklisted_repoids', lambda: {'blacklisted-rhel9'}) monkeypatch.setattr(pes_events_scanner, 'replace_pesids_with_repoids_in_packages', lambda pkgs, src_pkgs_repoids: pkgs) @@ -357,7 +357,7 @@ def test_blacklisted_repoid_is_not_produced(monkeypatch): repo_setup_tasks = [msg for msg in api.produce.model_instances if isinstance(msg, RepositoriesSetupTasks)] assert len(repo_setup_tasks) == 1 - assert repo_setup_tasks[0].to_enable == ['repoid-rhel8'] + assert repo_setup_tasks[0].to_enable == ['repoid-rhel9'] @pytest.mark.parametrize( diff --git a/repos/system_upgrade/common/actors/removebootfiles/tests/unit_test_removebootfiles.py b/repos/system_upgrade/common/actors/removebootfiles/tests/unit_test_removebootfiles.py index 7e5fbbf0..719ffe21 100644 --- a/repos/system_upgrade/common/actors/removebootfiles/tests/unit_test_removebootfiles.py +++ b/repos/system_upgrade/common/actors/removebootfiles/tests/unit_test_removebootfiles.py @@ -7,7 +7,7 @@ from leapp.libraries.stdlib import api from leapp.models import BootContent -class remove_file_mocked(object): +class remove_file_mocked: def __init__(self): self.called = 0 self.files_to_remove = [] diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py index 198c4368..df08e6fa 100644 --- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py +++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py @@ -1,3 +1,4 @@ +from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id from leapp.libraries.common.config.version import get_target_major_version from leapp.libraries.common.distro import get_distribution_data from leapp.libraries.common.rpms import has_package @@ -9,7 +10,7 @@ def _get_obsolete_keys(): """ Return keys obsoleted in target and previous versions """ - distribution = api.current_actor().configuration.os_release.release_id + distribution = get_target_distro_id() obsoleted_keys_map = get_distribution_data(distribution).get('obsoleted-keys', {}) keys = [] for version in range(7, int(get_target_major_version()) + 1): @@ -35,6 +36,11 @@ def register_dnfworkaround(keys): def process(): + if get_source_distro_id() != get_target_distro_id(): + # TODO adjust for conversions, in the current state it would not have + # any effect, just skip it + return + keys = _get_obsolete_keys() if not keys: return diff --git a/repos/system_upgrade/common/actors/removeresumeservice/tests/test_removeresumeservice.py b/repos/system_upgrade/common/actors/removeresumeservice/tests/test_removeresumeservice.py index ea803856..d59ef346 100644 --- a/repos/system_upgrade/common/actors/removeresumeservice/tests/test_removeresumeservice.py +++ b/repos/system_upgrade/common/actors/removeresumeservice/tests/test_removeresumeservice.py @@ -11,7 +11,7 @@ import pytest 'under the root user.', ) # TODO make the test not destructive -@pytest.mark.skipif(os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], +@pytest.mark.skipif(os.getenv("DESTRUCTIVE_TESTING", "0").lower() in ["false", "0"], reason='Test disabled by default because it would modify the system') def test_remove_resume_service(current_actor_context): service_name = 'leapp_resume.service' diff --git a/repos/system_upgrade/common/actors/removeupgradebootentry/tests/unit_test_removeupgradebootentry.py b/repos/system_upgrade/common/actors/removeupgradebootentry/tests/unit_test_removeupgradebootentry.py index c84d3085..9de4e4d3 100644 --- a/repos/system_upgrade/common/actors/removeupgradebootentry/tests/unit_test_removeupgradebootentry.py +++ b/repos/system_upgrade/common/actors/removeupgradebootentry/tests/unit_test_removeupgradebootentry.py @@ -8,7 +8,7 @@ from leapp.libraries.stdlib import api from leapp.models import ArmWorkaroundEFIBootloaderInfo, BootContent, EFIBootEntry, FirmwareFacts -class run_mocked(object): +class run_mocked: def __init__(self): self.args = [] diff --git a/repos/system_upgrade/common/actors/reportsettargetrelease/libraries/reportsettargetrelease.py b/repos/system_upgrade/common/actors/reportsettargetrelease/libraries/reportsettargetrelease.py index 37f60179..56dc15f0 100644 --- a/repos/system_upgrade/common/actors/reportsettargetrelease/libraries/reportsettargetrelease.py +++ b/repos/system_upgrade/common/actors/reportsettargetrelease/libraries/reportsettargetrelease.py @@ -1,6 +1,5 @@ from leapp import reporting -from leapp.libraries.common import rhsm -from leapp.libraries.common.config import get_distro_id +from leapp.libraries.common import config, rhsm from leapp.libraries.stdlib import api @@ -49,8 +48,9 @@ def _report_unhandled_release(): def process(): + # TODO this might need a better handling during conversions if rhsm.skip_rhsm(): - if get_distro_id() == 'rhel': + if config.get_source_distro_id() == config.get_target_distro_id() == 'rhel': _report_unhandled_release() else: _report_set_release() diff --git a/repos/system_upgrade/common/actors/repositoriesblacklist/libraries/repositoriesblacklist.py b/repos/system_upgrade/common/actors/repositoriesblacklist/libraries/repositoriesblacklist.py index e22fbee0..5059f619 100644 --- a/repos/system_upgrade/common/actors/repositoriesblacklist/libraries/repositoriesblacklist.py +++ b/repos/system_upgrade/common/actors/repositoriesblacklist/libraries/repositoriesblacklist.py @@ -6,7 +6,6 @@ from leapp.models import CustomTargetRepository, RepositoriesBlacklisted, Reposi # {OS_MAJOR_VERSION: PESID} UNSUPPORTED_PESIDS = { - "7": "rhel7-optional", "8": "rhel8-CRB", "9": "rhel9-CRB", "10": "rhel10-CRB" @@ -28,9 +27,8 @@ def _report_using_unsupported_repos(repos): def _report_excluded_repos(repos): - optional_repository_name = 'optional' if get_source_major_version() == '7' else 'CRB' api.current_logger().info( - "The {0} repository is not enabled. Excluding {1} from the upgrade".format(optional_repository_name, repos) + "The CRB repository is not enabled. Excluding {} from the upgrade".format(repos) ) report = [ diff --git a/repos/system_upgrade/common/actors/repositoriesblacklist/tests/test_repositoriesblacklist.py b/repos/system_upgrade/common/actors/repositoriesblacklist/tests/test_repositoriesblacklist.py index c4f9a36e..945007c6 100644 --- a/repos/system_upgrade/common/actors/repositoriesblacklist/tests/test_repositoriesblacklist.py +++ b/repos/system_upgrade/common/actors/repositoriesblacklist/tests/test_repositoriesblacklist.py @@ -20,8 +20,8 @@ from leapp.models import ( def repofacts_opts_disabled(): repos_data = [ RepositoryData( - repoid="rhel-7-server-optional-rpms", - name="RHEL 7 Server", + repoid="codeready-builder-for-rhel-8-x86_64-rpms", + name="RHEL 8 CRB", enabled=False, ) ] @@ -32,11 +32,11 @@ def repofacts_opts_disabled(): @pytest.fixture -def rhel7_optional_pesidrepo(): +def rhel8_crb_pesidrepo(): return PESIDRepositoryEntry( - pesid='rhel7-optional', - major_version='7', - repoid='rhel-7-server-optional-rpms', + pesid='rhel8-CRB', + major_version='8', + repoid='codeready-builder-for-rhel-8-x86_64-rpms', rhui='', arch='x86_64', channel='ga', @@ -46,11 +46,11 @@ def rhel7_optional_pesidrepo(): @pytest.fixture -def rhel8_crb_pesidrepo(): +def rhel9_crb_pesidrepo(): return PESIDRepositoryEntry( - pesid='rhel8-CRB', - major_version='8', - repoid='codeready-builder-for-rhel-8-x86_64-rpms', + pesid='rhel9-CRB', + major_version='9', + repoid='codeready-builder-for-rhel-9-x86_64-rpms', rhui='', arch='x86_64', channel='ga', @@ -60,10 +60,10 @@ def rhel8_crb_pesidrepo(): @pytest.fixture -def repomap_opts_only(rhel7_optional_pesidrepo, rhel8_crb_pesidrepo): +def repomap_opts_only(rhel8_crb_pesidrepo, rhel9_crb_pesidrepo): return RepositoriesMapping( - mapping=[RepoMapEntry(source='rhel7-optional', target=['rhel8-CRB'])], - repositories=[rhel7_optional_pesidrepo, rhel8_crb_pesidrepo] + mapping=[RepoMapEntry(source='rhel8-CRB', target=['rhel9-CRB'])], + repositories=[rhel8_crb_pesidrepo, rhel9_crb_pesidrepo] ) @@ -75,8 +75,8 @@ def test_all_target_optionals_blacklisted_when_no_optional_on_source(monkeypatch repos_data = [ RepositoryData( - repoid="rhel-7-server-rpms", - name="RHEL 7 Server", + repoid="rhel-8-server-rpms", + name="RHEL 8 Server", enabled=True, ) ] @@ -92,7 +92,7 @@ def test_all_target_optionals_blacklisted_when_no_optional_on_source(monkeypatch repositoriesblacklist.process() assert api.produce.called - assert 'codeready-builder-for-rhel-8-x86_64-rpms' in api.produce.model_instances[0].repoids + assert 'codeready-builder-for-rhel-9-x86_64-rpms' in api.produce.model_instances[0].repoids def test_with_no_mapping_for_optional_repos(monkeypatch, repomap_opts_only, repofacts_opts_disabled): @@ -115,7 +115,11 @@ def test_blacklist_produced_when_optional_repo_disabled(monkeypatch, repofacts_o Tests whether a correct blacklist is generated when there is disabled optional repo on the system. """ - monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[repofacts_opts_disabled, repomap_opts_only])) + monkeypatch.setattr( + api, + "current_actor", + CurrentActorMocked(msgs=[repofacts_opts_disabled, repomap_opts_only]), + ) monkeypatch.setattr(api, "produce", produce_mocked()) monkeypatch.setattr(reporting, "create_report", produce_mocked()) @@ -123,7 +127,7 @@ def test_blacklist_produced_when_optional_repo_disabled(monkeypatch, repofacts_o assert api.produce.model_instances, 'A blacklist should get generated.' - expected_blacklisted_repoid = 'codeready-builder-for-rhel-8-x86_64-rpms' + expected_blacklisted_repoid = 'codeready-builder-for-rhel-9-x86_64-rpms' err_msg = 'Blacklist does not contain expected repoid.' assert expected_blacklisted_repoid in api.produce.model_instances[0].repoids, err_msg @@ -166,8 +170,8 @@ def test_repositoriesblacklist_not_empty(monkeypatch, repofacts_opts_disabled, r def test_repositoriesblacklist_empty(monkeypatch, repofacts_opts_disabled, repomap_opts_only): """ - Tests whether nothing is produced if there are some disabled optional repos, but an empty blacklist is determined - from the repo mapping data. + Tests whether nothing is produced if there are some disabled optional + repos, but an empty blacklist is determined from the repo mapping data. """ msgs_to_feed = [repofacts_opts_disabled, repomap_opts_only] @@ -177,7 +181,7 @@ def test_repositoriesblacklist_empty(monkeypatch, repofacts_opts_disabled, repom repositoriesblacklist, "_get_repoids_to_exclude", lambda dummy_mapping: set() - ) # pylint:disable=W0108 + ) monkeypatch.setattr(api, "produce", produce_mocked()) repositoriesblacklist.process() @@ -187,7 +191,7 @@ def test_repositoriesblacklist_empty(monkeypatch, repofacts_opts_disabled, repom @pytest.mark.parametrize( ("enabled_repo", "exp_report_title", "message_produced"), [ - ("codeready-builder-for-rhel-8-x86_64-rpms", "Using repository not supported by Red Hat", False), + ("codeready-builder-for-rhel-9-x86_64-rpms", "Using repository not supported by Red Hat", False), ("some_other_enabled_repo", "Excluded target system repositories", True), (None, "Excluded target system repositories", True), ], diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py index d4a64793..4ec1d6e0 100644 --- a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py +++ b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py @@ -3,6 +3,7 @@ from collections import defaultdict from leapp.exceptions import StopActorExecutionError from leapp.libraries.common.config.version import get_source_major_version, get_target_major_version +from leapp.libraries.common.repomaputils import RepoMapData from leapp.libraries.common.fetch import load_data_asset from leapp.libraries.common.rpms import get_leapp_packages, LeappComponents from leapp.libraries.stdlib import api @@ -16,121 +17,6 @@ REPOMAP_FILE = 'repomap.json' """The name of the new repository mapping file.""" -class RepoMapData(object): - VERSION_FORMAT = '1.3.0' - - def __init__(self): - self.repositories = [] - self.mapping = {} - - def add_repository(self, data, pesid): - """ - Add new PESIDRepositoryEntry with given pesid from the provided dictionary. - - :param data: A dict containing the data of the added repository. The dictionary structure corresponds - to the repositories entries in the repository mapping JSON schema. - :type data: Dict[str, str] - :param pesid: PES id of the repository family that the newly added repository belongs to. - :type pesid: str - """ - self.repositories.append(PESIDRepositoryEntry( - repoid=data['repoid'], - channel=data['channel'], - rhui=data.get('rhui', ''), - repo_type=data['repo_type'], - arch=data['arch'], - major_version=data['major_version'], - pesid=pesid, - distro=data['distro'], - )) - - def get_repositories(self, valid_major_versions): - """ - Return the list of PESIDRepositoryEntry object matching the specified major versions. - """ - return [repo for repo in self.repositories if repo.major_version in valid_major_versions] - - def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid): - """ - Add a new mapping entry that is mapping the source pesid to the destination pesid(s), - relevant in an IPU from the supplied source major version to the supplied target - major version. - - :param str source_major_version: Specifies the major version of the source system - for which the added mapping applies. - :param str target_major_version: Specifies the major version of the target system - for which the added mapping applies. - :param str source_pesid: PESID of the source repository. - :param Union[str|List[str]] target_pesid: A single target PESID or a list of target - PESIDs of the added mapping. - """ - # NOTE: it could be more simple, but I prefer to be sure the input data - # contains just one map per source PESID. - key = '{}:{}'.format(source_major_version, target_major_version) - rmap = self.mapping.get(key, defaultdict(set)) - self.mapping[key] = rmap - if isinstance(target_pesid, list): - rmap[source_pesid].update(target_pesid) - else: - rmap[source_pesid].add(target_pesid) - - def get_mappings(self, src_major_version, dst_major_version): - """ - Return the list of RepoMapEntry objects for the specified upgrade path. - - IOW, the whole mapping for specified IPU. - """ - key = '{}:{}'.format(src_major_version, dst_major_version) - rmap = self.mapping.get(key, None) - if not rmap: - return None - map_list = [] - for src_pesid in sorted(rmap.keys()): - map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid]))) - return map_list - - @staticmethod - def load_from_dict(data): - if data['version_format'] != RepoMapData.VERSION_FORMAT: - raise ValueError( - 'The obtained repomap data has unsupported version of format.' - ' Get {} required {}' - .format(data['version_format'], RepoMapData.VERSION_FORMAT) - ) - - repomap = RepoMapData() - - # Load reposiories - existing_pesids = set() - for repo_family in data['repositories']: - existing_pesids.add(repo_family['pesid']) - for repo in repo_family['entries']: - repomap.add_repository(repo, repo_family['pesid']) - - # Load mappings - for mapping in data['mapping']: - for entry in mapping['entries']: - if not isinstance(entry['target'], list): - raise ValueError( - 'The target field of a mapping entry is not a list: {}' - .format(entry) - ) - - for pesid in [entry['source']] + entry['target']: - if pesid not in existing_pesids: - raise ValueError( - 'The {} pesid is not related to any repository.' - .format(pesid) - ) - repomap.add_mapping( - source_major_version=mapping['source_major_version'], - target_major_version=mapping['target_major_version'], - source_pesid=entry['source'], - target_pesid=entry['target'], - ) - return repomap - - def _inhibit_upgrade(msg): local_path = os.path.join('/etc/leapp/file', REPOMAP_FILE) hint = ( diff --git a/repos/system_upgrade/common/actors/rootscanner/tests/test_rootscanner.py b/repos/system_upgrade/common/actors/rootscanner/tests/test_rootscanner.py index 659a3017..07ce5da8 100644 --- a/repos/system_upgrade/common/actors/rootscanner/tests/test_rootscanner.py +++ b/repos/system_upgrade/common/actors/rootscanner/tests/test_rootscanner.py @@ -9,9 +9,9 @@ from leapp.libraries.actor.rootscanner import scan_dir @pytest.mark.parametrize("filename,symlink,count_invalid", - [(u'a_utf_file'.encode('utf-8'), u"utf8_symlink".encode('utf-8'), 0), - (u'простофайл'.encode('koi8-r'), u"этонеутф8".encode('koi8-r'), 2), - (u'a_utf_file'.encode('utf-8'), u"этонеутф8".encode('koi8-r'), 1)]) + [('a_utf_file'.encode('utf-8'), "utf8_symlink".encode('utf-8'), 0), + ('простофайл'.encode('koi8-r'), "этонеутф8".encode('koi8-r'), 2), + ('a_utf_file'.encode('utf-8'), "этонеутф8".encode('koi8-r'), 1)]) def test_invalid_symlinks(filename, symlink, count_invalid): # Let's create a directory with both valid utf-8 and non-utf symlinks # NOTE(ivasilev) As this has to run for python2 as well can't use the nice tempfile.TemporaryDirectory way diff --git a/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py b/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py index 3a59535b..e9455feb 100644 --- a/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py +++ b/repos/system_upgrade/common/actors/rpmscanner/tests/test_rpmscanner.py @@ -77,7 +77,7 @@ ARTIFACTS_SUBVERSION_113 = [ ] -class ModuleMocked(object): +class ModuleMocked: def __init__(self, name, stream, artifacts): self.name = name self.stream = stream diff --git a/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py b/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py index 43ac1fc4..62aefaf4 100644 --- a/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py +++ b/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py @@ -18,21 +18,37 @@ def load_tasks_file(path, logger): return [] +def filter_out(installed_rpm_names, to_filter, debug_msg): + # These are the packages that aren't installed on the system. + filtered_ok = [pkg for pkg in to_filter if pkg not in installed_rpm_names] + + # And these ones are the ones that are. + filtered_out = list(set(to_filter) - set(filtered_ok)) + if filtered_out: + api.current_logger().debug( + debug_msg + + '\n- ' + '\n- '.join(filtered_out) + ) + # We may want to use either of the two sets. + return filtered_ok, filtered_out + + def load_tasks(base_dir, logger): # Loads configuration files to_install, to_keep, and to_remove from the given base directory rpms = next(api.consume(DistributionSignedRPM)) rpm_names = [rpm.name for rpm in rpms.items] + to_install = load_tasks_file(os.path.join(base_dir, 'to_install'), logger) + install_debug_msg = 'The following packages from "to_install" file will be ignored as they are already installed:' # we do not want to put into rpm transaction what is already installed (it will go to "to_upgrade" bucket) - to_install_filtered = [pkg for pkg in to_install if pkg not in rpm_names] + to_install_filtered, _ = filter_out(rpm_names, to_install, install_debug_msg) - filtered = set(to_install) - set(to_install_filtered) - if filtered: - api.current_logger().debug( - 'The following packages from "to_install" file will be ignored as they are already installed:' - '\n- ' + '\n- '.join(filtered)) + to_reinstall = load_tasks_file(os.path.join(base_dir, 'to_reinstall'), logger) + reinstall_debug_msg = 'The following packages from "to_reinstall" file will be ignored as they are not installed:' + _, to_reinstall_filtered = filter_out(rpm_names, to_reinstall, reinstall_debug_msg) return RpmTransactionTasks( to_install=to_install_filtered, + to_reinstall=to_reinstall_filtered, to_keep=load_tasks_file(os.path.join(base_dir, 'to_keep'), logger), to_remove=load_tasks_file(os.path.join(base_dir, 'to_remove'), logger)) diff --git a/repos/system_upgrade/common/actors/satellite_upgrader/tests/unit_test_satellite_upgrader.py b/repos/system_upgrade/common/actors/satellite_upgrader/tests/unit_test_satellite_upgrader.py index 55896c75..f3bed932 100644 --- a/repos/system_upgrade/common/actors/satellite_upgrader/tests/unit_test_satellite_upgrader.py +++ b/repos/system_upgrade/common/actors/satellite_upgrader/tests/unit_test_satellite_upgrader.py @@ -4,7 +4,7 @@ from leapp.models import SatelliteFacts, SatellitePostgresqlFacts from leapp.snactor.fixture import current_actor_context -class MockedRun(object): +class MockedRun: def __init__(self): self._manager = Manager() self.commands = self._manager.list() diff --git a/repos/system_upgrade/common/actors/scanclienablerepo/tests/test_unit_scanclienablerepo.py b/repos/system_upgrade/common/actors/scanclienablerepo/tests/test_unit_scanclienablerepo.py index 1f437c47..6cf7e681 100644 --- a/repos/system_upgrade/common/actors/scanclienablerepo/tests/test_unit_scanclienablerepo.py +++ b/repos/system_upgrade/common/actors/scanclienablerepo/tests/test_unit_scanclienablerepo.py @@ -6,7 +6,7 @@ from leapp.libraries.stdlib import api from leapp.models import CustomTargetRepository -class LoggerMocked(object): +class LoggerMocked: def __init__(self): self.infomsg = None self.debugmsg = None diff --git a/repos/system_upgrade/common/actors/scancpu/libraries/scancpu.py b/repos/system_upgrade/common/actors/scancpu/libraries/scancpu.py index db3f92d4..ecc23349 100644 --- a/repos/system_upgrade/common/actors/scancpu/libraries/scancpu.py +++ b/repos/system_upgrade/common/actors/scancpu/libraries/scancpu.py @@ -2,17 +2,15 @@ import json import re from leapp.libraries.common.config import architecture -from leapp.libraries.common.config.version import get_source_major_version from leapp.libraries.stdlib import api, CalledProcessError, run from leapp.models import CPUInfo, DetectedDeviceOrDriver, DeviceDriverDeprecationData -LSCPU_NAME_VALUE = re.compile(r'^(?P[^:]+):[^\S\n]+(?P.+)\n?', flags=re.MULTILINE) PPC64LE_MODEL = re.compile(r'\d+\.\d+ \(pvr (?P[0-9a-fA-F]+) 0*[0-9a-fA-F]+\)') -def _get_lscpu_output(output_json=False): +def _get_lscpu_output(): try: - result = run(['lscpu'] + (['-J'] if output_json else [])) + result = run(['lscpu', '-J']) return result.get('stdout', '') except (OSError, CalledProcessError): api.current_logger().debug('Executing `lscpu` failed', exc_info=True) @@ -20,10 +18,7 @@ def _get_lscpu_output(output_json=False): def _parse_lscpu_output(): - if get_source_major_version() == '7': - return dict(LSCPU_NAME_VALUE.findall(_get_lscpu_output())) - - lscpu = _get_lscpu_output(output_json=True) + lscpu = _get_lscpu_output() try: parsed_json = json.loads(lscpu) # The json contains one entry "lscpu" which is a list of dictionaries diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/json/invalid b/repos/system_upgrade/common/actors/scancpu/tests/files/invalid similarity index 100% rename from repos/system_upgrade/common/actors/scancpu/tests/files/json/invalid rename to repos/system_upgrade/common/actors/scancpu/tests/files/invalid diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_aarch64 b/repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_aarch64 similarity index 100% rename from repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_aarch64 rename to repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_aarch64 diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_ppc64le b/repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_ppc64le similarity index 100% rename from repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_ppc64le rename to repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_ppc64le diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_s390x b/repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_s390x similarity index 100% rename from repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_s390x rename to repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_s390x diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_x86_64 b/repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_x86_64 similarity index 100% rename from repos/system_upgrade/common/actors/scancpu/tests/files/json/lscpu_x86_64 rename to repos/system_upgrade/common/actors/scancpu/tests/files/lscpu_x86_64 diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_aarch64 b/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_aarch64 deleted file mode 100644 index 3b9619ef..00000000 --- a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_aarch64 +++ /dev/null @@ -1,25 +0,0 @@ -Architecture: aarch64 -Byte Order: Little Endian -CPU(s): 160 -On-line CPU(s) list: 0-159 -Thread(s) per core: 1 -Core(s) per socket: 80 -Socket(s): 2 -NUMA node(s): 4 -Vendor ID: ARM -BIOS Vendor ID: Ampere(R) -Model: 1 -Model name: Neoverse-N1 -BIOS Model name: Ampere(R) Altra(R) Processor -Stepping: r3p1 -CPU max MHz: 3000.0000 -CPU min MHz: 1000.0000 -BogoMIPS: 50.00 -L1d cache: 64K -L1i cache: 64K -L2 cache: 1024K -NUMA node0 CPU(s): 0-79 -NUMA node1 CPU(s): 80-159 -NUMA node2 CPU(s): -NUMA node3 CPU(s): -Flags: fp asimd evtstrm aes pmull sha1 sha2 crc32 atomics fphp asimdhp cpuid asimdrdm lrcpc dcpop asimddp ssbs diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_empty_field b/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_empty_field deleted file mode 100644 index f830b7fe..00000000 --- a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_empty_field +++ /dev/null @@ -1,4 +0,0 @@ -Empyt 1: -Empyt 2: -Empyt 3: -Flags: flag diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_ppc64le b/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_ppc64le deleted file mode 100644 index 07d2ed65..00000000 --- a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_ppc64le +++ /dev/null @@ -1,15 +0,0 @@ -Architecture: ppc64le -Byte Order: Little Endian -CPU(s): 8 -On-line CPU(s) list: 0-7 -Thread(s) per core: 1 -Core(s) per socket: 1 -Socket(s): 8 -NUMA node(s): 1 -Model: 2.1 (pvr 004b 0201) -Model name: POWER8E (raw), altivec supported -Hypervisor vendor: KVM -Virtualization type: para -L1d cache: 64K -L1i cache: 32K -NUMA node0 CPU(s): 0-7 diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_s390x b/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_s390x deleted file mode 100644 index 2c0de9f9..00000000 --- a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_s390x +++ /dev/null @@ -1,26 +0,0 @@ -Architecture: s390x -CPU op-mode(s): 32-bit, 64-bit -Byte Order: Big Endian -CPU(s): 4 -On-line CPU(s) list: 0-3 -Thread(s) per core: 1 -Core(s) per socket: 1 -Socket(s) per book: 1 -Book(s) per drawer: 1 -Drawer(s): 4 -NUMA node(s): 1 -Vendor ID: IBM/S390 -Machine type: 3931 -CPU dynamic MHz: 5200 -CPU static MHz: 5200 -BogoMIPS: 3331.00 -Hypervisor: KVM/Linux -Hypervisor vendor: KVM -Virtualization type: full -Dispatching mode: horizontal -L1d cache: 128K -L1i cache: 128K -L2 cache: 32768K -L3 cache: 262144K -NUMA node0 CPU(s): 0-3 -Flags: esan3 zarch stfle msa ldisp eimm dfp edat etf3eh highgprs te vx vxd vxe gs vxe2 vxp sort dflt vxp2 nnpa sie diff --git a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_x86_64 b/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_x86_64 deleted file mode 100644 index a1dc1035..00000000 --- a/repos/system_upgrade/common/actors/scancpu/tests/files/txt/lscpu_x86_64 +++ /dev/null @@ -1,36 +0,0 @@ -Architecture: x86_64 -CPU op-mode(s): 32-bit, 64-bit -Address sizes: 46 bits physical, 48 bits virtual -Byte Order: Little Endian -CPU(s): 48 -On-line CPU(s) list: 0-47 -Vendor ID: GenuineIntel -Model name: Intel(R) Xeon(R) CPU E5-2670 v3 @ 2.30GHz -CPU family: 6 -Model: 63 -Thread(s) per core: 2 -Core(s) per socket: 12 -Socket(s): 2 -Stepping: 2 -CPU(s) scaling MHz: 44% -CPU max MHz: 3100.0000 -CPU min MHz: 1200.0000 -BogoMIPS: 4599.83 -Flags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc cpuid aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm cpuid_fault epb invpcid_single pti ssbd ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid ept_ad fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid cqm xsaveopt cqm_llc cqm_occup_llc dtherm ida arat pln pts md_clear flush_l1d -Virtualization: VT-x -L1d cache: 768 KiB (24 instances) -L1i cache: 768 KiB (24 instances) -L2 cache: 6 MiB (24 instances) -L3 cache: 60 MiB (2 instances) -NUMA node(s): 2 -NUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46 -NUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47 -Vulnerability Itlb multihit: KVM: Mitigation: VMX disabled -Vulnerability L1tf: Mitigation; PTE Inversion; VMX conditional cache flushes, SMT vulnerable -Vulnerability Mds: Mitigation; Clear CPU buffers; SMT vulnerable -Vulnerability Meltdown: Mitigation; PTI -Vulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl and seccomp -Vulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization -Vulnerability Spectre v2: Mitigation; Full generic retpoline, IBPB conditional, IBRS_FW, STIBP conditional, RSB filling -Vulnerability Srbds: Not affected -Vulnerability Tsx async abort: Not affected diff --git a/repos/system_upgrade/common/actors/scancpu/tests/test_scancpu.py b/repos/system_upgrade/common/actors/scancpu/tests/test_scancpu.py index dc9d1ffc..3605ebe7 100644 --- a/repos/system_upgrade/common/actors/scancpu/tests/test_scancpu.py +++ b/repos/system_upgrade/common/actors/scancpu/tests/test_scancpu.py @@ -56,34 +56,25 @@ LSCPU = { } -class mocked_get_cpuinfo(object): +class mocked_get_cpuinfo: def __init__(self, filename): self.filename = filename - def __call__(self, output_json=False): + def __call__(self): """ Return lines of the self.filename test file located in the files directory. Those files contain /proc/cpuinfo content from several machines. """ - - filename = self.filename - if output_json: - filename = os.path.join('json', filename) - else: - filename = os.path.join('txt', filename) - filename = os.path.join(CUR_DIR, 'files', filename) + filename = os.path.join(CUR_DIR, 'files', self.filename) with open(filename, 'r') as fp: return '\n'.join(fp.read().splitlines()) @pytest.mark.parametrize("arch", ARCH_SUPPORTED) -@pytest.mark.parametrize("version", ['7', '8']) -def test_scancpu(monkeypatch, arch, version): - - monkeypatch.setattr('leapp.libraries.actor.scancpu.get_source_major_version', lambda: version) +def test_scancpu(monkeypatch, arch): mocked_cpuinfo = mocked_get_cpuinfo('lscpu_' + arch) monkeypatch.setattr(scancpu, '_get_lscpu_output', mocked_cpuinfo) @@ -106,34 +97,9 @@ def test_scancpu(monkeypatch, arch, version): assert expected == produced -def test_lscpu_with_empty_field(monkeypatch): - - def mocked_cpuinfo(*args, **kwargs): - return mocked_get_cpuinfo('lscpu_empty_field')(output_json=False) - - monkeypatch.setattr(scancpu, '_get_lscpu_output', mocked_cpuinfo) - monkeypatch.setattr(api, 'produce', produce_mocked()) - current_actor = CurrentActorMocked() - monkeypatch.setattr(api, 'current_actor', current_actor) - - scancpu.process() - - expected = CPUInfo(machine_type=None, flags=['flag']) - produced = api.produce.model_instances[0] - - assert api.produce.called == 1 - - assert expected.machine_type == produced.machine_type - assert sorted(expected.flags) == sorted(produced.flags) - - def test_parse_invalid_json(monkeypatch): - monkeypatch.setattr('leapp.libraries.actor.scancpu.get_source_major_version', lambda: '8') - - def mocked_cpuinfo(*args, **kwargs): - return mocked_get_cpuinfo('invalid')(output_json=True) - + mocked_cpuinfo = mocked_get_cpuinfo('invalid') monkeypatch.setattr(scancpu, '_get_lscpu_output', mocked_cpuinfo) monkeypatch.setattr(api, 'produce', produce_mocked()) monkeypatch.setattr(api, 'current_logger', logger_mocked()) diff --git a/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py b/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py index 27dec8cc..772b33e6 100644 --- a/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py +++ b/repos/system_upgrade/common/actors/scancustomrepofile/tests/test_scancustomrepofile.py @@ -23,7 +23,7 @@ _CUSTOM_REPOS = [ _CUSTOM_REPO_FILE_MSG = CustomTargetRepositoryFile(file=scancustomrepofile.CUSTOM_REPO_PATH) -class LoggerMocked(object): +class LoggerMocked: def __init__(self): self.infomsg = None self.debugmsg = None diff --git a/repos/system_upgrade/common/actors/scandnfpluginpath/actor.py b/repos/system_upgrade/common/actors/scandnfpluginpath/actor.py new file mode 100644 index 00000000..e43a691e --- /dev/null +++ b/repos/system_upgrade/common/actors/scandnfpluginpath/actor.py @@ -0,0 +1,21 @@ +from leapp.actors import Actor +from leapp.libraries.actor.scandnfpluginpath import scan_dnf_pluginpath +from leapp.models import DnfPluginPathDetected +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class ScanDnfPluginPath(Actor): + """ + Scans DNF configuration for custom pluginpath option. + + This actor collects information about whether the pluginpath option is configured in DNF configuration + and produces a DnfPluginPathDetected message, containing the information. + """ + + name = 'scan_dnf_pluginpath' + consumes = () + produces = (DnfPluginPathDetected,) + tags = (FactsPhaseTag, IPUWorkflowTag) + + def process(self): + scan_dnf_pluginpath() diff --git a/repos/system_upgrade/common/actors/scandnfpluginpath/libraries/scandnfpluginpath.py b/repos/system_upgrade/common/actors/scandnfpluginpath/libraries/scandnfpluginpath.py new file mode 100644 index 00000000..818f7700 --- /dev/null +++ b/repos/system_upgrade/common/actors/scandnfpluginpath/libraries/scandnfpluginpath.py @@ -0,0 +1,30 @@ +import os + +from six.moves import configparser + +from leapp.libraries.stdlib import api +from leapp.models import DnfPluginPathDetected + +DNF_CONFIG_PATH = '/etc/dnf/dnf.conf' + + +def _is_pluginpath_set(config_path): + """Check if pluginpath option is set in DNF configuration file.""" + if not os.path.isfile(config_path): + api.current_logger().warning('The %s file is missing.', config_path) + return False + + parser = configparser.ConfigParser() + + try: + parser.read(config_path) + return parser.has_option('main', 'pluginpath') + except (configparser.Error, IOError) as e: + api.current_logger().warning('The DNF config file %s couldn\'t be parsed: %s', config_path, e) + return False + + +def scan_dnf_pluginpath(): + """Scan DNF configuration and produce DnfPluginPathDetected message.""" + is_detected = _is_pluginpath_set(DNF_CONFIG_PATH) + api.produce(DnfPluginPathDetected(is_pluginpath_detected=is_detected)) diff --git a/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_incorrect_pluginpath b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_incorrect_pluginpath new file mode 100644 index 00000000..aa29db09 --- /dev/null +++ b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_incorrect_pluginpath @@ -0,0 +1,7 @@ +[main] +gpgcheck=1 +installonly_limit=3 +clean_requirements_on_remove=True +best=True +skip_if_unavailable=False +pluginpathincorrect=/usr/lib/python3.6/site-packages/dnf-plugins diff --git a/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_no_pluginpath b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_no_pluginpath new file mode 100644 index 00000000..3d08d075 --- /dev/null +++ b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_no_pluginpath @@ -0,0 +1,6 @@ +[main] +gpgcheck=1 +installonly_limit=3 +clean_requirements_on_remove=True +best=True +skip_if_unavailable=False diff --git a/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_with_pluginpath b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_with_pluginpath new file mode 100644 index 00000000..09a81e64 --- /dev/null +++ b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/files/dnf_config_with_pluginpath @@ -0,0 +1,7 @@ +[main] +gpgcheck=1 +installonly_limit=3 +clean_requirements_on_remove=True +best=True +skip_if_unavailable=False +pluginpath=/usr/lib/python3.6/site-packages/dnf-plugins diff --git a/repos/system_upgrade/common/actors/scandnfpluginpath/tests/test_scandnfpluginpath.py b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/test_scandnfpluginpath.py new file mode 100644 index 00000000..fefb9d3f --- /dev/null +++ b/repos/system_upgrade/common/actors/scandnfpluginpath/tests/test_scandnfpluginpath.py @@ -0,0 +1,53 @@ +import os + +import pytest + +from leapp.libraries.actor import scandnfpluginpath +from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked, produce_mocked +from leapp.libraries.stdlib import api +from leapp.models import DnfPluginPathDetected + + +@pytest.mark.parametrize('is_detected', [False, True]) +def test_scan_detects_pluginpath(monkeypatch, is_detected): + mocked_producer = produce_mocked() + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) + monkeypatch.setattr(api, 'produce', mocked_producer) + + monkeypatch.setattr(scandnfpluginpath, '_is_pluginpath_set', + lambda path: is_detected) + + scandnfpluginpath.scan_dnf_pluginpath() + + assert mocked_producer.called == 1 + assert mocked_producer.model_instances[0].is_pluginpath_detected is is_detected + + +@pytest.mark.parametrize(('config_file', 'result'), [ + ('files/dnf_config_no_pluginpath', False), + ('files/dnf_config_with_pluginpath', True), + ('files/dnf_config_incorrect_pluginpath', False), + ('files/not_existing_file.conf', False) +]) +def test_is_pluginpath_set(config_file, result): + CUR_DIR = os.path.dirname(os.path.abspath(__file__)) + + assert scandnfpluginpath._is_pluginpath_set(os.path.join(CUR_DIR, config_file)) == result + + +def test_scan_no_config_file(monkeypatch): + mocked_producer = produce_mocked() + logger = logger_mocked() + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked()) + monkeypatch.setattr(api, 'produce', mocked_producer) + monkeypatch.setattr(api, 'current_logger', lambda: logger) + + filename = 'files/not_existing_file.conf' + monkeypatch.setattr(scandnfpluginpath, 'DNF_CONFIG_PATH', filename) + scandnfpluginpath.scan_dnf_pluginpath() + + assert mocked_producer.called == 1 + assert mocked_producer.model_instances[0].is_pluginpath_detected is False + + assert 'The %s file is missing.' in logger.warnmsg + assert filename in logger.warnmsg diff --git a/repos/system_upgrade/common/actors/scandynamiclinkerconfiguration/libraries/scandynamiclinkerconfiguration.py b/repos/system_upgrade/common/actors/scandynamiclinkerconfiguration/libraries/scandynamiclinkerconfiguration.py index 8d3b473e..73b0c84e 100644 --- a/repos/system_upgrade/common/actors/scandynamiclinkerconfiguration/libraries/scandynamiclinkerconfiguration.py +++ b/repos/system_upgrade/common/actors/scandynamiclinkerconfiguration/libraries/scandynamiclinkerconfiguration.py @@ -113,5 +113,5 @@ def scan_dynamic_linker_configuration(): included_configs=included_config_files, used_variables=used_variables) - if other_lines or any([config.modified for config in included_config_files]) or used_variables: + if other_lines or any(config.modified for config in included_config_files) or used_variables: api.produce(configuration) diff --git a/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py b/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py index c1cc69ee..35683cca 100644 --- a/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py +++ b/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/libraries/scankernel.py @@ -70,7 +70,7 @@ def get_boot_files_provided_by_kernel_pkg(kernel_nevra): @suppress_deprecation(InstalledTargetKernelVersion) def process(): - # pylint: disable=no-else-return - false positive + # pylint: disable=no-else-return # false positive # TODO: should we take care about stuff of kernel-rt and kernel in the same # time when both are present? or just one? currently, handle only one # of these during the upgrade. kernel-rt has higher prio when original sys diff --git a/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/tests/test_scaninstalledkernel_scaninstalledtargetkernelversion.py b/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/tests/test_scaninstalledkernel_scaninstalledtargetkernelversion.py index 570b6782..8f9f8b7a 100644 --- a/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/tests/test_scaninstalledkernel_scaninstalledtargetkernelversion.py +++ b/repos/system_upgrade/common/actors/scaninstalledtargetkernelversion/tests/test_scaninstalledkernel_scaninstalledtargetkernelversion.py @@ -15,7 +15,7 @@ OLD_KERNEL_NEVRA = 'kernel-core-0.1.2-3.el8.x86_64' OLD_RT_KERNEL_NEVRA = 'kernel-rt-core-0.1.2-3.rt4.5.el8.x86_64' -class MockedRun(object): +class MockedRun: def __init__(self, stdouts): # stdouts should be dict of list of strings: { str: [str1,str2,...]} diff --git a/repos/system_upgrade/common/actors/scansaphana/tests/test_scansaphana.py b/repos/system_upgrade/common/actors/scansaphana/tests/test_scansaphana.py index 0b55c9fb..791cdc68 100644 --- a/repos/system_upgrade/common/actors/scansaphana/tests/test_scansaphana.py +++ b/repos/system_upgrade/common/actors/scansaphana/tests/test_scansaphana.py @@ -59,7 +59,7 @@ compiler-version: GCC 9 lcmserver-artifact-version: 2.5.46''')) -class CallMock(object): +class CallMock: def __init__(self, ret): self.args = None self.ret = ret @@ -69,7 +69,7 @@ class CallMock(object): return self.ret -class SubprocessCall(object): +class SubprocessCall: def __init__(self, admusername): self.admusername = admusername @@ -77,9 +77,9 @@ class SubprocessCall(object): assert args[0][0:3] == ['sudo', '-u', self.admusername] cmd = args[0][3:] kwargs.pop('checked', None) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE) - p.wait() - return {'exit_code': p.returncode, 'stdout': p.stdout.read()} + with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + stdout, stderr = p.communicate() + return {'exit_code': p.returncode, 'stdout': stdout.decode('utf-8'), 'stderr': stderr.decode('utf-8')} def test_scansaphana_get_instance_status(monkeypatch): @@ -108,7 +108,7 @@ def test_scansaphana_get_instance_status(monkeypatch): def test_scansaphana_parse_manifest(monkeypatch): - class _mock_open(object): + class _mock_open: def __init__(self, path, mode): self._fp = BytesIO(SAPHANA2_MANIFEST.encode('utf-8')) diff --git a/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/actor.py b/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/actor.py new file mode 100644 index 00000000..2c0d1973 --- /dev/null +++ b/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/actor.py @@ -0,0 +1,19 @@ +from leapp.actors import Actor +from leapp.libraries.actor import scanthirdpartytargetpythonmodules +from leapp.models import DistributionSignedRPM, ThirdPartyTargetPythonModules +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class ScanThirdPartyTargetPythonModules(Actor): + """ + Detect third-party target Python modules and RPMs on the source system. + + """ + + name = 'scan_third_party_target_python_modules' + consumes = (DistributionSignedRPM,) + produces = (ThirdPartyTargetPythonModules,) + tags = (FactsPhaseTag, IPUWorkflowTag) + + def process(self): + scanthirdpartytargetpythonmodules.process() diff --git a/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/libraries/scanthirdpartytargetpythonmodules.py b/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/libraries/scanthirdpartytargetpythonmodules.py new file mode 100644 index 00000000..1329c50f --- /dev/null +++ b/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/libraries/scanthirdpartytargetpythonmodules.py @@ -0,0 +1,193 @@ +import json +import os +from collections import defaultdict +from pathlib import Path + +import rpm + +from leapp.libraries.common.config.version import get_target_major_version +from leapp.libraries.common.rpms import has_package +from leapp.libraries.stdlib import api, run +from leapp.models import DistributionSignedRPM, ThirdPartyTargetPythonModules + +PYTHON_EXTENSIONS = (".py", ".so", ".pyc") +FMT_LIST_SEPARATOR = '\n - ' + + +def _formatted_list_output(input_list, sep=FMT_LIST_SEPARATOR): + return ['{}{}'.format(sep, item) for item in input_list] + + +def get_python_sys_paths(python_interpreter): + """Get sys.path from the specified Python interpreter.""" + + result = run([python_interpreter, '-c', 'import sys, json; print(json.dumps(sys.path))'])['stdout'] + raw_paths = json.loads(result) + paths = [Path(raw_path).resolve() for raw_path in raw_paths] + return paths + + +def get_python_binary_for_rhel(rhel_version): + """ + Maps RHEL major version to the appropriate Python binary. + """ + + version_map = { + '9': 'python3.9', + '10': 'python3.12', + } + return version_map.get(rhel_version) + + +def is_target_python_present(target_python): + """ + Checks if the target Python interpreter is available on the system. + """ + + result = run(['command', '-v', target_python], checked=False) + return not result['exit_code'] + + +def identify_files_of_pypackages(syspaths): + ts = rpm.TransactionSet() + # add a trailing slash by calling os.path.join(..., '') + roots = tuple(os.path.join(str(path), "") for path in syspaths) + file_to_pkg = {} + + # Iterate over all installed packages + for header in ts.dbMatch(): + pkg = header['name'] + files = header['filenames'] + for filename in files: + if filename and filename.endswith(PYTHON_EXTENSIONS) and filename.startswith(roots): + file_to_pkg[filename] = pkg + return file_to_pkg + + +def find_python_related(root): + # recursively search for all files matching the given extension + for pattern in PYTHON_EXTENSIONS: + yield from root.rglob("*" + pattern) + + +def _should_skip_file(file): + # pyc files are importable, but not if they are in __pycache__ + return file.name.endswith(".pyc") and file.parent.name == "__pycache__" + + +def scan_python_files(system_paths, rpm_files): + """ + Scan system paths for Python files and categorize them by ownership. + + :param system_paths: List of paths to scan for Python files + :param rpm_files: Dictionary mapping file paths to RPM package names + :return: Tuple of (rpms_to_check, third_party_unowned_files) where: + - rpms_to_check is a dict mapping RPM names to list of their files + - third_party_unowned_files is a list of files not owned by any RPM + """ + rpms_to_check = defaultdict(list) + third_party_unowned_files = [] + + for path in system_paths: + if not path.is_dir(): + continue + for file in find_python_related(path): + if _should_skip_file(file): + continue + + file_path = str(file) + owner = rpm_files.get(file_path) + if owner: + rpms_to_check[owner].append(file_path) + else: + third_party_unowned_files.append(file_path) + + return rpms_to_check, third_party_unowned_files + + +def identify_unsigned_rpms(rpms_to_check): + """ + Identify which RPMs are third-party (not signed by the distribution). + + :param rpms_to_check: Dictionary mapping RPM names to list of their files + :return: Tuple of (third_party_rpms, third_party_files) where: + - third_party_rpms is a list of third-party RPM package names + - third_party_files is a list of files from third-party RPMs + """ + third_party_rpms = [] + third_party_files = [] + + for rpm_name, files in rpms_to_check.items(): + if not has_package(DistributionSignedRPM, rpm_name): + third_party_rpms.append(rpm_name) + api.current_logger().warning( + 'Found Python files from non-distribution RPM package: {}'.format(rpm_name) + ) + third_party_files.extend(files) + + return third_party_rpms, third_party_files + + +def process(): + """ + Main function to scan for third-party Python modules/RPMs on the target system. + + This function: + 1. Validates the target RHEL version and Python interpreter + 2. Scans system paths for Python files + 3. Identifies third-party RPMs and modules + 4. Produces a message if any third-party modules/RPMs are detected + """ + target_version = get_target_major_version() + target_python = get_python_binary_for_rhel(target_version) + + if not target_python: + api.current_logger().info( + "RHEL version {} is not supported for third-party Python modules scanning, " + "skipping check.".format(target_version) + ) + return + + if not is_target_python_present(target_python): + api.current_logger().info( + "Target Python interpreter {} is not installed on the source system, " + "skipping check of 3rd party python modules.".format(target_python) + ) + return + system_paths = get_python_sys_paths(target_python) + rpm_files = identify_files_of_pypackages(system_paths[1:]) + + rpms_to_check, third_party_unowned_files = scan_python_files(system_paths[1:], rpm_files) + + third_party_rpms, third_party_rpm_files = identify_unsigned_rpms(rpms_to_check) + + # Combine all third-party files (unowned + from third-party RPMs) + all_third_party_files = third_party_unowned_files + third_party_rpm_files + + if third_party_rpms or all_third_party_files: + api.current_logger().warning( + 'Found {} third-party RPM package(s) and {} third-party Python file(s) ' + 'for target Python {}'.format( + len(third_party_rpms), len(all_third_party_files), target_python + ) + ) + + if third_party_rpms: + api.current_logger().info( + 'Complete list of third-party RPM packages:{}'.format( + ''.join(_formatted_list_output(third_party_rpms)) + ) + ) + + if all_third_party_files: + api.current_logger().info( + 'Complete list of third-party Python modules:{}'.format( + ''.join(_formatted_list_output(all_third_party_files)) + ) + ) + + api.produce(ThirdPartyTargetPythonModules( + target_python=target_python, + third_party_modules=all_third_party_files, + third_party_rpm_names=third_party_rpms + )) diff --git a/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/tests/test_scan_third_party_target_python_modules.py b/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/tests/test_scan_third_party_target_python_modules.py new file mode 100644 index 00000000..796185ae --- /dev/null +++ b/repos/system_upgrade/common/actors/scanthirdpartytargetpythonmodules/tests/test_scan_third_party_target_python_modules.py @@ -0,0 +1,136 @@ +from collections import defaultdict, namedtuple +from pathlib import Path + +import pytest + +from leapp.libraries.actor import scanthirdpartytargetpythonmodules +from leapp.libraries.common.testutils import logger_mocked +from leapp.libraries.stdlib import api +from leapp.models import DistributionSignedRPM + +Parent = namedtuple('Parent', ['name']) +MockFile = namedtuple('MockFile', ['name', 'parent', 'path']) + + +def _mock_file_str(self): + return self.path + + +MockFile.__str__ = _mock_file_str + + +@pytest.mark.parametrize('rhel_version,expected_python', [ + ('9', 'python3.9'), + ('10', 'python3.12'), + ('8', None), + ('7', None), + ('', None), + ('invalid', None), + (None, None), +]) +def test_get_python_binary_for_rhel(rhel_version, expected_python): + assert scanthirdpartytargetpythonmodules.get_python_binary_for_rhel(rhel_version) == expected_python + + +@pytest.mark.parametrize('file_name,parent_name,should_skip', [ + ('module.pyc', '__pycache__', True), + ('module.pyc', 'site-packages', False), + ('module.py', '__pycache__', False), + ('module.so', '__pycache__', False), + ('module.py', 'site-packages', False), + ('module.so', 'site-packages', False), +]) +def test_should_skip_file(file_name, parent_name, should_skip): + mock_file = MockFile(name=file_name, parent=Parent(name=parent_name), path='/dummy/path') + assert scanthirdpartytargetpythonmodules._should_skip_file(mock_file) is should_skip + + +def test_scan_python_files(monkeypatch): + system_paths = [Path('/usr/lib/python3.9/site-packages')] + rpm_files = { + '/usr/lib/python3.9/site-packages/rpm_module.py': 'rpm-package', + '/usr/lib/python3.9/site-packages/another.py': 'another-rpm', + } + + def mock_is_dir(self): + return True + + def mock_find_python_related(root): + files = [ + MockFile('rpm_module.py', Parent('site-packages'), '/usr/lib/python3.9/site-packages/rpm_module.py'), + MockFile('unowned.py', Parent('site-packages'), '/usr/lib/python3.9/site-packages/unowned.py'), + MockFile('another.py', Parent('site-packages'), '/usr/lib/python3.9/site-packages/another.py'), + ] + return iter(files) + + monkeypatch.setattr(Path, 'is_dir', mock_is_dir) + monkeypatch.setattr(scanthirdpartytargetpythonmodules, 'find_python_related', mock_find_python_related) + + rpms_to_check, unowned = scanthirdpartytargetpythonmodules.scan_python_files(system_paths, rpm_files) + + assert 'rpm-package' in rpms_to_check + assert 'another-rpm' in rpms_to_check + assert '/usr/lib/python3.9/site-packages/unowned.py' in unowned + assert len(unowned) == 1 + + +@pytest.mark.parametrize('path_exists,mock_files', [ + (False, None), + (True, [MockFile('module.pyc', Parent('__pycache__'), '/usr/lib/python3.9/site-packages/__pycache__/module.pyc')]), +]) +def test_scan_python_files_filtering(monkeypatch, path_exists, mock_files): + system_paths = [Path('/usr/lib/python3.9/site-packages')] + rpm_files = {} + + def mock_is_dir(self): + return path_exists + + monkeypatch.setattr(Path, 'is_dir', mock_is_dir) + + if mock_files is not None: + def mock_find_python_related(root): + return iter(mock_files) + monkeypatch.setattr(scanthirdpartytargetpythonmodules, 'find_python_related', mock_find_python_related) + + rpms_to_check, unowned = scanthirdpartytargetpythonmodules.scan_python_files(system_paths, rpm_files) + + assert len(rpms_to_check) == 0 + assert len(unowned) == 0 + + +@pytest.mark.parametrize('is_signed,expected_rpm_count,expected_file_count', [ + (False, 1, 2), + (True, 0, 0), +]) +def test_identify_unsigned_rpms(monkeypatch, is_signed, expected_rpm_count, expected_file_count): + rpms_to_check = defaultdict(list) + package_name = 'test-package' + rpms_to_check[package_name] = [ + '/path/to/file1.py', + '/path/to/file2.py', + ] + + def mock_has_package(model, pkg_name): + return is_signed + + monkeypatch.setattr(scanthirdpartytargetpythonmodules, 'has_package', mock_has_package) + monkeypatch.setattr(api, 'current_logger', logger_mocked()) + + third_party_rpms, third_party_files = scanthirdpartytargetpythonmodules.identify_unsigned_rpms(rpms_to_check) + + assert len(third_party_rpms) == expected_rpm_count + assert len(third_party_files) == expected_file_count + + if not is_signed: + assert package_name in third_party_rpms + assert '/path/to/file1.py' in third_party_files + assert '/path/to/file2.py' in third_party_files + + +def test_identify_unsigned_rpms_empty_input(): + rpms_to_check = defaultdict(list) + + third_party_rpms, third_party_files = scanthirdpartytargetpythonmodules.identify_unsigned_rpms(rpms_to_check) + + assert len(third_party_rpms) == 0 + assert len(third_party_files) == 0 diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py new file mode 100644 index 00000000..a5e481cb --- /dev/null +++ b/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py @@ -0,0 +1,26 @@ +from leapp.actors import Actor +from leapp.libraries.actor import scanvendorrepofiles +from leapp.models import ( + CustomTargetRepositoryFile, + ActiveVendorList, + VendorCustomTargetRepositoryList, +) +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class ScanVendorRepofiles(Actor): + """ + Load and produce custom repository data from vendor-provided files. + Only those vendors whose source system repoids were found on the system will be included. + """ + + name = "scan_vendor_repofiles" + consumes = ActiveVendorList + produces = ( + CustomTargetRepositoryFile, + VendorCustomTargetRepositoryList, + ) + tags = (FactsPhaseTag, IPUWorkflowTag) + + def process(self): + scanvendorrepofiles.process() diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py new file mode 100644 index 00000000..84392101 --- /dev/null +++ b/repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py @@ -0,0 +1,72 @@ +import os + +from leapp.libraries.common import repofileutils +from leapp.libraries.stdlib import api +from leapp.models import ( + CustomTargetRepository, + CustomTargetRepositoryFile, + ActiveVendorList, + VendorCustomTargetRepositoryList, +) + + +VENDORS_DIR = "/etc/leapp/files/vendors.d/" +REPOFILE_SUFFIX = ".repo" + + +def process(): + """ + Produce CustomTargetRepository msgs for the vendor repo files inside the + . + + The CustomTargetRepository messages are produced only if a "from" vendor repository + listed indide its map matched one of the repositories active on the system. + """ + if not os.path.isdir(VENDORS_DIR): + api.current_logger().debug( + "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR) + ) + return + + for repofile_name in os.listdir(VENDORS_DIR): + if not repofile_name.endswith(REPOFILE_SUFFIX): + continue + # Cut the .repo part to get only the name. + vendor_name = repofile_name[:-5] + + active_vendors = [] + for vendor_list in api.consume(ActiveVendorList): + active_vendors.extend(vendor_list.data) + + api.current_logger().debug("Active vendor list: {}".format(active_vendors)) + + if vendor_name not in active_vendors: + api.current_logger().debug( + "Vendor {} not in active list, skipping".format(vendor_name) + ) + continue + + full_repo_path = os.path.join(VENDORS_DIR, repofile_name) + parsed_repofile = repofileutils.parse_repofile(full_repo_path) + api.current_logger().debug( + "Vendor {} found in active list, processing file {}".format(vendor_name, repofile_name) + ) + + api.produce(CustomTargetRepositoryFile(file=full_repo_path)) + + custom_vendor_repos = [ + CustomTargetRepository( + repoid=repo.repoid, + name=repo.name, + baseurl=repo.baseurl, + enabled=repo.enabled, + ) for repo in parsed_repofile.data + ] + + api.produce( + VendorCustomTargetRepositoryList(vendor=vendor_name, repos=custom_vendor_repos) + ) + + api.current_logger().info( + "The {} directory exists, vendor repositories loaded.".format(VENDORS_DIR) + ) diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py new file mode 100644 index 00000000..cb5c7ab7 --- /dev/null +++ b/repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py @@ -0,0 +1,131 @@ +import os + +from leapp.libraries.actor import scancustomrepofile +from leapp.libraries.common import repofileutils +from leapp.libraries.common.testutils import produce_mocked +from leapp.libraries.stdlib import api + +from leapp.models import (CustomTargetRepository, CustomTargetRepositoryFile, + RepositoryData, RepositoryFile) + + +_REPODATA = [ + RepositoryData(repoid="repo1", name="repo1name", baseurl="repo1url", enabled=True), + RepositoryData(repoid="repo2", name="repo2name", baseurl="repo2url", enabled=False), + RepositoryData(repoid="repo3", name="repo3name", enabled=True), + RepositoryData(repoid="repo4", name="repo4name", mirrorlist="mirror4list", enabled=True), +] + +_CUSTOM_REPOS = [ + CustomTargetRepository(repoid="repo1", name="repo1name", baseurl="repo1url", enabled=True), + CustomTargetRepository(repoid="repo2", name="repo2name", baseurl="repo2url", enabled=False), + CustomTargetRepository(repoid="repo3", name="repo3name", baseurl=None, enabled=True), + CustomTargetRepository(repoid="repo4", name="repo4name", baseurl=None, enabled=True), +] + +_CUSTOM_REPO_FILE_MSG = CustomTargetRepositoryFile(file=scancustomrepofile.CUSTOM_REPO_PATH) + + +_TESTING_REPODATA = [ + RepositoryData(repoid="repo1-stable", name="repo1name", baseurl="repo1url", enabled=True), + RepositoryData(repoid="repo2-testing", name="repo2name", baseurl="repo2url", enabled=False), + RepositoryData(repoid="repo3-stable", name="repo3name", enabled=False), + RepositoryData(repoid="repo4-testing", name="repo4name", mirrorlist="mirror4list", enabled=True), +] + +_TESTING_CUSTOM_REPOS_STABLE_TARGET = [ + CustomTargetRepository(repoid="repo1-stable", name="repo1name", baseurl="repo1url", enabled=True), + CustomTargetRepository(repoid="repo2-testing", name="repo2name", baseurl="repo2url", enabled=False), + CustomTargetRepository(repoid="repo3-stable", name="repo3name", baseurl=None, enabled=False), + CustomTargetRepository(repoid="repo4-testing", name="repo4name", baseurl=None, enabled=True), +] + +_TESTING_CUSTOM_REPOS_BETA_TARGET = [ + CustomTargetRepository(repoid="repo1-stable", name="repo1name", baseurl="repo1url", enabled=True), + CustomTargetRepository(repoid="repo2-testing", name="repo2name", baseurl="repo2url", enabled=True), + CustomTargetRepository(repoid="repo3-stable", name="repo3name", baseurl=None, enabled=False), + CustomTargetRepository(repoid="repo4-testing", name="repo4name", baseurl=None, enabled=True), +] + +_PROCESS_STABLE_TARGET = "stable" +_PROCESS_BETA_TARGET = "beta" + + +class LoggerMocked(object): + def __init__(self): + self.infomsg = None + self.debugmsg = None + + def info(self, msg): + self.infomsg = msg + + def debug(self, msg): + self.debugmsg = msg + + def __call__(self): + return self + + +def test_no_repofile(monkeypatch): + monkeypatch.setattr(os.path, 'isfile', lambda dummy: False) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(api, 'current_logger', LoggerMocked()) + scancustomrepofile.process() + msg = "The {} file doesn't exist. Nothing to do.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.debugmsg == msg + assert not api.produce.called + + +def test_valid_repofile_exists(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=_REPODATA) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + monkeypatch.setattr(api, 'current_logger', LoggerMocked()) + scancustomrepofile.process() + msg = "The {} file exists, custom repositories loaded.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.infomsg == msg + assert api.produce.called == len(_CUSTOM_REPOS) + 1 + assert _CUSTOM_REPO_FILE_MSG in api.produce.model_instances + for crepo in _CUSTOM_REPOS: + assert crepo in api.produce.model_instances + + +def test_target_stable_repos(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=_TESTING_REPODATA) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + + scancustomrepofile.process(_PROCESS_STABLE_TARGET) + assert api.produce.called == len(_TESTING_CUSTOM_REPOS_STABLE_TARGET) + 1 + for crepo in _TESTING_CUSTOM_REPOS_STABLE_TARGET: + assert crepo in api.produce.model_instances + + +def test_target_beta_repos(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=_TESTING_REPODATA) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + + scancustomrepofile.process(_PROCESS_BETA_TARGET) + assert api.produce.called == len(_TESTING_CUSTOM_REPOS_BETA_TARGET) + 1 + for crepo in _TESTING_CUSTOM_REPOS_BETA_TARGET: + assert crepo in api.produce.model_instances + + +def test_empty_repofile_exists(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=[]) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + monkeypatch.setattr(api, 'current_logger', LoggerMocked()) + scancustomrepofile.process() + msg = "The {} file exists, but is empty. Nothing to do.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.infomsg == msg + assert not api.produce.called diff --git a/repos/system_upgrade/common/actors/scheduleselinuxrelabeling/tests/test_scheduleselinuxrelabeling.py b/repos/system_upgrade/common/actors/scheduleselinuxrelabeling/tests/test_scheduleselinuxrelabeling.py index 595b9985..8603bd97 100644 --- a/repos/system_upgrade/common/actors/scheduleselinuxrelabeling/tests/test_scheduleselinuxrelabeling.py +++ b/repos/system_upgrade/common/actors/scheduleselinuxrelabeling/tests/test_scheduleselinuxrelabeling.py @@ -9,7 +9,7 @@ from leapp.snactor.fixture import current_actor_context @pytest.mark.skipif( - os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], + os.getenv("DESTRUCTIVE_TESTING", "0").lower() in ["false", "0"], reason='Test disabled by default because it would modify the system', ) def test_schedule_no_relabel(current_actor_context): @@ -19,7 +19,7 @@ def test_schedule_no_relabel(current_actor_context): @pytest.mark.skipif( - os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], + os.getenv("DESTRUCTIVE_TESTING", "0").lower() in ["false", "0"], reason='Test disabled by default because it would modify the system', ) def test_schedule_relabel(current_actor_context): diff --git a/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/actor.py b/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/actor.py index 4856f36a..db8fe8ac 100644 --- a/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/actor.py +++ b/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/actor.py @@ -40,9 +40,7 @@ class SELinuxApplyCustom(Actor): return # get list of policy modules after the upgrade - installed_modules = set( - [module[0] for module in selinuxapplycustom.list_selinux_modules()] - ) + installed_modules = {module[0] for module in selinuxapplycustom.list_selinux_modules()} # import custom SElinux modules for semodules in self.consume(SELinuxModules): diff --git a/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/tests/component_test_selinuxapplycustom.py b/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/tests/component_test_selinuxapplycustom.py index 8a4665c1..aab18e58 100644 --- a/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/tests/component_test_selinuxapplycustom.py +++ b/repos/system_upgrade/common/actors/selinux/selinuxapplycustom/tests/component_test_selinuxapplycustom.py @@ -72,7 +72,7 @@ def destructive_selinux_env(): "Failed to remove SELinux customizations after testing") -@pytest.mark.skipif(os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], +@pytest.mark.skipif(os.getenv("DESTRUCTIVE_TESTING", "0").lower() in ["false", "0"], reason='Test disabled by default because it would modify the system') def test_SELinuxApplyCustom(current_actor_context, destructive_selinux_teardown): diff --git a/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/component_test_selinuxcontentscanner.py b/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/component_test_selinuxcontentscanner.py index faa2e1b0..802e038a 100644 --- a/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/component_test_selinuxcontentscanner.py +++ b/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/component_test_selinuxcontentscanner.py @@ -76,7 +76,7 @@ def find_semanage_rule(rules, rule): return next((r for r in rules if all(word in r for word in rule)), None) -@pytest.mark.skipif(os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], +@pytest.mark.skipif(os.getenv("DESTRUCTIVE_TESTING", "false") in ["False", "false", "0"], reason='Test disabled by default because it would modify the system') def test_SELinuxContentScanner(current_actor_context, destructive_selinux_env): diff --git a/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/unit_test_selinuxcontentscanner.py b/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/unit_test_selinuxcontentscanner.py index 830eeac5..ce3ab7b5 100644 --- a/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/unit_test_selinuxcontentscanner.py +++ b/repos/system_upgrade/common/actors/selinux/selinuxcontentscanner/tests/unit_test_selinuxcontentscanner.py @@ -3,7 +3,7 @@ from leapp.libraries.common.config import version from leapp.libraries.stdlib import CalledProcessError -class run_mocked(object): +class run_mocked: def __init__(self): self.args = [] self.called = 0 @@ -39,7 +39,7 @@ class run_mocked(object): return {'stdout': stdout} -class run_mocked_fail(object): +class run_mocked_fail: def __init__(self): self.called = 0 diff --git a/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/component_test_selinuxprepare.py b/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/component_test_selinuxprepare.py index bad1baa2..d124675a 100644 --- a/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/component_test_selinuxprepare.py +++ b/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/component_test_selinuxprepare.py @@ -76,7 +76,7 @@ def destructive_selinux_env(): _run_cmd(semodule_command) -@pytest.mark.skipif(os.getenv('DESTRUCTIVE_TESTING', False) in [False, '0'], +@pytest.mark.skipif(os.getenv('DESTRUCTIVE_TESTING', '0').lower() in ['false', '0'], reason='Test disabled by default because it would modify the system') def test_SELinuxPrepare(current_actor_context, semodule_lfull_initial, semanage_export_initial, destructive_selinux_env): diff --git a/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/unit_test_selinuxprepare.py b/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/unit_test_selinuxprepare.py index c1ad06c5..9b234d65 100644 --- a/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/unit_test_selinuxprepare.py +++ b/repos/system_upgrade/common/actors/selinux/selinuxprepare/tests/unit_test_selinuxprepare.py @@ -3,7 +3,7 @@ from leapp.libraries.stdlib import api, CalledProcessError, run from leapp.models import SELinuxModule, SELinuxModules -class run_mocked(object): +class run_mocked: def __init__(self): self.args = [] self.called = 0 diff --git a/repos/system_upgrade/common/actors/setetcreleasever/tests/test_setetcreleasever.py b/repos/system_upgrade/common/actors/setetcreleasever/tests/test_setetcreleasever.py index a14dd2b8..546ce195 100644 --- a/repos/system_upgrade/common/actors/setetcreleasever/tests/test_setetcreleasever.py +++ b/repos/system_upgrade/common/actors/setetcreleasever/tests/test_setetcreleasever.py @@ -16,7 +16,7 @@ from leapp.models import ( CUR_DIR = os.path.dirname(os.path.abspath(__file__)) -class mocked_set_releasever(object): +class mocked_set_releasever: def __init__(self): self.content = None diff --git a/repos/system_upgrade/common/actors/setpermissiveselinux/tests/test_setpermissiveselinux.py b/repos/system_upgrade/common/actors/setpermissiveselinux/tests/test_setpermissiveselinux.py index efa4e550..9acdf39a 100644 --- a/repos/system_upgrade/common/actors/setpermissiveselinux/tests/test_setpermissiveselinux.py +++ b/repos/system_upgrade/common/actors/setpermissiveselinux/tests/test_setpermissiveselinux.py @@ -6,7 +6,7 @@ from leapp.models import SelinuxPermissiveDecision @pytest.mark.skipif( - os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], + os.getenv("DESTRUCTIVE_TESTING", "0").lower() in ["0", "false"], reason='Test disabled by default because it would modify the system') def check_permissive_in_conf(): """ Check if we have set permissive in SElinux conf file """ @@ -19,7 +19,7 @@ def check_permissive_in_conf(): @pytest.mark.skipif( - os.getenv("DESTRUCTIVE_TESTING", False) in [False, "0"], + os.getenv("DESTRUCTIVE_TESTING", "false").lower() in ["0", "false"], reason='Test disabled by default because it would modify the system') def test_set_selinux_permissive(current_actor_context): current_actor_context.feed(SelinuxPermissiveDecision(set_permissive=True)) diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/actor.py b/repos/system_upgrade/common/actors/setuptargetrepos/actor.py index 91855818..3a7e955b 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/actor.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/actor.py @@ -10,7 +10,8 @@ from leapp.models import ( RHUIInfo, SkippedRepositories, TargetRepositories, - UsedRepositories + UsedRepositories, + VendorCustomTargetRepositoryList ) from leapp.tags import FactsPhaseTag, IPUWorkflowTag @@ -37,7 +38,8 @@ class SetupTargetRepos(Actor): RepositoriesFacts, RepositoriesBlacklisted, RHUIInfo, - UsedRepositories) + UsedRepositories, + VendorCustomTargetRepositoryList) produces = (TargetRepositories, SkippedRepositories) tags = (IPUWorkflowTag, FactsPhaseTag) diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py index a6073aa3..41e10247 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py @@ -1,9 +1,11 @@ - from leapp.libraries.actor import setuptargetrepos_repomap -from leapp.libraries.common.config.version import get_source_major_version, get_source_version, get_target_version +from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id +from leapp.libraries.common.config.version import get_source_major_version, get_source_version +from leapp.libraries.common.repomaputils import combine_repomap_messages from leapp.libraries.stdlib import api from leapp.models import ( CustomTargetRepository, + DistroTargetRepository, InstalledRPM, RepositoriesBlacklisted, RepositoriesFacts, @@ -13,8 +15,10 @@ from leapp.models import ( RHUIInfo, SkippedRepositories, TargetRepositories, - UsedRepositories + UsedRepositories, + VendorCustomTargetRepositoryList ) +from leapp.utils.deprecation import suppress_deprecation RHUI_CLIENT_REPOIDS_RHEL88_TO_RHEL810 = { 'rhui-microsoft-azure-rhel8-sapapps': 'rhui-microsoft-azure-rhel8-base-sap-apps', @@ -74,19 +78,70 @@ def _get_used_repo_dict(): def _get_mapped_repoids(repomap, src_repoids): mapped_repoids = set() src_maj_ver = get_source_major_version() + src_distro = get_source_distro_id() for repoid in src_repoids: - if repomap.get_pesid_repo_entry(repoid, src_maj_ver): + if repomap.get_pesid_repo_entry(repoid, src_maj_ver, src_distro): mapped_repoids.add(repoid) return mapped_repoids +def _get_vendor_custom_repos(enabled_repos, mapping_list): + # Look at what source repos from the vendor mapping were enabled. + # If any of them are in beta, include vendor's custom repos in the list. + # Otherwise skip them. + + result = [] + + # Build a dict of vendor mappings for easy lookup. + map_dict = {mapping.vendor: mapping for mapping in mapping_list if mapping.vendor} + + for vendor_repolist in api.consume(VendorCustomTargetRepositoryList): + vendor_repomap = map_dict[vendor_repolist.vendor] + + # Find the beta channel repositories for the vendor. + beta_repos = [ + x.repoid for x in vendor_repomap.repositories if x.channel == "beta" + ] + api.current_logger().debug( + "Vendor {} beta repos: {}".format(vendor_repolist.vendor, beta_repos) + ) + + # Are any of the beta repos present and enabled on the system? + if any(rep in beta_repos for rep in enabled_repos): + # If so, use all repos including beta in the upgrade. + vendor_repos = vendor_repolist.repos + else: + # Otherwise filter beta repos out. + vendor_repos = [repo for repo in vendor_repolist.repos if repo.repoid not in beta_repos] + + result.extend([CustomTargetRepository( + repoid=repo.repoid, + name=repo.name, + baseurl=repo.baseurl, + enabled=repo.enabled, + ) for repo in vendor_repos]) + + return result + + +@suppress_deprecation(RHELTargetRepository) def process(): # Load relevant data from messages used_repoids_dict = _get_used_repo_dict() enabled_repoids = _get_enabled_repoids() excluded_repoids = _get_blacklisted_repoids() + + # Remember that we can't just grab one message, each vendor can have its own mapping. + repo_mapping_list = list(api.consume(RepositoriesMapping)) + custom_repos = _get_custom_target_repos() repoids_from_installed_packages = _get_repoids_from_installed_packages() + vendor_repos = _get_vendor_custom_repos(enabled_repoids, repo_mapping_list) + custom_repos.extend(vendor_repos) + + api.current_logger().debug( + "Vendor repolist: {}".format([repo.repoid for repo in vendor_repos]) + ) # Setup repomap handler repo_mappig_msg = next(api.consume(RepositoriesMapping), RepositoriesMapping()) @@ -106,7 +161,7 @@ def process(): # RHEL8.10 use a different repoid for client repository, but the repomapping mechanism cannot distinguish these # as it does not use minor versions. Therefore, we have to hardcode these changes. - if get_source_version() == '8.10': + if get_source_distro_id() == 'rhel' and get_source_version() == '8.10': for rhel88_rhui_client_repoid, rhel810_rhui_client_repoid in RHUI_CLIENT_REPOIDS_RHEL88_TO_RHEL810.items(): if rhel810_rhui_client_repoid in repoids_to_map: # Replace RHEL8.10 rhui client repoids with RHEL8.8 repoids, @@ -119,9 +174,9 @@ def process(): default_channels = setuptargetrepos_repomap.get_default_repository_channels(repomap, repoids_to_map) repomap.set_default_channels(default_channels) - # Get target RHEL repoids based on the repomap + # Get target distro repoids based on the repomap expected_repos = repomap.get_expected_target_pesid_repos(repoids_to_map) - target_rhel_repoids = set() + target_distro_repoids = set() for target_pesid, target_pesidrepo in expected_repos.items(): if not target_pesidrepo: # NOTE this could happen only for enabled repositories part of the set, @@ -139,7 +194,7 @@ def process(): if target_pesidrepo.repoid in excluded_repoids: api.current_logger().debug('Skipping the {} repo (excluded).'.format(target_pesidrepo.repoid)) continue - target_rhel_repoids.add(target_pesidrepo.repoid) + target_distro_repoids.add(target_pesidrepo.repoid) # FIXME: this could possibly result into a try to enable multiple repositories # from the same family (pesid). But unless we have a bug in previous actors, @@ -151,23 +206,21 @@ def process(): if repo in excluded_repoids: api.current_logger().debug('Skipping the {} repo from setup task (excluded).'.format(repo)) continue - target_rhel_repoids.add(repo) - - # On 8.10, some RHUI setups have different names than the one computed by repomapping. - # Although such situation could be avoided (having another client repo when a single - # repo can hold more than one RPM), we have to deal with it here. This is not a proper - # solution. - if get_target_version() == '8.10': - for pre_810_repoid, post_810_repoid in RHUI_CLIENT_REPOIDS_RHEL88_TO_RHEL810.items(): - if pre_810_repoid in target_rhel_repoids: - target_rhel_repoids.remove(pre_810_repoid) - target_rhel_repoids.add(post_810_repoid) + target_distro_repoids.add(repo) # create the final lists and sort them (for easier testing) - rhel_repos = [RHELTargetRepository(repoid=repoid) for repoid in sorted(target_rhel_repoids)] + if get_target_distro_id() == 'rhel': + rhel_repos = [RHELTargetRepository(repoid=repoid) for repoid in sorted(target_distro_repoids)] + else: + rhel_repos = [] + distro_repos = [DistroTargetRepository(repoid=repoid) for repoid in sorted(target_distro_repoids)] custom_repos = [repo for repo in custom_repos if repo.repoid not in excluded_repoids] custom_repos = sorted(custom_repos, key=lambda x: x.repoid) + api.current_logger().debug( + "Final repolist: {}".format([repo.repoid for repo in custom_repos]) + ) + # produce message about skipped repositories enabled_repoids_with_mapping = _get_mapped_repoids(repomap, enabled_repoids) skipped_repoids = enabled_repoids & set(used_repoids_dict.keys()) - enabled_repoids_with_mapping @@ -179,5 +232,6 @@ def process(): api.produce(TargetRepositories( rhel_repos=rhel_repos, + distro_repos=distro_repos, custom_repos=custom_repos, )) diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py index 37be03f1..3286609d 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos_repomap.py @@ -1,4 +1,4 @@ -from leapp.libraries.common.config import get_target_product_channel +from leapp.libraries.common.config import get_source_distro_id, get_target_distro_id, get_target_product_channel from leapp.libraries.common.config.version import get_source_major_version, get_target_major_version from leapp.libraries.stdlib import api @@ -18,12 +18,19 @@ def _get_channel_prio(pesid_repo): return priorities.get(pesid_repo.channel, 10) -class RepoMapDataHandler(object): +class RepoMapDataHandler: """ Provide the basic functionality to work with the repository data easily. """ - def __init__(self, repo_map, distro='', cloud_provider='', default_channels=None): + def __init__( + self, + repo_map, + source_distro="", + target_distro="", + cloud_provider="", + default_channels=None, + ): """ Initialize the object based on the given RepositoriesMapping msg. @@ -32,8 +39,10 @@ class RepoMapDataHandler(object): :param repo_map: A valid RepositoryMapping message. :type repo_map: RepositoryMapping - :param distro: Which distribution's mappings to use, default to current - :type distro: str + :param source_distro: The distribution to map repos from, default to current + :type source_distro: str + :param target_distro: The distribution to map repos to, default to current target distro + :type target_distro: str :param default_channels: A list of default channels to use when a target repository equivalent exactly matching a source repository was not found. :type default_channels: List[str] @@ -44,7 +53,9 @@ class RepoMapDataHandler(object): # ideal for work, but there is not any significant impact.. self.repositories = repo_map.repositories self.mapping = repo_map.mapping - self.distro = distro or api.current_actor().configuration.os_release.release_id + + self.source_distro = source_distro or get_source_distro_id() + self.target_distro = target_distro or get_target_distro_id() # FIXME(pstodulk): what about default_channel -> fallback_channel # hardcoded always as ga? instead of list of channels.. # it'd be possibly confusing naming now... @@ -89,19 +100,19 @@ class RepoMapDataHandler(object): """ self.default_channels = default_channels - def get_pesid_repo_entry(self, repoid, major_version): + def get_pesid_repo_entry(self, repoid, major_version, distro): """ - Retrieve the PESIDRepositoryEntry that matches the given repoid and OS major version. + Retrieve the PESIDRepositoryEntry that matches the given repoid, distro and OS major version If multiple pesid repo entries with the same repoid were found, the entry with rhui matching the source system's rhui info will be returned. If no entry with matching rhui exists, the CDN one is returned if any. - Note that repositories are automatically filtered based on the specified OS release ID (self.distro). - - :param repoid: RepoID that should the PESIDRepositoryEntry match. + :param repoid: RepoID that the PESIDRepositoryEntry should match. :type repoid: str - :param major_version: RepoID that should the PESIDRepositoryEntry match. + :param major_version: Major version that the PESIDRepositoryEntry should match. :type major_version: str + :param distro: Distro that the PESIDRepositoryEntry should match. + :type distro: str :return: The PESIDRepositoryEntry matching the given repoid and major_version or None if no such entry could be found. :rtype: Optional[PESIDRepositoryEntry] @@ -109,8 +120,8 @@ class RepoMapDataHandler(object): matching_pesid_repos = [] for pesid_repo in self.repositories: # FIXME(pstodulk): Why we do not check actually architecture here? - # It seems obvious we should check it but the fixme comment below - # suggests that it's expected - for not obvious reason. + # It seems obvious we should check it, but it's not clear why we + # don't and investigation might be required. # For the investigation: # # check repoids matching various architectures # # check repoids without $arch in substring on how many architectures they are present @@ -119,12 +130,13 @@ class RepoMapDataHandler(object): if ( pesid_repo.repoid == repoid and pesid_repo.major_version == major_version - and pesid_repo.distro == self.distro + and pesid_repo.distro == distro ): matching_pesid_repos.append(pesid_repo) # FIXME: when a PESID is present for multiple architectures, there - # multiple matching repos even though there should really be just one + # are multiple matching repos even though there should really be just + # one, the condition below fails even though it shouldn't if len(matching_pesid_repos) == 1: # Perform no heuristics if only a single pesid repository with matching repoid found return matching_pesid_repos[0] @@ -190,7 +202,7 @@ class RepoMapDataHandler(object): the OS Major version same as the source OS. :rtype: List[PESIDRepositoryEntry] """ - return self.get_pesid_repos(pesid, get_source_major_version(), self.distro) + return self.get_pesid_repos(pesid, get_source_major_version(), self.source_distro) def get_target_pesid_repos(self, pesid): """ @@ -203,7 +215,7 @@ class RepoMapDataHandler(object): the OS Major version same as the target OS. :rtype: List[PESIDRepositoryEntry] """ - return self.get_pesid_repos(pesid, get_target_major_version(), self.distro) + return self.get_pesid_repos(pesid, get_target_major_version(), self.target_distro) def _find_repository_target_equivalent(self, src_pesidrepo, target_pesid): """ @@ -223,7 +235,7 @@ class RepoMapDataHandler(object): matches_rhui = candidate.rhui == src_pesidrepo.rhui matches_repo_type = candidate.repo_type == 'rpm' matches_arch = candidate.arch == api.current_actor().configuration.architecture - matches_distro = candidate.distro == self.distro + matches_distro = candidate.distro == self.target_distro if matches_rhui and matches_arch and matches_distro and matches_repo_type: # user can specify in future the specific channel should be @@ -295,7 +307,7 @@ class RepoMapDataHandler(object): # {pesid: target_repo} target_repos_best_candidates = {} for src_repoid in src_repoids: - src_pesidrepo = self.get_pesid_repo_entry(src_repoid, get_source_major_version()) + src_pesidrepo = self.get_pesid_repo_entry(src_repoid, get_source_major_version(), self.source_distro) if not src_pesidrepo: # unmapped or custom repo -> skip this one continue @@ -340,7 +352,9 @@ def get_default_repository_channels(repomap, src_repoids): default_pesid = DEFAULT_PESID[get_source_major_version()] top_prio_pesid_repo = None for repoid in src_repoids: - pesid_repo = repomap.get_pesid_repo_entry(repoid, get_source_major_version()) + pesid_repo = repomap.get_pesid_repo_entry( + repoid, get_source_major_version(), get_source_distro_id() + ) if not pesid_repo or pesid_repo.pesid != default_pesid: continue if not top_prio_pesid_repo or _get_channel_prio(pesid_repo) > _get_channel_prio(top_prio_pesid_repo): diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py b/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py index 1b0a3122..30c415c0 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_repomapping.py @@ -98,15 +98,15 @@ def test_get_pesid_repo_entry(monkeypatch, repomap_data_for_pesid_repo_retrieval fail_description = ( 'get_pesid_repo_entry method failed to find correct pesid repository that matches given parameters.') for exp_repo in repositories: - result_repo = handler.get_pesid_repo_entry(exp_repo.repoid, exp_repo.major_version) + result_repo = handler.get_pesid_repo_entry(exp_repo.repoid, exp_repo.major_version, exp_repo.distro) assert result_repo == exp_repo, fail_description fail_description = ( 'get_pesid_repo_entry method found a pesid repository, but no repository should match given parameters.') - assert handler.get_pesid_repo_entry('pesid1-repoid', '6') is None, fail_description - assert handler.get_pesid_repo_entry('pesid1-repoid', '8') is None, fail_description - assert handler.get_pesid_repo_entry('pesid1-repoid', '9') is None, fail_description - assert handler.get_pesid_repo_entry('nonexisting-repo', '7') is None, fail_description + assert handler.get_pesid_repo_entry('pesid1-repoid', '6', 'rhel') is None, fail_description + assert handler.get_pesid_repo_entry('pesid1-repoid', '8', 'rhel') is None, fail_description + assert handler.get_pesid_repo_entry('pesid1-repoid', '9', 'rhel') is None, fail_description + assert handler.get_pesid_repo_entry('nonexisting-repo', '7', 'rhel') is None, fail_description @pytest.mark.parametrize('distro', ('rhel', 'centos', 'almalinux')) @@ -117,13 +117,18 @@ def test_get_pesid_repo_entry_distro( Test for the RepoMapDataHandler.get_pesid_repo_entry method. Verifies that the method correctly retrieves PESIDRepositoryEntry that are - matching the OS major version, repoid and the distro. + matching the OS major version, repoid and the distro, regardless of the + actual distro. """ monkeypatch.setattr( api, "current_actor", CurrentActorMocked( - arch="x86_64", src_ver="9.6", dst_ver="10.2", release_id=distro + arch="x86_64", + src_ver="9.6", + dst_ver="10.2", + src_distro=distro, + dst_distro=distro, ), ) handler = RepoMapDataHandler(repomap_data_multiple_distros) @@ -138,7 +143,7 @@ def test_get_pesid_repo_entry_distro( ) for exp_repo in repositories: result_repo = handler.get_pesid_repo_entry( - exp_repo.repoid, exp_repo.major_version + exp_repo.repoid, exp_repo.major_version, exp_repo.distro ) assert result_repo == exp_repo, fail_description @@ -307,7 +312,7 @@ def test_get_target_pesid_repos(monkeypatch, repomap_data_for_pesid_repo_retriev have the same major version and distro as the source system. """ monkeypatch.setattr(api, 'current_actor', - CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4', release_id=distro)) + CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4', dst_distro=distro)) handler = RepoMapDataHandler(repomap_data_for_pesid_repo_retrieval) repositories = repomap_data_for_pesid_repo_retrieval.repositories @@ -324,7 +329,7 @@ def test_get_target_pesid_repos(monkeypatch, repomap_data_for_pesid_repo_retriev 'The get_target_pesid_repos method doesn\'t take into account the target system version correctly.' ) monkeypatch.setattr(api, 'current_actor', - CurrentActorMocked(arch='x86_64', src_ver='9.4', dst_ver='10.0', release_id=distro)) + CurrentActorMocked(arch='x86_64', src_ver='9.4', dst_ver='10.0', dst_distro=distro)) # Repeat the same test as above to make sure it respects the target OS major version assert [] == handler.get_target_pesid_repos('pesid3'), fail_description @@ -372,7 +377,7 @@ def test_find_repository_target_equivalent_fullmatch( pesid repo parameters exactly when such repository is available in the repository mapping data. """ monkeypatch.setattr(api, 'current_actor', - CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4', release_id=distro)) + CurrentActorMocked(arch='x86_64', src_ver='7.9', dst_ver='8.4', dst_distro=distro)) handler = RepoMapDataHandler(mapping_data_for_find_repository_equiv) diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_setuptargetrepos.py b/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_setuptargetrepos.py index 1f898e8f..c3ff5f49 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_setuptargetrepos.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/tests/test_setuptargetrepos.py @@ -1,6 +1,5 @@ import pytest -from leapp.libraries import stdlib from leapp.libraries.actor import setuptargetrepos from leapp.libraries.common.testutils import CurrentActorMocked, produce_mocked from leapp.libraries.stdlib import api @@ -15,8 +14,7 @@ from leapp.models import ( RepositoriesSetupTasks, RepositoryData, RepositoryFile, - RPM, - TargetRepositories + RPM ) RH_PACKAGER = 'Red Hat, Inc. ' @@ -100,109 +98,126 @@ def test_repositories_setup_tasks(monkeypatch): assert rhel_repos[0].repoid == 'rhel-8-server-rpms' -@pytest.mark.parametrize('distro_id', ['rhel', 'centos', 'almalinux']) -def test_repos_mapping_for_distro(monkeypatch, distro_id): +@pytest.mark.parametrize('src_distro', ['rhel', 'centos', 'almalinux']) +@pytest.mark.parametrize('dst_distro', ['rhel', 'centos', 'almalinux']) +def test_repos_mapping_for_distro(monkeypatch, src_distro, dst_distro): """ Tests whether actor correctly determines what repositories should be enabled on target based on the information about what repositories are enabled on the source system using - the RepositoriesMapping information for a specific distro. + the RepositoriesMapping information for a specific source and target distro pair. """ repos_data = [ - RepositoryData(repoid='{}-7-server-rpms'.format(distro_id), name='{} 7 Server'.format(distro_id)), - RepositoryData(repoid='{}-7-blacklisted-rpms'.format(distro_id), name='{} 7 Blacklisted'.format(distro_id))] + RepositoryData(repoid='{}-8-server-rpms'.format(src_distro), name='{} 8 Server'.format(src_distro)), + RepositoryData(repoid='{}-8-blacklisted-rpms'.format(src_distro), name='{} 8 Blacklisted'.format(src_distro))] repos_files = [RepositoryFile(file='/etc/yum.repos.d/redhat.repo', data=repos_data)] facts = RepositoriesFacts(repositories=repos_files) installed_rpms = InstalledRPM( - items=[mock_package('foreman', '{}-7-for-x86_64-satellite-extras-rpms'.format(distro_id)), - mock_package('foreman-proxy', 'nosuch-{}-7-for-x86_64-satellite-extras-rpms'.format(distro_id))]) + items=[mock_package('foreman', '{}-8-for-x86_64-satellite-extras-rpms'.format(src_distro)), + mock_package('foreman-proxy', 'nosuch-{}-8-for-x86_64-satellite-extras-rpms'.format(src_distro))]) repomap = RepositoriesMapping( - mapping=[RepoMapEntry(source='{0}7-base'.format(distro_id), - target=['{0}8-baseos'.format(distro_id), - '{0}8-appstream'.format(distro_id), - '{0}8-blacklist'.format(distro_id)]), - RepoMapEntry(source='{0}7-satellite-extras'.format(distro_id), - target=['{0}8-satellite-extras'.format(distro_id)])], + mapping=[RepoMapEntry(source='{0}8-base'.format(src_distro), + target=['{0}9-baseos'.format(dst_distro), + '{0}9-appstream'.format(dst_distro), + '{0}9-blacklist'.format(dst_distro)]), + RepoMapEntry(source='{0}8-satellite-extras'.format(src_distro), + target=['{0}9-satellite-extras'.format(dst_distro)])], repositories=[ PESIDRepositoryEntry( - pesid='{0}7-base'.format(distro_id), - repoid='{0}-7-server-rpms'.format(distro_id), - major_version='7', + pesid='{0}8-base'.format(src_distro), + repoid='{0}-8-server-rpms'.format(src_distro), + major_version='8', arch='x86_64', repo_type='rpm', channel='ga', rhui='', - distro=distro_id, + distro=src_distro, ), PESIDRepositoryEntry( - pesid='{0}8-baseos'.format(distro_id), - repoid='{0}-8-for-x86_64-baseos-htb-rpms'.format(distro_id), - major_version='8', + pesid='{0}9-baseos'.format(dst_distro), + repoid='{0}-9-for-x86_64-baseos-htb-rpms'.format(dst_distro), + major_version='9', arch='x86_64', repo_type='rpm', channel='ga', rhui='', - distro=distro_id, + distro=dst_distro, ), PESIDRepositoryEntry( - pesid='{0}8-appstream'.format(distro_id), - repoid='{0}-8-for-x86_64-appstream-htb-rpms'.format(distro_id), - major_version='8', + pesid='{0}9-appstream'.format(dst_distro), + repoid='{0}-9-for-x86_64-appstream-htb-rpms'.format(dst_distro), + major_version='9', arch='x86_64', repo_type='rpm', channel='ga', rhui='', - distro=distro_id, + distro=dst_distro, ), PESIDRepositoryEntry( - pesid='{0}8-blacklist'.format(distro_id), - repoid='{0}-8-blacklisted-rpms'.format(distro_id), - major_version='8', + pesid='{0}9-blacklist'.format(dst_distro), + repoid='{0}-9-blacklisted-rpms'.format(dst_distro), + major_version='9', arch='x86_64', repo_type='rpm', channel='ga', rhui='', - distro=distro_id, + distro=dst_distro, ), PESIDRepositoryEntry( - pesid='{0}7-satellite-extras'.format(distro_id), - repoid='{0}-7-for-x86_64-satellite-extras-rpms'.format(distro_id), - major_version='7', + pesid='{0}8-satellite-extras'.format(src_distro), + repoid='{0}-8-for-x86_64-satellite-extras-rpms'.format(src_distro), + major_version='8', arch='x86_64', repo_type='rpm', channel='ga', rhui='', - distro=distro_id, + distro=src_distro, ), PESIDRepositoryEntry( - pesid='{0}8-satellite-extras'.format(distro_id), - repoid='{0}-8-for-x86_64-satellite-extras-rpms'.format(distro_id), - major_version='8', + pesid='{0}9-satellite-extras'.format(dst_distro), + repoid='{0}-9-for-x86_64-satellite-extras-rpms'.format(dst_distro), + major_version='9', arch='x86_64', repo_type='rpm', channel='ga', rhui='', - distro=distro_id, + distro=dst_distro, ), ] ) - repos_blacklisted = RepositoriesBlacklisted(repoids=['{}-8-blacklisted-rpms'.format(distro_id)]) + repos_blacklisted = RepositoriesBlacklisted(repoids=['{}-9-blacklisted-rpms'.format(dst_distro)]) msgs = [facts, repomap, repos_blacklisted, installed_rpms] - monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=msgs, release_id=distro_id)) + monkeypatch.setattr( + api, + 'current_actor', + CurrentActorMocked(msgs=msgs, src_distro=src_distro, dst_distro=dst_distro), + ) monkeypatch.setattr(api, 'produce', produce_mocked()) setuptargetrepos.process() assert api.produce.called + distro_repos = api.produce.model_instances[0].distro_repos rhel_repos = api.produce.model_instances[0].rhel_repos - assert len(rhel_repos) == 3 + assert len(distro_repos) == 3 + + produced_distro_repoids = {repo.repoid for repo in distro_repos} produced_rhel_repoids = {repo.repoid for repo in rhel_repos} - expected_rhel_repoids = {'{0}-8-for-x86_64-baseos-htb-rpms'.format(distro_id), - '{0}-8-for-x86_64-appstream-htb-rpms'.format(distro_id), - '{0}-8-for-x86_64-satellite-extras-rpms'.format(distro_id)} - assert produced_rhel_repoids == expected_rhel_repoids + + expected_repoids = { + "{0}-9-for-x86_64-baseos-htb-rpms".format(dst_distro), + "{0}-9-for-x86_64-appstream-htb-rpms".format(dst_distro), + "{0}-9-for-x86_64-satellite-extras-rpms".format(dst_distro), + } + + assert produced_distro_repoids == expected_repoids + if dst_distro == 'rhel': + assert len(rhel_repos) == 3 + assert produced_rhel_repoids == expected_repoids + else: + assert not rhel_repos diff --git a/repos/system_upgrade/common/actors/storagescanner/libraries/storagescanner.py b/repos/system_upgrade/common/actors/storagescanner/libraries/storagescanner.py index cae38731..e2d869da 100644 --- a/repos/system_upgrade/common/actors/storagescanner/libraries/storagescanner.py +++ b/repos/system_upgrade/common/actors/storagescanner/libraries/storagescanner.py @@ -35,7 +35,7 @@ def _is_file_readable(path): def _get_cmd_output(cmd, delim, expected_len): """ Verify if command exists and return output """ if not any(os.access(os.path.join(path, cmd[0]), os.X_OK) for path in os.environ['PATH'].split(os.pathsep)): - api.current_logger().warning("'%s': command not found" % cmd[0]) + api.current_logger().warning("'%s': command not found", cmd[0]) return try: @@ -45,7 +45,11 @@ def _get_cmd_output(cmd, delim, expected_len): output = subprocess.check_output(cmd, env={'LVM_SUPPRESS_FD_WARNINGS': '1', 'PATH': os.environ['PATH']}) except subprocess.CalledProcessError as e: - api.current_logger().debug("Command '%s' return non-zero exit status: %s" % (" ".join(cmd), e.returncode)) + api.current_logger().debug( + "Command '%s' returned non-zero exit status: %s", + " ".join(cmd), + e.returncode + ) return if bytes is not str: diff --git a/repos/system_upgrade/common/actors/storagescanner/tests/unit_test_storagescanner.py b/repos/system_upgrade/common/actors/storagescanner/tests/unit_test_storagescanner.py index 456e40ec..6879e52a 100644 --- a/repos/system_upgrade/common/actors/storagescanner/tests/unit_test_storagescanner.py +++ b/repos/system_upgrade/common/actors/storagescanner/tests/unit_test_storagescanner.py @@ -268,8 +268,7 @@ def test_get_lsblk_info(monkeypatch): 'crypt', '', '/dev/nvme0n1p1'], ['/dev/nvme0n1p1', '259:1', '0', str(39 * bytes_per_gb), '0', 'part', '', '/dev/nvme0n1'], ] - for output_line_parts in output_lines_split_on_whitespace: - yield output_line_parts + yield from output_lines_split_on_whitespace elif len(cmd) == 5 and cmd[:4] == ['lsblk', '-nr', '--output', 'NAME,KNAME,SIZE']: # We cannot have the output in a list, since the command is called per device. Therefore, we have to map # each device path to its output. @@ -460,7 +459,7 @@ def test_get_lvdisplay_info(monkeypatch): def test_get_systemd_mount_info(monkeypatch): - class UdevDeviceMocked(object): + class UdevDeviceMocked: def __init__(self, device_node, path, model, wwn, fs_type, label, uuid): self.device_node = device_node # Simulate udev device attributes that should be queried @@ -482,7 +481,7 @@ def test_get_systemd_mount_info(monkeypatch): return self.device_attributes[attribute] - class UdevContextMocked(object): + class UdevContextMocked: def __init__(self, mocked_devices): self.mocked_devices = mocked_devices diff --git a/repos/system_upgrade/common/actors/systemd/checksystemdbrokensymlinks/tests/test_checksystemdbrokensymlinks.py b/repos/system_upgrade/common/actors/systemd/checksystemdbrokensymlinks/tests/test_checksystemdbrokensymlinks.py index 2364f7a5..a4c0a657 100644 --- a/repos/system_upgrade/common/actors/systemd/checksystemdbrokensymlinks/tests/test_checksystemdbrokensymlinks.py +++ b/repos/system_upgrade/common/actors/systemd/checksystemdbrokensymlinks/tests/test_checksystemdbrokensymlinks.py @@ -20,7 +20,7 @@ def test_report_broken_symlinks(monkeypatch): checksystemdbrokensymlinks._report_broken_symlinks(symlinks) assert created_reports.called - assert all([s in created_reports.report_fields['summary'] for s in symlinks]) + assert all(s in created_reports.report_fields['summary'] for s in symlinks) def test_report_enabled_services_broken_symlinks(monkeypatch): @@ -35,10 +35,10 @@ def test_report_enabled_services_broken_symlinks(monkeypatch): checksystemdbrokensymlinks._report_enabled_services_broken_symlinks(symlinks) assert created_reports.called - assert all([s in created_reports.report_fields['summary'] for s in symlinks]) + assert all(s in created_reports.report_fields['summary'] for s in symlinks) -class ReportBrokenSymlinks(object): +class ReportBrokenSymlinks: def __init__(self): self.symlinks = [] diff --git a/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/libraries/repairsystemdsymlinks.py b/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/libraries/repairsystemdsymlinks.py index 3fcf4aa6..a8e801b9 100644 --- a/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/libraries/repairsystemdsymlinks.py +++ b/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/libraries/repairsystemdsymlinks.py @@ -7,7 +7,6 @@ from leapp.libraries.stdlib import api, CalledProcessError, run from leapp.models import SystemdBrokenSymlinksSource, SystemdBrokenSymlinksTarget, SystemdServicesInfoSource _INSTALLATION_CHANGED = { - '8': ['rngd.service', 'sysstat.service'], '9': [], '10': [], } diff --git a/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/tests/test_repairsystemdsymlinks.py b/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/tests/test_repairsystemdsymlinks.py index 2394df5e..d52abdfa 100644 --- a/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/tests/test_repairsystemdsymlinks.py +++ b/repos/system_upgrade/common/actors/systemd/repairsystemdsymlinks/tests/test_repairsystemdsymlinks.py @@ -1,16 +1,11 @@ from leapp.libraries.actor import repairsystemdsymlinks from leapp.libraries.common import systemd -from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked -from leapp.libraries.stdlib import api, CalledProcessError, run -from leapp.models import ( - SystemdBrokenSymlinksSource, - SystemdBrokenSymlinksTarget, - SystemdServiceFile, - SystemdServicesInfoSource -) +from leapp.libraries.common.testutils import CurrentActorMocked +from leapp.libraries.stdlib import api +from leapp.models import SystemdServiceFile, SystemdServicesInfoSource -class MockedSystemdCmd(object): +class MockedSystemdCmd: def __init__(self): self.units = [] @@ -20,8 +15,16 @@ class MockedSystemdCmd(object): def test_bad_symslinks(monkeypatch): + # there is no _INSTALLATION_CHANGED service on RHEL 8 and RHEL 9, but it's + # possible such service will be discovered and added in the future as it + # was on RHEL 7, so let's add such case + monkeypatch.setitem( + repairsystemdsymlinks._INSTALLATION_CHANGED, + "9", ["some.service"], + ) + service_files = [ - SystemdServiceFile(name='rngd.service', state='enabled'), + SystemdServiceFile(name='some.service', state='enabled'), SystemdServiceFile(name='sysstat.service', state='disabled'), SystemdServiceFile(name='hello.service', state='enabled'), SystemdServiceFile(name='world.service', state='disabled'), @@ -36,11 +39,15 @@ def test_bad_symslinks(monkeypatch): monkeypatch.setattr(systemd, 'reenable_unit', reenable_mocked) service_info = SystemdServicesInfoSource(service_files=service_files) - monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(msgs=[service_info])) + monkeypatch.setattr( + api, + "current_actor", + CurrentActorMocked(src_ver="8.10", dst_ver="9.6", msgs=[service_info]), + ) repairsystemdsymlinks._handle_bad_symlinks(service_info.service_files) - assert reenable_mocked.units == ['rngd.service'] + assert reenable_mocked.units == ['some.service'] def test_handle_newly_broken_symlink(monkeypatch): diff --git a/repos/system_upgrade/common/actors/systemd/scansystemdsource/tests/test_scansystemdsource.py b/repos/system_upgrade/common/actors/systemd/scansystemdsource/tests/test_scansystemdsource.py index 7b95a2df..f92657d3 100644 --- a/repos/system_upgrade/common/actors/systemd/scansystemdsource/tests/test_scansystemdsource.py +++ b/repos/system_upgrade/common/actors/systemd/scansystemdsource/tests/test_scansystemdsource.py @@ -73,7 +73,7 @@ _CALL_PROC_ERR = CalledProcessError( ) -class GetOrRaise(object): +class GetOrRaise: def __init__(self, value): self.value = value diff --git a/repos/system_upgrade/common/actors/systemd/scansystemdtarget/tests/test_scansystemdtarget.py b/repos/system_upgrade/common/actors/systemd/scansystemdtarget/tests/test_scansystemdtarget.py index 227ba61a..2e806e25 100644 --- a/repos/system_upgrade/common/actors/systemd/scansystemdtarget/tests/test_scansystemdtarget.py +++ b/repos/system_upgrade/common/actors/systemd/scansystemdtarget/tests/test_scansystemdtarget.py @@ -73,7 +73,7 @@ _CALL_PROC_ERR = CalledProcessError( ) -class GetOrRaise(object): +class GetOrRaise: def __init__(self, value): self.value = value diff --git a/repos/system_upgrade/common/actors/systemd/setsystemdservicesstates/tests/test_setsystemdservicesstate.py b/repos/system_upgrade/common/actors/systemd/setsystemdservicesstates/tests/test_setsystemdservicesstate.py index fa17a94c..976da6da 100644 --- a/repos/system_upgrade/common/actors/systemd/setsystemdservicesstates/tests/test_setsystemdservicesstate.py +++ b/repos/system_upgrade/common/actors/systemd/setsystemdservicesstates/tests/test_setsystemdservicesstate.py @@ -8,7 +8,7 @@ from leapp.libraries.stdlib import api, CalledProcessError from leapp.models import SystemdServicesTasks -class MockedSystemdCmd(object): +class MockedSystemdCmd: def __init__(self): self.units = [] diff --git a/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py b/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py index 6964a65b..488b37d4 100644 --- a/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py +++ b/repos/system_upgrade/common/actors/systemd/transitionsystemdservicesstates/tests/test_transitionsystemdservicesstates.py @@ -205,9 +205,7 @@ def test_report_kept_enabled(monkeypatch, tasks, expect_extended_summary): assert created_reports.called if expect_extended_summary: assert extended_summary_str in created_reports.report_fields["summary"] - assert all( - [s in created_reports.report_fields["summary"] for s in tasks.to_enable] - ) + all(s in created_reports.report_fields['summary'] for s in tasks.to_enable) else: assert extended_summary_str not in created_reports.report_fields["summary"] @@ -238,7 +236,7 @@ def test_report_newly_enabled(monkeypatch): transitionsystemdservicesstates._report_newly_enabled(newly_enabled) assert created_reports.called - assert all([s in created_reports.report_fields["summary"] for s in newly_enabled]) + assert all(s in created_reports.report_fields["summary"] for s in newly_enabled) @pytest.mark.parametrize( diff --git a/repos/system_upgrade/common/actors/systemfacts/actor.py b/repos/system_upgrade/common/actors/systemfacts/actor.py index 59b12c87..85d4a09e 100644 --- a/repos/system_upgrade/common/actors/systemfacts/actor.py +++ b/repos/system_upgrade/common/actors/systemfacts/actor.py @@ -47,7 +47,7 @@ class SystemFactsActor(Actor): GrubCfgBios, Report ) - tags = (IPUWorkflowTag, FactsPhaseTag,) + tags = (IPUWorkflowTag, FactsPhaseTag.Before,) def process(self): self.produce(systemfacts.get_sysctls_status()) diff --git a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py index 5831b979..16405b15 100644 --- a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py +++ b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts.py @@ -61,7 +61,7 @@ def test_aslist(current_actor_libraries): ) def test_get_system_users(monkeypatch, etc_passwd_names, etc_passwd_directory, skipped_user_names): - class MockedPwdEntry(object): + class MockedPwdEntry: def __init__(self, pw_name, pw_uid, pw_gid, pw_dir): self.pw_name = pw_name self.pw_uid = pw_uid @@ -99,7 +99,7 @@ def test_get_system_users(monkeypatch, etc_passwd_names, etc_passwd_directory, s ) def test_get_system_groups(monkeypatch, etc_group_names, skipped_group_names): - class MockedGrpEntry(object): + class MockedGrpEntry: def __init__(self, gr_name, gr_gid, gr_mem): self.gr_name = gr_name self.gr_gid = gr_gid diff --git a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_grub.py b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_grub.py index 08552771..7fd774b5 100644 --- a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_grub.py +++ b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_grub.py @@ -4,7 +4,7 @@ from leapp.libraries.actor import systemfacts from leapp.models import DefaultGrub -class RunMocked(object): +class RunMocked: def __init__(self, cmd_result): self.called = 0 self.cmd_result = cmd_result diff --git a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py index b9e0c71d..d36900bd 100644 --- a/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py +++ b/repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py @@ -74,7 +74,7 @@ def test_selinux_disabled(monkeypatch): assert SELinuxFacts(**expected_data) == get_selinux_status() -class MockNoConfigFileOSError(object): +class MockNoConfigFileOSError: def __init__(self): raise OSError diff --git a/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py b/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py index 55877d05..62a84a85 100644 --- a/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py +++ b/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py @@ -6,9 +6,9 @@ import shutil from leapp import reporting from leapp.exceptions import StopActorExecution, StopActorExecutionError from leapp.libraries.actor import constants -from leapp.libraries.common import dnfplugin, mounting, overlaygen, repofileutils, rhsm, utils -from leapp.libraries.common.config import get_distro_id, get_env, get_product_type -from leapp.libraries.common.config.version import get_target_major_version +from leapp.libraries.common import distro, dnfplugin, mounting, overlaygen, repofileutils, rhsm, utils +from leapp.libraries.common.config import get_env, get_product_type, get_source_distro_id, get_target_distro_id +from leapp.libraries.common.config.version import get_target_major_version, get_target_version from leapp.libraries.common.gpg import get_path_to_gpg_certs, is_nogpgcheck_set from leapp.libraries.stdlib import api, CalledProcessError, config, run from leapp.models import RequiredTargetUserspacePackages # deprecated @@ -17,6 +17,7 @@ from leapp.models import ( CustomTargetRepositoryFile, PkgManagerInfo, RepositoriesFacts, + RHELTargetRepository, RHSMInfo, RHUIInfo, StorageInfo, @@ -57,6 +58,7 @@ from leapp.utils.deprecation import suppress_deprecation PROD_CERTS_FOLDER = 'prod-certs' PERSISTENT_PACKAGE_CACHE_DIR = '/var/lib/leapp/persistent_package_cache' DEDICATED_LEAPP_PART_URL = 'https://access.redhat.com/solutions/7011704' +FMT_LIST_SEPARATOR = '\n - ' def _check_deprecated_rhsm_skip(): @@ -77,7 +79,7 @@ class BrokenSymlinkError(Exception): """Raised when we encounter a broken symlink where we weren't expecting it.""" -class _InputData(object): +class _InputData: def __init__(self): self._consume_data() @@ -149,12 +151,14 @@ def _backup_to_persistent_package_cache(userspace_dir): def _import_gpg_keys(context, install_root_dir, target_major_version): certs_path = get_path_to_gpg_certs() - # Import the RHEL X+1 GPG key to be able to verify the installation of initial packages + # Import the target distro target version GPG key to be able to verify the + # installation of initial packages try: # Import also any other keys provided by the customer in the same directory - for certname in os.listdir(certs_path): - cmd = ['rpm', '--root', install_root_dir, '--import', os.path.join(certs_path, certname)] - context.call(cmd, callback_raw=utils.logging_handler) + for trusted_dir in certs_path: + for certname in os.listdir(trusted_dir): + cmd = ['rpm', '--root', install_root_dir, '--import', os.path.join(trusted_dir, certname)] + context.call(cmd, callback_raw=utils.logging_handler) except CalledProcessError as exc: raise StopActorExecutionError( message=( @@ -247,7 +251,9 @@ def prepare_target_userspace(context, userspace_dir, enabled_repos, packages): try: context.call(cmd, callback_raw=utils.logging_handler) except CalledProcessError as exc: - message = 'Unable to install RHEL {} userspace packages.'.format(target_major_version) + message = 'Unable to install target \'{}\' {} userspace packages.'.format( + get_target_distro_id(), target_major_version + ) details = {'details': str(exc), 'stderr': exc.stderr} if 'more space needed on the' in exc.stderr: @@ -260,26 +266,40 @@ def prepare_target_userspace(context, userspace_dir, enabled_repos, packages): # failed since leapp does not support updates behind proxy yet. for manager_info in api.consume(PkgManagerInfo): if manager_info.configured_proxies: - details['details'] = ( - "DNF failed to install userspace packages, likely due to the proxy " - "configuration detected in the YUM/DNF configuration file. " - "Make sure the proxy is properly configured in /etc/dnf/dnf.conf. " - "It's also possible the proxy settings in the DNF configuration file are " - "incompatible with the target system. A compatible configuration can be " - "placed in /etc/leapp/files/dnf.conf which, if present, will be used during " - "the upgrade instead of /etc/dnf/dnf.conf. " - "In such case the configuration will also be applied to the target system." + details['hint'] = ( + 'DNF failed to install userspace packages, likely due to the proxy ' + 'configuration detected in the YUM/DNF configuration file. ' + 'Make sure the proxy is properly configured in /etc/dnf/dnf.conf. ' + 'It\'s also possible the proxy settings in the DNF configuration file are ' + 'incompatible with the target system. A compatible configuration can be ' + 'placed in /etc/leapp/files/dnf.conf which, if present, will be used during ' + 'the upgrade instead of /etc/dnf/dnf.conf. ' + 'In such case the configuration will also be applied to the target system.' ) # Similarly if a proxy was set specifically for one of the repositories. for repo_facts in api.consume(RepositoriesFacts): for repo_file in repo_facts.repositories: if any(repo_data.proxy and repo_data.enabled for repo_data in repo_file.data): - details['details'] = ( - "DNF failed to install userspace packages, likely due to the proxy " - "configuration detected in a repository configuration file." + details['hint'] = ( + 'DNF failed to install userspace packages, likely due to the proxy ' + 'configuration detected in a repository configuration file.' ) + if get_source_distro_id() == 'centos' and get_target_distro_id() == 'rhel': + check_rhel_release_hint = ( + 'When upgrading and converting from Centos Stream to Red Hat Enterprise Linux' + ' (RHEL), the automatically determined latest target version of RHEL \'{}\' might' + ' not yet have been released. If so, specify the latest released RHEL version' + ' manually using the --target-version commandline option.' + ).format(get_target_version()) + + if details.get('hint'): + # keep the proxy hint, we don't know which one is the problem + details['hint'] = f"{details['hint']}\n\n{check_rhel_release_hint}" + else: + details['hint'] = check_rhel_release_hint + raise StopActorExecutionError(message=message, details=details) @@ -641,6 +661,7 @@ def _prep_repository_access(context, target_userspace): run(["chroot", target_userspace, "/bin/bash", "-c", "su - -c update-ca-trust"]) if not rhsm.skip_rhsm(): + _copy_certificates(context, target_userspace) run(['rm', '-rf', os.path.join(target_etc, 'rhsm')]) context.copytree_from('/etc/rhsm', os.path.join(target_etc, 'rhsm')) @@ -674,12 +695,13 @@ def _get_product_certificate_path(): """ Retrieve the required / used product certificate for RHSM. - Product certificates are only used on RHEL, on non-RHEL systems the function returns None. + Product certificates are only used for RHEL. Returns None if the target + distro is not RHEL. :return: The path to the product certificate or None on non-RHEL systems :raises: StopActorExecution if a certificate cannot be found """ - if get_distro_id() != 'rhel': + if get_target_distro_id() != 'rhel': return None architecture = api.current_actor().configuration.architecture @@ -777,7 +799,7 @@ def _inhibit_on_duplicate_repos(repofiles): list_separator_fmt = '\n - ' api.current_logger().warning( 'The following repoids are defined multiple times:{0}{1}' - .format(list_separator_fmt, list_separator_fmt.join(duplicates)) + .format(list_separator_fmt, list_separator_fmt.join(sorted(duplicates))) ) reporting.create_report([ @@ -785,7 +807,7 @@ def _inhibit_on_duplicate_repos(repofiles): reporting.Summary( 'The following repositories are defined multiple times inside the' ' "upgrade" container:{0}{1}' - .format(list_separator_fmt, list_separator_fmt.join(duplicates)) + .format(list_separator_fmt, list_separator_fmt.join(sorted(duplicates))) ), reporting.Severity(reporting.Severity.MEDIUM), reporting.Groups([reporting.Groups.REPOSITORY]), @@ -814,21 +836,19 @@ def _get_all_available_repoids(context): return set(repoids) -def _get_rhsm_available_repoids(context): - target_major_version = get_target_major_version() +def _inhibit_if_no_base_repos(distro_repoids): # FIXME: check that required repo IDs (baseos, appstream) # + or check that all required RHEL repo IDs are available. - if rhsm.skip_rhsm(): - return set() - # Get the RHSM repos available in the target RHEL container - # TODO: very similar thing should happens for all other repofiles in container - # - repoids = rhsm.get_available_repo_ids(context) + + target_major_version = get_target_major_version() # NOTE(ivasilev) For the moment at least AppStream and BaseOS repos are required. While we are still # contemplating on what can be a generic solution to checking this, let's introduce a minimal check for # at-least-one-appstream and at-least-one-baseos among present repoids - if not repoids or all("baseos" not in ri for ri in repoids) or all("appstream" not in ri for ri in repoids): + no_baseos = all("baseos" not in ri for ri in distro_repoids) + no_appstream = all("appstream" not in ri for ri in distro_repoids) + if no_baseos or no_appstream: reporting.create_report([ + # TODO: Make the report distro agnostic reporting.Title('Cannot find required basic RHEL target repositories.'), reporting.Summary( 'This can happen when a repository ID was entered incorrectly either while using the --enablerepo' @@ -860,21 +880,6 @@ def _get_rhsm_available_repoids(context): title='Preparing for the upgrade') ]) raise StopActorExecution() - return set(repoids) - - -def _get_rhui_available_repoids(context, cloud_repo): - repofiles = repofileutils.get_parsed_repofiles(context) - - # TODO: same refactoring as Issue #486? - _inhibit_on_duplicate_repos(repofiles) - repoids = [] - for rfile in repofiles: - if rfile.file == cloud_repo and rfile.data: - repoids = [repo.repoid for repo in rfile.data] - repoids.sort() - break - return set(repoids) def get_copy_location_from_copy_in_task(context_basepath, copy_task): @@ -885,88 +890,109 @@ def get_copy_location_from_copy_in_task(context_basepath, copy_task): return copy_task.dst -def _get_rh_available_repoids(context, indata): +def _get_rhui_available_repoids(context, rhui_info): """ - RH repositories are provided either by RHSM or are stored in the expected repo file provided by - RHUI special packages (every cloud provider has itw own rpm). + Get repoids provided by the RHUI target clients + + :rtype: set[str] """ + # If we are upgrading a RHUI system, check what repositories are provided by the (already installed) target clients + setup_info = rhui_info.target_client_setup_info + target_content_access_files = set() + if setup_info.bootstrap_target_client: + target_content_access_files = _query_rpm_for_pkg_files(context, rhui_info.target_client_pkg_names) - rh_repoids = _get_rhsm_available_repoids(context) + def is_repofile(path): + return os.path.dirname(path) == '/etc/yum.repos.d' and os.path.basename(path).endswith('.repo') - # If we are upgrading a RHUI system, check what repositories are provided by the (already installed) target clients - if indata and indata.rhui_info: - setup_info = indata.rhui_info.target_client_setup_info - target_content_access_files = set() - if setup_info.bootstrap_target_client: - target_content_access_files = _query_rpm_for_pkg_files(context, indata.rhui_info.target_client_pkg_names) + def extract_repoid_from_line(line): + return line.split(':', 1)[1].strip() - def is_repofile(path): - return os.path.dirname(path) == '/etc/yum.repos.d' and os.path.basename(path).endswith('.repo') + target_ver = api.current_actor().configuration.version.target + setup_tasks = rhui_info.target_client_setup_info.preinstall_tasks.files_to_copy_into_overlay - def extract_repoid_from_line(line): - return line.split(':', 1)[1].strip() + yum_repos_d = context.full_path('/etc/yum.repos.d') + all_repofiles = {os.path.join(yum_repos_d, path) for path in os.listdir(yum_repos_d) if path.endswith('.repo')} + api.current_logger().debug('(RHUI Setup) All available repofiles: {0}'.format(' '.join(all_repofiles))) - target_ver = api.current_actor().configuration.version.target - setup_tasks = indata.rhui_info.target_client_setup_info.preinstall_tasks.files_to_copy_into_overlay + target_access_repofiles = { + context.full_path(path) for path in target_content_access_files if is_repofile(path) + } - yum_repos_d = context.full_path('/etc/yum.repos.d') - all_repofiles = {os.path.join(yum_repos_d, path) for path in os.listdir(yum_repos_d) if path.endswith('.repo')} - api.current_logger().debug('(RHUI Setup) All available repofiles: {0}'.format(' '.join(all_repofiles))) + # Exclude repofiles used to setup the target rhui access as on some platforms the repos provided by + # the client are not sufficient to install the client into target userspace (GCP) + rhui_setup_repofile_tasks = [task for task in setup_tasks if task.src.endswith('repo')] + rhui_setup_repofiles = ( + get_copy_location_from_copy_in_task(context.base_dir, copy) for copy in rhui_setup_repofile_tasks + ) + rhui_setup_repofiles = {context.full_path(repofile) for repofile in rhui_setup_repofiles} - target_access_repofiles = { - context.full_path(path) for path in target_content_access_files if is_repofile(path) - } + foreign_repofiles = all_repofiles - target_access_repofiles - rhui_setup_repofiles - # Exclude repofiles used to setup the target rhui access as on some platforms the repos provided by - # the client are not sufficient to install the client into target userspace (GCP) - rhui_setup_repofile_tasks = [task for task in setup_tasks if task.src.endswith('repo')] - rhui_setup_repofiles = ( - get_copy_location_from_copy_in_task(context.base_dir, copy) for copy in rhui_setup_repofile_tasks - ) - rhui_setup_repofiles = {context.full_path(repofile) for repofile in rhui_setup_repofiles} + api.current_logger().debug( + 'The following repofiles are considered as unknown to' + ' the target RHUI content setup and will be ignored: {0}'.format(' '.join(foreign_repofiles)) + ) - foreign_repofiles = all_repofiles - target_access_repofiles - rhui_setup_repofiles + # Rename non-client repofiles so they will not be recognized when running dnf repolist + for foreign_repofile in foreign_repofiles: + os.rename(foreign_repofile, '{0}.back'.format(foreign_repofile)) - api.current_logger().debug( - 'The following repofiles are considered as unknown to' - ' the target RHUI content setup and will be ignored: {0}'.format(' '.join(foreign_repofiles)) + rhui_repoids = set() + try: + dnf_cmd = [ + 'dnf', 'repolist', + '--releasever', target_ver, '-v', + '--enablerepo', '*', + '--disablerepo', '*-source-*', + '--disablerepo', '*-debug-*', + ] + repolist_result = context.call(dnf_cmd)['stdout'] + repoid_lines = [line for line in repolist_result.split('\n') if line.startswith('Repo-id')] + rhui_repoids.update({extract_repoid_from_line(line) for line in repoid_lines}) + + except CalledProcessError as err: + details = {'err': err.stderr, 'details': str(err)} + raise StopActorExecutionError( + message='Failed to retrieve repoids provided by target RHUI clients.', + details=details ) - # Rename non-client repofiles so they will not be recognized when running dnf repolist + finally: + # Revert the renaming of non-client repofiles for foreign_repofile in foreign_repofiles: - os.rename(foreign_repofile, '{0}.back'.format(foreign_repofile)) + os.rename('{0}.back'.format(foreign_repofile), foreign_repofile) - try: - dnf_cmd = [ - 'dnf', 'repolist', - '--releasever', target_ver, '-v', - '--enablerepo', '*', - '--disablerepo', '*-source-*', - '--disablerepo', '*-debug-*', - ] - repolist_result = context.call(dnf_cmd)['stdout'] - repoid_lines = [line for line in repolist_result.split('\n') if line.startswith('Repo-id')] - rhui_repoids = {extract_repoid_from_line(line) for line in repoid_lines} - rh_repoids.update(rhui_repoids) - - except CalledProcessError as err: - details = {'err': err.stderr, 'details': str(err)} - raise StopActorExecutionError( - message='Failed to retrieve repoids provided by target RHUI clients.', - details=details - ) + return rhui_repoids - finally: - # Revert the renaming of non-client repofiles - for foreign_repofile in foreign_repofiles: - os.rename('{0}.back'.format(foreign_repofile), foreign_repofile) - api.current_logger().debug( - 'The following repofiles are considered as provided by RedHat: {0}'.format(' '.join(rh_repoids)) - ) - return rh_repoids +def _get_distro_available_repoids(context, indata): + """ + Get repoids provided by the distribution + + On RHEL: RH repositories are provided either by RHSM or are stored in the + expected repo file provided by RHUI special packages (every cloud + provider has itw own rpm). + On other: Repositories are provided in specific repofiles (e.g. centos.repo + and centos-addons.repo on CS) + + :return: A set of repoids provided by distribution + :rtype: set[str] + """ + distro_repoids = distro.get_target_distro_repoids(context) + distro_id = get_target_distro_id() + rhel_and_rhsm = distro_id == 'rhel' and not rhsm.skip_rhsm() + if distro_id != 'rhel' or rhel_and_rhsm: + _inhibit_if_no_base_repos(distro_repoids) + + if indata and indata.rhui_info: + rhui_repoids = _get_rhui_available_repoids(context, indata.rhui_info) + distro_repoids.extend(rhui_repoids) + return set(distro_repoids) + +@suppress_deprecation(RHELTargetRepository) # member of TargetRepositories def gather_target_repositories(context, indata): """ Get available required target repositories and inhibit or raise error if basic checks do not pass. @@ -984,17 +1010,33 @@ def gather_target_repositories(context, indata): :param context: An instance of a mounting.IsolatedActions class :type context: mounting.IsolatedActions class :return: List of target system repoids - :rtype: List(string) + :rtype: set[str] """ - rh_available_repoids = _get_rh_available_repoids(context, indata) - all_available_repoids = _get_all_available_repoids(context) - target_repoids = [] - missing_custom_repoids = [] + distro_repoids = _get_distro_available_repoids(context, indata) + if distro_repoids: + api.current_logger().info( + "The following repoids are considered as provided by the '{}' distribution:{}{}".format( + get_target_distro_id(), + FMT_LIST_SEPARATOR, + FMT_LIST_SEPARATOR.join(sorted(distro_repoids)), + ) + ) + else: + api.current_logger().warning( + "No repoids provided by the {} distribution have been discovered".format( + get_target_distro_id() + ) + ) + + all_repoids = _get_all_available_repoids(context) + + target_repoids = set() + missing_custom_repoids = set() for target_repo in api.consume(TargetRepositories): - for rhel_repo in target_repo.rhel_repos: - if rhel_repo.repoid in rh_available_repoids: - target_repoids.append(rhel_repo.repoid) + for distro_repo in target_repo.distro_repos: + if distro_repo.repoid in distro_repoids: + target_repoids.add(distro_repo.repoid) else: # TODO: We shall report that the RHEL repos that we deem necessary for # the upgrade are not available; but currently it would just print bunch of @@ -1003,12 +1045,16 @@ def gather_target_repositories(context, indata): # of the upgrade. Let's skip it for now until it's clear how we will deal # with it. pass + for custom_repo in target_repo.custom_repos: - if custom_repo.repoid in all_available_repoids: - target_repoids.append(custom_repo.repoid) + if custom_repo.repoid in all_repoids: + target_repoids.add(custom_repo.repoid) else: - missing_custom_repoids.append(custom_repo.repoid) - api.current_logger().debug("Gathered target repositories: {}".format(', '.join(target_repoids))) + missing_custom_repoids.add(custom_repo.repoid) + api.current_logger().debug( + "Gathered target repositories: {}".format(", ".join(sorted(target_repoids))) + ) + if not target_repoids: target_major_version = get_target_major_version() reporting.create_report([ @@ -1054,7 +1100,7 @@ def gather_target_repositories(context, indata): ' while using the --enablerepo option of leapp, or in a third party actor that produces a' ' CustomTargetRepositoryMessage.\n' 'The following repositories IDs could not be found in the target configuration:\n' - '- {}\n'.format('\n- '.join(missing_custom_repoids)) + '- {}\n'.format('\n- '.join(sorted(missing_custom_repoids))) ), reporting.Groups([reporting.Groups.REPOSITORY]), reporting.Groups([reporting.Groups.INHIBITOR]), @@ -1071,7 +1117,7 @@ def gather_target_repositories(context, indata): )) ]) raise StopActorExecution() - return set(target_repoids) + return target_repoids def _install_custom_repofiles(context, custom_repofiles): @@ -1129,7 +1175,7 @@ def _gather_target_repositories(context, indata, prod_cert_path): rhsm.set_container_mode(context) rhsm.switch_certificate(context, indata.rhsm_info, prod_cert_path) - if api.current_actor().configuration.os_release.release_id == 'centos': + if get_target_distro_id() == 'centos': adjust_dnf_stream_variable(context) _install_custom_repofiles(context, indata.custom_repofiles) diff --git a/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py b/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py index 7853a7ad..d783843c 100644 --- a/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py +++ b/repos/system_upgrade/common/actors/targetuserspacecreator/tests/unit_test_targetuserspacecreator.py @@ -11,9 +11,9 @@ import pytest from leapp import models, reporting from leapp.exceptions import StopActorExecution, StopActorExecutionError from leapp.libraries.actor import userspacegen -from leapp.libraries.common import overlaygen, repofileutils, rhsm +from leapp.libraries.common import distro, overlaygen, repofileutils, rhsm from leapp.libraries.common.config import architecture -from leapp.libraries.common.testutils import CurrentActorMocked, logger_mocked, produce_mocked +from leapp.libraries.common.testutils import create_report_mocked, CurrentActorMocked, logger_mocked, produce_mocked from leapp.libraries.stdlib import api, CalledProcessError from leapp.utils.deprecation import suppress_deprecation @@ -36,7 +36,7 @@ def adjust_cwd(): os.chdir(previous_cwd) -class MockedMountingBase(object): +class MockedMountingBase: def __init__(self, **dummy_kwargs): self.called_copytree_from = [] self.target = '' @@ -50,7 +50,8 @@ class MockedMountingBase(object): def __call__(self, **dummy_kwarg): yield self - def call(self, *args, **kwargs): + @staticmethod + def call(*args, **kwargs): return {'stdout': ''} def nspawn(self): @@ -95,8 +96,7 @@ def traverse_structure(structure, root=Path('/')): filepath = root / filename if isinstance(links_to, dict): - for pair in traverse_structure(links_to, filepath): - yield pair + yield from traverse_structure(links_to, root=filepath) else: yield (filepath, links_to) @@ -880,8 +880,9 @@ def test_get_product_certificate_path(monkeypatch, adjust_cwd, result, dst_ver, assert userspacegen._get_product_certificate_path() in result -def test_get_product_certificate_path_nonrhel(monkeypatch): - actor = CurrentActorMocked(release_id='notrhel') +@pytest.mark.parametrize('src_distro', ('rhel', 'centos')) +def test_get_product_certificate_path_nonrhel(monkeypatch, src_distro): + actor = CurrentActorMocked(src_distro=src_distro, dst_distro='notrhel') monkeypatch.setattr(userspacegen.api, 'current_actor', actor) path = userspacegen._get_product_certificate_path() assert path is None @@ -924,7 +925,7 @@ _SAEE = StopActorExecutionError _SAE = StopActorExecution -class MockedConsume(object): +class MockedConsume: def __init__(self, *args): self._msgs = [] for arg in args: @@ -1068,10 +1069,11 @@ def test_consume_data(monkeypatch, raised, no_rhsm, testdata): assert raised[1] in err.value.message else: assert userspacegen.api.current_logger.warnmsg - assert any([raised[1] in x for x in userspacegen.api.current_logger.warnmsg]) + assert any(raised[1] in x for x in userspacegen.api.current_logger.warnmsg) @pytest.mark.skip(reason="Currently not implemented in the actor. It's TODO.") +@suppress_deprecation(models.RHELTargetRepository) def test_gather_target_repositories(monkeypatch): monkeypatch.setattr(userspacegen.api, 'current_actor', CurrentActorMocked()) # The available RHSM repos @@ -1104,6 +1106,7 @@ def test_gather_target_repositories_none_available(monkeypatch): assert inhibitors[0].get('title', '') == 'Cannot find required basic RHEL target repositories.' +@suppress_deprecation(models.RHELTargetRepository) def test_gather_target_repositories_rhui(monkeypatch): indata = testInData( @@ -1113,7 +1116,9 @@ def test_gather_target_repositories_rhui(monkeypatch): monkeypatch.setattr(userspacegen.api, 'current_actor', CurrentActorMocked()) monkeypatch.setattr(userspacegen, '_get_all_available_repoids', lambda x: []) monkeypatch.setattr( - userspacegen, '_get_rh_available_repoids', lambda x, y: ['rhui-1', 'rhui-2', 'rhui-3'] + userspacegen, + "_get_distro_available_repoids", + lambda dummy_context, dummy_indata: {"rhui-1", "rhui-2", "rhui-3"}, ) monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: True) monkeypatch.setattr( @@ -1122,6 +1127,10 @@ def test_gather_target_repositories_rhui(monkeypatch): rhel_repos=[ models.RHELTargetRepository(repoid='rhui-1'), models.RHELTargetRepository(repoid='rhui-2') + ], + distro_repos=[ + models.DistroTargetRepository(repoid='rhui-1'), + models.DistroTargetRepository(repoid='rhui-2') ] ) ]) @@ -1130,6 +1139,7 @@ def test_gather_target_repositories_rhui(monkeypatch): assert target_repoids == set(['rhui-1', 'rhui-2']) +@suppress_deprecation(models.RHELTargetRepository) def test_gather_target_repositories_baseos_appstream_not_available(monkeypatch): # If the repos that Leapp identifies as required for the upgrade (based on the repo mapping and PES data) are not # available, an exception shall be raised @@ -1188,6 +1198,54 @@ def test_gather_target_repositories_baseos_appstream_not_available(monkeypatch): assert inhibitors[0].get('title', '') == 'Cannot find required basic RHEL target repositories.' +def test__get_distro_available_repoids_norhsm_norhui(monkeypatch): + """ + Empty set should be returned when on rhel and skip_rhsm == True. + """ + monkeypatch.setattr( + userspacegen.api, "current_actor", CurrentActorMocked(release_id="rhel") + ) + monkeypatch.setattr(userspacegen.api.current_actor(), 'produce', produce_mocked()) + + monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: True) + monkeypatch.setattr(distro, 'get_target_distro_repoids', lambda ctx: []) + + indata = testInData(_PACKAGES_MSGS, None, None, _XFS_MSG, _STORAGEINFO_MSG, None) + # NOTE: context is not used without rhsm, for simplicity setting to None + repoids = userspacegen._get_distro_available_repoids(None, indata) + assert repoids == set() + + +@pytest.mark.parametrize( + "distro_id,skip_rhsm", [("rhel", False), ("centos", True), ("almalinux", True)] +) +def test__get_distro_available_repoids_nobaserepos_inhibit( + monkeypatch, distro_id, skip_rhsm +): + """ + Test that get_distro_available repoids reports and raises if there are no base repos. + """ + monkeypatch.setattr( + userspacegen.api, "current_actor", CurrentActorMocked(dst_distro=distro_id) + ) + monkeypatch.setattr(userspacegen.api.current_actor(), 'produce', produce_mocked()) + monkeypatch.setattr(reporting, "create_report", create_report_mocked()) + + monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: skip_rhsm) + monkeypatch.setattr(distro, 'get_target_distro_repoids', lambda ctx: []) + + indata = testInData(_PACKAGES_MSGS, None, None, _XFS_MSG, _STORAGEINFO_MSG, None) + with pytest.raises(StopActorExecution): + # NOTE: context is not used without rhsm, for simplicity setting to None + userspacegen._get_distro_available_repoids(None, indata) + + # TODO adjust the asserts when the report is made distro agnostic + assert reporting.create_report.called == 1 + report = reporting.create_report.reports[0] + assert "Cannot find required basic RHEL target repositories" in report["title"] + assert reporting.Groups.INHIBITOR in report["groups"] + + def mocked_consume_data(): packages = {'dnf', 'dnf-command(config-manager)', 'pkgA', 'pkgB'} rhsm_info = _RHSMINFO_MSG @@ -1333,7 +1391,7 @@ def test__get_files_owned_by_rpms_recursive(monkeypatch): assert sorted(owned[0:4]) == sorted(out) def has_dbgmsg(substr): - return any([substr in log for log in logger.dbgmsg]) + return any(substr in log for log in logger.dbgmsg) # test a few assert has_dbgmsg( @@ -1368,8 +1426,11 @@ def test_failing_stream_varfile_write(monkeypatch): assert 'Failed to adjust dnf variable' in str(err.value) -@pytest.mark.parametrize("distro,should_adjust", [('rhel', False), ('centos', True)]) -def test_if_adjust_dnf_stream_variable_only_for_centos(monkeypatch, distro, should_adjust): +@pytest.mark.parametrize('src_distro', ('rhel', 'centos')) +@pytest.mark.parametrize("dst_distro,should_adjust", [('rhel', False), ('centos', True)]) +def test_if_adjust_dnf_stream_variable_only_for_centos( + monkeypatch, src_distro, dst_distro, should_adjust +): def do_nothing(*args, **kwargs): pass @@ -1379,7 +1440,11 @@ def test_if_adjust_dnf_stream_variable_only_for_centos(monkeypatch, distro, shou nonlocal adjust_called adjust_called = True - monkeypatch.setattr(userspacegen.api, 'current_actor', CurrentActorMocked(release_id=distro)) + monkeypatch.setattr( + userspacegen.api, + "current_actor", + CurrentActorMocked(src_distro=src_distro, dst_distro=dst_distro), + ) monkeypatch.setattr(userspacegen, 'get_target_major_version', lambda: '10') monkeypatch.setattr(rhsm, 'set_container_mode', do_nothing) monkeypatch.setattr(rhsm, 'switch_certificate', do_nothing) diff --git a/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py b/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py index 6377f767..4c5420f6 100644 --- a/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py +++ b/repos/system_upgrade/common/actors/trustedgpgkeysscanner/libraries/trustedgpgkeys.py @@ -13,13 +13,14 @@ def _get_pubkeys(installed_rpms): pubkeys = get_pubkeys_from_rpms(installed_rpms) db_pubkeys = [key.fingerprint for key in pubkeys] certs_path = get_path_to_gpg_certs() - for certname in os.listdir(certs_path): - key_file = os.path.join(certs_path, certname) - fps = get_gpg_fp_from_file(key_file) - for fp in fps: - if fp not in db_pubkeys: - pubkeys.append(GpgKey(fingerprint=fp, rpmdb=False, filename=key_file)) - db_pubkeys += fp + for trusted_dir in certs_path: + for certname in os.listdir(trusted_dir): + key_file = os.path.join(trusted_dir, certname) + fps = get_gpg_fp_from_file(key_file) + for fp in fps: + if fp not in db_pubkeys: + pubkeys.append(GpgKey(fingerprint=fp, rpmdb=False, filename=key_file)) + db_pubkeys += fp return pubkeys diff --git a/repos/system_upgrade/common/actors/trustedgpgkeysscanner/tests/test_trustedgpgkeys.py b/repos/system_upgrade/common/actors/trustedgpgkeysscanner/tests/test_trustedgpgkeys.py index 0d98aad7..b8229d00 100644 --- a/repos/system_upgrade/common/actors/trustedgpgkeysscanner/tests/test_trustedgpgkeys.py +++ b/repos/system_upgrade/common/actors/trustedgpgkeysscanner/tests/test_trustedgpgkeys.py @@ -32,7 +32,7 @@ def _get_test_installed_rmps(fps): return InstalledRPM(items=rpms) -class MockedGetGpgFromFile(object): +class MockedGetGpgFromFile: def __init__(self, file_fps_tuples): # e.g. file_fps_tuple = [('/mydir/myfile', ['0000ff31', '0000ff32'])] self._data = {} @@ -40,7 +40,7 @@ class MockedGetGpgFromFile(object): self._data[fname] = fps def get_files(self): - return self._data.keys() # noqa: W1655; pylint: disable=dict-keys-not-iterating + return self._data.keys() def __call__(self, fname): return self._data.get(fname, []) diff --git a/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py b/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py index 27a4a245..93816103 100644 --- a/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py +++ b/repos/system_upgrade/common/actors/updategrubcore/tests/test_updategrubcore.py @@ -20,7 +20,7 @@ def raise_call_error(args=None): ) -class run_mocked(object): +class run_mocked: def __init__(self, raise_err=False, raise_callback=raise_call_error): self.called = 0 self.args = [] diff --git a/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py b/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py new file mode 100644 index 00000000..dbf86974 --- /dev/null +++ b/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py @@ -0,0 +1,72 @@ +import os + +from leapp.actors import Actor +from leapp.models import VendorSignatures, ActiveVendorList +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +VENDORS_DIR = "/etc/leapp/files/vendors.d/" +SIGFILE_SUFFIX = ".sigs" + + +class VendorRepoSignatureScanner(Actor): + """ + Produce VendorSignatures messages for the vendor signature files inside the + . + These messages are used to extend the list of pakcages Leapp will consider + signed and will attempt to upgrade. + + The messages are produced only if a "from" vendor repository + listed indide its map matched one of the repositories active on the system. + """ + + name = 'vendor_repo_signature_scanner' + consumes = (ActiveVendorList) + produces = (VendorSignatures) + tags = (IPUWorkflowTag, FactsPhaseTag.Before) + + def process(self): + if not os.path.isdir(VENDORS_DIR): + self.log.debug( + "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR) + ) + return + + active_vendors = [] + for vendor_list in self.consume(ActiveVendorList): + active_vendors.extend(vendor_list.data) + + self.log.debug( + "Active vendor list: {}".format(active_vendors) + ) + + for sigfile_name in os.listdir(VENDORS_DIR): + if not sigfile_name.endswith(SIGFILE_SUFFIX): + continue + # Cut the suffix part to get only the name. + vendor_name = sigfile_name[:-5] + + if vendor_name not in active_vendors: + self.log.debug( + "Vendor {} not in active list, skipping".format(vendor_name) + ) + continue + + self.log.debug( + "Vendor {} found in active list, processing file {}".format(vendor_name, sigfile_name) + ) + + full_sigfile_path = os.path.join(VENDORS_DIR, sigfile_name) + with open(full_sigfile_path) as f: + signatures = [line for line in f.read().splitlines() if line] + + self.produce( + VendorSignatures( + vendor=vendor_name, + sigs=signatures, + ) + ) + + self.log.info( + "The {} directory exists, vendor signatures loaded.".format(VENDORS_DIR) + ) diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py new file mode 100644 index 00000000..13256476 --- /dev/null +++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py @@ -0,0 +1,19 @@ +from leapp.actors import Actor +# from leapp.libraries.common.repomaputils import scan_vendor_repomaps, VENDOR_REPOMAP_DIR +from leapp.libraries.actor.vendorrepositoriesmapping import scan_vendor_repomaps +from leapp.models import VendorSourceRepos, RepositoriesMapping +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class VendorRepositoriesMapping(Actor): + """ + Scan the vendor repository mapping files and provide the data to other actors. + """ + + name = "vendor_repositories_mapping" + consumes = () + produces = (RepositoriesMapping, VendorSourceRepos,) + tags = (IPUWorkflowTag, FactsPhaseTag.Before) + + def process(self): + scan_vendor_repomaps() diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py new file mode 100644 index 00000000..6a41d4e5 --- /dev/null +++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py @@ -0,0 +1,92 @@ +import os +import json + +from leapp.libraries.common import fetch +from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version +from leapp.libraries.common.repomaputils import RepoMapData +from leapp.libraries.stdlib import api +from leapp.models import VendorSourceRepos, RepositoriesMapping +from leapp.models.fields import ModelViolationError +from leapp.exceptions import StopActorExecutionError + + +VENDORS_DIR = "/etc/leapp/files/vendors.d" +"""The folder containing the vendor repository mapping files.""" + + +def inhibit_upgrade(msg): + raise StopActorExecutionError( + msg, + details={'hint': ('Read documentation at the following link for more' + ' information about how to retrieve the valid file:' + ' https://access.redhat.com/articles/3664871')}) + + +def read_repofile(repofile, repodir): + try: + return json.loads(fetch.read_or_fetch(repofile, directory=repodir, allow_download=False)) + except ValueError: + # The data does not contain a valid json + inhibit_upgrade('The repository mapping file is invalid: file does not contain a valid JSON object.') + return None + + +def read_repomap_file(repomap_file, read_repofile_func, vendor_name): + json_data = read_repofile_func(repomap_file, VENDORS_DIR) + try: + repomap_data = RepoMapData.load_from_dict(json_data) + + source_major = get_source_major_version() + target_major = get_target_major_version() + + api.produce(VendorSourceRepos( + vendor=vendor_name, + source_repoids=repomap_data.get_version_repoids(source_major) + )) + + mapping = repomap_data.get_mappings(source_major, target_major) + valid_major_versions = [source_major, target_major] + + api.produce(RepositoriesMapping( + mapping=mapping, + repositories=repomap_data.get_repositories(valid_major_versions), + vendor=vendor_name + )) + except ModelViolationError as err: + err_message = ( + 'The repository mapping file is invalid: ' + 'the JSON does not match required schema (wrong field type/value): {}. ' + 'Ensure that the current upgrade path is correct and is present in the mappings: {} -> {}' + .format(err, source_major, target_major) + ) + inhibit_upgrade(err_message) + except KeyError as err: + inhibit_upgrade( + 'The repository mapping file is invalid: the JSON is missing a required field: {}'.format(err)) + except ValueError as err: + # The error should contain enough information, so we do not need to clarify it further + inhibit_upgrade('The repository mapping file is invalid: {}'.format(err)) + + +def scan_vendor_repomaps(read_repofile_func=read_repofile): + """ + Scan the repository mapping file and produce RepositoriesMapping msg. + + See the description of the actor for more details. + """ + + map_json_suffix = "_map.json" + if os.path.isdir(VENDORS_DIR): + vendor_mapfiles = list(filter(lambda vfile: map_json_suffix in vfile, os.listdir(VENDORS_DIR))) + + for mapfile in vendor_mapfiles: + read_repomap_file(mapfile, read_repofile_func, mapfile[:-len(map_json_suffix)]) + else: + api.current_logger().debug( + "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR) + ) + # vendor_repomap_collection = scan_vendor_repomaps(VENDOR_REPOMAP_DIR) + # if vendor_repomap_collection: + # self.produce(vendor_repomap_collection) + # for repomap in vendor_repomap_collection.maps: + # self.produce(repomap) diff --git a/repos/system_upgrade/common/actors/verifycheckresults/tests/unit_test_verifycheckresults.py b/repos/system_upgrade/common/actors/verifycheckresults/tests/unit_test_verifycheckresults.py index 6f459a69..6df9ff20 100644 --- a/repos/system_upgrade/common/actors/verifycheckresults/tests/unit_test_verifycheckresults.py +++ b/repos/system_upgrade/common/actors/verifycheckresults/tests/unit_test_verifycheckresults.py @@ -7,7 +7,7 @@ from leapp.libraries.actor import verifycheckresults from leapp.libraries.stdlib import api -class Report(object): +class Report: def __init__(self, message): self.message = message diff --git a/repos/system_upgrade/common/actors/xfsinfoscanner/tests/unit_test_xfsinfoscanner.py b/repos/system_upgrade/common/actors/xfsinfoscanner/tests/unit_test_xfsinfoscanner.py index 71f46b47..34e542a8 100644 --- a/repos/system_upgrade/common/actors/xfsinfoscanner/tests/unit_test_xfsinfoscanner.py +++ b/repos/system_upgrade/common/actors/xfsinfoscanner/tests/unit_test_xfsinfoscanner.py @@ -146,7 +146,7 @@ TEST_XFS_INFO_FTYPE0_MODEL = XFSInfo( ) -class run_mocked(object): +class run_mocked: def __init__(self): self.called = 0 self.args = None diff --git a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json index 24bc93ba..0629d123 100644 --- a/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json +++ b/repos/system_upgrade/common/files/distro/almalinux/gpg-signatures.json @@ -8,11 +8,19 @@ ], "obsoleted-keys": { "7": [], - "8": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", + "gpg-pubkey-db42a60e-37ea5438" + ], "9": [ + "gpg-pubkey-d4082792-5b32db75", "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071", "gpg-pubkey-ced7258b-6525146f" ], "10": ["gpg-pubkey-b86b3716-61e69f29"] } + } diff --git a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json index fe85e03c..6dfa5b0f 100644 --- a/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json +++ b/repos/system_upgrade/common/files/distro/centos/gpg-signatures.json @@ -2,9 +2,24 @@ "keys": [ "24c6a8a7f4a80eb5", "05b555b38483c65d", - "4eb84e71f2ee9d55" + "4eb84e71f2ee9d55", + "429785e181b961a5", + "d07bf2a08d50eb66", + "6c7cb6ef305d49d6" ], "obsoleted-keys": { + "7": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", + "gpg-pubkey-db42a60e-37ea5438" + ], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" + ], "10": ["gpg-pubkey-8483c65d-5ccc5b19"] } } diff --git a/repos/system_upgrade/common/files/distro/cloudlinux/gpg-signatures.json b/repos/system_upgrade/common/files/distro/cloudlinux/gpg-signatures.json new file mode 100644 index 00000000..acad9006 --- /dev/null +++ b/repos/system_upgrade/common/files/distro/cloudlinux/gpg-signatures.json @@ -0,0 +1,22 @@ +{ + "keys": [ + "8c55a6628608cb71", + "d07bf2a08d50eb66", + "429785e181b961a5" + ], + "obsoleted-keys": { + "7": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", + "gpg-pubkey-db42a60e-37ea5438" + ], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" + ], + "10": [] + } +} diff --git a/repos/system_upgrade/common/files/distro/ol/gpg-signatures.json b/repos/system_upgrade/common/files/distro/ol/gpg-signatures.json new file mode 100644 index 00000000..a53775cf --- /dev/null +++ b/repos/system_upgrade/common/files/distro/ol/gpg-signatures.json @@ -0,0 +1,24 @@ +{ + "keys": [ + "72f97b74ec551f03", + "82562ea9ad986da3", + "bc4d06a08d8b756f", + "429785e181b961a5", + "d07bf2a08d50eb66" + ], + "obsoleted-keys": { + "7": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", + "gpg-pubkey-db42a60e-37ea5438" + ], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" + ], + "10": [] + } +} diff --git a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json index 3cc67f82..c1f4acf4 100644 --- a/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json +++ b/repos/system_upgrade/common/files/distro/rhel/gpg-signatures.json @@ -4,7 +4,9 @@ "5326810137017186", "938a80caf21541eb", "fd372689897da07a", - "45689c882fa658e0" + "45689c882fa658e0", + "429785e181b961a5", + "d07bf2a08d50eb66" ], "obsoleted-keys": { "7": [], @@ -13,7 +15,12 @@ "gpg-pubkey-37017186-45761324", "gpg-pubkey-db42a60e-37ea5438" ], - "9": ["gpg-pubkey-d4082792-5b32db75"], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" + ], "10": ["gpg-pubkey-fd431d51-4ae0493b"] } } diff --git a/repos/system_upgrade/common/files/distro/rocky/gpg-signatures.json b/repos/system_upgrade/common/files/distro/rocky/gpg-signatures.json new file mode 100644 index 00000000..f1738e79 --- /dev/null +++ b/repos/system_upgrade/common/files/distro/rocky/gpg-signatures.json @@ -0,0 +1,23 @@ +{ + "keys": [ + "15af5dac6d745a60", + "702d426d350d275d", + "429785e181b961a5", + "d07bf2a08d50eb66" + ], + "obsoleted-keys": { + "7": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", + "gpg-pubkey-db42a60e-37ea5438" + ], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" + ], + "10": [] + } +} diff --git a/repos/system_upgrade/common/files/distro/scientific/gpg-signatures.json b/repos/system_upgrade/common/files/distro/scientific/gpg-signatures.json new file mode 100644 index 00000000..df764b53 --- /dev/null +++ b/repos/system_upgrade/common/files/distro/scientific/gpg-signatures.json @@ -0,0 +1,22 @@ +{ + "keys": [ + "b0b4183f192a7d7d", + "429785e181b961a5", + "d07bf2a08d50eb66" + ], + "obsoleted-keys": { + "7": [], + "8": [ + "gpg-pubkey-2fa658e0-45700c69", + "gpg-pubkey-37017186-45761324", + "gpg-pubkey-db42a60e-37ea5438" + ], + "9": [ + "gpg-pubkey-d4082792-5b32db75", + "gpg-pubkey-3abb34f8-5ffd890e", + "gpg-pubkey-6275f250-5e26cb2e", + "gpg-pubkey-73e3b907-6581b071" + ], + "10": [] + } +} diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/279.pem b/repos/system_upgrade/common/files/prod-certs/10.2/279.pem new file mode 100644 index 00000000..76336f82 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/279.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGKDCCBBCgAwIBAgIJALDxRLt/tVBkMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQ0NloXDTQ1MDcw +ODExMjQ0NlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFsyOWJlMDI0 +My03NGU1LTRiNDctYjEwNy1iZjhkNjRjYmNjNDhdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBsTCBrjAJBgNVHRMEAjAAMEMGDCsGAQQBkggJAYIXAQQzDDFSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIFBvd2VyLCBsaXR0bGUgZW5kaWFuMBYGDCsG +AQQBkggJAYIXAgQGDAQxMC4yMBkGDCsGAQQBkggJAYIXAwQJDAdwcGM2NGxlMCkG +DCsGAQQBkggJAYIXBAQZDBdyaGVsLTEwLHJoZWwtMTAtcHBjNjRsZTANBgkqhkiG +9w0BAQsFAAOCAgEAGouNVN3DdCE2cqv3lKMNC3jit5mHi7QQt7fqN/KX4fQfArb6 +IQ9M0GaGJM8W9rj+9s+Q9LOFjys24Pcdb9qbQWpfwvn9FY60uQw3TIXAerJaVb98 +doxrFHjVptm0/VX2xnOa/dY97dmMT4Amwe5+y4RYlMEsYqY8dpJkVuKNdGtCg+Uf +f9hb6XjDqRevADgskHNprXrjF65Ib3a92qJRfttnVUfqqeDkTPntIPbau9hZwLeR +oMl8pn4kMIYLz1IolSAC8yBFe9sLxllGu8qIFqH4Efzx8BOtHkPUH/VqtgvUej+j +boJ0EEpwYjvYbz00mZmJHFNkUheW6cDUPWmMoTzYibPzRTrBcAIfvybpeuPjFGfl +gYZa/DpEG68hlEnSxB4TNpVCx9qfiqXvNcukmeX3Jr7DS1uC2ePBFDQKewx6WdAa +bAmuANmBUB+NX1WMuNTfxxIzxfIoShaChiFRVjsRTkLo1ZPuMkvXOXYfyfW1PKQN +PXHEdY9wprn8ZY2qhMwmE1sDdndNpSxB3boI9FQBUVDzbSG6KwbPfSdmrte+Wdrh +QCIGU+0x7ulF68yOkMkz1spPNgrTXt0efaCSWqUK0nqv1s1Gh2Q6iJaE0yETpSG7 +hFeHpENftckpmuKcJM0v/uBBeIX7X8plrL7Fkm4ND/e61tEiDwvnhxGhtBE= +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/362.pem b/repos/system_upgrade/common/files/prod-certs/10.2/362.pem new file mode 100644 index 00000000..ebeb065c --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/362.pem @@ -0,0 +1,36 @@ +-----BEGIN CERTIFICATE----- +MIIGNzCCBB+gAwIBAgIJALDxRLt/tVBTMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQzMVoXDTQ1MDcw +ODExMjQzMVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFthMmU1N2Ix +MS03ZDBiLTRiNGYtOGE5ZC03MmRkNGM2NDA2NzJdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBwDCBvTAJBgNVHRMEAjAAMEgGDCsGAQQBkggJAYJqAQQ4DDZSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIFBvd2VyLCBsaXR0bGUgZW5kaWFuIEJldGEw +GwYMKwYBBAGSCAkBgmoCBAsMCTEwLjIgQmV0YTAZBgwrBgEEAZIICQGCagMECQwH +cHBjNjRsZTAuBgwrBgEEAZIICQGCagQEHgwccmhlbC0xMCxyaGVsLTEwLWJldGEt +cHBjNjRsZTANBgkqhkiG9w0BAQsFAAOCAgEAgQC6qqfeW79hSj2S+OfBT5ccNNZa +8blVYtVJU5iTfhX29VQTdGus/ROWGqfgDe8MMOCJyt0eNeqGO70KRJsT3pGeeKcM +UbDfdqzopgD7+/6IL1c1fw/8HijOorW+CMAzeOBdnjMwRjhZxcDlFSqxNCWtngnp +XlDMIlUR3m0rlBwzNfUMk7KYPUESmyEiBWMSKmqRDeiUg3BSP6Ci0x3Ufnf0xTBv +VPVKO/h3ta3+pAYzeFy/ageJ/sR9tLRZQZXzvxYvIY+8/EehafPJCHDHH3uCTpdZ +JAeXDLf2QcOBZnl8uONdev+KaE1JFRCRmqwhliUsARv/t24CY+UBoEzzaj/py2bR +RQqfE5WI1JSdj6HoQ6YHbtR6SF+UedfvMQoSF4zPiXAPNebiIiLkc1rtb/ycUi1f +bUjkRfgRqlDwUcgfHrKhSDp5/XhjgxVXiESNcDe2ltKvVr09qAaPBarLolWeIXkN +n2csdFxyiDZIhk6tFL8lUtpmXWpeEn/iBPwaiBIYoBnIbaqN4OZngwfi2QtTdl+s +9iCuYgbGQiEZnV3g7HLsYXrAagPuJxXs0FMYJZ8x6biREgUQATwTzZMQ8vWRMmYY +kteQBaOCDzNpb8OUgbPxgncl9kgr4NIBn+5oGeMitb+I1XvWqoCFsA7Uii6oygdk +iE+YZEA6e/4057M= +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/363.pem b/repos/system_upgrade/common/files/prod-certs/10.2/363.pem new file mode 100644 index 00000000..865fbda6 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/363.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGKTCCBBGgAwIBAgIJALDxRLt/tVBSMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQzMVoXDTQ1MDcw +ODExMjQzMVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFs0NjJhNDRj +ZC1jNWUzLTQzZGItYTExNy0zZjA5ZGU1ZDRmMzNdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBsjCBrzAJBgNVHRMEAjAAMDoGDCsGAQQBkggJAYJrAQQqDChSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIEFSTSA2NCBCZXRhMBsGDCsGAQQBkggJAYJr +AgQLDAkxMC4yIEJldGEwGQYMKwYBBAGSCAkBgmsDBAkMB2FhcmNoNjQwLgYMKwYB +BAGSCAkBgmsEBB4MHHJoZWwtMTAscmhlbC0xMC1iZXRhLWFhcmNoNjQwDQYJKoZI +hvcNAQELBQADggIBAC/KEEZ85rdWnL/CK9q3uT/d4reNZc1WD5oWYcpj+J31u4sw +pjAvmq/eA6DmzqGjhfEGhwu5MDbVg77OAPCcfm7qqGSDcnjqnO3ZogDjyzat1WS5 +J2uuRcPbF6DIk/LkgIc/FgvSFG8Vc93hM+P56wTzTbnPYSRyJq3BBm8ZjSiFO5jq +V9WOganzxsVKzifTK8RoSdWLyB0JpvL/LZKa4G97ahUctYVilhJBHCgd+uT6/IVn +ppETnw4xo6SXg0+O+fC1P+90+GZrWWzeHeHnEgmZ8B+RTDQbx/KHQHU4UhqU5qnT +6VngqL1453IxmlxVxwKlkwzV4SYrQnmEZPvugMhlenbx0T9pJvwg/xvWYJJTGjUy +1l9p0LtyUHmFJxtbq50++oooUdDtQ6RDD5jtxnvWMF5PFLYGxf6gXFFCJVSgwonP +BtqoBH2PWp8/nwumAOquzks41m+bqzaMALhp0GUGTKKTITrM4gsLVHqKh2WTCOPs +s6mdXOyVma/o5Jri8Ec12/HGyIRlQQleb6vcC68PK3X088LZi/zENi2Bq31W5Hip +R03YxVzmjZA3kJsA8Vim4zaG7e6puLGuXmQLawN7oScBFlvVLvZD2ycZsYLOesCz +VSxJkmqDMb6To9RRbSmN0csPFKWNkdD8D5iBei4IaGWXyOB3GGJJ2ME/Qv65 +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/419.pem b/repos/system_upgrade/common/files/prod-certs/10.2/419.pem new file mode 100644 index 00000000..42986ccc --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/419.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGGjCCBAKgAwIBAgIJALDxRLt/tVBjMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQ0NloXDTQ1MDcw +ODExMjQ0NlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFs0MjIwNzhj +OS1mY2MzLTQwMWQtOGM2Yi0yZGUwNWRmZGEyN2NdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBozCBoDAJBgNVHRMEAjAAMDUGDCsGAQQBkggJAYMjAQQlDCNSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIEFSTSA2NDAWBgwrBgEEAZIICQGDIwIEBgwE +MTAuMjAZBgwrBgEEAZIICQGDIwMECQwHYWFyY2g2NDApBgwrBgEEAZIICQGDIwQE +GQwXcmhlbC0xMCxyaGVsLTEwLWFhcmNoNjQwDQYJKoZIhvcNAQELBQADggIBAAvn +gY6rJIku70uFkod9Xc45wNgPNMkim2p+dCOQXl5GG7rSLqBp/e6MhBOu+VfvXddF +zEndO84pb7Evc3PjjtnBietvvcmcoJjMTrGF2oKcJWJP+x/SKMzN2qPPoQu4QoZj +OTuaemuHLCkA9cnvRj2qxW9ZpINmr6H72jCHPoYGWD8Omupnctyt3/uu/MG7KT4y +8B5hXLmFeuF1vgOkKnoqjZRgZ86xsJ4dig/vLWkAKdsWPlRlV0SICwgVALqFmTge +Hgrz0A6F2BM7f0vYNFUTRv0qQwHR7EA/jEHCQByNc73cvDtHZFyODTqvEBoLFVOw +2fad9K5EID1GKj9U1NGYAlAvEpbrgs2Xd2ugFyN5mtbSLon+VeXm5q9fB/Ca0j7z +vvfdoKsd89R822m2Y+HB0eei63zGE6Ykr4aaTQNjQyTu5K8pUNG/y5UGWIpSM1IR +YqOsdJvCyavBlQ98K7OfL9yqOiZFXB9VkmXPPiT1ljNgpYzK63ZWidjXkpG2I7g1 +YoCIT0JE5xX6x2U5Ia79OFug/g9SwQn6izVYrLCgqqNqeld0WokeFBPnyZkXSYt1 +pzY4HAjXjaDGbF1O4SmoCTtagB2vNmi1wUPazizA5SESifVcYfPeaWRk10PJT9MR +p3EFR/BSg/hvmehuGSEfRNFV8g9Deo3EN1LHEhTY +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/433.pem b/repos/system_upgrade/common/files/prod-certs/10.2/433.pem new file mode 100644 index 00000000..932dbf7a --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/433.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGLDCCBBSgAwIBAgIJALDxRLt/tVBUMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQzMVoXDTQ1MDcw +ODExMjQzMVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFsxNzhhMzJi +NC0xZWNjLTRmZDEtOTA2NS0wMGZkMjQzZDEzYzBdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBtTCBsjAJBgNVHRMEAjAAMEEGDCsGAQQBkggJAYMxAQQxDC9SZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIElCTSB6IFN5c3RlbXMgQmV0YTAbBgwrBgEE +AZIICQGDMQIECwwJMTAuMiBCZXRhMBcGDCsGAQQBkggJAYMxAwQHDAVzMzkweDAs +BgwrBgEEAZIICQGDMQQEHAwacmhlbC0xMCxyaGVsLTEwLWJldGEtczM5MHgwDQYJ +KoZIhvcNAQELBQADggIBAAUwQwSc0A1Q5SiC7N5xSS1ZegZQT1hER7SRDs5p6drK +Riayu5bER7kpQnJc/ww1/iTmHHH/H180pSP+EZEPqCLumqYmf1vW60SAR4BMklyh +QuYqVkJCxA7uloA59cLZcPnEu+xHLfnhSQdTIXhi1uLK960mEIiexCT8xMkQ5E5A +ZUajyEhdLp4ca8K+nUWzSzYQBpGYpkiQtniLZ/i4kzaYTfHpFGJNQQCrPlB2lMCa +vZKseaPlFzExXfq5MJ5IX1lc2RNqeaf22p49Bia6CgVLMagsFnAr909zZ9NAaZWV +kYqjLVMJ5EY25OJS21So0fI//lOsRVBxlfqOS7v9hYBnuLhPuiIiHEaNcQyNBI/7 +DgT5xCmL8IDzvsBJLZ/AqolO1fo5lSVOZ5PCbwIZj7bBZJwf8gTSUu2cuhbN2Gxi +s7R2QFVco+AAPcuoWOISG4cKwX4wDUR+rHqQMCKJM6mQGlnB2OXBwZX1fYo7k82d +b7BygRhEML6INaweUe2Do7v8phz6TXM2lFJCQYnja2lO6GxSlaXgRNb4Rnc6ty79 +O5S6K2g3uEc4Uc8F7echBFAudl9KQqu9il9cb3f0fI+kYX2j9ib4isdF8qIusZVp +F191fHyl1Y6pp4eWKA48uO8Op8uO320UIX8HQnNGi74eEOvCqvZtfKZE5+Za/YT+ +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/479.pem b/repos/system_upgrade/common/files/prod-certs/10.2/479.pem new file mode 100644 index 00000000..2c4b8db2 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/479.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGGDCCBACgAwIBAgIJALDxRLt/tVBmMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQ0NloXDTQ1MDcw +ODExMjQ0NlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFs5OTUxYjVm +NC0yZTE4LTQ1OGEtYTc4ZC05NGNkZDhkN2I1ZWVdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBoTCBnjAJBgNVHRMEAjAAMDUGDCsGAQQBkggJAYNfAQQlDCNSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIHg4Nl82NDAWBgwrBgEEAZIICQGDXwIEBgwE +MTAuMjAYBgwrBgEEAZIICQGDXwMECAwGeDg2XzY0MCgGDCsGAQQBkggJAYNfBAQY +DBZyaGVsLTEwLHJoZWwtMTAteDg2XzY0MA0GCSqGSIb3DQEBCwUAA4ICAQDUoyHm +MM07NncLVGO9xO6qa3cxy8NyJO95LScKCAzlx3dHrgW+v1CcxZEzMsV6Rtpch0kH +De61DOCn50JgQ6s7e6Cxk6nMovcxYMi5q4JxYoIlCFTbeRRGdTQv8SmndmSd7rh2 +6mDUWoDxbw1fpeNxiWAOq8IQXrcmrEnVIpOQP4Fc+yNw/Sdsqz23o9VBlP0yBJ4W +a6zGCwRzcisLsNOc+8mRtuirG11Zqm07V0xt2YVXlV13Wu/Dy0qKW49tPJD8WceO +hCC/alSRh1s4YV50gVlA0IRyyezAwU/0Al+lMKfMeqqedg81QGMBiy6qzDjXllcK +XfKYsWC2egkofpvxb5jVU0EXdl0kE+RGQfK3fVq09YwNim41n9qgJTlA1vIBrq8o +1NMwyrbQdfndyGZLSpzWxLHpYUCe2lJomgJTNvrA6+xTnlpfEPOn2zDUxJ7CSfoQ +ZkPhdO4UsrvJOPLt5oY5R5Q6tXLVR7xL24WeUw5FXtzFMibOaE3kT9ib0o8zluMS +ly290tfnl8Wq7fgjFT8mt0NIH/rXC4COBw87EjLbhxUCbEHnbJiOj+JT2QRxKjWg +9icCBbU5TEY0V8rC+vx54JCcx8NGaJDDKDmv6tgEOA0u9YEpGw44fk6RxqeNaysW +glkF2dUoSBDKWSqiroYrjEgaFWvdSaalOSJQuA== +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/486.pem b/repos/system_upgrade/common/files/prod-certs/10.2/486.pem new file mode 100644 index 00000000..181b7a98 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/486.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGJzCCBA+gAwIBAgIJALDxRLt/tVBVMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQzMVoXDTQ1MDcw +ODExMjQzMVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFs2OWQ5ZGY5 +Yy1mMGFmLTRjY2UtYTRhMi0zZDA4MDM1YjJmYjFdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBsDCBrTAJBgNVHRMEAjAAMDoGDCsGAQQBkggJAYNmAQQqDChSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIHg4Nl82NCBCZXRhMBsGDCsGAQQBkggJAYNm +AgQLDAkxMC4yIEJldGEwGAYMKwYBBAGSCAkBg2YDBAgMBng4Nl82NDAtBgwrBgEE +AZIICQGDZgQEHQwbcmhlbC0xMCxyaGVsLTEwLWJldGEteDg2XzY0MA0GCSqGSIb3 +DQEBCwUAA4ICAQA00Q5BALEU5y1CJwF7ru1xNujrtjZvwOdXeNl+esZIqlZQqITP +Rr6Xww0/mcMcvqEHu/PlJ2xyWC8VYrhZ+/LC6EtTbPEKSDEAHE914MU934pC02tP +QE+a7BKsHPGhh4SyvMrZ0vWoxnwcug5g8V5uXNOQYSgnOAHdNQxMeMh8LCHO76np +fjWL7en5dUMWHOB9W1kyZO87f2WBGhFrTyNnFTcg99G/MNMkMD5rLc+Qg8GhY1Zt +8+AN4c5HprFI1cUz8/4osj2ZBW1xxH+mcps2oy3L8UNFceiAdewVpTmwlBN0HEUk +3+NB64+QXLf13EowJnAunJrVms+bQbB1Y2zOL1ymiCLF6iQu4mIdEP2yqzk7lowa +RmuxEOI/S279n+YtilUuWKoeaLcGqPd0rPS5B01M049+KXW0Vv/6OOakA0rltB76 ++RBeE4UTnPCOIBfyVCHdoCTDFaI5GavVZGTr1bLQR9FdIRzQs+nx3VUYf6o2ZHOW +R1I794GHADaLwNfD5b5oo1XwIkuDxcvrF5kFlhnI3X9cVFDhk6uvMTzKEHPsdoYY +Oe2PdTNfyaiAZs5RzE7If+DAK1zCHrO3GHN4tRyQEwG5p/1F91iw2/Kj67zosH38 +Wvm4FSL0ENRPIIUt+p0zT4FBPXOr4YwQGBn0PuaIob5mymAdbUI6Q3CHqA== +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/10.2/72.pem b/repos/system_upgrade/common/files/prod-certs/10.2/72.pem new file mode 100644 index 00000000..3d15c146 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/10.2/72.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGGTCCBAGgAwIBAgIJALDxRLt/tVBlMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjQ0NloXDTQ1MDcw +ODExMjQ0NlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFtmM2M4ZTQ0 +OC1lYmY4LTQxN2MtOTI5My01ZmE5NjU2YTI4YjJdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBojCBnzAJBgNVHRMEAjAAMDsGCysGAQQBkggJAUgBBCwMKlJlZCBIYXQg +RW50ZXJwcmlzZSBMaW51eCBmb3IgSUJNIHogU3lzdGVtczAVBgsrBgEEAZIICQFI +AgQGDAQxMC4yMBYGCysGAQQBkggJAUgDBAcMBXMzOTB4MCYGCysGAQQBkggJAUgE +BBcMFXJoZWwtMTAscmhlbC0xMC1zMzkweDANBgkqhkiG9w0BAQsFAAOCAgEAMT/B +VjMEkJIvwAShBe9XhWj1lNvd58VQNaet8eMebCOy2CN32v50zquH9QgQH4Sf/aBm +X8HfQWl23zAQApCjMr2Sawmjmy05Oj7UGghkbANDvwHV2VKg1nOIoy4oaRvQj86m +Hn7g0t4Iz1/kTCioZkRgj1PULeDKa7u/GKiYgpc1HVjxUUwJsC2JQwjZ1CwRsNPc +AV6sDLveJn0doggYrxbC/+9oGYSxxUrkvaPzMmuvHa5F50NHuwgcNTL47uVkglIV ++GBQaBaOq9c/8yWbqLVVDbXu1JD6zgzGj6BYiziJEpU7cqYfCOF9qPIYTD9AnZLx +43LHz33E6dRRCD9yTuMQEHE3uUoFi/G+yQvf/paSddE5FBX2d35jPSKk5um/x30g +EiFhQKSuHqWIz/cfucwFBQJRHIPj/yN93RqE9u+uJQrSk8KorEg3fVTumBT6bTYh +QprOvJBrV6UZg7oHnUC9byiyHzHRHktHv2HOPGbywbIZd0TM5R0KWaEQEVg0OAJG +KgwEeuiEufQZGq29EZTEtyDpDIP9wNiC4pBHe9B1UpE6EdzfoZWlJb6wbUMRtTqw +RS1ijNAFzvYy2Yuz0/aRi163qek95YwoXeeZn2QbDN+YgFjJZq6pHjNxYTyDthos +uWfveDk3xJRFp+Ja5WbgEK9FxzdFz34OZKFlre4= +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/279.pem b/repos/system_upgrade/common/files/prod-certs/9.8/279.pem new file mode 100644 index 00000000..8757b9b0 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/279.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGJTCCBA2gAwIBAgIJALDxRLt/tVAaMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjI0MloXDTQ1MDcw +ODExMjI0MlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFtkZDJiN2Jk +OS01NmJkLTQ3YzctOWQxOS1jNWYyNmE5YWQwZTJdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBrjCBqzAJBgNVHRMEAjAAMEMGDCsGAQQBkggJAYIXAQQzDDFSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIFBvd2VyLCBsaXR0bGUgZW5kaWFuMBUGDCsG +AQQBkggJAYIXAgQFDAM5LjgwGQYMKwYBBAGSCAkBghcDBAkMB3BwYzY0bGUwJwYM +KwYBBAGSCAkBghcEBBcMFXJoZWwtOSxyaGVsLTktcHBjNjRsZTANBgkqhkiG9w0B +AQsFAAOCAgEAEzlRfJCOY32tUEuMY4Z/cBUoJjM3/IoKY+7tA1xjzNWzL0hPWXcP +Ifb2iYXG7rINxRkGk56PiFYReGjxlkhkUWRiciAZc6oHfbpw3IyeWnCgSHJXQqTc +kUEWcUG0CJOlE9OBegvqK+PSydRx2JYhaAFza3zMvIYrdtmOvqhP3/GvuN+nnI4G +F7GgJkOyalbaSTOWlH2+wxuXeAnlEtUTytRFBEsBywuyi6NIpBL6Oj+QoBFQdCOE +Ot2Q3v0N4Q5+aiu5UsYPHs97NV8DPkuA0I2qDZr9j/PgxwftbMt14QHG+G9LW3Cz +DSRIXeKfXGo0GbR7E4ZZBLpp/3LMmH5w/K13skoGtnfWC5x/yoHFRPGmSb1Rrzx2 +kre8EMrXrFFZn4hXu/huQwLTxpg8Hn5pPzDphEksTKQxLeUF0lRj5b3NtqJbQ4he +NDBAA9cgpifdfaFO8Ax/zppiUeoEizAyst4FFGMDC5u4EFPNQJLjh6vc/2rvP1bk +KwH2FRxd/jyCcu6bEF4Fv/O/dpddkYtmSPQs3DLX9g9N30uOdOp9TM3W9lt/HFQE +VpqG7mXTu+f4hx5LFqJXR1pSLzCjVPl03sVi05rjD0Tjkt//pRybpzf/66wMQ1wE +LWoT869L+7EiL5aSPE3dX7D6IsNzqHvIPKuFAO8T2ZXdiwidAlpXlyA= +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/362.pem b/repos/system_upgrade/common/files/prod-certs/9.8/362.pem new file mode 100644 index 00000000..cb1b7c00 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/362.pem @@ -0,0 +1,36 @@ +-----BEGIN CERTIFICATE----- +MIIGNDCCBBygAwIBAgIJALDxRLt/tVAGMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjIxOVoXDTQ1MDcw +ODExMjIxOVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFswMGIwYzc0 +MS0xMDQyLTRiZGUtOTYyYy1kZjRjOGVlMmNiNjBdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBvTCBujAJBgNVHRMEAjAAMEgGDCsGAQQBkggJAYJqAQQ4DDZSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIFBvd2VyLCBsaXR0bGUgZW5kaWFuIEJldGEw +GgYMKwYBBAGSCAkBgmoCBAoMCDkuOCBCZXRhMBkGDCsGAQQBkggJAYJqAwQJDAdw +cGM2NGxlMCwGDCsGAQQBkggJAYJqBAQcDBpyaGVsLTkscmhlbC05LWJldGEtcHBj +NjRsZTANBgkqhkiG9w0BAQsFAAOCAgEAvtSvgFTBFe30H/WQcgvDjJe2ucROmr6B +AW3OF3hvamcwciLMjzMgVyf4dwRDCsKL0q9cRmFXlMR0H36iNQnYkZU1p/sWfCIB +HtPDPlSr3miELB6FTvod/L4zn+CqbjgN2D3wJJKVfldbQzOTV3kEFed96yB8exTV +ObdCIzyadhtULog9mtUCe+8IxG8oDzpjAaaYfwkyq6tY3VzbvRS76292yFVQe6rG +wc9kxhwCfprnvzH7+dTlbMJlvk7PQB7xH1CvSmrIf7C5tfLf/BrsygFtqnq8KLTx +v644hMGkOvMBdEw5Ry3jMPAlmL+Eyc5751XkN3b5yujXA+T71t1/F0i99DM8XTO8 +WovLAH4KjX+gvHugdsEQs0ujRpxPDgkv9/RFWs0kkBgzhUlFqOGBsi3HyGoqq770 +/e4Fvnj/XxHzs4G3FgiyGnsKLOaKm7eFTwhePsscIckGr/6oq7U0VQF1xOc77I7n +uPFdSXso5TUUO2UVhqmeq71hhj000wpw4vKQ71rEfgTtMiC7Et93hpk4y4iwuk9w +mDGTksyr50QNgS9ZNWGLu2JejT3s9RcjROEJ6VOWJxorDWxEY/LXl683FtRXPEM2 +UjHyhx8twhxbIlcD3a8S0R4BfcWCLvhtpdnmOtFGACYMaYd9TAdOG/AZoc/jBOpy +s2OKIQwKXPY= +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/363.pem b/repos/system_upgrade/common/files/prod-certs/9.8/363.pem new file mode 100644 index 00000000..fa09ec7c --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/363.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGJjCCBA6gAwIBAgIJALDxRLt/tVAFMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjIxOVoXDTQ1MDcw +ODExMjIxOVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFs0MTFmZDc4 +NC00ZTc4LTQ5YWQtOTNiOC0zNjc2OWY0ZDFlZTVdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBrzCBrDAJBgNVHRMEAjAAMDoGDCsGAQQBkggJAYJrAQQqDChSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIEFSTSA2NCBCZXRhMBoGDCsGAQQBkggJAYJr +AgQKDAg5LjggQmV0YTAZBgwrBgEEAZIICQGCawMECQwHYWFyY2g2NDAsBgwrBgEE +AZIICQGCawQEHAwacmhlbC05LHJoZWwtOS1iZXRhLWFhcmNoNjQwDQYJKoZIhvcN +AQELBQADggIBAFzGv13LCdYf3XkOt3TjwVwY2ldFVMxf2Sx/uNjLFG4I1mhYwZZ9 +0Pyz7J771yMxyhyKb8rc8XMAYxi8lOKfOpp1PpPRVC+NtKo2pdrbZhWy2qKomfyL +S6jN/hEgg7P6LHGEnvT1Bm9e+BoED3gmOVAmupL4xKv2eRxgXuwuPHrvE6oo63SB +xtrYIo/pmYgVFgl/d7X5vXqerF4pwLR2DwtK6O84DSyVRf35ghNET09GYm6G+URQ +eGWi1/h0YCpS9LCXOOOv/J4MM8zr+NLbDyJWxmaG83/zvAQhX65bzJ0bBtb0avJ0 +cgos6LBCDxt+kmipnAMqz5Cb+HVifgdBz1ep3EcoxHwmwBDpHewq0zNtPgMyjzhi +uwB0inlcCk7JKdjdO36H7RdUYvrM7WEDUKAXtMgOXxr3o6h9v9jZKTfbk5Af91/D +epoMULy0sErnEuzHAq9sdh3HTmDTHsMNcUpxwC+93VGaCGGrbyM2yQtdLg7dhHQK +7d9Z9BJEzKReIy+R354M1jQsLGLQ3B8uY476dmP0G0Q01m86rsJ/gjxa8vrJpafO +t1Up9YexwbVtEtKG7koCz4fwxPv2cauGncuUTdyHJDoS5FpPLMlaWXAfwD0Udbiv +gZke/PD+39I+UPrxtM+XIXGoJPeZdM5Kv0+3/suvKHGqtkFa8YiK2EHA +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/419.pem b/repos/system_upgrade/common/files/prod-certs/9.8/419.pem new file mode 100644 index 00000000..9ad33fd1 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/419.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGFzCCA/+gAwIBAgIJALDxRLt/tVAZMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjI0MloXDTQ1MDcw +ODExMjI0MlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFtiOWY4OWYx +OC0xNjAzLTRlZDUtYTFmOS0xN2YyMmEwNDdlODNdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBoDCBnTAJBgNVHRMEAjAAMDUGDCsGAQQBkggJAYMjAQQlDCNSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIEFSTSA2NDAVBgwrBgEEAZIICQGDIwIEBQwD +OS44MBkGDCsGAQQBkggJAYMjAwQJDAdhYXJjaDY0MCcGDCsGAQQBkggJAYMjBAQX +DBVyaGVsLTkscmhlbC05LWFhcmNoNjQwDQYJKoZIhvcNAQELBQADggIBAFu1LxAU +wPUs7/e6mFAzT/JIzq1fQ6kMHxbrf9nS+g9pO0G0z7Imwj85Rk7qOpJpEjKaLVDy +OJ9fXV20TNidKDvjZuL+F209NLam64MEpy7q40LgOaDYsr9tvGfJJFAQRKRszbrC +/CGj7l/q09ym1FB3LXgpn4vHR8jD7LloGFEWq19UpRLe+L+1Frh16apy03WNYTnk +JLALo274I3+kgO69sEemXZF+WD/O+4ySugY/ImbrIlrY1SAeAWTd/kudLMLVLYnN +JlmB7OPUGE2ZAR0aOTvTeoDBZPz1EGItbJg2hlx4vrhrnGG9kKu+/cDOOAJ7+bgx +fgc64NOoLTSc+9QIgKKhDt5jShXHfFjpwWbJ08/U29bTZmntcRO0h6F0WBS3ptgW +hocfN2nDN7pPvivnrUUo+kRY7jKE57im3+mznHHw97em6YCREuvc/NwLIxi4LSiU +cJgOQ3ltljrFSMKlv4p6evMxlX/QOwgeE+hf/QYjCODoHe/66h5bnKkLGnFdPHxk +6btQfVePn8UpMUO64OgIcPuGyAEXu1m9N/PFL3S5JUVmfjF9COhmZQEW1x5HBF/u +mAfwI79++xKH1nmVsgDUjm5HMVZ3qj0y3miAKtC3Ses9Ym6JawpvPSld3xFGF5Mc +BiYQsv12swSrLy3IzdvJViXRFUFE3dWuVdG1 +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/433.pem b/repos/system_upgrade/common/files/prod-certs/9.8/433.pem new file mode 100644 index 00000000..eaf93d90 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/433.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGKTCCBBGgAwIBAgIJALDxRLt/tVAHMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjIxOVoXDTQ1MDcw +ODExMjIxOVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFsyYjk5MzMx +OC0xZjFiLTRlY2UtODJiMS0wODEzYmFjOGFkNGJdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBsjCBrzAJBgNVHRMEAjAAMEEGDCsGAQQBkggJAYMxAQQxDC9SZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIElCTSB6IFN5c3RlbXMgQmV0YTAaBgwrBgEE +AZIICQGDMQIECgwIOS44IEJldGEwFwYMKwYBBAGSCAkBgzEDBAcMBXMzOTB4MCoG +DCsGAQQBkggJAYMxBAQaDBhyaGVsLTkscmhlbC05LWJldGEtczM5MHgwDQYJKoZI +hvcNAQELBQADggIBACjKAgygB5V2mVD3S5snrHjFTpP2x1ODX1GdSgwrXFZ/ehom +hf1SI9zIsmm+s96ns/Gc5BDrqS2TSil083OqG5YZ98ap+NkQPI/XqIhMRSw2z+QK +p1i7e/Si6VZyiitnutCrbX/b1RzWCSOAfIU2m41iptvT7HATw0y/6CQpQNrhZ3wR +TubEIEypmxO5stJt4CO/bqkU3vX+U3FdQdSJWJn3qpvErJ4qNFdwl8zX9WGoaueh +gNbYrz2EWARVbvedp+ylB1VNdpYXQ+LUI/KwHI4Sxizgg16+IxcFoKJVCYNOH7Wh +IoMZc7eW91oAzm57yS36RF/Z50S1x8JHHg2hgev+2czDG9dgRTsLvvAXqsnrUHuD +lRPMDjgaSooUWJmKwIXQ7yJDAPHoxZAXWtMEc1kNLZGEPVDQbT73j4eDOxzZDZrr +agWGoWJ3kuY9AVvv/RTi6z5VWs7ySJER7RxQcGhH8TctysW7gIMjHfgnTGN2bW5U +mV5Ds+/i9AiA9/V+rWWsv8riz+MfEa23/J/EvOdBBCd5MuzsqkXn2gde8WP3cjes +sgqUKQzOy7Rqr5LHT1IQl5SkyYr1QV1InghJ8dh7BjRLvWUaw0uqPRvxX1c6K1l/ +NFsCie9RwuhdE8OBwHuBjB28k3Zs9SPaVzYRe70qwi0epbCrhwcGOkTNfCcz +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/479.pem b/repos/system_upgrade/common/files/prod-certs/9.8/479.pem new file mode 100644 index 00000000..a0ff7061 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/479.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGFTCCA/2gAwIBAgIJALDxRLt/tVAcMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjI0MloXDTQ1MDcw +ODExMjI0MlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFs4ZmM5YTM4 +Ni0wYzkzLTQ0MjctYTlhOC1kMTdkMzAwZGJmZDZdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBnjCBmzAJBgNVHRMEAjAAMDUGDCsGAQQBkggJAYNfAQQlDCNSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIHg4Nl82NDAVBgwrBgEEAZIICQGDXwIEBQwD +OS44MBgGDCsGAQQBkggJAYNfAwQIDAZ4ODZfNjQwJgYMKwYBBAGSCAkBg18EBBYM +FHJoZWwtOSxyaGVsLTkteDg2XzY0MA0GCSqGSIb3DQEBCwUAA4ICAQC6vt8EEay9 +CjdRGHmseXosuo03ub+bUt61uYVpf15IoVUV+7XT6ZHff8cPZbKBjoRbuWNILvR2 +rCdl11bm3prCxfLNJh5u8hqNXv+iIB4k4qhCSrhPFQEf3HNJma2J67U/8Mt7oM4B +RqpZ1CCw9VTHQSB+iraKzE+BFr9kNlQfZu75Clsgv5dZaT1WK5hKiuQy8kc2CBKy +CuiL6i0PK2tzNtNH4ON/tMU3AM+edIiUFV6C376kewwO/omArY6FYmJVcPLKWh3h +TSUt81CmaHmyW+XKJ2pM3f2hfHdq1Lf7lInjgw5Rolyhm/Xqrrj8j19SrUSru/tw +WcmLMhhEyU2/jwfipbbzB9AC3tIXZjKv8539e4omsBmHwHQno1NAjq0+alGxr9pK +AZywsuMhiGyznbYdIANGZyMUN3sULIsG649UcEsmzM5q9g1TVyuJH9m+OJSK2PGk +UnorgDlGs1AiJhsqZuW8zxzy3nfQmniO/o/6wZbqlKiyLjQY7Fxa4Rb0hXbBJkZ7 +TkHkjlAObUEkcjg0jUHb8sFRQ7hXx+Tk4tGk549crSZCCg951SITV5By9bAxm7fu +DHGXgY7tOwHII51sfBfryuvIKs+JmzF9Evzssf3kLBSXylyS6pr/8dKN6sF7Pw4M +Fe/gvJ3J/pARSVP41wR6tI0zYvqkO/ULQg== +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/486.pem b/repos/system_upgrade/common/files/prod-certs/9.8/486.pem new file mode 100644 index 00000000..84461ed8 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/486.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGJDCCBAygAwIBAgIJALDxRLt/tVAIMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjIxOVoXDTQ1MDcw +ODExMjIxOVowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFtkZjUwNWIw +ZS02Y2E4LTRkODQtOTY0Mi0wNGRlYTg5NjY0MzNdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBrTCBqjAJBgNVHRMEAjAAMDoGDCsGAQQBkggJAYNmAQQqDChSZWQgSGF0 +IEVudGVycHJpc2UgTGludXggZm9yIHg4Nl82NCBCZXRhMBoGDCsGAQQBkggJAYNm +AgQKDAg5LjggQmV0YTAYBgwrBgEEAZIICQGDZgMECAwGeDg2XzY0MCsGDCsGAQQB +kggJAYNmBAQbDBlyaGVsLTkscmhlbC05LWJldGEteDg2XzY0MA0GCSqGSIb3DQEB +CwUAA4ICAQCNeDKnDjuvHBE0Fa0cMAFM96OWn3b3wTtAx7i9BCaCe4NsRTjEMJTw +jO4KwkfzCQWpOJuFHnOKmVZBXVZZ8aOLCfYTyQZ6nZ1UsiekL7l9sdIFCHrbgKkL +zBFe+8cVtUMwb9f7fORPp960ImtYpvt8ERihHf2LvIBUI+BGRz/D/3NW1ehVggGI +0VCe270sgLSl3y8lR5BrNXtKbigeI4mNMasndf/TDMFeCk5OH4FJ+DyiY0zma2IT +x0PwQmEeI4my1KTmQSUDgIOmHtKbq1tCR2wMIh/ju/HOrvVeOnzEsBgQTiTh92qJ +U7/++7Ayqw/6PfPCd+gOMqIPS1+Aef7w54I+zWyYeHQcXXCxcffPwO7sZV3zQJyh +clfLJv13Oe6G5mB7ZCH+tB4LdaVBURz9G0MkLwXGfTWfnU5N61Kne8hjOriSBWP4 +2FZEP+2BQ/1Z7aIssbQKegdRvvMd/KqJjIeiFtrz9AVSodEUZgJlxiZ9KDSysG18 +hmZcPuk2mc9nwWQ9gHZWzatGs+uONS92QqFvXxlY7TWMDIdlscubcjV/bbDHm69P ++pqGilb3zJz8msBwFpdO+h4l8eUMMMsLzdUdH499q/enZrH3VSdmNtWtoVm9R7rp +khFJ4DdORE9/P5lfqAObt8KNO72BQ2/KcK0FZ1lLxKWG/4dZ5oAdGw== +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/prod-certs/9.8/72.pem b/repos/system_upgrade/common/files/prod-certs/9.8/72.pem new file mode 100644 index 00000000..724e0a62 --- /dev/null +++ b/repos/system_upgrade/common/files/prod-certs/9.8/72.pem @@ -0,0 +1,35 @@ +-----BEGIN CERTIFICATE----- +MIIGFjCCA/6gAwIBAgIJALDxRLt/tVAbMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD +VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI +YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk +IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ +ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTI1MDcwODExMjI0MloXDTQ1MDcw +ODExMjI0MlowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFtlYjMyYzY1 +Ny00OGY0LTRiZjUtYmY3Yy1mYjMwNWU1YjgyMDFdMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk +sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x +8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB +RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I +5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa +xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo +QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI +yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl +1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v +5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ +ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C +AwEAAaOBnzCBnDAJBgNVHRMEAjAAMDsGCysGAQQBkggJAUgBBCwMKlJlZCBIYXQg +RW50ZXJwcmlzZSBMaW51eCBmb3IgSUJNIHogU3lzdGVtczAUBgsrBgEEAZIICQFI +AgQFDAM5LjgwFgYLKwYBBAGSCAkBSAMEBwwFczM5MHgwJAYLKwYBBAGSCAkBSAQE +FQwTcmhlbC05LHJoZWwtOS1zMzkweDANBgkqhkiG9w0BAQsFAAOCAgEADCKqieee +Hvj06J4U23K/Wr5zn+d6AtA2vfhpicAYh5jzYqLAJHmB9T5Ql6pFqJ9lMLI2EGSg +jhLD+lzDP9A2vk+rFWK0BEGnqlPrQtM5atTBeihRVRci1ymspPBrLwu+Zu3jromg +I14r86EZwSXpPZLaNUsOjOi4Euc50Q3wsUJGvXCpoU4SgnnAIER3lq9HSNFDZkmp +AjW+VHAhPIOTujm9PhCFIn5bB0jsygHHYyqV7KvQSmxoPTaLMxFpva+Xy0QNKlwg +NXKw/JYAHX1yaskeZviqwZzhKpnvycyEgWF9f7cBD6O8Adxx9qkqXqer7YsQ/wgR +cHjGCAKbV2OTIgyQEDie1gdPLdSUPzrbzJ9C1I85tSJH3ujdACiGG/aHPtspLb3Z +M6265fbXDbXOqjFuP/njDUqal3WgUgw34w4Xi2JLCcqLvHLQhTmZSKiD0SJbRDL1 +smcle/yKhTc4+7zJqQV8faR9LVEAkaLzjG3ZRiTUDq4RASr9tN/A0AfXqggG9nGL +06m6QcXRxHM0OVgLHLksKsj3rG3VX0v3aQm353GW1sxxX0hqFnoOnGWA410GUG9S +rg897hshyti1pn045uhhFjbpxYRKu/JY9VNNyRW0KqL1hyz4TY7OQxJxGDAPX7uJ +7NGSWW9EsYMZNMxEee6br9lWVwGWnc8DWhA= +-----END CERTIFICATE----- diff --git a/repos/system_upgrade/common/files/rhel_upgrade.py b/repos/system_upgrade/common/files/rhel_upgrade.py index 4f76a61d..4e8b380d 100644 --- a/repos/system_upgrade/common/files/rhel_upgrade.py +++ b/repos/system_upgrade/common/files/rhel_upgrade.py @@ -40,7 +40,7 @@ class RhelUpgradeCommand(dnf.cli.Command): summary = 'Plugin for upgrading to the next RHEL major release' def __init__(self, cli): - super(RhelUpgradeCommand, self).__init__(cli) + super().__init__(cli) self.plugin_data = {} @staticmethod @@ -49,7 +49,8 @@ class RhelUpgradeCommand(dnf.cli.Command): metavar="[%s]" % "|".join(CMDS)) parser.add_argument('filename') - def _process_entities(self, entities, op, entity_name): + @staticmethod + def _process_entities(entities, op, entity_name): """ Adds list of packages for given operation to the transaction """ @@ -73,7 +74,8 @@ class RhelUpgradeCommand(dnf.cli.Command): with open(self.opts.filename, 'w+') as fo: json.dump(self.plugin_data, fo, sort_keys=True, indent=2) - def _read_aws_region(self, repo): + @staticmethod + def _read_aws_region(repo): region = None if repo.baseurl: # baseurl is tuple (changed by Amazon-id plugin) @@ -86,7 +88,8 @@ class RhelUpgradeCommand(dnf.cli.Command): sys.exit(1) return region - def _fix_rhui_url(self, repo, region): + @staticmethod + def _fix_rhui_url(repo, region): if repo.baseurl: repo.baseurl = tuple( url.replace('REGION', region, 1) for url in repo.baseurl @@ -185,6 +188,7 @@ class RhelUpgradeCommand(dnf.cli.Command): to_install = self.plugin_data['pkgs_info']['to_install'] to_remove = self.plugin_data['pkgs_info']['to_remove'] to_upgrade = self.plugin_data['pkgs_info']['to_upgrade'] + to_reinstall = self.plugin_data['pkgs_info']['to_reinstall'] # Modules to enable self._process_entities(entities=[available_modules_to_enable], @@ -197,6 +201,9 @@ class RhelUpgradeCommand(dnf.cli.Command): self._process_entities(entities=to_install, op=self.base.install, entity_name='Package') # Packages to be upgraded self._process_entities(entities=to_upgrade, op=self.base.upgrade, entity_name='Package') + # Packages to be reinstalled + self._process_entities(entities=to_reinstall, op=self.base.reinstall, entity_name='Package') + self.base.distro_sync() if self.opts.tid[0] == 'check': @@ -222,6 +229,6 @@ class RhelUpgradePlugin(dnf.Plugin): name = 'rhel-upgrade' def __init__(self, base, cli): - super(RhelUpgradePlugin, self).__init__(base, cli) + super().__init__(base, cli) if cli: cli.register_command(RhelUpgradeCommand) diff --git a/repos/system_upgrade/common/files/upgrade_paths.json b/repos/system_upgrade/common/files/upgrade_paths.json index 22e0fd7d..d2e893d8 100644 --- a/repos/system_upgrade/common/files/upgrade_paths.json +++ b/repos/system_upgrade/common/files/upgrade_paths.json @@ -2,9 +2,10 @@ "rhel": { "default": { "7.9": ["8.10"], - "8.10": ["9.4", "9.6", "9.7"], + "8.10": ["9.4", "9.6", "9.7", "9.8"], "9.6": ["10.0"], "9.7": ["10.1"], + "9.8": ["10.2"], "7": ["8.10"], "8": ["9.4", "9.6"], "9": ["10.0"] @@ -15,6 +16,7 @@ "8.10": ["9.6", "9.4"], "8": ["9.6", "9.4"], "9.6": ["10.0"], + "9.8": ["10.2"], "9": ["10.0"] } }, @@ -25,14 +27,17 @@ }, "_virtual_versions": { "8": "8.10", - "9": "9.7", - "10": "10.1" + "9": "9.8", + "10": "10.2" } }, "almalinux": { "default": { - "8.10": ["9.0", "9.1", "9.2", "9.3", "9.4", "9.5", "9.6", "9.7"], - "9.7": ["10.0", "10.1"] + "8.10": ["9.0", "9.1", "9.2", "9.3", "9.4", "9.5", "9.6", "9.7", "9.8"], + "9.6": ["10.0", "10.1"], + "9.7": ["10.0", "10.1"], + "8": ["9.0", "9.1", "9.2", "9.3", "9.4", "9.5", "9.6", "9.7", "9.8"], + "9": ["10.0", "10.1"] } } } diff --git a/repos/system_upgrade/common/libraries/config/__init__.py b/repos/system_upgrade/common/libraries/config/__init__.py index 0c737f93..396c524a 100644 --- a/repos/system_upgrade/common/libraries/config/__init__.py +++ b/repos/system_upgrade/common/libraries/config/__init__.py @@ -1,5 +1,6 @@ from leapp.exceptions import StopActorExecutionError from leapp.libraries.stdlib import api +from leapp.utils.deprecation import deprecated # The devel variable for target product channel can also contain 'beta' SUPPORTED_TARGET_CHANNELS = {'ga', 'e4s', 'eus', 'aus'} @@ -99,9 +100,13 @@ def get_consumed_data_stream_id(): return CONSUMED_DATA_STREAM_ID +@deprecated( + since="2025-10-27", + message="Use get_source_distro_id or get_target_distro_id instead.", +) def get_distro_id(): """ - Retrieve the distro ID. + Retrieve the distro ID of the source system. This is the ID string from /etc/os_release. E.g. "rhel" for Red Hat Enterprise Linux @@ -109,4 +114,30 @@ def get_distro_id(): :return: The ID string from /etc/os_release :rtype: str """ - return api.current_actor().configuration.os_release.release_id + return api.current_actor().configuration.distro.source + + +def get_source_distro_id(): + """ + Retrieve the distro ID of the source system. + + This is the ID string from /etc/os_release. + E.g. "rhel" for Red Hat Enterprise Linux + + :return: The ID string from /etc/os_release + :rtype: str + """ + return api.current_actor().configuration.distro.source + + +def get_target_distro_id(): + """ + Retrieve the distro ID for the target system. + + The ID follows the naming convention that is used in /etc/os_release files. + E.g. "rhel" for Red Hat Enterprise Linux, "centos" for Centos (Stream), etc. + + :return: The ID for the target system + :rtype: str + """ + return api.current_actor().configuration.distro.target diff --git a/repos/system_upgrade/common/libraries/config/mock_configs.py b/repos/system_upgrade/common/libraries/config/mock_configs.py index a7ee0000..a0daac74 100644 --- a/repos/system_upgrade/common/libraries/config/mock_configs.py +++ b/repos/system_upgrade/common/libraries/config/mock_configs.py @@ -6,7 +6,7 @@ The library is supposed to be used only for testing purposes. Import of the library is expected only inside test files. """ -from leapp.models import EnvVar, IPUConfig, IPUSourceToPossibleTargets, OSRelease, Version +from leapp.models import Distro, EnvVar, IPUConfig, IPUSourceToPossibleTargets, OSRelease, Version CONFIG = IPUConfig( leapp_env_vars=[EnvVar(name='LEAPP_DEVEL', value='0')], @@ -27,7 +27,11 @@ CONFIG = IPUConfig( kernel='3.10.0-957.43.1.el7.x86_64', supported_upgrade_paths=[ IPUSourceToPossibleTargets(source_version='7.6', target_versions=['8.0']) - ] + ], + distro=Distro( + source='rhel', + target='rhel', + ), ) CONFIG_NO_NETWORK_RENAMING = IPUConfig( @@ -49,7 +53,11 @@ CONFIG_NO_NETWORK_RENAMING = IPUConfig( kernel='3.10.0-957.43.1.el7.x86_64', supported_upgrade_paths=[ IPUSourceToPossibleTargets(source_version='7.6', target_versions=['8.0']) - ] + ], + distro=Distro( + source='rhel', + target='rhel', + ), ) CONFIG_ALL_SIGNED = IPUConfig( @@ -71,7 +79,11 @@ CONFIG_ALL_SIGNED = IPUConfig( kernel='3.10.0-957.43.1.el7.x86_64', supported_upgrade_paths=[ IPUSourceToPossibleTargets(source_version='7.6', target_versions=['8.0']) - ] + ], + distro=Distro( + source='rhel', + target='rhel', + ), ) CONFIG_S390X = IPUConfig( @@ -92,5 +104,9 @@ CONFIG_S390X = IPUConfig( kernel='3.10.0-957.43.1.el7.x86_64', supported_upgrade_paths=[ IPUSourceToPossibleTargets(source_version='7.6', target_versions=['8.0']) - ] + ], + distro=Distro( + source='rhel', + target='rhel', + ), ) diff --git a/repos/system_upgrade/common/libraries/config/tests/test_version.py b/repos/system_upgrade/common/libraries/config/tests/test_version.py index d51f8098..f36dbc5f 100644 --- a/repos/system_upgrade/common/libraries/config/tests/test_version.py +++ b/repos/system_upgrade/common/libraries/config/tests/test_version.py @@ -94,7 +94,7 @@ def test_matches_source_version(monkeypatch, result, version_list): (False, ['8.2', '8.0']), ]) def test_matches_target_version(monkeypatch, result, version_list): - monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(src_ver='7.6')) + monkeypatch.setattr(api, 'current_actor', CurrentActorMocked(src_ver='7.6', dst_ver='8.1')) assert version.matches_target_version(*version_list) == result diff --git a/repos/system_upgrade/common/libraries/config/version.py b/repos/system_upgrade/common/libraries/config/version.py index 00ce3ec8..84cbd753 100644 --- a/repos/system_upgrade/common/libraries/config/version.py +++ b/repos/system_upgrade/common/libraries/config/version.py @@ -106,8 +106,7 @@ class _SupportedVersionsDict(dict): def __iter__(self): self._feed_supported_versions() - for d in self.data: - yield d + yield from self.data def __repr__(self): self._feed_supported_versions() @@ -199,9 +198,12 @@ def matches_version(match_list, detected): raise TypeError("Detected version has to be a string " "but provided was {}: '{}'".format(type(detected), detected)) - # If we are on CentOS, and we are provided with a version of the form MAJOR, try to correct - # the version into MAJOR.MINOR using virtual versions - if api.current_actor().configuration.os_release.release_id == 'centos': + # If we are on CentOS, or the target is CentOS and we are provided with a + # version of the form MAJOR, try to correct the version into MAJOR.MINOR + # using virtual versions + # Cannot use get_source_distro_id and get_target_distro_id here because of circular imports + distro_config = api.current_actor().configuration.distro + if distro_config.source == 'centos' or distro_config.target == 'centos': new_detected = _autocorrect_centos_version(detected) # We might have a matchlist ['> 8', '<= 9'] that, e.g., results from blindly using source/target versions # to make a matchlist. Our `detected` version might be some fixed string, e.g., `9.1`. So we need to diff --git a/repos/system_upgrade/common/libraries/distro.py b/repos/system_upgrade/common/libraries/distro.py index 2ed5eacd..a5042769 100644 --- a/repos/system_upgrade/common/libraries/distro.py +++ b/repos/system_upgrade/common/libraries/distro.py @@ -2,7 +2,12 @@ import json import os from leapp.exceptions import StopActorExecutionError +from leapp.libraries.common import repofileutils, rhsm +from leapp.libraries.common.config import get_target_distro_id +from leapp.libraries.common.config.architecture import ARCH_ACCEPTED, ARCH_X86_64 +from leapp.libraries.common.config.version import get_target_major_version from leapp.libraries.stdlib import api +from leapp.models import VendorSignatures def get_distribution_data(distribution): @@ -11,8 +16,202 @@ def get_distribution_data(distribution): distribution_config = os.path.join(distributions_path, distribution, 'gpg-signatures.json') if os.path.exists(distribution_config): with open(distribution_config) as distro_config_file: - return json.load(distro_config_file) + distro_config_json = json.load(distro_config_file) else: raise StopActorExecutionError( 'Cannot find distribution signature configuration.', details={'Problem': 'Distribution {} was not found in {}.'.format(distribution, distributions_path)}) + + # Extend with Vendors signatures + for siglist in api.consume(VendorSignatures): + distro_config_json["keys"].extend(siglist.sigs) + + return distro_config_json + +# distro -> major_version -> repofile -> tuple of architectures where it's present +_DISTRO_REPOFILES_MAP = { + 'rhel': { + '8': {'/etc/yum.repos.d/redhat.repo': ARCH_ACCEPTED}, + '9': {'/etc/yum.repos.d/redhat.repo': ARCH_ACCEPTED}, + '10': {'/etc/yum.repos.d/redhat.repo': ARCH_ACCEPTED}, + }, + 'centos': { + '8': { + # TODO is this true on all archs? + 'CentOS-Linux-AppStream.repo': ARCH_ACCEPTED, + 'CentOS-Linux-BaseOS.repo': ARCH_ACCEPTED, + 'CentOS-Linux-ContinuousRelease.repo': ARCH_ACCEPTED, + 'CentOS-Linux-Debuginfo.repo': ARCH_ACCEPTED, + 'CentOS-Linux-Devel.repo': ARCH_ACCEPTED, + 'CentOS-Linux-Extras.repo': ARCH_ACCEPTED, + 'CentOS-Linux-FastTrack.repo': ARCH_ACCEPTED, + 'CentOS-Linux-HighAvailability.repo': ARCH_ACCEPTED, + 'CentOS-Linux-Media.repo': ARCH_ACCEPTED, + 'CentOS-Linux-Plus.repo': ARCH_ACCEPTED, + 'CentOS-Linux-PowerTools.repo': ARCH_ACCEPTED, + 'CentOS-Linux-Sources.repo': ARCH_ACCEPTED, + }, + '9': { + '/etc/yum.repos.d/centos.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/centos-addons.repo': ARCH_ACCEPTED, + }, + '10': { + '/etc/yum.repos.d/centos.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/centos-addons.repo': ARCH_ACCEPTED, + }, + }, + 'almalinux': { + '8': { + # TODO is this true on all archs? + '/etc/yum.repos.d/almalinux-ha.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-nfv.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-plus.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-powertools.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-resilientstorage.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-rt.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-sap.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-saphana.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux.repo': ARCH_ACCEPTED, + }, + '9': { + '/etc/yum.repos.d/almalinux.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-appstream.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-baseos.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-crb.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-extras.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-highavailability.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-plus.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-resilientstorage.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-sap.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-saphana.repo': ARCH_ACCEPTED, + # RT and NFV are only on x86_64 on almalinux 9 + '/etc/yum.repos.d/almalinux-nfv.repo': (ARCH_X86_64,), + '/etc/yum.repos.d/almalinux-rt.repo': (ARCH_X86_64,), + }, + '10': { + # no resilientstorage on 10 + '/etc/yum.repos.d/almalinux-appstream.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-baseos.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-crb.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-extras.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-highavailability.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-plus.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-sap.repo': ARCH_ACCEPTED, + '/etc/yum.repos.d/almalinux-saphana.repo': ARCH_ACCEPTED, + # RT and NFV are only on x86_64 on almalinux 10 + '/etc/yum.repos.d/almalinux-nfv.repo': (ARCH_X86_64,), + '/etc/yum.repos.d/almalinux-rt.repo': (ARCH_X86_64,), + }, + }, +} + + +def _get_distro_repofiles(distro, major_version, arch): + """ + Get distribution provided repofiles. + + Note that this does not perform any validation, the caller must check + whether the files exist. + + :param distro: The distribution to get repofiles for. + :type distro: str + :param major_version: The major version to get repofiles for. + :type major_version: str + :param arch: The architecture to get repofiles for. + :type arch: str + :return: A list of paths to repofiles provided by distribution + :rtype: list[str] or None if no repofiles are mapped for the arguments + """ + + distro_repofiles = _DISTRO_REPOFILES_MAP.get(distro) + if not distro_repofiles: + return None + + version_repofiles = distro_repofiles.get(major_version, {}) + if not version_repofiles: + return None + + return [repofile for repofile, archs in version_repofiles.items() if arch in archs] + + +def get_target_distro_repoids(context): + """ + Get repoids defined in distro provided repofiles + + See the generic :func:`_get_distro_repoids` for more details. + + :param context: An instance of mounting.IsolatedActions class + :type context: mounting.IsolatedActions + :return: Repoids of distribution provided repositories + :type: list[str] + """ + + return get_distro_repoids( + context, + get_target_distro_id(), + get_target_major_version(), + api.current_actor().configuration.architecture + ) + + +def get_distro_repoids(context, distro, major_version, arch): + """ + Get repoids defined in distro provided repofiles + + On RHEL with RHSM this delegates to rhsm.get_available_repo_ids. + + Repofiles installed by RHUI client packages are not covered by this + function. + + :param context: An instance of mounting.IsolatedActions class + :type context: mounting.IsolatedActions + :param distro: The distro whose repoids to return + :type distro: str + :param major_version: The major version to get distro repoids for. + :type major_version: str + :param arch: The architecture to get distro repoids for. + :type arch: str + :return: Repoids of distribution provided repositories + :type: list[str] + """ + + if distro == 'rhel': + if rhsm.skip_rhsm(): + return [] + # Kept this todo here from the original code from + # userspacegen._get_rh_available_repoids: + # Get the RHSM repos available in the target RHEL container + # TODO: very similar thing should happens for all other repofiles in container + return rhsm.get_available_repo_ids(context) + + repofiles = repofileutils.get_parsed_repofiles(context) + distro_repofiles = _get_distro_repofiles(distro, major_version, arch) + if not distro_repofiles: + # TODO: a different way of signaling an error would be preferred (e.g. returning None), + # but since rhsm.get_available_repo_ids also raises StopActorExecutionError, + # let's make it easier for the caller for now and use it too + raise StopActorExecutionError( + "No known distro provided repofiles mapped", + details={ + "details": "distro: {}, major version: {}, architecture: {}".format( + distro, major_version, arch + ) + }, + ) + + distro_repoids = [] + for rfile in repofiles: + if rfile.file in distro_repofiles: + + if not os.path.exists(context.full_path(rfile.file)): + api.current_logger().debug( + "Expected distribution provided repofile does not exists: {}".format( + rfile + ) + ) + continue + + if rfile.data: + distro_repoids.extend([repo.repoid for repo in rfile.data]) + + return sorted(distro_repoids) diff --git a/repos/system_upgrade/common/libraries/dnfconfig.py b/repos/system_upgrade/common/libraries/dnfconfig.py index 4b5afeb5..9f1902b6 100644 --- a/repos/system_upgrade/common/libraries/dnfconfig.py +++ b/repos/system_upgrade/common/libraries/dnfconfig.py @@ -43,8 +43,11 @@ def _get_main_dump(context, disable_plugins): output_data = {} for line in data[main_start:]: + if not line.strip(): + continue try: key, val = _strip_split(line, '=', 1) + output_data[key] = val except ValueError: # This is not expected to happen, but call it a seatbelt in case # the dnf dump implementation will change and we will miss it @@ -54,7 +57,6 @@ def _get_main_dump(context, disable_plugins): api.current_logger().warning( 'Cannot parse the dnf dump correctly, line: {}'.format(line)) pass - output_data[key] = val return output_data diff --git a/repos/system_upgrade/common/libraries/dnfplugin.py b/repos/system_upgrade/common/libraries/dnfplugin.py index 4f0c3a99..66b89aed 100644 --- a/repos/system_upgrade/common/libraries/dnfplugin.py +++ b/repos/system_upgrade/common/libraries/dnfplugin.py @@ -90,6 +90,7 @@ def build_plugin_data(target_repoids, debug, test, tasks, on_aws): 'to_install': sorted(tasks.to_install), 'to_remove': sorted(tasks.to_remove), 'to_upgrade': sorted(tasks.to_upgrade), + 'to_reinstall': sorted(tasks.to_reinstall), 'modules_to_enable': sorted(['{}:{}'.format(m.name, m.stream) for m in tasks.modules_to_enable]), }, 'dnf_conf': { @@ -461,9 +462,10 @@ def perform_transaction_install(target_userspace_info, storage_info, used_repos, @contextlib.contextmanager def _prepare_perform(used_repos, target_userspace_info, xfs_info, storage_info, target_iso=None): - # noqa: W0135; pylint: disable=contextmanager-generator-missing-cleanup + # noqa: W0135; pylint: disable=bad-option-value,contextmanager-generator-missing-cleanup # NOTE(pstodulk): the pylint check is not valid in this case - finally is covered # implicitly + # noqa: W0135 reserve_space = overlaygen.get_recommended_leapp_free_space(target_userspace_info.path) with _prepare_transaction(used_repos=used_repos, target_userspace_info=target_userspace_info diff --git a/repos/system_upgrade/common/libraries/fetch.py b/repos/system_upgrade/common/libraries/fetch.py index 82bf4ff3..44abe66b 100644 --- a/repos/system_upgrade/common/libraries/fetch.py +++ b/repos/system_upgrade/common/libraries/fetch.py @@ -56,8 +56,8 @@ def _request_data(service_path, cert, proxies, timeout=REQUEST_TIMEOUT): timeout = (timeout[0], timeout[1] + 10) if attempt > MAX_ATTEMPTS: logger.warning( - 'Attempt {} of {} to get {} failed: {}.' - .format(MAX_ATTEMPTS, MAX_ATTEMPTS, service_path, etype_msg) + 'Attempt {max} of {max} to get {service} failed: {error}.' + .format(max=MAX_ATTEMPTS, service=service_path, error=etype_msg) ) raise @@ -146,7 +146,8 @@ def load_data_asset(actor_requesting_asset, asset_filename, asset_fulltext_name, docs_url, - docs_title): + docs_title, + asset_directory="/etc/leapp/files"): """ Load the content of the data asset with given asset_filename and produce :class:`leapp.model.ConsumedDataAsset` message. @@ -183,7 +184,7 @@ def load_data_asset(actor_requesting_asset, try: # The asset family ID has the form (major, minor), include only `major` in the URL - raw_asset_contents = read_or_fetch(asset_filename, data_stream=data_stream_major, allow_download=False) + raw_asset_contents = read_or_fetch(asset_filename, directory=asset_directory, data_stream=data_stream_major, allow_download=False) asset_contents = json.loads(raw_asset_contents) except ValueError: msg = 'The {0} file (at {1}) does not contain a valid JSON object.'.format(asset_fulltext_name, asset_filename) diff --git a/repos/system_upgrade/common/libraries/gpg.py b/repos/system_upgrade/common/libraries/gpg.py index c9c3f1fc..0c83a889 100644 --- a/repos/system_upgrade/common/libraries/gpg.py +++ b/repos/system_upgrade/common/libraries/gpg.py @@ -105,6 +105,8 @@ def get_gpg_fp_from_file(key_path): return fp +# TODO when a need for the same function for source arises, or when there is +# reason to deprecate this (re)name this to include "target" def get_path_to_gpg_certs(): """ Get path to the directory with trusted target gpg keys in the common leapp repository. @@ -121,13 +123,16 @@ def get_path_to_gpg_certs(): # only beta is special in regards to the GPG signing keys if target_product_type == 'beta': certs_dir = '{}beta'.format(target_major_version) - distro = api.current_actor().configuration.os_release.release_id - return os.path.join( - api.get_common_folder_path('distro'), - distro, - GPG_CERTS_FOLDER, - certs_dir - ) + distro = config.get_target_distro_id() + return [ + "/etc/leapp/files/vendors.d/rpm-gpg/", + os.path.join( + api.get_common_folder_path('distro'), + distro, + GPG_CERTS_FOLDER, + certs_dir + ) + ] def is_nogpgcheck_set(): diff --git a/repos/system_upgrade/common/libraries/grub.py b/repos/system_upgrade/common/libraries/grub.py index cd960ea4..77679d01 100644 --- a/repos/system_upgrade/common/libraries/grub.py +++ b/repos/system_upgrade/common/libraries/grub.py @@ -30,11 +30,10 @@ def canonical_path_to_efi_format(canonical_path): return canonical_path.replace(EFI_MOUNTPOINT[:-1], "").replace("/", "\\") -class EFIBootLoaderEntry(object): +class EFIBootLoaderEntry: """ Representation of an UEFI boot loader entry. """ - # pylint: disable=eq-without-hash def __init__(self, boot_number, label, active, efi_bin_source): self.boot_number = boot_number @@ -102,7 +101,7 @@ class EFIBootLoaderEntry(object): return EFIBootLoaderEntry._efi_path_to_canonical(match.groups('path')[0]) -class EFIBootInfo(object): +class EFIBootInfo: """ Data about the current UEFI boot configuration. @@ -163,7 +162,8 @@ class EFIBootInfo(object): # it's not expected that no entry exists raise StopActorExecution('UEFI: Unable to detect any UEFI bootloader entry.') - def _parse_key_value(self, bootmgr_output, key): + @staticmethod + def _parse_key_value(bootmgr_output, key): # e.g.: : for line in bootmgr_output.splitlines(): if line.startswith(key + ':'): diff --git a/repos/system_upgrade/common/libraries/guards.py b/repos/system_upgrade/common/libraries/guards.py index c8001817..ea2bf4dd 100644 --- a/repos/system_upgrade/common/libraries/guards.py +++ b/repos/system_upgrade/common/libraries/guards.py @@ -34,7 +34,7 @@ def guarded_execution(*guards): def connection_guard(url='https://example.com'): def closure(): try: - urlopen(url) + urlopen(url) # pylint: disable=consider-using-with return None except URLError as e: cause = '''Failed to open url '{url}' with error: {error}'''.format(url=url, error=e) diff --git a/repos/system_upgrade/common/libraries/kernel.py b/repos/system_upgrade/common/libraries/kernel.py index dac21b06..fc8c1167 100644 --- a/repos/system_upgrade/common/libraries/kernel.py +++ b/repos/system_upgrade/common/libraries/kernel.py @@ -9,7 +9,7 @@ KernelPkgInfo = namedtuple('KernelPkgInfo', ('name', 'version', 'release', 'arch KERNEL_UNAME_R_PROVIDES = ['kernel-uname-r', 'kernel-rt-uname-r'] -class KernelType(object): +class KernelType: ORDINARY = 'ordinary' REALTIME = 'realtime' diff --git a/repos/system_upgrade/common/libraries/mounting.py b/repos/system_upgrade/common/libraries/mounting.py index 2eb19d31..279d31dc 100644 --- a/repos/system_upgrade/common/libraries/mounting.py +++ b/repos/system_upgrade/common/libraries/mounting.py @@ -16,7 +16,7 @@ ALWAYS_BIND = [] ErrorData = namedtuple('ErrorData', ['summary', 'details']) -class MountingMode(object): +class MountingMode: """ MountingMode are types of mounts supported by the library """ @@ -46,13 +46,13 @@ class MountError(Exception): """ Exception that is thrown when a mount related operation failed """ def __init__(self, message, details): - super(MountError, self).__init__(message) + super().__init__(message) self.details = details -class IsolationType(object): +class IsolationType: """ Implementations for the different isolated actions types """ - class _Implementation(object): + class _Implementation: """ Base class for all isolated actions """ def __init__(self, target, **kwargs): @@ -66,7 +66,8 @@ class IsolationType(object): """ Release the isolation context """ pass - def make_command(self, cmd): + @staticmethod + def make_command(cmd): """ Transform the given command to the isolated environment """ return cmd @@ -74,7 +75,7 @@ class IsolationType(object): """ systemd-nspawn implementation """ def __init__(self, target, binds=(), env_vars=None): - super(IsolationType.NSPAWN, self).__init__(target=target) + super().__init__(target=target) self.binds = list(binds) + ALWAYS_BIND self.env_vars = env_vars or get_all_envs() @@ -97,7 +98,7 @@ class IsolationType(object): """ chroot implementation """ def __init__(self, target): - super(IsolationType.CHROOT, self).__init__(target) + super().__init__(target) self.context = None def create(self): @@ -129,7 +130,7 @@ class IsolationType(object): """ Execute the given commands and perform the given operations on the real system and not isolated. """ -class IsolatedActions(object): +class IsolatedActions: """ This class allows to perform actions in a manner as if the given base_dir would be the current root """ _isolated = True @@ -261,14 +262,14 @@ class ChrootActions(IsolatedActions): """ Isolation with chroot """ def __init__(self, base_dir): - super(ChrootActions, self).__init__(base_dir=base_dir, implementation=IsolationType.CHROOT) + super().__init__(base_dir=base_dir, implementation=IsolationType.CHROOT) class NspawnActions(IsolatedActions): """ Isolation with systemd-nspawn """ def __init__(self, base_dir, binds=(), env_vars=None): - super(NspawnActions, self).__init__( + super().__init__( base_dir=base_dir, implementation=IsolationType.NSPAWN, binds=binds, env_vars=env_vars) @@ -277,10 +278,10 @@ class NotIsolatedActions(IsolatedActions): _isolated = False def __init__(self, base_dir): - super(NotIsolatedActions, self).__init__(base_dir=base_dir, implementation=IsolationType.NONE) + super().__init__(base_dir=base_dir, implementation=IsolationType.NONE) -class MountConfig(object): +class MountConfig: """ Options for Mount """ _Options = namedtuple('_Options', ('should_create', 'should_cleanup')) AttachOnly = _Options(should_create=False, should_cleanup=False) @@ -293,7 +294,7 @@ class MountConfig(object): """ Create all necessary directories and perform mount calls and cleanup afterwards """ -class MountingBase(object): +class MountingBase: """ Base class for all mount operations """ def __init__(self, source, target, mode, config=MountConfig.Mount): @@ -374,7 +375,7 @@ class NullMount(MountingBase): """ This is basically a NoOp for compatibility with other mount operations, in case a mount is optional """ def __init__(self, target, config=MountConfig.AttachOnly): - super(NullMount, self).__init__(source=target, target=target, mode=MountingMode.NONE, config=config) + super().__init__(source=target, target=target, mode=MountingMode.NONE, config=config) def __enter__(self): return self @@ -387,21 +388,21 @@ class LoopMount(MountingBase): """ Performs loop mounts """ def __init__(self, source, target, config=MountConfig.Mount): - super(LoopMount, self).__init__(source=source, target=target, mode=MountingMode.LOOP, config=config) + super().__init__(source=source, target=target, mode=MountingMode.LOOP, config=config) class BindMount(MountingBase): """ Performs bind mounts """ def __init__(self, source, target, config=MountConfig.Mount): - super(BindMount, self).__init__(source=source, target=target, mode=MountingMode.BIND, config=config) + super().__init__(source=source, target=target, mode=MountingMode.BIND, config=config) class TypedMount(MountingBase): """ Performs a typed mounts """ def __init__(self, fstype, source, target, config=MountConfig.Mount): - super(TypedMount, self).__init__(source=source, target=target, mode=MountingMode.FSTYPE, config=config) + super().__init__(source=source, target=target, mode=MountingMode.FSTYPE, config=config) self.fstype = fstype def _mount_options(self): @@ -415,8 +416,12 @@ class OverlayMount(MountingBase): """ Performs an overlayfs mount """ def __init__(self, name, source, workdir, config=MountConfig.Mount): - super(OverlayMount, self).__init__(source=source, target=os.path.join(workdir, name), - mode=MountingMode.OVERLAY, config=config) + super().__init__( + source=source, + target=os.path.join(workdir, name), + mode=MountingMode.OVERLAY, + config=config + ) self._upper_dir = os.path.join(workdir, 'upper') self._work_dir = os.path.join(workdir, 'work') self.additional_directories = (self._upper_dir, self._work_dir) diff --git a/repos/system_upgrade/common/libraries/multipathutil.py b/repos/system_upgrade/common/libraries/multipathutil.py index 47d0d86e..cb5c3693 100644 --- a/repos/system_upgrade/common/libraries/multipathutil.py +++ b/repos/system_upgrade/common/libraries/multipathutil.py @@ -38,7 +38,7 @@ def write_config(path, contents): ) -class LineData(object): +class LineData: TYPE_BLANK = 0 TYPE_SECTION_START = 1 TYPE_SECTION_END = 2 diff --git a/repos/system_upgrade/common/libraries/overlaygen.py b/repos/system_upgrade/common/libraries/overlaygen.py index 867e3559..83dc33b8 100644 --- a/repos/system_upgrade/common/libraries/overlaygen.py +++ b/repos/system_upgrade/common/libraries/overlaygen.py @@ -185,7 +185,7 @@ def _get_fspace(path, convert_to_mibs=False, coefficient=1): coefficient = min(coefficient, 1) fspace_bytes = int(stat.f_frsize * stat.f_bavail * coefficient) if convert_to_mibs: - return int(fspace_bytes / 1024 / 1024) # noqa: W1619; pylint: disable=old-division + return int(fspace_bytes / 1024 / 1024) return fspace_bytes @@ -325,7 +325,7 @@ def _prepare_required_mounts(scratch_dir, mounts_dir, storage_info, scratch_rese @contextlib.contextmanager def _build_overlay_mount(root_mount, mounts): - # noqa: W0135; pylint: disable=contextmanager-generator-missing-cleanup + # noqa: W0135; pylint: disable=bad-option-value,contextmanager-generator-missing-cleanup # NOTE(pstodulk): the pylint check is not valid in this case - finally is covered # implicitly if not root_mount: @@ -480,8 +480,8 @@ def _create_mount_disk_image(disk_images_directory, path, disk_size): # NOTE(pstodulk): In case the formatting params are modified, # the minimal required size could be different api.current_logger().warning( - 'The apparent size for the disk image representing {path}' - ' is too small ({disk_size} MiBs) for a formatting. Setting 130 MiBs instead.' + 'The apparent size for the disk image representing {path} ' + 'is too small ({disk_size} MiBs) for a formatting. Setting 130 MiBs instead.' .format(path=path, disk_size=disk_size) ) disk_size = 130 @@ -489,12 +489,11 @@ def _create_mount_disk_image(disk_images_directory, path, disk_size): cmd = [ '/bin/dd', 'if=/dev/zero', 'of={}'.format(diskimage_path), - 'bs=1M', 'count=0', 'seek={}'.format(disk_size) + 'bs=1M', 'count=0', 'seek={}'.format(disk_size), ] hint = ( 'Please ensure that there is enough diskspace on the partition hosting' - 'the {} directory.' - .format(disk_images_directory) + 'the {} directory.'.format(disk_images_directory) ) api.current_logger().debug('Attempting to create disk image at %s', diskimage_path) @@ -540,7 +539,9 @@ def _create_mounts_dir(scratch_dir, mounts_dir): utils.makedirs(mounts_dir) api.current_logger().debug('Done creating mount directories.') except OSError: - api.current_logger().error('Failed to create mounting directories %s', mounts_dir, exc_info=True) + api.current_logger().error( + 'Failed to create mounting directories %s', mounts_dir, exc_info=True + ) # This is an attempt for giving the user a chance to resolve it on their own raise StopActorExecutionError( @@ -556,17 +557,25 @@ def _mount_dnf_cache(overlay_target): """ Convenience context manager to ensure bind mounted /var/cache/dnf and removal of the mount. """ - # noqa: W0135; pylint: disable=contextmanager-generator-missing-cleanup + # noqa: W0135; pylint: disable=bad-option-value,contextmanager-generator-missing-cleanup # NOTE(pstodulk): the pylint check is not valid in this case - finally is covered # implicitly with mounting.BindMount( - source='/var/cache/dnf', - target=os.path.join(overlay_target, 'var', 'cache', 'dnf')) as cache_mount: + source='/var/cache/dnf', + target=os.path.join(overlay_target, 'var', 'cache', 'dnf'), + ) as cache_mount: yield cache_mount @contextlib.contextmanager -def create_source_overlay(mounts_dir, scratch_dir, xfs_info, storage_info, mount_target=None, scratch_reserve=0): +def create_source_overlay( + mounts_dir, + scratch_dir, + xfs_info, + storage_info, + mount_target=None, + scratch_reserve=0, +): """ Context manager that prepares the source system overlay and yields the mount. @@ -610,7 +619,7 @@ def create_source_overlay(mounts_dir, scratch_dir, xfs_info, storage_info, mount :type scratch_reserve: Optional[int] :rtype: mounting.BindMount or mounting.NullMount """ - # noqa: W0135; pylint: disable=contextmanager-generator-missing-cleanup + # noqa: W0135; pylint: disable=bad-option-value,contextmanager-generator-missing-cleanup # NOTE(pstodulk): the pylint check is not valid in this case - finally is covered # implicitly api.current_logger().debug('Creating source overlay in {scratch_dir} with mounts in {mounts_dir}'.format( @@ -710,7 +719,7 @@ def _create_mount_disk_image_old(disk_images_directory, path): try: utils.call_with_oserror_handled(cmd=['/sbin/mkfs.ext4', '-F', diskimage_path]) except CalledProcessError as e: - api.current_logger().error('Failed to create ext4 filesystem in %s', exc_info=True) + api.current_logger().error('Failed to create ext4 filesystem in %s', diskimage_path, exc_info=True) raise StopActorExecutionError( message=str(e) ) diff --git a/repos/system_upgrade/common/libraries/persistentnetnames.py b/repos/system_upgrade/common/libraries/persistentnetnames.py index 8769712c..7fdf7eaa 100644 --- a/repos/system_upgrade/common/libraries/persistentnetnames.py +++ b/repos/system_upgrade/common/libraries/persistentnetnames.py @@ -50,7 +50,7 @@ def interfaces(): except Exception as e: # pylint: disable=broad-except # FIXME(msekleta): We should probably handle errors more granularly # Maybe we should inhibit upgrade process at this point - api.current_logger().warning('Failed to gather information about network interface: ' + str(e)) + api.current_logger().warning('Failed to gather information about network interface: %s', e) continue yield Interface(**attrs) diff --git a/repos/system_upgrade/common/libraries/repofileutils.py b/repos/system_upgrade/common/libraries/repofileutils.py index cab3c42b..376473a4 100644 --- a/repos/system_upgrade/common/libraries/repofileutils.py +++ b/repos/system_upgrade/common/libraries/repofileutils.py @@ -16,7 +16,7 @@ class InvalidRepoDefinition(Exception): def __init__(self, msg, repofile, repoid): message = 'Invalid repository definition: {repoid} in: {repofile}: {msg}'.format( repoid=repoid, repofile=repofile, msg=msg) - super(InvalidRepoDefinition, self).__init__(message) + super().__init__(message) self.repofile = repofile self.repoid = repoid diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py new file mode 100644 index 00000000..40a6f001 --- /dev/null +++ b/repos/system_upgrade/common/libraries/repomaputils.py @@ -0,0 +1,141 @@ +from collections import defaultdict +from leapp.models import PESIDRepositoryEntry, RepoMapEntry, RepositoriesMapping + +class RepoMapData: + VERSION_FORMAT = '1.3.0' + + def __init__(self): + self.repositories = [] + self.mapping = {} + + def add_repository(self, data, pesid): + """ + Add new PESIDRepositoryEntry with given pesid from the provided dictionary. + + :param data: A dict containing the data of the added repository. The dictionary structure corresponds + to the repositories entries in the repository mapping JSON schema. + :type data: Dict[str, str] + :param pesid: PES id of the repository family that the newly added repository belongs to. + :type pesid: str + """ + self.repositories.append(PESIDRepositoryEntry( + repoid=data['repoid'], + channel=data['channel'], + rhui=data.get('rhui', ''), + repo_type=data['repo_type'], + arch=data['arch'], + major_version=data['major_version'], + pesid=pesid, + distro=data['distro'], + )) + + def get_repositories(self, valid_major_versions): + """ + Return the list of PESIDRepositoryEntry object matching the specified major versions. + """ + return [repo for repo in self.repositories if repo.major_version in valid_major_versions] + + def get_version_repoids(self, major_version): + """ + Return the list of repository ID strings for repositories matching the specified major version. + """ + return [repo.repoid for repo in self.repositories if repo.major_version == major_version] + + def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid): + """ + Add a new mapping entry that is mapping the source pesid to the destination pesid(s), + relevant in an IPU from the supplied source major version to the supplied target + major version. + + :param str source_major_version: Specifies the major version of the source system + for which the added mapping applies. + :param str target_major_version: Specifies the major version of the target system + for which the added mapping applies. + :param str source_pesid: PESID of the source repository. + :param Union[str|List[str]] target_pesid: A single target PESID or a list of target + PESIDs of the added mapping. + """ + # NOTE: it could be more simple, but I prefer to be sure the input data + # contains just one map per source PESID. + key = '{}:{}'.format(source_major_version, target_major_version) + rmap = self.mapping.get(key, defaultdict(set)) + self.mapping[key] = rmap + if isinstance(target_pesid, list): + rmap[source_pesid].update(target_pesid) + else: + rmap[source_pesid].add(target_pesid) + + def get_mappings(self, src_major_version, dst_major_version): + """ + Return the list of RepoMapEntry objects for the specified upgrade path. + + IOW, the whole mapping for specified IPU. + """ + key = '{}:{}'.format(src_major_version, dst_major_version) + rmap = self.mapping.get(key, None) + if not rmap: + return None + map_list = [] + for src_pesid in sorted(rmap.keys()): + map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid]))) + return map_list + + @staticmethod + def load_from_dict(data): + if data['version_format'] != RepoMapData.VERSION_FORMAT: + raise ValueError( + 'The obtained repomap data has unsupported version of format.' + ' Get {} required {}' + .format(data['version_format'], RepoMapData.VERSION_FORMAT) + ) + + repomap = RepoMapData() + + # Load reposiories + existing_pesids = set() + for repo_family in data['repositories']: + existing_pesids.add(repo_family['pesid']) + for repo in repo_family['entries']: + repomap.add_repository(repo, repo_family['pesid']) + + # Load mappings + for mapping in data['mapping']: + for entry in mapping['entries']: + if not isinstance(entry['target'], list): + raise ValueError( + 'The target field of a mapping entry is not a list: {}' + .format(entry) + ) + + for pesid in [entry['source']] + entry['target']: + if pesid not in existing_pesids: + raise ValueError( + 'The {} pesid is not related to any repository.' + .format(pesid) + ) + repomap.add_mapping( + source_major_version=mapping['source_major_version'], + target_major_version=mapping['target_major_version'], + source_pesid=entry['source'], + target_pesid=entry['target'], + ) + return repomap + +def combine_repomap_messages(mapping_list): + """ + Combine multiple RepositoryMapping messages into one. + Needed because we might get more than one message if there are vendors present. + """ + combined_mapping = [] + combined_repositories = [] + # Depending on whether there are any vendors present, we might get more than one message. + for msg in mapping_list: + combined_mapping.extend(msg.mapping) + combined_repositories.extend(msg.repositories) + + combined_repomapping = RepositoriesMapping( + mapping=combined_mapping, + repositories=combined_repositories + ) + + return combined_repomapping diff --git a/repos/system_upgrade/common/libraries/rhsm.py b/repos/system_upgrade/common/libraries/rhsm.py index 79164cca..2112ca3d 100644 --- a/repos/system_upgrade/common/libraries/rhsm.py +++ b/repos/system_upgrade/common/libraries/rhsm.py @@ -7,7 +7,7 @@ import time from leapp import reporting from leapp.exceptions import StopActorExecutionError from leapp.libraries.common import repofileutils -from leapp.libraries.common.config import get_distro_id, get_env +from leapp.libraries.common.config import get_env, get_target_distro_id from leapp.libraries.stdlib import api, CalledProcessError from leapp.models import RHSMInfo @@ -337,8 +337,8 @@ def set_container_mode(context): :param context: An instance of a mounting.IsolatedActions class :type context: mounting.IsolatedActions class """ - # this has to happen even with skip_rhsm, but only on RHEL - if get_distro_id() != 'rhel': + # this has to happen even with skip_rhsm, but only on RHEL target + if get_target_distro_id() != 'rhel': api.current_logger().info( 'Skipping setting RHSM into container mode on non-RHEL systems.' ) diff --git a/repos/system_upgrade/common/libraries/rhui.py b/repos/system_upgrade/common/libraries/rhui.py index 5c293304..c90c8c14 100644 --- a/repos/system_upgrade/common/libraries/rhui.py +++ b/repos/system_upgrade/common/libraries/rhui.py @@ -21,7 +21,7 @@ RHUI_PKI_PRIVATE_DIR = os.path.join(RHUI_PKI_DIR, 'private') AWS_DNF_PLUGIN_NAME = 'amazon-id.py' -class ContentChannel(object): +class ContentChannel: GA = 'ga' TUV = 'tuv' E4S = 'e4s' @@ -30,14 +30,14 @@ class ContentChannel(object): BETA = 'beta' -class RHUIVariant(object): +class RHUIVariant: ORDINARY = 'ordinary' # Special value - not displayed in report/errors SAP = 'sap' SAP_APPS = 'sap-apps' SAP_HA = 'sap-ha' -class RHUIProvider(object): +class RHUIProvider: GOOGLE = 'Google' AZURE = 'Azure' AWS = 'AWS' @@ -74,7 +74,7 @@ RHUISetup = namedtuple( """ -class RHUIFamily(object): +class RHUIFamily: def __init__(self, provider, client_files_folder='', variant=RHUIVariant.ORDINARY, arch=arch.ARCH_X86_64,): self.provider = provider self.client_files_folder = client_files_folder diff --git a/repos/system_upgrade/common/libraries/rpms.py b/repos/system_upgrade/common/libraries/rpms.py index bd3a2961..8f98c1a4 100644 --- a/repos/system_upgrade/common/libraries/rpms.py +++ b/repos/system_upgrade/common/libraries/rpms.py @@ -3,7 +3,7 @@ from leapp.libraries.common.config.version import get_source_major_version from leapp.models import InstalledRPM -class LeappComponents(object): +class LeappComponents: """ Supported component values to be used with get_packages_function: * FRAMEWORK - the core of the leapp project: the leapp executable and diff --git a/repos/system_upgrade/common/libraries/tests/test_distro.py b/repos/system_upgrade/common/libraries/tests/test_distro.py new file mode 100644 index 00000000..13e782e6 --- /dev/null +++ b/repos/system_upgrade/common/libraries/tests/test_distro.py @@ -0,0 +1,200 @@ +import json +import os + +import pytest + +from leapp.actors import StopActorExecutionError +from leapp.libraries.common import distro, repofileutils, rhsm +from leapp.libraries.common.config.architecture import ARCH_ACCEPTED, ARCH_ARM64, ARCH_PPC64LE, ARCH_S390X, ARCH_X86_64 +from leapp.libraries.common.distro import _get_distro_repofiles, get_distribution_data, get_distro_repoids +from leapp.libraries.common.testutils import CurrentActorMocked +from leapp.libraries.stdlib import api +from leapp.models import RepositoryData, RepositoryFile + +_RHEL_REPOFILES = ['/etc/yum.repos.d/redhat.repo'] +_CENTOS_REPOFILES = [ + "/etc/yum.repos.d/centos.repo", "/etc/yum.repos.d/centos-addons.repo" +] + +_CUR_DIR = os.path.dirname(os.path.abspath(__file__)) + + +@pytest.mark.parametrize('distro', ['rhel', 'centos']) +def test_get_distribution_data(monkeypatch, distro): + common_path = os.path.join(_CUR_DIR, "../../files/", 'distro') + monkeypatch.setattr( + api, + "get_common_folder_path", + lambda folder: common_path + ) + data_path = os.path.join(common_path, distro, "gpg-signatures.json") + + def exists_mocked(path): + assert path == data_path + return True + + monkeypatch.setattr(os.path, 'exists', exists_mocked) + ret = get_distribution_data(distro) + + with open(data_path) as fp: + assert ret == json.load(fp) + + +@pytest.mark.parametrize('distro', ['rhel', 'centos']) +def test_get_distribution_data_not_exists(monkeypatch, distro): + common_path = os.path.join(_CUR_DIR, "../../files/", 'distro') + monkeypatch.setattr( + api, + "get_common_folder_path", + lambda folder: common_path + ) + data_path = os.path.join(common_path, distro, "gpg-signatures.json") + + def exists_mocked(path): + assert path == data_path + return False + + monkeypatch.setattr(os.path, 'exists', exists_mocked) + + with pytest.raises(StopActorExecutionError) as err: + get_distribution_data(distro) + assert 'Cannot find distribution signature configuration.' in err + + +def test_get_distro_repofiles(monkeypatch): + """ + Test the functionality, not the data. + """ + test_map = { + 'distro1': { + '8': { + 'repofile1': ARCH_ACCEPTED, + 'repofile2': [ARCH_X86_64], + }, + '9': { + 'repofile3': ARCH_ACCEPTED, + }, + }, + 'distro2': { + '8': {}, + '9': { + 'repofile2': [ARCH_X86_64], + 'repofile3': [ARCH_ARM64, ARCH_S390X, ARCH_PPC64LE], + }, + }, + } + monkeypatch.setattr(distro, '_DISTRO_REPOFILES_MAP', test_map) + + # mix of all and specific arch + repofiles = _get_distro_repofiles('distro1', '8', ARCH_X86_64) + assert repofiles == ['repofile1', 'repofile2'] + + # match all but not x86_64 + repofiles = _get_distro_repofiles('distro1', '8', ARCH_ARM64) + assert repofiles == ['repofile1'] + + repofiles = _get_distro_repofiles('distro2', '9', ARCH_X86_64) + assert repofiles == ['repofile2'] + repofiles = _get_distro_repofiles('distro2', '9', ARCH_ARM64) + assert repofiles == ['repofile3'] + repofiles = _get_distro_repofiles('distro2', '9', ARCH_S390X) + assert repofiles == ['repofile3'] + repofiles = _get_distro_repofiles('distro2', '9', ARCH_PPC64LE) + assert repofiles == ['repofile3'] + + # version not mapped + repofiles = _get_distro_repofiles('distro2', '8', ARCH_X86_64) + assert repofiles is None + + # distro not mapped + repofiles = _get_distro_repofiles('distro42', '8', ARCH_X86_64) + assert repofiles is None + + +def _make_repo(repoid): + return RepositoryData(repoid=repoid, name='name {}'.format(repoid)) + + +def _make_repofile(rfile, data=None): + if data is None: + data = [_make_repo("{}-{}".format(rfile.split("/")[-1], i)) for i in range(3)] + return RepositoryFile(file=rfile, data=data) + + +def _make_repofiles(rfiles): + return [_make_repofile(rfile) for rfile in rfiles] + + +@pytest.mark.parametrize('other_rfiles', [ + [], + [_make_repofile("foo")], + _make_repofiles(["foo", "bar"]), +]) +@pytest.mark.parametrize( + "distro_id,skip_rhsm,distro_rfiles", + [ + ("rhel", True, []), + ("rhel", True, _make_repofiles(_RHEL_REPOFILES)), + ("rhel", False, _make_repofiles(_RHEL_REPOFILES)), + ("centos", True, []), + ("centos", True, _make_repofiles(_CENTOS_REPOFILES)), + ] +) +def test_get_distro_repoids( + monkeypatch, distro_id, skip_rhsm, distro_rfiles, other_rfiles +): + """ + Tests that the correct repoids are returned + + This is a little ugly because on RHEL the get_distro_repoids function still + delegates to rhsm.get_available_repo_ids and also has different behavior + with skip_rhsm + """ + current_actor = CurrentActorMocked(release_id=distro_id if distro_id else 'rhel') + monkeypatch.setattr(api, 'current_actor', current_actor) + monkeypatch.setattr(rhsm, 'skip_rhsm', lambda: skip_rhsm) + + repofiles = other_rfiles + if distro_rfiles: + repofiles.extend(distro_rfiles) + monkeypatch.setattr(repofileutils, 'get_parsed_repofiles', lambda x: repofiles) + + distro_repoids = [] + for rfile in distro_rfiles: + distro_repoids.extend([repo.repoid for repo in rfile.data] if rfile else []) + distro_repoids.sort() + + monkeypatch.setattr(rhsm, 'get_available_repo_ids', lambda _: distro_repoids) + monkeypatch.setattr(os.path, 'exists', lambda f: f in _CENTOS_REPOFILES) + + class MockedContext: + @staticmethod + def full_path(path): + return path + + repoids = get_distro_repoids(MockedContext(), distro_id, '9', 'x86_64') + + if distro_id == 'rhel' and skip_rhsm: + assert repoids == [] + else: + assert sorted(repoids) == distro_repoids + + +@pytest.mark.parametrize('other_rfiles', [ + [], + [_make_repofile("foo")], + _make_repofiles(["foo", "bar"]), +]) +def test_get_distro_repoids_no_distro_repofiles(monkeypatch, other_rfiles): + """ + Test that exception is thrown when there are no known distro provided repofiles. + """ + + def mocked_get_distro_repofiles(*args): + return [] + + monkeypatch.setattr(distro, '_get_distro_repofiles', mocked_get_distro_repofiles) + monkeypatch.setattr(repofileutils, "get_parsed_repofiles", lambda x: other_rfiles) + + with pytest.raises(StopActorExecutionError): + get_distro_repoids(None, 'somedistro', '8', 'x86_64') diff --git a/repos/system_upgrade/common/libraries/tests/test_gpg.py b/repos/system_upgrade/common/libraries/tests/test_gpg.py index 47617ad8..1394e60d 100644 --- a/repos/system_upgrade/common/libraries/tests/test_gpg.py +++ b/repos/system_upgrade/common/libraries/tests/test_gpg.py @@ -22,7 +22,7 @@ from leapp.models import GpgKey, InstalledRPM, RPM ('10.0', 'ga', 'almalinux', '../../files/distro/almalinux/rpm-gpg/10'), ]) def test_get_path_to_gpg_certs(monkeypatch, target, product_type, distro, exp): - current_actor = CurrentActorMocked(dst_ver=target, release_id=distro, + current_actor = CurrentActorMocked(dst_ver=target, dst_distro=distro, envars={'LEAPP_DEVEL_TARGET_PRODUCT_TYPE': product_type}) monkeypatch.setattr(api, 'current_actor', current_actor) diff --git a/repos/system_upgrade/common/libraries/tests/test_grub.py b/repos/system_upgrade/common/libraries/tests/test_grub.py index 9bc9f682..08dc6895 100644 --- a/repos/system_upgrade/common/libraries/tests/test_grub.py +++ b/repos/system_upgrade/common/libraries/tests/test_grub.py @@ -23,7 +23,7 @@ INVALID_DD = b'Nothing to see here!' CUR_DIR = os.path.dirname(os.path.abspath(__file__)) -# pylint: disable=E501 +# pylint: disable=line-too-long # flake8: noqa: E501 EFIBOOTMGR_OUTPUT = r""" BootCurrent: 0006 @@ -98,7 +98,7 @@ def raise_call_error(args=None): ) -class RunMocked(object): +class RunMocked: def __init__(self, raise_err=False, boot_on_raid=False): self.called = 0 diff --git a/repos/system_upgrade/common/libraries/tests/test_mdraid.py b/repos/system_upgrade/common/libraries/tests/test_mdraid.py index d536beec..8668be8c 100644 --- a/repos/system_upgrade/common/libraries/tests/test_mdraid.py +++ b/repos/system_upgrade/common/libraries/tests/test_mdraid.py @@ -20,7 +20,7 @@ def raise_call_error(args=None): ) -class RunMocked(object): +class RunMocked: def __init__(self, raise_err=False): self.called = 0 diff --git a/repos/system_upgrade/common/libraries/tests/test_persistentnetnames_library.py b/repos/system_upgrade/common/libraries/tests/test_persistentnetnames_library.py index 2c399888..74aa08fa 100644 --- a/repos/system_upgrade/common/libraries/tests/test_persistentnetnames_library.py +++ b/repos/system_upgrade/common/libraries/tests/test_persistentnetnames_library.py @@ -3,7 +3,7 @@ from leapp.libraries.common.testutils import produce_mocked from leapp.libraries.stdlib import api -class AttributesTest(object): +class AttributesTest: def __init__(self): self.attributes = { 'address': b'fa:16:3e:cd:26:5a' @@ -15,7 +15,7 @@ class AttributesTest(object): raise KeyError -class DeviceTest(object): +class DeviceTest: def __init__(self): self.dict_data = { 'ID_NET_DRIVER': 'virtio_net', diff --git a/repos/system_upgrade/common/libraries/tests/test_rhsm.py b/repos/system_upgrade/common/libraries/tests/test_rhsm.py index b643cd0d..b0b7df79 100644 --- a/repos/system_upgrade/common/libraries/tests/test_rhsm.py +++ b/repos/system_upgrade/common/libraries/tests/test_rhsm.py @@ -62,7 +62,7 @@ RHSM_ENABLED_REPOS = [ ] -class IsolatedActionsMocked(object): +class IsolatedActionsMocked: def __init__(self, call_stdout=None, raise_err=False): self.commands_called = [] self.call_return = {'stdout': call_stdout, 'stderr': None} @@ -73,7 +73,8 @@ class IsolatedActionsMocked(object): # A map from called commands to their mocked output self.mocked_command_call_outputs = dict() - def is_isolated(self): + @staticmethod + def is_isolated(): return True def call(self, cmd, *args, **dummy_kwargs): @@ -93,7 +94,8 @@ class IsolatedActionsMocked(object): 'exit_code': exit_code } - def full_path(self, path): + @staticmethod + def full_path(path): return path def remove(self, path): @@ -423,7 +425,7 @@ def test_is_registered_error(context_mocked): def test_set_container_mode(monkeypatch, context_mocked): - actor = CurrentActorMocked(release_id='rhel') + actor = CurrentActorMocked(dst_distro='rhel') monkeypatch.setattr(api, 'current_actor', actor) monkeypatch.setattr( os.path, "exists", lambda path: path in ("/etc/rhsm", "/etc/pki/entitlement") @@ -438,7 +440,7 @@ def test_set_container_mode(monkeypatch, context_mocked): def test_set_container_mode_nonrhel_skip(monkeypatch, context_mocked): - actor = CurrentActorMocked(release_id='notrhel') + actor = CurrentActorMocked(dst_distro='notrhel') monkeypatch.setattr(api, 'current_actor', actor) rhsm.set_container_mode(context_mocked) diff --git a/repos/system_upgrade/common/libraries/testutils.py b/repos/system_upgrade/common/libraries/testutils.py index 3e145d91..0a56d698 100644 --- a/repos/system_upgrade/common/libraries/testutils.py +++ b/repos/system_upgrade/common/libraries/testutils.py @@ -10,7 +10,7 @@ from leapp.models import EnvVar, IPUSourceToPossibleTargets from leapp.utils.deprecation import deprecated -class produce_mocked(object): +class produce_mocked: def __init__(self): self.called = 0 self.model_instances = [] @@ -20,7 +20,7 @@ class produce_mocked(object): self.model_instances.extend(list(model_instances)) -class create_report_mocked(object): +class create_report_mocked: def __init__(self): self.called = 0 self.reports = [] @@ -38,7 +38,7 @@ class create_report_mocked(object): return {} -class logger_mocked(object): +class logger_mocked: def __init__(self): self.dbgmsg = [] self.infomsg = [] @@ -77,13 +77,28 @@ def _make_default_config(actor_config_schema): # Note: The constructor of the following class takes in too many arguments (R0913). A builder-like # pattern would be nice here. Ideally, the builder should actively prevent the developer from setting fields # that do not affect actor's behavior in __setattr__. -class CurrentActorMocked(object): # pylint:disable=R0904 - def __init__(self, arch=architecture.ARCH_X86_64, envars=None, # pylint:disable=R0913 - kernel='3.10.0-957.43.1.el7.x86_64', - release_id='rhel', src_ver='7.8', dst_ver='8.1', msgs=None, flavour='default', config=None, - virtual_source_version=None, virtual_target_version=None, - supported_upgrade_paths=None): +class CurrentActorMocked: # pylint:disable=R0904 + + def __init__( # pylint: disable=too-many-arguments + self, + arch=architecture.ARCH_X86_64, + envars=None, # pylint:disable=R0913 + kernel="3.10.0-957.43.1.el7.x86_64", + release_id="rhel", + src_ver="8.10", + dst_ver="9.6", + msgs=None, + flavour="default", + config=None, + virtual_source_version=None, + virtual_target_version=None, + supported_upgrade_paths=None, + src_distro=None, + dst_distro=None, + ): """ + Note: src_distro and release_id specify the same thing, but src_distro takes priority. + :param List[IPUSourceToPossibleTargets] supported_upgrade_paths: List of supported upgrade paths. """ envarsList = [EnvVar(name=k, value=v) for k, v in envars.items()] if envars else [] @@ -92,7 +107,11 @@ class CurrentActorMocked(object): # pylint:disable=R0904 version_values = [src_ver, dst_ver, virtual_source_version or src_ver, virtual_target_version or dst_ver] version = namedtuple('Version', version_fields)(*version_values) - release = namedtuple('OS_release', ['release_id', 'version_id'])(release_id, src_ver) + release = namedtuple('OS_release', ['release_id', 'version_id'])(src_distro or release_id, src_ver) + + distro = namedtuple("Distro", ["source", "target"])( + src_distro or release_id, dst_distro or release_id + ) self._common_folder = '../../files' self._common_tools_folder = '../../tools' @@ -103,9 +122,18 @@ class CurrentActorMocked(object): # pylint:disable=R0904 supported_upgrade_paths = [IPUSourceToPossibleTargets(source_version=src_ver, target_versions=[dst_ver])] ipu_conf_fields = ['architecture', 'kernel', 'leapp_env_vars', 'os_release', - 'version', 'flavour', 'supported_upgrade_paths'] + 'version', 'flavour', 'supported_upgrade_paths', 'distro'] config_type = namedtuple('configuration', ipu_conf_fields) - self.configuration = config_type(arch, kernel, envarsList, release, version, flavour, supported_upgrade_paths) + self.configuration = config_type( + arch, + kernel, + envarsList, + release, + version, + flavour, + supported_upgrade_paths, + distro, + ) self._msgs = msgs or [] self.config = {} if config is None else config @@ -120,7 +148,7 @@ class CurrentActorMocked(object): # pylint:disable=R0904 return os.path.join(self._common_tools_folder, name) def consume(self, model): - return iter(filter( # pylint:disable=W0110,W1639 + return iter(filter( lambda msg: isinstance(msg, model), self._msgs )) diff --git a/repos/system_upgrade/common/models/activevendorlist.py b/repos/system_upgrade/common/models/activevendorlist.py new file mode 100644 index 00000000..de4056fb --- /dev/null +++ b/repos/system_upgrade/common/models/activevendorlist.py @@ -0,0 +1,7 @@ +from leapp.models import Model, fields +from leapp.topics import VendorTopic + + +class ActiveVendorList(Model): + topic = VendorTopic + data = fields.List(fields.String()) diff --git a/repos/system_upgrade/common/models/dnfpluginpathdetected.py b/repos/system_upgrade/common/models/dnfpluginpathdetected.py new file mode 100644 index 00000000..c5474857 --- /dev/null +++ b/repos/system_upgrade/common/models/dnfpluginpathdetected.py @@ -0,0 +1,14 @@ +from leapp.models import fields, Model +from leapp.topics import SystemInfoTopic + + +class DnfPluginPathDetected(Model): + """ + This model contains information about whether DNF pluginpath option is configured in /etc/dnf/dnf.conf. + """ + topic = SystemInfoTopic + + is_pluginpath_detected = fields.Boolean() + """ + True if pluginpath option is found in /etc/dnf/dnf.conf, False otherwise. + """ diff --git a/repos/system_upgrade/common/models/ipuconfig.py b/repos/system_upgrade/common/models/ipuconfig.py index 379ac13f..a787e4d5 100644 --- a/repos/system_upgrade/common/models/ipuconfig.py +++ b/repos/system_upgrade/common/models/ipuconfig.py @@ -64,6 +64,16 @@ class IPUSourceToPossibleTargets(Model): """List of defined target system versions for the `source_version` system.""" +class Distro(Model): + topic = SystemInfoTopic + + source = fields.String() + """Release id of the source system (e.g. rhel, centos, almalinux).""" + + target = fields.String() + """Release id of the target system (e.g. rhel, centos, almalinux).""" + + class IPUConfig(Model): """ IPU workflow configuration model @@ -96,3 +106,6 @@ class IPUConfig(Model): The list contains only upgrade paths for the `flavour` of the source system. """ + + distro = fields.Model(Distro) + """Release IDs of the source and target system.""" diff --git a/repos/system_upgrade/common/models/repositoriesmap.py b/repos/system_upgrade/common/models/repositoriesmap.py index 842cd807..fc740606 100644 --- a/repos/system_upgrade/common/models/repositoriesmap.py +++ b/repos/system_upgrade/common/models/repositoriesmap.py @@ -96,3 +96,4 @@ class RepositoriesMapping(Model): mapping = fields.List(fields.Model(RepoMapEntry), default=[]) repositories = fields.List(fields.Model(PESIDRepositoryEntry), default=[]) + vendor = fields.Nullable(fields.String()) diff --git a/repos/system_upgrade/common/models/rpmtransactiontasks.py b/repos/system_upgrade/common/models/rpmtransactiontasks.py index 7e2870d0..05d4e941 100644 --- a/repos/system_upgrade/common/models/rpmtransactiontasks.py +++ b/repos/system_upgrade/common/models/rpmtransactiontasks.py @@ -10,6 +10,7 @@ class RpmTransactionTasks(Model): to_keep = fields.List(fields.String(), default=[]) to_remove = fields.List(fields.String(), default=[]) to_upgrade = fields.List(fields.String(), default=[]) + to_reinstall = fields.List(fields.String(), default=[]) modules_to_enable = fields.List(fields.Model(Module), default=[]) modules_to_reset = fields.List(fields.Model(Module), default=[]) diff --git a/repos/system_upgrade/common/models/targetrepositories.py b/repos/system_upgrade/common/models/targetrepositories.py index 02c6c5e5..e1d44d80 100644 --- a/repos/system_upgrade/common/models/targetrepositories.py +++ b/repos/system_upgrade/common/models/targetrepositories.py @@ -1,4 +1,5 @@ from leapp.models import fields, Model +from leapp.reporting import deprecated from leapp.topics import TransactionTopic @@ -11,33 +12,66 @@ class UsedTargetRepository(TargetRepositoryBase): pass +@deprecated( + since="2025-07-23", + message="This model is deprecated, use DistroTargetRepository instead.", +) class RHELTargetRepository(TargetRepositoryBase): pass +class DistroTargetRepository(TargetRepositoryBase): + pass + + class CustomTargetRepository(TargetRepositoryBase): name = fields.Nullable(fields.String()) baseurl = fields.Nullable(fields.String()) enabled = fields.Boolean(default=True) +class VendorCustomTargetRepositoryList(Model): + topic = TransactionTopic + vendor = fields.String() + repos = fields.List(fields.Model(CustomTargetRepository)) + + class TargetRepositories(Model): """ Repositories supposed to be used during the IPU process The list of the actually used repositories could be just subset - of these repositoies. In case of `custom_repositories`, all such repositories + of these repositories. In case of `custom_repositories`, all such repositories must be available otherwise the upgrade is inhibited. But in case of - `rhel_repos`, only BaseOS and Appstream repos are required now. If others + `distro_repos`, only BaseOS and Appstream repos are required now. If others are missing, upgrade can still continue. + + Note: `rhel_repos` are deprecated, use `distro_repos` instead. """ topic = TransactionTopic + + # DEPRECATED: this has been superseded by distro_repos rhel_repos = fields.List(fields.Model(RHELTargetRepository)) """ Expected target YUM RHEL repositories provided via RHSM + DEPRECATED - use distro_repos instead. + + These repositories are stored inside /etc/yum.repos.d/redhat.repo and + are expected to be used based on the provided repositories mapping. + """ + + distro_repos = fields.List(fields.Model(DistroTargetRepository)) + """ + Expected target DNF repositories provided by the distribution. + + On RHEL these are the repositories provided via RHSM. These repositories are stored inside /etc/yum.repos.d/redhat.repo and are expected to be used based on the provided repositories mapping. + + On other distributions, such as Centos Stream these are repositories + in /etc/yum.repos.d/ that are provided by the distribution and are expected + to be used based on the provided repositories mapping. """ custom_repos = fields.List(fields.Model(CustomTargetRepository), default=[]) diff --git a/repos/system_upgrade/common/models/thirdpartytagetpythonmodules.py b/repos/system_upgrade/common/models/thirdpartytagetpythonmodules.py new file mode 100644 index 00000000..105e9f2c --- /dev/null +++ b/repos/system_upgrade/common/models/thirdpartytagetpythonmodules.py @@ -0,0 +1,25 @@ +from leapp.models import fields, Model +from leapp.topics import SystemInfoTopic + + +class ThirdPartyTargetPythonModules(Model): + """ + Information about third-party target Python modules found on system. + + """ + topic = SystemInfoTopic + + target_python = fields.String() + """ + Target system Python version. + """ + + third_party_modules = fields.List(fields.String(), default=[]) + """ + List of third-party target Python modules found on the source system. Empty list if no modules found. + """ + + third_party_rpm_names = fields.List(fields.String(), default=[]) + """ + List of third-party RPMs found on the source system. Empty list if no modules found. + """ diff --git a/repos/system_upgrade/common/models/vendorsignatures.py b/repos/system_upgrade/common/models/vendorsignatures.py new file mode 100644 index 00000000..f456aec5 --- /dev/null +++ b/repos/system_upgrade/common/models/vendorsignatures.py @@ -0,0 +1,8 @@ +from leapp.models import Model, fields +from leapp.topics import VendorTopic + + +class VendorSignatures(Model): + topic = VendorTopic + vendor = fields.String() + sigs = fields.List(fields.String()) diff --git a/repos/system_upgrade/common/models/vendorsourcerepos.py b/repos/system_upgrade/common/models/vendorsourcerepos.py new file mode 100644 index 00000000..b7a219b4 --- /dev/null +++ b/repos/system_upgrade/common/models/vendorsourcerepos.py @@ -0,0 +1,12 @@ +from leapp.models import Model, fields +from leapp.topics import VendorTopic + + +class VendorSourceRepos(Model): + """ + This model contains the data on all source repositories associated with a specific vendor. + Its data is used to determine whether the vendor should be included into the upgrade process. + """ + topic = VendorTopic + vendor = fields.String() + source_repoids = fields.List(fields.String()) diff --git a/repos/system_upgrade/common/topics/vendortopic.py b/repos/system_upgrade/common/topics/vendortopic.py new file mode 100644 index 00000000..014b7afb --- /dev/null +++ b/repos/system_upgrade/common/topics/vendortopic.py @@ -0,0 +1,5 @@ +from leapp.topics import Topic + + +class VendorTopic(Topic): + name = 'vendor_topic' diff --git a/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/libraries/addupgradebootloader.py b/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/libraries/addupgradebootloader.py index c076fe6b..2455a2f6 100644 --- a/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/libraries/addupgradebootloader.py +++ b/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/libraries/addupgradebootloader.py @@ -14,6 +14,22 @@ from leapp.libraries.common.grub import ( from leapp.libraries.stdlib import api, CalledProcessError, run from leapp.models import ArmWorkaroundEFIBootloaderInfo, EFIBootEntry, TargetUserSpaceInfo +dirname = { + 'AlmaLinux': 'almalinux', + 'CentOS Linux': 'centos', + 'CentOS Stream': 'centos', + 'Oracle Linux Server': 'redhat', + 'Red Hat Enterprise Linux': 'redhat', + 'Rocky Linux': 'rocky', + 'Scientific Linux': 'redhat', +} + +with open('/etc/system-release', 'r') as sr: + release_line = next(line for line in sr if 'release' in line) + distro = release_line.split(' release ', 1)[0] + +distro_dir = dirname.get(distro, 'default') + UPGRADE_EFI_ENTRY_LABEL = 'Leapp Upgrade' ARM_SHIM_PACKAGE_NAME = 'shim-aa64' @@ -21,7 +37,7 @@ ARM_GRUB_PACKAGE_NAME = 'grub2-efi-aa64' EFI_MOUNTPOINT = '/boot/efi/' LEAPP_EFIDIR_CANONICAL_PATH = os.path.join(EFI_MOUNTPOINT, 'EFI/leapp/') -RHEL_EFIDIR_CANONICAL_PATH = os.path.join(EFI_MOUNTPOINT, 'EFI/redhat/') +RHEL_EFIDIR_CANONICAL_PATH = os.path.join(EFI_MOUNTPOINT, 'EFI/', distro_dir) UPGRADE_BLS_DIR = '/boot/upgrade-loader' CONTAINER_DOWNLOAD_DIR = '/tmp_pkg_download_dir' diff --git a/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/tests/test_addarmbootloaderworkaround.py b/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/tests/test_addarmbootloaderworkaround.py index 4f990e00..8c41cc7b 100644 --- a/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/tests/test_addarmbootloaderworkaround.py +++ b/repos/system_upgrade/el8toel9/actors/addarmbootloaderworkaround/tests/test_addarmbootloaderworkaround.py @@ -35,7 +35,7 @@ class MockEFIBootInfo: } -class IsolatedActionsMocked(object): +class IsolatedActionsMocked: def __init__(self): self.copytree_from_calls = [] diff --git a/repos/system_upgrade/el8toel9/actors/checkvdo/tests/unit_test_checkvdo.py b/repos/system_upgrade/el8toel9/actors/checkvdo/tests/unit_test_checkvdo.py index 865e036f..d7cfb4fb 100644 --- a/repos/system_upgrade/el8toel9/actors/checkvdo/tests/unit_test_checkvdo.py +++ b/repos/system_upgrade/el8toel9/actors/checkvdo/tests/unit_test_checkvdo.py @@ -15,13 +15,15 @@ from leapp.utils.report import is_inhibitor # Mock actor base for CheckVdo tests. class MockedActorCheckVdo(CurrentActorMocked): - def get_vdo_answer(self): + @staticmethod + def get_vdo_answer(): return False # Mock actor for all_vdo_converted dialog response. class MockedActorAllVdoConvertedTrue(MockedActorCheckVdo): - def get_vdo_answer(self): + @staticmethod + def get_vdo_answer(): return True diff --git a/repos/system_upgrade/el8toel9/actors/cloud/ensurevalidgrubcfghybrid/tests/test_ensurevalidgrubcfghybrid.py b/repos/system_upgrade/el8toel9/actors/cloud/ensurevalidgrubcfghybrid/tests/test_ensurevalidgrubcfghybrid.py index 3ba46cb5..72356172 100644 --- a/repos/system_upgrade/el8toel9/actors/cloud/ensurevalidgrubcfghybrid/tests/test_ensurevalidgrubcfghybrid.py +++ b/repos/system_upgrade/el8toel9/actors/cloud/ensurevalidgrubcfghybrid/tests/test_ensurevalidgrubcfghybrid.py @@ -19,7 +19,7 @@ def raise_call_error(args=None): ) -class run_mocked(object): +class run_mocked: def __init__(self, raise_err=False): self.called = 0 self.args = [] diff --git a/repos/system_upgrade/el8toel9/actors/grub2mkconfigonppc64/tests/test_grub2mkconfigonppc64.py b/repos/system_upgrade/el8toel9/actors/grub2mkconfigonppc64/tests/test_grub2mkconfigonppc64.py index afa999d0..9d5da4b2 100644 --- a/repos/system_upgrade/el8toel9/actors/grub2mkconfigonppc64/tests/test_grub2mkconfigonppc64.py +++ b/repos/system_upgrade/el8toel9/actors/grub2mkconfigonppc64/tests/test_grub2mkconfigonppc64.py @@ -12,7 +12,7 @@ from leapp.models import DefaultGrub, DefaultGrubInfo, FirmwareFacts CUR_DIR = os.path.dirname(os.path.abspath(__file__)) -class MockedRun(object): +class MockedRun: def __init__(self): self.commands = [] @@ -36,7 +36,7 @@ def test_run_grub2mkconfig(monkeypatch, cmd_issued): 'menuentry "Red Hat Enterprise Linux Server (3.10.0-1160.45.1.el7.x86_64) 7.9 (Maipo)"' ) - class _mock_open(object): + class _mock_open: def __init__(self, path, mode): input_ = grub2_cfg_non_bls_excerpt if cmd_issued else grub2_cfg_bls_excerpt self._fp = StringIO(input_) diff --git a/repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py b/repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py index e5b3f06c..5b1cef50 100644 --- a/repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py +++ b/repos/system_upgrade/el8toel9/actors/multipathconfread/libraries/multipathconfread.py @@ -68,12 +68,16 @@ def _parse_config_dir(config_dir): res.append(conf) except OSError as e: if e.errno == errno.ENOENT: - api.current_logger().debug('Multipath conf directory ' + - '"{}" doesn\'t exist'.format(config_dir)) + api.current_logger().debug( + 'Multipath conf directory "%s" doesn\'t exist', + config_dir + ) else: - api.current_logger().warning('Failed to read multipath config ' + - 'directory ' + - '"{}": {}'.format(config_dir, e)) + api.current_logger().warning( + 'Failed to read multipath config directory "%s": %s', + config_dir, + e + ) return res diff --git a/repos/system_upgrade/el8toel9/actors/networkmanagerconnectionscanner/tests/unit_test_networkmanagerconnectionscanner.py b/repos/system_upgrade/el8toel9/actors/networkmanagerconnectionscanner/tests/unit_test_networkmanagerconnectionscanner.py index 46af07c1..7558b307 100644 --- a/repos/system_upgrade/el8toel9/actors/networkmanagerconnectionscanner/tests/unit_test_networkmanagerconnectionscanner.py +++ b/repos/system_upgrade/el8toel9/actors/networkmanagerconnectionscanner/tests/unit_test_networkmanagerconnectionscanner.py @@ -1,4 +1,5 @@ import errno +import sys import textwrap import pytest @@ -57,7 +58,16 @@ def test_no_conf(monkeypatch): assert not api.produce.called -@pytest.mark.skipif(not nmconnscanner.libnm_available, reason="NetworkManager g-ir not installed") +@pytest.mark.skipif( + sys.version_info.major != 3 or sys.version_info.minor != 6, + # On Python > 3.6 the GLib and NM libraries apparently behave differently and + # the test fails. Let's skip it since the actor it's only ever run with + # Python3.6 (el8toel9 repo and FactsPhase) + reason="Only runs on Python 3.6", +) +@pytest.mark.skipif( + not nmconnscanner.libnm_available, reason="NetworkManager g-ir not installed" +) def test_nm_conn(monkeypatch): """ Check a basic keyfile diff --git a/repos/system_upgrade/el8toel9/actors/nisscanner/libraries/nisscan.py b/repos/system_upgrade/el8toel9/actors/nisscanner/libraries/nisscan.py index 9910f748..ae51c69d 100644 --- a/repos/system_upgrade/el8toel9/actors/nisscanner/libraries/nisscan.py +++ b/repos/system_upgrade/el8toel9/actors/nisscanner/libraries/nisscan.py @@ -14,7 +14,8 @@ class NISScanLibrary: Helper library for NISScan actor. """ - def client_has_non_default_configuration(self): + @staticmethod + def client_has_non_default_configuration(): """ Check for any significant ypbind configuration lines in .conf file. """ @@ -31,7 +32,8 @@ class NISScanLibrary: return True return False - def server_has_non_default_configuration(self): + @staticmethod + def server_has_non_default_configuration(): """ Check for any additional (not default) files in ypserv DIR. """ diff --git a/repos/system_upgrade/el8toel9/actors/opensshdropindirectory/tests/test_opensshdropindirectory_prepend.py b/repos/system_upgrade/el8toel9/actors/opensshdropindirectory/tests/test_opensshdropindirectory_prepend.py index bccadf4b..a6665313 100644 --- a/repos/system_upgrade/el8toel9/actors/opensshdropindirectory/tests/test_opensshdropindirectory_prepend.py +++ b/repos/system_upgrade/el8toel9/actors/opensshdropindirectory/tests/test_opensshdropindirectory_prepend.py @@ -3,7 +3,7 @@ import pytest from leapp.libraries.actor.opensshdropindirectory import prepend_string_if_not_present -class MockFile(object): +class MockFile: def __init__(self, path, content=None): self.path = path self.content = content diff --git a/repos/system_upgrade/el8toel9/actors/opensslconfigcheck/libraries/opensslconfigcheck.py b/repos/system_upgrade/el8toel9/actors/opensslconfigcheck/libraries/opensslconfigcheck.py index f36a62e1..07c1b22f 100644 --- a/repos/system_upgrade/el8toel9/actors/opensslconfigcheck/libraries/opensslconfigcheck.py +++ b/repos/system_upgrade/el8toel9/actors/opensslconfigcheck/libraries/opensslconfigcheck.py @@ -115,7 +115,8 @@ def _openssl_reachable_key(config, key, value=None): return False -# pylint: disable=too-many-return-statements -- could not simplify more +# pylint: disable=too-many-return-statements +# could not simplify more def _openssl_reachable_path(config, path, value=None): """ Check if the given path is reachable in OpenSSL configuration diff --git a/repos/system_upgrade/el8toel9/actors/opensslproviders/tests/test_add_provider.py b/repos/system_upgrade/el8toel9/actors/opensslproviders/tests/test_add_provider.py index 1c1cd209..78f2e9c6 100644 --- a/repos/system_upgrade/el8toel9/actors/opensslproviders/tests/test_add_provider.py +++ b/repos/system_upgrade/el8toel9/actors/opensslproviders/tests/test_add_provider.py @@ -64,7 +64,7 @@ def test_append(lines, add, comment, expected): assert r == expected -class MockFile(object): +class MockFile: def __init__(self, content=None): self.content = content self.error = False diff --git a/repos/system_upgrade/el8toel9/actors/removeupgradeefientry/libraries/removeupgradeefientry.py b/repos/system_upgrade/el8toel9/actors/removeupgradeefientry/libraries/removeupgradeefientry.py index daa7b2ca..dd604d8b 100644 --- a/repos/system_upgrade/el8toel9/actors/removeupgradeefientry/libraries/removeupgradeefientry.py +++ b/repos/system_upgrade/el8toel9/actors/removeupgradeefientry/libraries/removeupgradeefientry.py @@ -5,9 +5,25 @@ from leapp.exceptions import StopActorExecutionError from leapp.libraries.stdlib import api, CalledProcessError, run from leapp.models import ArmWorkaroundEFIBootloaderInfo +dirname = { + 'AlmaLinux': 'almalinux', + 'CentOS Linux': 'centos', + 'CentOS Stream': 'centos', + 'Oracle Linux Server': 'redhat', + 'Red Hat Enterprise Linux': 'redhat', + 'Rocky Linux': 'rocky', + 'Scientific Linux': 'redhat', +} + +with open('/etc/system-release', 'r') as sr: + release_line = next(line for line in sr if 'release' in line) + distro = release_line.split(' release ', 1)[0] + +distro_dir = dirname.get(distro, 'default') + EFI_MOUNTPOINT = '/boot/efi/' LEAPP_EFIDIR_CANONICAL_PATH = os.path.join(EFI_MOUNTPOINT, 'EFI/leapp/') -RHEL_EFIDIR_CANONICAL_PATH = os.path.join(EFI_MOUNTPOINT, 'EFI/redhat/') +RHEL_EFIDIR_CANONICAL_PATH = os.path.join(EFI_MOUNTPOINT, 'EFI/', distro_dir) def get_workaround_efi_info(): diff --git a/repos/system_upgrade/el8toel9/actors/rocecheck/tests/unit_test_rocecheck.py b/repos/system_upgrade/el8toel9/actors/rocecheck/tests/unit_test_rocecheck.py index a36cc8ed..b5511d17 100644 --- a/repos/system_upgrade/el8toel9/actors/rocecheck/tests/unit_test_rocecheck.py +++ b/repos/system_upgrade/el8toel9/actors/rocecheck/tests/unit_test_rocecheck.py @@ -91,7 +91,10 @@ def test_roce_old_rhel(monkeypatch, msgs, version): monkeypatch.setattr(reporting, "create_report", create_report_mocked()) rocecheck.process() assert reporting.create_report.called - assert any(['version of RHEL' in report['title'] for report in reporting.create_report.reports]) + assert any( + 'version of RHEL' in report['title'] + for report in reporting.create_report.reports + ) # NOTE: what about the situation when net.naming-scheme is configured multiple times??? @@ -113,4 +116,7 @@ def test_roce_wrong_configuration(monkeypatch, msgs, version): monkeypatch.setattr(reporting, "create_report", create_report_mocked()) rocecheck.process() assert reporting.create_report.called - assert any(['RoCE configuration' in report['title'] for report in reporting.create_report.reports]) + assert any( + 'RoCE configuration' in report['title'] + for report in reporting.create_report.reports + ) diff --git a/repos/system_upgrade/el8toel9/actors/scanblacklistca/tests/unit_test_scanblacklistca.py b/repos/system_upgrade/el8toel9/actors/scanblacklistca/tests/unit_test_scanblacklistca.py index 4eab6df5..c2ae4639 100644 --- a/repos/system_upgrade/el8toel9/actors/scanblacklistca/tests/unit_test_scanblacklistca.py +++ b/repos/system_upgrade/el8toel9/actors/scanblacklistca/tests/unit_test_scanblacklistca.py @@ -14,7 +14,7 @@ TESTLINK = "linkca.cert" SUBDIR = "casdir" -class MockedGetFiles(object): +class MockedGetFiles: def __init__(self, files=None, error=None): self.called = 0 self.files = files @@ -33,7 +33,7 @@ class MockedGetFiles(object): return ret -class MockedGetDirs(object): +class MockedGetDirs: def __init__(self, dirs): self.called = 0 self.dirs = dirs diff --git a/repos/system_upgrade/el8toel9/actors/sssdfacts/libraries/sssdfacts8to9.py b/repos/system_upgrade/el8toel9/actors/sssdfacts/libraries/sssdfacts8to9.py index a2537eeb..1482373e 100644 --- a/repos/system_upgrade/el8toel9/actors/sssdfacts/libraries/sssdfacts8to9.py +++ b/repos/system_upgrade/el8toel9/actors/sssdfacts/libraries/sssdfacts8to9.py @@ -1,7 +1,7 @@ from leapp.models import SSSDConfig8to9 -class SSSDFactsLibrary(object): +class SSSDFactsLibrary: """ Helper library from SSSDFacts actor to allow unit testing. """ diff --git a/repos/system_upgrade/el9toel10/actors/sssd/sssdchecks/libraries/sssdchecks.py b/repos/system_upgrade/el9toel10/actors/sssd/sssdchecks/libraries/sssdchecks.py index 0a86fa7b..cb95026c 100644 --- a/repos/system_upgrade/el9toel10/actors/sssd/sssdchecks/libraries/sssdchecks.py +++ b/repos/system_upgrade/el9toel10/actors/sssd/sssdchecks/libraries/sssdchecks.py @@ -15,8 +15,8 @@ def check_config(model): 'SSSD\'s sss_ssh_knownhostsproxy tool is replaced by the more ' 'reliable sss_ssh_knownhosts tool. SSH\'s configuration will be updated ' 'to reflect this by updating every mention of sss_ssh_knownhostsproxy by ' - 'the corresponding mention of sss_ssh_knownhosts, even those commented out.\n' - 'SSSD\'s ssh service will be enabled if not already done.\n' + 'the corresponding mention of sss_ssh_knownhosts, even those commented out. ' + 'SSSD\'s ssh service will be enabled if not already done.\n\n' 'The following files will be updated:{}{}'.format( FMT_LIST_SEPARATOR, FMT_LIST_SEPARATOR.join(model.sssd_config_files + model.ssh_config_files) diff --git a/repos/system_upgrade/el9toel10/actors/sssd/sssdfacts/libraries/sssdfacts.py b/repos/system_upgrade/el9toel10/actors/sssd/sssdfacts/libraries/sssdfacts.py index 0ae9d93f..7d343229 100644 --- a/repos/system_upgrade/el9toel10/actors/sssd/sssdfacts/libraries/sssdfacts.py +++ b/repos/system_upgrade/el9toel10/actors/sssd/sssdfacts/libraries/sssdfacts.py @@ -19,7 +19,10 @@ def _does_file_contain_expression(file_path, expression): ) return False except OSError as e: - raise StopActorExecutionError('Could not open file ' + file_path, details={'details': str(e)}) + raise StopActorExecutionError( + 'Could not open configuration file', + details={'details': 'Coudn\'t open {} file with error: {}.'.format(file_path, str(e))} + ) def _look_for_files(expression: str, path_list: list[str]) -> list[str]: diff --git a/repos/system_upgrade/el9toel10/actors/sssd/sssdupdate/libraries/sssdupdate.py b/repos/system_upgrade/el9toel10/actors/sssd/sssdupdate/libraries/sssdupdate.py index 6d745ead..5b96bcc6 100644 --- a/repos/system_upgrade/el9toel10/actors/sssd/sssdupdate/libraries/sssdupdate.py +++ b/repos/system_upgrade/el9toel10/actors/sssd/sssdupdate/libraries/sssdupdate.py @@ -1,7 +1,7 @@ import os import re -from leapp.exceptions import StopActorExecutionError +from leapp.libraries.stdlib import api def _process_knownhosts(line: str) -> str: @@ -29,30 +29,26 @@ def _process_enable_svc(line: str) -> str: def _update_file(filename, process_function): - newname = filename + '.new' - oldname = filename + '.old' + newname = '{}.leappnew'.format(filename) + oldname = '{}.leappsave'.format(filename) try: - with open(filename, 'r') as input_file, open(newname, 'x') as output_file: + with open(filename, 'r') as input_file, open(newname, 'w') as output_file: istat = os.fstat(input_file.fileno()) os.fchmod(output_file.fileno(), istat.st_mode) for line in input_file: try: output_file.write(process_function(line)) except OSError as e: - raise StopActorExecutionError('Failed to write to {}'.format(newname), - details={'details': str(e)}) + api.current_logger().warning('Failed to write to {}'.format(newname), details={'details': str(e)}) - except FileExistsError as e: - raise StopActorExecutionError('Temporary file already exists: {}'.format(newname), - details={'details': str(e)}) except OSError as e: try: os.unlink(newname) except FileNotFoundError: pass - raise StopActorExecutionError('Failed to access the required files', details={'details': str(e)}) + api.current_logger().error('Failed to access the required files', details={'details': str(e)}) - # Let's make sure the old configuration is preserverd if something goes wrong + # Let's make sure the old configuration is preserved if something goes wrong os.replace(filename, oldname) os.replace(newname, filename) os.unlink(oldname) diff --git a/requirements.txt b/requirements.txt index a1bb4725..3c79b23d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,3 +14,4 @@ git+https://github.com/oamg/leapp requests # pinning a py27 troublemaking transitive dependency lazy-object-proxy==1.5.2; python_version < '3' +rpm diff --git a/utils/actor_path.py b/utils/actor_path.py index 5c53a16a..3c61ce79 100755 --- a/utils/actor_path.py +++ b/utils/actor_path.py @@ -1,7 +1,10 @@ import logging +import os import sys -from leapp.repository.scan import find_and_scan_repositories +from leapp.repository.manager import RepositoryManager +from leapp.repository.scan import _resolve_repository_links, find_and_scan_repositories, scan_repo + def err_exit(): # We want to be sure that `make test` (test_no_lint) will stop when expected @@ -10,22 +13,36 @@ def err_exit(): sys.stdout.write('ERROR:__read_error_messages_above_this_one_on_stderr__') sys.exit(1) + def main(): logging.basicConfig(level=logging.INFO, filename='/dev/null') logger = logging.getLogger('run_pytest.py') BASE_REPO = 'repos' - repos = find_and_scan_repositories(BASE_REPO, include_locals=True) - repos.load() - if len(sys.argv) > 1: - actors = repos._lookup_actors(sys.argv[1]) - if not actors: - sys.stderr.write('ERROR: No actor found for search "{}"\n'.format(sys.argv[1])) - err_exit() - print(' '.join([actor.full_path for actor in actors])) + SYSUPG_REPO = os.path.join(BASE_REPO, 'system_upgrade') + + if len(sys.argv) == 2: + manager = find_and_scan_repositories(BASE_REPO, include_locals=True) + manager.load() + elif len(sys.argv) == 3: + repos = sys.argv[2].split(',') + # TODO: it would be nicer to have some function in the framework for + # the scanning and resolving done below + manager = RepositoryManager() + for repo in repos: + manager.add_repo(scan_repo(os.path.join(SYSUPG_REPO, repo))) + _resolve_repository_links(manager=manager, include_locals=True) + manager.load() else: sys.stderr.write('ERROR: Missing commandline argument\n') + sys.stderr.write('Usage: actor_path.py [repositories]\n') + err_exit() + + actors = manager._lookup_actors(sys.argv[1]) + if not actors: + sys.stderr.write('ERROR: No actor found for search "{}"\n'.format(sys.argv[1])) err_exit() + print(' '.join([actor.full_path for actor in actors])) if __name__ == '__main__': diff --git a/utils/container-builds/Containerfile.centos7 b/utils/container-builds/Containerfile.centos7 deleted file mode 100644 index af00eddb..00000000 --- a/utils/container-builds/Containerfile.centos7 +++ /dev/null @@ -1,15 +0,0 @@ -FROM centos:7 - -VOLUME /repo - -# mirror.centos.org is dead, comment out mirrorlist and set baseurl to vault.centos.org -RUN sed -i s/mirror.centos.org/vault.centos.org/ /etc/yum.repos.d/CentOS-*.repo -RUN sed -i s/^#\s*baseurl=http/baseurl=http/ /etc/yum.repos.d/CentOS-*.repo -RUN sed -i s/^mirrorlist=http/#mirrorlist=http/ /etc/yum.repos.d/CentOS-*.repo - -RUN yum update -y && \ - yum install -y rpm-build python-devel make git - -WORKDIR /repo -ENV DIST_VERSION 7 -ENTRYPOINT make _build_local diff --git a/utils/container-builds/Containerfile.ubi8 b/utils/container-builds/Containerfile.el8 similarity index 100% rename from utils/container-builds/Containerfile.ubi8 rename to utils/container-builds/Containerfile.el8 diff --git a/utils/container-builds/Containerfile.ubi9 b/utils/container-builds/Containerfile.el9 similarity index 100% rename from utils/container-builds/Containerfile.ubi9 rename to utils/container-builds/Containerfile.el9 diff --git a/utils/container-tests/Containerfile.el8 b/utils/container-tests/Containerfile.el8 new file mode 100644 index 00000000..b92e8742 --- /dev/null +++ b/utils/container-tests/Containerfile.el8 @@ -0,0 +1,24 @@ +FROM centos:8 + +RUN sed -i s/mirror.centos.org/vault.centos.org/ /etc/yum.repos.d/CentOS-*.repo +RUN sed -i s/^#\s*baseurl=http/baseurl=http/ /etc/yum.repos.d/CentOS-*.repo +RUN sed -i s/^mirrorlist=http/#mirrorlist=http/ /etc/yum.repos.d/CentOS-*.repo + +VOLUME /repo + +RUN dnf update -y && \ + dnf install -y git make rsync \ + python3-virtualenv python3-setuptools python3-pip \ + python3-gobject NetworkManager-libnm + +ENV PYTHON_VENV python3.6 + +COPY . /repocopy + +WORKDIR /repocopy + +RUN rm -rf tut* + +RUN make clean && make install-deps-fedora + +WORKDIR / diff --git a/utils/container-tests/Containerfile.rhel9 b/utils/container-tests/Containerfile.el9 similarity index 100% rename from utils/container-tests/Containerfile.rhel9 rename to utils/container-tests/Containerfile.el9 diff --git a/utils/container-tests/Containerfile.f34 b/utils/container-tests/Containerfile.f42 similarity index 84% rename from utils/container-tests/Containerfile.f34 rename to utils/container-tests/Containerfile.f42 index a9346635..46f0f63a 100644 --- a/utils/container-tests/Containerfile.f34 +++ b/utils/container-tests/Containerfile.f42 @@ -1,11 +1,11 @@ -FROM fedora:34 +FROM fedora:42 VOLUME /repo RUN dnf update -y && \ dnf install -y findutils make rsync python3-gobject-base NetworkManager-libnm -ENV PYTHON_VENV python3.9 +ENV PYTHON_VENV python3.13 COPY . /repocopy diff --git a/utils/container-tests/Containerfile.rhel7 b/utils/container-tests/Containerfile.rhel7 deleted file mode 100644 index 0a0c384a..00000000 --- a/utils/container-tests/Containerfile.rhel7 +++ /dev/null @@ -1,24 +0,0 @@ -FROM registry.access.redhat.com/ubi7/ubi:7.9 - -VOLUME /repo - -RUN yum update -y && \ - yum install -y python-virtualenv python-setuptools make git rsync - -# see ./Containerfile.ubi7 for explanation -RUN yum -y install python27-python-pip && \ - scl enable python27 -- pip install -U --target /usr/lib/python2.7/site-packages/ pip==20.3.0 && \ - python -m pip install --ignore-installed pip==20.3.4 ipaddress virtualenv - -ENV PYTHON_VENV python2.7 - -COPY . /repocopy - -WORKDIR /repocopy - -RUN rm -rf tut* - -RUN make clean && make install-deps - -WORKDIR / - diff --git a/utils/container-tests/Containerfile.rhel8 b/utils/container-tests/Containerfile.rhel8 deleted file mode 100644 index 6f21839b..00000000 --- a/utils/container-tests/Containerfile.rhel8 +++ /dev/null @@ -1,18 +0,0 @@ -FROM registry.access.redhat.com/ubi8/ubi:latest - -VOLUME /repo - -RUN dnf update -y && \ - dnf install -y python3-virtualenv python3-setuptools python3-pip make git rsync - -ENV PYTHON_VENV python3.6 - -COPY . /repocopy - -WORKDIR /repocopy - -RUN rm -rf tut* - -RUN make clean && make install-deps-fedora - -WORKDIR / diff --git a/utils/container-tests/Containerfile.ubi7 b/utils/container-tests/Containerfile.ubi7 deleted file mode 100644 index 44625a76..00000000 --- a/utils/container-tests/Containerfile.ubi7 +++ /dev/null @@ -1,25 +0,0 @@ -FROM registry.access.redhat.com/ubi7/ubi:7.9 - -VOLUME /payload - -RUN yum update -y && \ - yum install python-virtualenv python-setuptools make git -y - -# NOTE(ivasilev,pstodulk) We need at least pip v10.0.1, however centos:7 -# provides just v8.1.2 (via EPEL). So do this: install epel repos -> install -# python2-pip -> use pip to update to specific pip version we require. period -# NOTE(pstodulk) I see we take care about pip for py3 inside the Makefile, -# however I am afraid of additional possible troubles in future because of the -# archaic pip3 version (v9.0.1). As we want to run tests for Py2 and Py3 in ci -# always anyway, let's put py3 installation here as well.. -# Dropped Python3 as it is now added in its own container on RHEL8 - -# This is some trickery: We install python27-python-pip from the scl, use the scl to bootstrap the python -# module of pip version 20.3.0 and then make it update to 20.3.4 resulting the 'pip' command to be available. -# The --target approach doesn't add it, but at least we now have pip 20.3.4 installed ;-) -RUN yum -y install python27-python-pip && \ - scl enable python27 -- pip install -U --target /usr/lib/python2.7/site-packages/ pip==20.3.0 && \ - python -m pip install --ignore-installed pip==20.3.4 ipaddress virtualenv - -WORKDIR /payload -ENTRYPOINT make install-deps && make test_no_lint diff --git a/utils/container-tests/Containerfile.ubi7-lint b/utils/container-tests/Containerfile.ubi7-lint deleted file mode 100644 index ed548985..00000000 --- a/utils/container-tests/Containerfile.ubi7-lint +++ /dev/null @@ -1,25 +0,0 @@ -FROM registry.access.redhat.com/ubi7/ubi:7.9 - -VOLUME /payload - -RUN yum update -y && \ - yum install python-virtualenv python-setuptools make git -y - -# NOTE(ivasilev,pstodulk) We need at least pip v10.0.1, however centos:7 -# provides just v8.1.2 (via EPEL). So do this: install epel repos -> install -# python2-pip -> use pip to update to specific pip version we require. period -# NOTE(pstodulk) I see we take care about pip for py3 inside the Makefile, -# however I am afraid of additional possible troubles in future because of the -# archaic pip3 version (v9.0.1). As we want to run tests for Py2 and Py3 in ci -# always anyway, let's put py3 installation here as well.. -# Dropped Python3 as it is now added in its own container on RHEL8 - -# This is some trickery: We install python27-python-pip from the scl, use the scl to bootstrap the python -# module of pip version 20.3.0 and then make it update to 20.3.4 resulting the 'pip' command to be available. -# The --target approach doesn't add it, but at least we now have pip 20.3.4 installed ;-) -RUN yum -y install python27-python-pip && \ - scl enable python27 -- pip install -U --target /usr/lib/python2.7/site-packages/ pip==20.3.0 && \ - python -m pip install --ignore-installed pip==20.3.4 ipaddress virtualenv - -WORKDIR /payload -ENTRYPOINT make install-deps && make lint diff --git a/utils/container-tests/Containerfile.ubi8 b/utils/container-tests/Containerfile.ubi8 deleted file mode 100644 index 4da60c18..00000000 --- a/utils/container-tests/Containerfile.ubi8 +++ /dev/null @@ -1,9 +0,0 @@ -FROM registry.access.redhat.com/ubi8/ubi:latest - -VOLUME /payload - -RUN dnf update -y && \ - dnf install python3-virtualenv python3-setuptools python3-pip make git -y - -WORKDIR /payload -ENTRYPOINT make install-deps && make test_no_lint diff --git a/utils/container-tests/ci/Containerfile.el8 b/utils/container-tests/ci/Containerfile.el8 new file mode 100644 index 00000000..4a19092e --- /dev/null +++ b/utils/container-tests/ci/Containerfile.el8 @@ -0,0 +1,15 @@ +FROM centos:8 + +RUN sed -i s/mirror.centos.org/vault.centos.org/ /etc/yum.repos.d/CentOS-*.repo +RUN sed -i s/^#\s*baseurl=http/baseurl=http/ /etc/yum.repos.d/CentOS-*.repo +RUN sed -i s/^mirrorlist=http/#mirrorlist=http/ /etc/yum.repos.d/CentOS-*.repo + +VOLUME /payload + +RUN dnf update -y && \ + dnf install -y make git \ + python3-virtualenv python3-setuptools python3-pip \ + python3-gobject NetworkManager-libnm + +WORKDIR /payload +ENTRYPOINT make install-deps && make test_no_lint diff --git a/utils/container-tests/Containerfile.ubi8-lint b/utils/container-tests/ci/Containerfile.el8-lint similarity index 100% rename from utils/container-tests/Containerfile.ubi8-lint rename to utils/container-tests/ci/Containerfile.el8-lint diff --git a/utils/container-tests/Containerfile.ubi9 b/utils/container-tests/ci/Containerfile.el9 similarity index 100% rename from utils/container-tests/Containerfile.ubi9 rename to utils/container-tests/ci/Containerfile.el9 diff --git a/utils/container-tests/Containerfile.ubi9-lint b/utils/container-tests/ci/Containerfile.el9-lint similarity index 100% rename from utils/container-tests/Containerfile.ubi9-lint rename to utils/container-tests/ci/Containerfile.el9-lint