Import from CS git
This commit is contained in:
parent
6e448a755d
commit
ef02e9e53b
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,2 +1,2 @@
|
||||
SOURCES/deps-pkgs-10.tar.gz
|
||||
SOURCES/leapp-repository-0.20.0.tar.gz
|
||||
SOURCES/deps-pkgs-11.tar.gz
|
||||
SOURCES/leapp-repository-0.21.0.tar.gz
|
||||
|
@ -1,2 +1,2 @@
|
||||
d520ada12294e4dd8837c81f92d4c184ab403d51 SOURCES/deps-pkgs-10.tar.gz
|
||||
185bbb040dba48e1ea2d6c627133af594378afd4 SOURCES/leapp-repository-0.20.0.tar.gz
|
||||
8b3fe3a7b52d2e144d374623aa5b0b0add7ab0c7 SOURCES/deps-pkgs-11.tar.gz
|
||||
9327be3720ccb3f7b285d2199463d7df0c38dfae SOURCES/leapp-repository-0.21.0.tar.gz
|
||||
|
@ -0,0 +1,44 @@
|
||||
From fbc38d4ad1d828e0553579e3719c0e4ed4a2a6bd Mon Sep 17 00:00:00 2001
|
||||
From: jinkangkang <1547182170@qq.com>
|
||||
Date: Mon, 19 Aug 2024 18:46:08 +0800
|
||||
Subject: [PATCH 01/40] rhui(alibaba): add ARM RHEL8 and RHEL9 setup entries
|
||||
(#1277)
|
||||
|
||||
Since leapp's RHUI mechanism filters setups based on the architecture of the source system,
|
||||
it was not possible to upgrade of ARM-based RHEL systems on Alibaba cloud as there
|
||||
were no ARM entries in RHUI_SETUPS. This patch adds these entries, making it possible
|
||||
for EL 8 -> 9 upgrades of ARM systems on Alibaba cloud.
|
||||
---
|
||||
repos/system_upgrade/common/libraries/rhui.py | 16 ++++++++++++++++
|
||||
1 file changed, 16 insertions(+)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/libraries/rhui.py b/repos/system_upgrade/common/libraries/rhui.py
|
||||
index 51694ac2..30de0275 100644
|
||||
--- a/repos/system_upgrade/common/libraries/rhui.py
|
||||
+++ b/repos/system_upgrade/common/libraries/rhui.py
|
||||
@@ -348,6 +348,22 @@ RHUI_SETUPS = {
|
||||
('content.crt', RHUI_PKI_PRODUCT_DIR)
|
||||
],
|
||||
os_version='9'),
|
||||
+ ],
|
||||
+ RHUIFamily(RHUIProvider.ALIBABA, arch=arch.ARCH_ARM64, client_files_folder='alibaba'): [
|
||||
+ mk_rhui_setup(clients={'aliyun_rhui_rhel8'}, leapp_pkg='leapp-rhui-alibaba',
|
||||
+ mandatory_files=[('leapp-alibaba.repo', YUM_REPOS_PATH)],
|
||||
+ optional_files=[
|
||||
+ ('key.pem', RHUI_PKI_DIR),
|
||||
+ ('content.crt', RHUI_PKI_PRODUCT_DIR)
|
||||
+ ],
|
||||
+ os_version='8'),
|
||||
+ mk_rhui_setup(clients={'aliyun_rhui_rhel9'}, leapp_pkg='leapp-rhui-alibaba',
|
||||
+ mandatory_files=[('leapp-alibaba.repo', YUM_REPOS_PATH)],
|
||||
+ optional_files=[
|
||||
+ ('key.pem', RHUI_PKI_DIR),
|
||||
+ ('content.crt', RHUI_PKI_PRODUCT_DIR)
|
||||
+ ],
|
||||
+ os_version='9'),
|
||||
]
|
||||
}
|
||||
|
||||
--
|
||||
2.47.0
|
||||
|
@ -1,251 +0,0 @@
|
||||
From 921c06892f7550a3a8e2b3fe941c6272bdacf88d Mon Sep 17 00:00:00 2001
|
||||
From: mhecko <mhecko@redhat.com>
|
||||
Date: Thu, 15 Feb 2024 09:56:27 +0100
|
||||
Subject: [PATCH] rhui: do not bootstrap target client on aws
|
||||
|
||||
Bootstrapping target RHUI client now requires installing the entire
|
||||
RHEL8 RPM stack. Threfore, do not try installing target client
|
||||
and instead rely only on the files from our leapp-rhui-aws package.
|
||||
---
|
||||
.../cloud/checkrhui/libraries/checkrhui.py | 6 +-
|
||||
.../libraries/userspacegen.py | 104 ++++++++++++++----
|
||||
.../system_upgrade/common/models/rhuiinfo.py | 7 ++
|
||||
3 files changed, 92 insertions(+), 25 deletions(-)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py b/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py
|
||||
index 84ab40e3..e1c158c7 100644
|
||||
--- a/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py
|
||||
+++ b/repos/system_upgrade/common/actors/cloud/checkrhui/libraries/checkrhui.py
|
||||
@@ -142,7 +142,11 @@ def customize_rhui_setup_for_aws(rhui_family, setup_info):
|
||||
|
||||
target_version = version.get_target_major_version()
|
||||
if target_version == '8':
|
||||
- return # The rhel8 plugin is packed into leapp-rhui-aws as we need python2 compatible client
|
||||
+ # RHEL8 rh-amazon-rhui-client depends on amazon-libdnf-plugin that depends
|
||||
+ # essentially on the entire RHEL8 RPM stack, so we cannot just swap the clients
|
||||
+ # The leapp-rhui-aws will provide all necessary files to access entire RHEL8 content
|
||||
+ setup_info.bootstrap_target_client = False
|
||||
+ return
|
||||
|
||||
amazon_plugin_copy_task = CopyFile(src='/usr/lib/python3.9/site-packages/dnf-plugins/amazon-id.py',
|
||||
dst='/usr/lib/python3.6/site-packages/dnf-plugins/')
|
||||
diff --git a/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py b/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py
|
||||
index d917bfd5..d60bc75f 100644
|
||||
--- a/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py
|
||||
+++ b/repos/system_upgrade/common/actors/targetuserspacecreator/libraries/userspacegen.py
|
||||
@@ -853,9 +853,9 @@ def _get_rhui_available_repoids(context, cloud_repo):
|
||||
return set(repoids)
|
||||
|
||||
|
||||
-def get_copy_location_from_copy_in_task(context, copy_task):
|
||||
+def get_copy_location_from_copy_in_task(context_basepath, copy_task):
|
||||
basename = os.path.basename(copy_task.src)
|
||||
- dest_in_container = context.full_path(copy_task.dst)
|
||||
+ dest_in_container = os.path.join(context_basepath, copy_task.dst)
|
||||
if os.path.isdir(dest_in_container):
|
||||
return os.path.join(copy_task.dst, basename)
|
||||
return copy_task.dst
|
||||
@@ -871,7 +871,10 @@ def _get_rh_available_repoids(context, indata):
|
||||
|
||||
# If we are upgrading a RHUI system, check what repositories are provided by the (already installed) target clients
|
||||
if indata and indata.rhui_info:
|
||||
- files_provided_by_clients = _query_rpm_for_pkg_files(context, indata.rhui_info.target_client_pkg_names)
|
||||
+ setup_info = indata.rhui_info.target_client_setup_info
|
||||
+ target_content_access_files = set()
|
||||
+ if setup_info.bootstrap_target_client:
|
||||
+ target_content_access_files = _query_rpm_for_pkg_files(context, indata.rhui_info.target_client_pkg_names)
|
||||
|
||||
def is_repofile(path):
|
||||
return os.path.dirname(path) == '/etc/yum.repos.d' and os.path.basename(path).endswith('.repo')
|
||||
@@ -884,24 +887,33 @@ def _get_rh_available_repoids(context, indata):
|
||||
|
||||
yum_repos_d = context.full_path('/etc/yum.repos.d')
|
||||
all_repofiles = {os.path.join(yum_repos_d, path) for path in os.listdir(yum_repos_d) if path.endswith('.repo')}
|
||||
- client_repofiles = {context.full_path(path) for path in files_provided_by_clients if is_repofile(path)}
|
||||
+ api.current_logger().debug('(RHUI Setup) All available repofiles: {0}'.format(' '.join(all_repofiles)))
|
||||
+
|
||||
+ target_access_repofiles = {
|
||||
+ context.full_path(path) for path in target_content_access_files if is_repofile(path)
|
||||
+ }
|
||||
|
||||
# Exclude repofiles used to setup the target rhui access as on some platforms the repos provided by
|
||||
# the client are not sufficient to install the client into target userspace (GCP)
|
||||
rhui_setup_repofile_tasks = [task for task in setup_tasks if task.src.endswith('repo')]
|
||||
rhui_setup_repofiles = (
|
||||
- get_copy_location_from_copy_in_task(context, copy_task) for copy_task in rhui_setup_repofile_tasks
|
||||
+ get_copy_location_from_copy_in_task(context.base_dir, copy) for copy in rhui_setup_repofile_tasks
|
||||
)
|
||||
rhui_setup_repofiles = {context.full_path(repofile) for repofile in rhui_setup_repofiles}
|
||||
|
||||
- foreign_repofiles = all_repofiles - client_repofiles - rhui_setup_repofiles
|
||||
+ foreign_repofiles = all_repofiles - target_access_repofiles - rhui_setup_repofiles
|
||||
+
|
||||
+ api.current_logger().debug(
|
||||
+ 'The following repofiles are considered as unknown to'
|
||||
+ ' the target RHUI content setup and will be ignored: {0}'.format(' '.join(foreign_repofiles))
|
||||
+ )
|
||||
|
||||
# Rename non-client repofiles so they will not be recognized when running dnf repolist
|
||||
for foreign_repofile in foreign_repofiles:
|
||||
os.rename(foreign_repofile, '{0}.back'.format(foreign_repofile))
|
||||
|
||||
try:
|
||||
- dnf_cmd = ['dnf', 'repolist', '--releasever', target_ver, '-v']
|
||||
+ dnf_cmd = ['dnf', 'repolist', '--releasever', target_ver, '-v', '--enablerepo', '*']
|
||||
repolist_result = context.call(dnf_cmd)['stdout']
|
||||
repoid_lines = [line for line in repolist_result.split('\n') if line.startswith('Repo-id')]
|
||||
rhui_repoids = {extract_repoid_from_line(line) for line in repoid_lines}
|
||||
@@ -919,6 +931,9 @@ def _get_rh_available_repoids(context, indata):
|
||||
for foreign_repofile in foreign_repofiles:
|
||||
os.rename('{0}.back'.format(foreign_repofile), foreign_repofile)
|
||||
|
||||
+ api.current_logger().debug(
|
||||
+ 'The following repofiles are considered as provided by RedHat: {0}'.format(' '.join(rh_repoids))
|
||||
+ )
|
||||
return rh_repoids
|
||||
|
||||
|
||||
@@ -1086,7 +1101,7 @@ def _get_target_userspace():
|
||||
return constants.TARGET_USERSPACE.format(get_target_major_version())
|
||||
|
||||
|
||||
-def _create_target_userspace(context, packages, files, target_repoids):
|
||||
+def _create_target_userspace(context, indata, packages, files, target_repoids):
|
||||
"""Create the target userspace."""
|
||||
target_path = _get_target_userspace()
|
||||
prepare_target_userspace(context, target_path, target_repoids, list(packages))
|
||||
@@ -1096,12 +1111,57 @@ def _create_target_userspace(context, packages, files, target_repoids):
|
||||
_copy_files(target_context, files)
|
||||
dnfplugin.install(_get_target_userspace())
|
||||
|
||||
+ # If we used only repofiles from leapp-rhui-<provider> then remove these as they provide
|
||||
+ # duplicit definitions as the target clients already installed in the target container
|
||||
+ if indata.rhui_info:
|
||||
+ api.current_logger().debug(
|
||||
+ 'Target container should have access to content. '
|
||||
+ 'Removing repofiles from leapp-rhui-<provider> from the target..'
|
||||
+ )
|
||||
+ setup_info = indata.rhui_info.target_client_setup_info
|
||||
+ if not setup_info.bootstrap_target_client:
|
||||
+ target_userspace_path = _get_target_userspace()
|
||||
+ for copy in setup_info.preinstall_tasks.files_to_copy_into_overlay:
|
||||
+ dst_in_container = get_copy_location_from_copy_in_task(target_userspace_path, copy)
|
||||
+ dst_in_container = dst_in_container.strip('/')
|
||||
+ dst_in_host = os.path.join(target_userspace_path, dst_in_container)
|
||||
+ if os.path.isfile(dst_in_host) and dst_in_host.endswith('.repo'):
|
||||
+ api.current_logger().debug('Removing repofile: {0}'.format(dst_in_host))
|
||||
+ os.remove(dst_in_host)
|
||||
+
|
||||
# and do not forget to set the rhsm into the container mode again
|
||||
with mounting.NspawnActions(_get_target_userspace()) as target_context:
|
||||
rhsm.set_container_mode(target_context)
|
||||
|
||||
|
||||
-def install_target_rhui_client_if_needed(context, indata):
|
||||
+def _apply_rhui_access_preinstall_tasks(context, rhui_setup_info):
|
||||
+ if rhui_setup_info.preinstall_tasks:
|
||||
+ api.current_logger().debug('Applying RHUI preinstall tasks.')
|
||||
+ preinstall_tasks = rhui_setup_info.preinstall_tasks
|
||||
+
|
||||
+ for file_to_remove in preinstall_tasks.files_to_remove:
|
||||
+ api.current_logger().debug('Removing {0} from the scratch container.'.format(file_to_remove))
|
||||
+ context.remove(file_to_remove)
|
||||
+
|
||||
+ for copy_info in preinstall_tasks.files_to_copy_into_overlay:
|
||||
+ api.current_logger().debug(
|
||||
+ 'Copying {0} in {1} into the scratch container.'.format(copy_info.src, copy_info.dst)
|
||||
+ )
|
||||
+ context.makedirs(os.path.dirname(copy_info.dst), exists_ok=True)
|
||||
+ context.copy_to(copy_info.src, copy_info.dst)
|
||||
+
|
||||
+
|
||||
+def _apply_rhui_access_postinstall_tasks(context, rhui_setup_info):
|
||||
+ if rhui_setup_info.postinstall_tasks:
|
||||
+ api.current_logger().debug('Applying RHUI postinstall tasks.')
|
||||
+ for copy_info in rhui_setup_info.postinstall_tasks.files_to_copy:
|
||||
+ context.makedirs(os.path.dirname(copy_info.dst), exists_ok=True)
|
||||
+ debug_msg = 'Copying {0} to {1} (inside the scratch container).'
|
||||
+ api.current_logger().debug(debug_msg.format(copy_info.src, copy_info.dst))
|
||||
+ context.call(['cp', copy_info.src, copy_info.dst])
|
||||
+
|
||||
+
|
||||
+def setup_target_rhui_access_if_needed(context, indata):
|
||||
if not indata.rhui_info:
|
||||
return
|
||||
|
||||
@@ -1110,15 +1170,14 @@ def install_target_rhui_client_if_needed(context, indata):
|
||||
_create_target_userspace_directories(userspace_dir)
|
||||
|
||||
setup_info = indata.rhui_info.target_client_setup_info
|
||||
- if setup_info.preinstall_tasks:
|
||||
- preinstall_tasks = setup_info.preinstall_tasks
|
||||
+ _apply_rhui_access_preinstall_tasks(context, setup_info)
|
||||
|
||||
- for file_to_remove in preinstall_tasks.files_to_remove:
|
||||
- context.remove(file_to_remove)
|
||||
-
|
||||
- for copy_info in preinstall_tasks.files_to_copy_into_overlay:
|
||||
- context.makedirs(os.path.dirname(copy_info.dst), exists_ok=True)
|
||||
- context.copy_to(copy_info.src, copy_info.dst)
|
||||
+ if not setup_info.bootstrap_target_client:
|
||||
+ # Installation of the target RHUI client is not possible and we bundle all necessary
|
||||
+ # files into the leapp-rhui-<provider> packages.
|
||||
+ api.current_logger().debug('Bootstrapping target RHUI client is disabled, leapp will rely '
|
||||
+ 'only on files budled in leapp-rhui-<provider> package.')
|
||||
+ return
|
||||
|
||||
cmd = ['dnf', '-y']
|
||||
|
||||
@@ -1149,16 +1208,13 @@ def install_target_rhui_client_if_needed(context, indata):
|
||||
|
||||
context.call(cmd, callback_raw=utils.logging_handler, stdin='\n'.join(dnf_transaction_steps))
|
||||
|
||||
- if setup_info.postinstall_tasks:
|
||||
- for copy_info in setup_info.postinstall_tasks.files_to_copy:
|
||||
- context.makedirs(os.path.dirname(copy_info.dst), exists_ok=True)
|
||||
- context.call(['cp', copy_info.src, copy_info.dst])
|
||||
+ _apply_rhui_access_postinstall_tasks(context, setup_info)
|
||||
|
||||
# Do a cleanup so there are not duplicit repoids
|
||||
files_owned_by_clients = _query_rpm_for_pkg_files(context, indata.rhui_info.target_client_pkg_names)
|
||||
|
||||
for copy_task in setup_info.preinstall_tasks.files_to_copy_into_overlay:
|
||||
- dest = get_copy_location_from_copy_in_task(context, copy_task)
|
||||
+ dest = get_copy_location_from_copy_in_task(context.base_dir, copy_task)
|
||||
can_be_cleaned_up = copy_task.src not in setup_info.files_supporting_client_operation
|
||||
if dest not in files_owned_by_clients and can_be_cleaned_up:
|
||||
context.remove(dest)
|
||||
@@ -1184,10 +1240,10 @@ def perform():
|
||||
target_iso = next(api.consume(TargetOSInstallationImage), None)
|
||||
with mounting.mount_upgrade_iso_to_root_dir(overlay.target, target_iso):
|
||||
|
||||
- install_target_rhui_client_if_needed(context, indata)
|
||||
+ setup_target_rhui_access_if_needed(context, indata)
|
||||
|
||||
target_repoids = _gather_target_repositories(context, indata, prod_cert_path)
|
||||
- _create_target_userspace(context, indata.packages, indata.files, target_repoids)
|
||||
+ _create_target_userspace(context, indata, indata.packages, indata.files, target_repoids)
|
||||
# TODO: this is tmp solution as proper one needs significant refactoring
|
||||
target_repo_facts = repofileutils.get_parsed_repofiles(context)
|
||||
api.produce(TMPTargetRepositoriesFacts(repositories=target_repo_facts))
|
||||
diff --git a/repos/system_upgrade/common/models/rhuiinfo.py b/repos/system_upgrade/common/models/rhuiinfo.py
|
||||
index 3eaa4826..0a2e45af 100644
|
||||
--- a/repos/system_upgrade/common/models/rhuiinfo.py
|
||||
+++ b/repos/system_upgrade/common/models/rhuiinfo.py
|
||||
@@ -36,6 +36,13 @@ class TargetRHUISetupInfo(Model):
|
||||
files_supporting_client_operation = fields.List(fields.String(), default=[])
|
||||
"""A subset of files copied in preinstall tasks that should not be cleaned up."""
|
||||
|
||||
+ bootstrap_target_client = fields.Boolean(default=True)
|
||||
+ """
|
||||
+ Swap the current RHUI client for the target one to facilitate access to the target content.
|
||||
+
|
||||
+ When False, only files from the leapp-rhui-<provider> will be used to access target content.
|
||||
+ """
|
||||
+
|
||||
|
||||
class RHUIInfo(Model):
|
||||
"""
|
||||
--
|
||||
2.43.0
|
||||
|
@ -0,0 +1,41 @@
|
||||
From 7e0fb44bb673893d0409903f6a441d0eb2829d22 Mon Sep 17 00:00:00 2001
|
||||
From: Evgeni Golov <evgeni@golov.de>
|
||||
Date: Tue, 20 Aug 2024 15:11:02 +0200
|
||||
Subject: [PATCH 02/40] don't require all versions to be defined for obsoleted
|
||||
keys
|
||||
|
||||
in releases where we do not have any obsoleted keys, we still had to
|
||||
define an entry (with an empty list), as otherwise the code would fail
|
||||
|
||||
instead, we can catch the KeyError and carry on as nothing happened
|
||||
---
|
||||
.../libraries/removeobsoleterpmgpgkeys.py | 13 ++++++++-----
|
||||
1 file changed, 8 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py
|
||||
index 6e84c2e9..bda7efa3 100644
|
||||
--- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py
|
||||
+++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py
|
||||
@@ -12,11 +12,14 @@ def _get_obsolete_keys():
|
||||
distribution = api.current_actor().configuration.os_release.release_id
|
||||
obsoleted_keys_map = get_distribution_data(distribution).get('obsoleted-keys', {})
|
||||
keys = []
|
||||
- for version in range(7, int(get_target_major_version()) + 1):
|
||||
- for key in obsoleted_keys_map[str(version)]:
|
||||
- name, version, release = key.rsplit("-", 2)
|
||||
- if has_package(InstalledRPM, name, version=version, release=release):
|
||||
- keys.append(key)
|
||||
+ try:
|
||||
+ for version in range(7, int(get_target_major_version()) + 1):
|
||||
+ for key in obsoleted_keys_map[str(version)]:
|
||||
+ name, version, release = key.rsplit("-", 2)
|
||||
+ if has_package(InstalledRPM, name, version=version, release=release):
|
||||
+ keys.append(key)
|
||||
+ except KeyError:
|
||||
+ pass
|
||||
|
||||
return keys
|
||||
|
||||
--
|
||||
2.47.0
|
||||
|
226
SOURCES/0003-Add-RHEL-10.0-prod-certs.patch
Normal file
226
SOURCES/0003-Add-RHEL-10.0-prod-certs.patch
Normal file
@ -0,0 +1,226 @@
|
||||
From 9f2f1726d8a5bdd12309a3a3111984f1666b903f Mon Sep 17 00:00:00 2001
|
||||
From: Matej Matuska <mmatuska@redhat.com>
|
||||
Date: Thu, 22 Aug 2024 15:52:19 +0200
|
||||
Subject: [PATCH 03/40] Add RHEL 10.0 prod-certs
|
||||
|
||||
Previously we temporarily used the RHEL 9 x86_64 prod cert for others
|
||||
archs it was missing completely.
|
||||
|
||||
Jira: OAMG-11138
|
||||
---
|
||||
.../common/files/prod-certs/10.0/279.pem | 37 ++++++++++
|
||||
.../common/files/prod-certs/10.0/419.pem | 37 ++++++++++
|
||||
.../common/files/prod-certs/10.0/479.pem | 68 ++++++++++---------
|
||||
.../common/files/prod-certs/10.0/72.pem | 37 ++++++++++
|
||||
4 files changed, 146 insertions(+), 33 deletions(-)
|
||||
create mode 100644 repos/system_upgrade/common/files/prod-certs/10.0/279.pem
|
||||
create mode 100644 repos/system_upgrade/common/files/prod-certs/10.0/419.pem
|
||||
create mode 100644 repos/system_upgrade/common/files/prod-certs/10.0/72.pem
|
||||
|
||||
diff --git a/repos/system_upgrade/common/files/prod-certs/10.0/279.pem b/repos/system_upgrade/common/files/prod-certs/10.0/279.pem
|
||||
new file mode 100644
|
||||
index 00000000..f62340fc
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/common/files/prod-certs/10.0/279.pem
|
||||
@@ -0,0 +1,37 @@
|
||||
+-----BEGIN CERTIFICATE-----
|
||||
+MIIGczCCBFugAwIBAgIUfZodBQY+YRSlyRRiFX1dx4vQ5y4wDQYJKoZIhvcNAQEL
|
||||
+BQAwga4xCzAJBgNVBAYTAlVTMRcwFQYDVQQIDA5Ob3J0aCBDYXJvbGluYTEWMBQG
|
||||
+A1UECgwNUmVkIEhhdCwgSW5jLjEYMBYGA1UECwwPUmVkIEhhdCBOZXR3b3JrMS4w
|
||||
+LAYDVQQDDCVSZWQgSGF0IEVudGl0bGVtZW50IFByb2R1Y3QgQXV0aG9yaXR5MSQw
|
||||
+IgYJKoZIhvcNAQkBFhVjYS1zdXBwb3J0QHJlZGhhdC5jb20wHhcNMjQwODE1MDYx
|
||||
+NjQ5WhcNNDQwODE1MDYxNjQ5WjBEMUIwQAYDVQQDDDlSZWQgSGF0IFByb2R1Y3Qg
|
||||
+SUQgWzA0YTU4NDFkLTVlNmUtNDU1Yy1hZWYwLTdhOTQ0NTBiNjg3Nl0wggIiMA0G
|
||||
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDGP0nTjP4TN3LHVTfeQV+0u/Se01LU
|
||||
+FJ66GhksOGzXzKSx6kbuFde0eHYIwV8tmZOMDIv2LVezHKRClVB1dMalQXfcLaoF
|
||||
+AcHmCViz353vzXHynybzMXFs9xbzZMglduBbcStWHy+TmoJsbVwIAAdv4NYyrQQD
|
||||
+LLVuX8mACCFg0YFG8ok5tN0Kt2liHTYpSoEuRI9ke+joNQkU3fsxcOlV5Cr1W2pG
|
||||
+OkosvC4R9dvRjsjnEQ6tHeRhs5oEBZW3eZhnW3Qv8p9jaNU51TlYXLIH0+Fsx0uL
|
||||
+XETzTWP4YmvBwtrGaq+PhRogJHNw8BM/zrNUzUEFBr6WKWRFB6zkfKNnNkOIZi52
|
||||
+deFuqYuj+fRy5ehAFVWOHNFMzHvUSKJqGaLD5TW8aqQeFA3FvXce03WVwCFQIOvH
|
||||
+F4y+sCNh1aliWkjJbc2yw9a3VhQeJ0wFIAngpy0h/3V3IT3dpK2XHAL9CfIWxk6Z
|
||||
+wSwHNUKfP0aZYyXX/pfMFLXINSoHKSXHRMsf7P+wr0D47atkDLWYHIJjBXG9s5mG
|
||||
+eobEC5OghL4DzW/mEKOwKI5JxUH5yKXfRgG7RwfzlFnQgs2Qd0p2sstZbjCOmEra
|
||||
+cGfaDaLf7O1/6dAQPalCpn+uG5bv2NzIJmX2Rep7XA50XQLBqHg3r/cvMhcQQrIQ
|
||||
+nE2pDC01zYhUTwIDAQABo4HxMIHuMAkGA1UdEwQCMAAwQwYMKwYBBAGSCAkBghcB
|
||||
+BDMMMVJlZCBIYXQgRW50ZXJwcmlzZSBMaW51eCBmb3IgUG93ZXIsIGxpdHRsZSBl
|
||||
+bmRpYW4wFgYMKwYBBAGSCAkBghcCBAYMBDEwLjAwGQYMKwYBBAGSCAkBghcDBAkM
|
||||
+B3BwYzY0bGUwKQYMKwYBBAGSCAkBghcEBBkMF3JoZWwtMTAscmhlbC0xMC1wcGM2
|
||||
+NGxlMB0GA1UdDgQWBBRh6iC1NXyvZ2Q6/2sI5hB40M0flTAfBgNVHSMEGDAWgBSW
|
||||
+/bscQED/QIStsh8LJsHDam/WfDANBgkqhkiG9w0BAQsFAAOCAgEAv6ySsgygc2z2
|
||||
+kQJeu9sdvBNFKe+gEtXbPu6+rZKPPosW3cggMJCnsZgki3nUogovz0Z3MPkbmRz+
|
||||
+GJwVjiVBnfUQLoORSDYwqYZB4WRoqszW/dytd7/64IehvD/JZo3Oa8BNYRSG/Ukh
|
||||
+7iUIT8ryFIH1DTUIersVObINN2gk3hC2JJXoTfNqIYG+4OAEUE7/F4CptRAGbgH/
|
||||
+4/9vfe2KNXvPMoWvILpXpD5w8t9Xh0Wl97N1W7+FLVRwQHAQ2/yBTu/sY27FvVSl
|
||||
+0o+SBSvjTKIi+9QslRpi0QCVza5WxHTiO8nzYgzFjfMkt6lzK74puf3VJavpqkQ9
|
||||
+dVfyp36A3Fh6vDsiNxhsfKrp8z2JnKA3vdslsH7cOHCIFYHXiqeaP654t4oGeESD
|
||||
+EPfS6PpXSyi47Kd/qjA2srgpXNQl2yMd0ih6NoHaoSYXFfb4LX6cWFGcT/AWZsaC
|
||||
+xv2pN9J0KhF2loLp8SK19FESc0rJShkAacTcxeYjuDYbvLtJi4Z5aWWVU421rMSs
|
||||
+X9IdiWa4WL70ZaDK5cP54S4zZNsVDKniUzNXwPltDCpqefy8ka4o5QlWNreBrXXW
|
||||
+6cy8I6L2om7xZ5hAZ3CB7nUZe9QE/LXnHqK3cQetvd5Q2LMnp6gVtgQ4a+7vD9xz
|
||||
+ExLtbBZjvGJFudimMmOxvn/J5+GMmm4=
|
||||
+-----END CERTIFICATE-----
|
||||
diff --git a/repos/system_upgrade/common/files/prod-certs/10.0/419.pem b/repos/system_upgrade/common/files/prod-certs/10.0/419.pem
|
||||
new file mode 100644
|
||||
index 00000000..08cb5b02
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/common/files/prod-certs/10.0/419.pem
|
||||
@@ -0,0 +1,37 @@
|
||||
+-----BEGIN CERTIFICATE-----
|
||||
+MIIGZTCCBE2gAwIBAgIUWARL99TkK+hxtTJkE5icdHXLfY0wDQYJKoZIhvcNAQEL
|
||||
+BQAwga4xCzAJBgNVBAYTAlVTMRcwFQYDVQQIDA5Ob3J0aCBDYXJvbGluYTEWMBQG
|
||||
+A1UECgwNUmVkIEhhdCwgSW5jLjEYMBYGA1UECwwPUmVkIEhhdCBOZXR3b3JrMS4w
|
||||
+LAYDVQQDDCVSZWQgSGF0IEVudGl0bGVtZW50IFByb2R1Y3QgQXV0aG9yaXR5MSQw
|
||||
+IgYJKoZIhvcNAQkBFhVjYS1zdXBwb3J0QHJlZGhhdC5jb20wHhcNMjQwODE1MDYx
|
||||
+NjQ5WhcNNDQwODE1MDYxNjQ5WjBEMUIwQAYDVQQDDDlSZWQgSGF0IFByb2R1Y3Qg
|
||||
+SUQgW2Y3ZWFmNGU2LTYwZGYtNDMyNC04N2I0LTdhNGUzZGVkZmViNV0wggIiMA0G
|
||||
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDGP0nTjP4TN3LHVTfeQV+0u/Se01LU
|
||||
+FJ66GhksOGzXzKSx6kbuFde0eHYIwV8tmZOMDIv2LVezHKRClVB1dMalQXfcLaoF
|
||||
+AcHmCViz353vzXHynybzMXFs9xbzZMglduBbcStWHy+TmoJsbVwIAAdv4NYyrQQD
|
||||
+LLVuX8mACCFg0YFG8ok5tN0Kt2liHTYpSoEuRI9ke+joNQkU3fsxcOlV5Cr1W2pG
|
||||
+OkosvC4R9dvRjsjnEQ6tHeRhs5oEBZW3eZhnW3Qv8p9jaNU51TlYXLIH0+Fsx0uL
|
||||
+XETzTWP4YmvBwtrGaq+PhRogJHNw8BM/zrNUzUEFBr6WKWRFB6zkfKNnNkOIZi52
|
||||
+deFuqYuj+fRy5ehAFVWOHNFMzHvUSKJqGaLD5TW8aqQeFA3FvXce03WVwCFQIOvH
|
||||
+F4y+sCNh1aliWkjJbc2yw9a3VhQeJ0wFIAngpy0h/3V3IT3dpK2XHAL9CfIWxk6Z
|
||||
+wSwHNUKfP0aZYyXX/pfMFLXINSoHKSXHRMsf7P+wr0D47atkDLWYHIJjBXG9s5mG
|
||||
+eobEC5OghL4DzW/mEKOwKI5JxUH5yKXfRgG7RwfzlFnQgs2Qd0p2sstZbjCOmEra
|
||||
+cGfaDaLf7O1/6dAQPalCpn+uG5bv2NzIJmX2Rep7XA50XQLBqHg3r/cvMhcQQrIQ
|
||||
+nE2pDC01zYhUTwIDAQABo4HjMIHgMAkGA1UdEwQCMAAwNQYMKwYBBAGSCAkBgyMB
|
||||
+BCUMI1JlZCBIYXQgRW50ZXJwcmlzZSBMaW51eCBmb3IgQVJNIDY0MBYGDCsGAQQB
|
||||
+kggJAYMjAgQGDAQxMC4wMBkGDCsGAQQBkggJAYMjAwQJDAdhYXJjaDY0MCkGDCsG
|
||||
+AQQBkggJAYMjBAQZDBdyaGVsLTEwLHJoZWwtMTAtYWFyY2g2NDAdBgNVHQ4EFgQU
|
||||
+YeogtTV8r2dkOv9rCOYQeNDNH5UwHwYDVR0jBBgwFoAUlv27HEBA/0CErbIfCybB
|
||||
+w2pv1nwwDQYJKoZIhvcNAQELBQADggIBAIpdcHN7RN18pg5ELfc55Sj58ivL5N25
|
||||
+19KprqbM7aVum32abw7/Qksfs6maGQpU6Hh/UqhJlGQ2bN48jZ/kdMKor4agSQ/T
|
||||
+iwr3b8RBJFPVCuqQJXIe4g3iRbHfnIjGxgoMgv36j58PENoEnpPtR7ZtHMyqQ2SO
|
||||
+m1WRQhY5tJ4Fk/Zkx/trxlNvmsTAjNRa530kqG4TfiMVvWNaVdxHsjMv0lXLJRXx
|
||||
+KT6+iHt2QBs2No5O8cjlXr/CzfGrB5TlBNrsHqhO0Llmw28KpcWGYGdexKdIHrDG
|
||||
+A/K0Pr21yRstUWN39jz/tdEqt1q8T7/it3oM976keQmFAxBa/CpyEG5Y6FKw9+F0
|
||||
+LtkAyI3XGHK7LbCOE67s7u0/BfgQvww1FqztVnVZ4sXlagj/IuYPJBhfGDe/6tik
|
||||
+laqP8FtR6xJdSra2YQMBc0kZb0Sv1uy7pGofNSvLM5L76XqiwKoDVo/eAcl60OWY
|
||||
+rF86pEDLGDmdJBLJKX2/77pzpQpZ9Yvc4vWwoZrP4gRKBuWF28aLH0OsWzdsfdMG
|
||||
+9+DrcO/58slMbWng1ZzOQyEjp7x1kto5sa5m2q8LMo06ETYT8ps5A0hyltBz1yAt
|
||||
+JEBS4Y14YlF6Px67aTak07MNo7AaaphuD47D2Sy3pwHa+vOx4nv/G33+G0iOm3Lr
|
||||
+zVAjwlfLIUB9
|
||||
+-----END CERTIFICATE-----
|
||||
diff --git a/repos/system_upgrade/common/files/prod-certs/10.0/479.pem b/repos/system_upgrade/common/files/prod-certs/10.0/479.pem
|
||||
index 1ea1cd3d..d89f6188 100644
|
||||
--- a/repos/system_upgrade/common/files/prod-certs/10.0/479.pem
|
||||
+++ b/repos/system_upgrade/common/files/prod-certs/10.0/479.pem
|
||||
@@ -1,35 +1,37 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
-MIIGFTCCA/2gAwIBAgIJALDxRLt/tVDQMA0GCSqGSIb3DQEBCwUAMIGuMQswCQYD
|
||||
-VQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xpbmExFjAUBgNVBAoMDVJlZCBI
|
||||
-YXQsIEluYy4xGDAWBgNVBAsMD1JlZCBIYXQgTmV0d29yazEuMCwGA1UEAwwlUmVk
|
||||
-IEhhdCBFbnRpdGxlbWVudCBQcm9kdWN0IEF1dGhvcml0eTEkMCIGCSqGSIb3DQEJ
|
||||
-ARYVY2Etc3VwcG9ydEByZWRoYXQuY29tMB4XDTIzMDcxOTE2MzQwOFoXDTQzMDcx
|
||||
-OTE2MzQwOFowRDFCMEAGA1UEAww5UmVkIEhhdCBQcm9kdWN0IElEIFsxZDg0ZDQ5
|
||||
-Ny1jZmNmLTQxNjEtOTM0YS0zNzk2MDU4M2ZmZGZdMIICIjANBgkqhkiG9w0BAQEF
|
||||
-AAOCAg8AMIICCgKCAgEAxj9J04z+Ezdyx1U33kFftLv0ntNS1BSeuhoZLDhs18yk
|
||||
-sepG7hXXtHh2CMFfLZmTjAyL9i1XsxykQpVQdXTGpUF33C2qBQHB5glYs9+d781x
|
||||
-8p8m8zFxbPcW82TIJXbgW3ErVh8vk5qCbG1cCAAHb+DWMq0EAyy1bl/JgAghYNGB
|
||||
-RvKJObTdCrdpYh02KUqBLkSPZHvo6DUJFN37MXDpVeQq9VtqRjpKLLwuEfXb0Y7I
|
||||
-5xEOrR3kYbOaBAWVt3mYZ1t0L/KfY2jVOdU5WFyyB9PhbMdLi1xE801j+GJrwcLa
|
||||
-xmqvj4UaICRzcPATP86zVM1BBQa+lilkRQes5HyjZzZDiGYudnXhbqmLo/n0cuXo
|
||||
-QBVVjhzRTMx71Eiiahmiw+U1vGqkHhQNxb13HtN1lcAhUCDrxxeMvrAjYdWpYlpI
|
||||
-yW3NssPWt1YUHidMBSAJ4KctIf91dyE93aStlxwC/QnyFsZOmcEsBzVCnz9GmWMl
|
||||
-1/6XzBS1yDUqByklx0TLH+z/sK9A+O2rZAy1mByCYwVxvbOZhnqGxAuToIS+A81v
|
||||
-5hCjsCiOScVB+cil30YBu0cH85RZ0ILNkHdKdrLLWW4wjphK2nBn2g2i3+ztf+nQ
|
||||
-ED2pQqZ/rhuW79jcyCZl9kXqe1wOdF0Cwah4N6/3LzIXEEKyEJxNqQwtNc2IVE8C
|
||||
-AwEAAaOBnjCBmzAJBgNVHRMEAjAAMDUGDCsGAQQBkggJAYNfAQQlDCNSZWQgSGF0
|
||||
-IEVudGVycHJpc2UgTGludXggZm9yIHg4Nl82NDAVBgwrBgEEAZIICQGDXwIEBQwD
|
||||
-OS40MBgGDCsGAQQBkggJAYNfAwQIDAZ4ODZfNjQwJgYMKwYBBAGSCAkBg18EBBYM
|
||||
-FHJoZWwtOSxyaGVsLTkteDg2XzY0MA0GCSqGSIb3DQEBCwUAA4ICAQCGUDPFBrLs
|
||||
-sK/RITJothRhKhKNX3zu9TWRG0WKxszCx/y7c4yEfH1TV/yd7BNB2RubaoayWz8E
|
||||
-TQjcRW8BnVu9JrlbdpWJm4eN+dOOpcESPilLnkz4Tr0WYDsT1/jk/uiorK4h21S0
|
||||
-EwMicuSuEmm0OUEX0zj2X/IyveFRtpJpH/JktznCkvexysc1JRzqMCbal8GipRX9
|
||||
-Xf7Oko6QiaUpu5GDLN2OXhizYHdR2f3l+Sn2cScsbi3fSVv+DLsnaz6J0kZ4U8q3
|
||||
-lYk/ZYifJjG+/7cv3e+usixpmK/qYlpOvunUDnqOkDfUs4/4bZjH8e8CdqJk4YvU
|
||||
-RRtLr7muXEJsaqF7lxAViXnKxT/z/+1kOgN/+Oyzjs4QDsk2HQpWHFgNYSSG9Mmz
|
||||
-PUS8tk2T0j5sN55X7QRRl5c0oqrBU5XaWyL26QcfONYcR8dBaKawjxg8CI9KzsYY
|
||||
-sb2jjS+fBkB1OI2c6z4OZRd+0N6FQ6gq++KiXOLFvi/QSFNi9Veb56c5tR2l6fBk
|
||||
-0pSH06Gg2s0aQg20NdMIr+HaYsVdJRsE1FgQ2tlfFx9rGkcqhgwV3Za/abgtRb2o
|
||||
-YVwps28DLm41DXf5DnXK+BXFHrtR/3YAZtga+R7OL/RvcF0kc2kudlxqd/8Y33uL
|
||||
-nqnoATy31FTW4J4rEfanJTQgTpatZmbaLQ==
|
||||
+MIIGYzCCBEugAwIBAgIUL5D34AcwqLAbqlUcxntHUCtEVxQwDQYJKoZIhvcNAQEL
|
||||
+BQAwga4xCzAJBgNVBAYTAlVTMRcwFQYDVQQIDA5Ob3J0aCBDYXJvbGluYTEWMBQG
|
||||
+A1UECgwNUmVkIEhhdCwgSW5jLjEYMBYGA1UECwwPUmVkIEhhdCBOZXR3b3JrMS4w
|
||||
+LAYDVQQDDCVSZWQgSGF0IEVudGl0bGVtZW50IFByb2R1Y3QgQXV0aG9yaXR5MSQw
|
||||
+IgYJKoZIhvcNAQkBFhVjYS1zdXBwb3J0QHJlZGhhdC5jb20wHhcNMjQwODE1MDYx
|
||||
+NjQ5WhcNNDQwODE1MDYxNjQ5WjBEMUIwQAYDVQQDDDlSZWQgSGF0IFByb2R1Y3Qg
|
||||
+SUQgWzk5NDZhMmY5LTI4NDMtNDJhOS1iNzhlLTIzM2E5ODIwYjVhZV0wggIiMA0G
|
||||
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDGP0nTjP4TN3LHVTfeQV+0u/Se01LU
|
||||
+FJ66GhksOGzXzKSx6kbuFde0eHYIwV8tmZOMDIv2LVezHKRClVB1dMalQXfcLaoF
|
||||
+AcHmCViz353vzXHynybzMXFs9xbzZMglduBbcStWHy+TmoJsbVwIAAdv4NYyrQQD
|
||||
+LLVuX8mACCFg0YFG8ok5tN0Kt2liHTYpSoEuRI9ke+joNQkU3fsxcOlV5Cr1W2pG
|
||||
+OkosvC4R9dvRjsjnEQ6tHeRhs5oEBZW3eZhnW3Qv8p9jaNU51TlYXLIH0+Fsx0uL
|
||||
+XETzTWP4YmvBwtrGaq+PhRogJHNw8BM/zrNUzUEFBr6WKWRFB6zkfKNnNkOIZi52
|
||||
+deFuqYuj+fRy5ehAFVWOHNFMzHvUSKJqGaLD5TW8aqQeFA3FvXce03WVwCFQIOvH
|
||||
+F4y+sCNh1aliWkjJbc2yw9a3VhQeJ0wFIAngpy0h/3V3IT3dpK2XHAL9CfIWxk6Z
|
||||
+wSwHNUKfP0aZYyXX/pfMFLXINSoHKSXHRMsf7P+wr0D47atkDLWYHIJjBXG9s5mG
|
||||
+eobEC5OghL4DzW/mEKOwKI5JxUH5yKXfRgG7RwfzlFnQgs2Qd0p2sstZbjCOmEra
|
||||
+cGfaDaLf7O1/6dAQPalCpn+uG5bv2NzIJmX2Rep7XA50XQLBqHg3r/cvMhcQQrIQ
|
||||
+nE2pDC01zYhUTwIDAQABo4HhMIHeMAkGA1UdEwQCMAAwNQYMKwYBBAGSCAkBg18B
|
||||
+BCUMI1JlZCBIYXQgRW50ZXJwcmlzZSBMaW51eCBmb3IgeDg2XzY0MBYGDCsGAQQB
|
||||
+kggJAYNfAgQGDAQxMC4wMBgGDCsGAQQBkggJAYNfAwQIDAZ4ODZfNjQwKAYMKwYB
|
||||
+BAGSCAkBg18EBBgMFnJoZWwtMTAscmhlbC0xMC14ODZfNjQwHQYDVR0OBBYEFGHq
|
||||
+ILU1fK9nZDr/awjmEHjQzR+VMB8GA1UdIwQYMBaAFJb9uxxAQP9AhK2yHwsmwcNq
|
||||
+b9Z8MA0GCSqGSIb3DQEBCwUAA4ICAQAa+c2/Usg6JToULhYTdLhf15Hk6xxdlwT7
|
||||
+zZlnZLbuAKtaDqP1NiSiX0Z/lMJzFfW0B/zyWLy8uiXLYmF5V28f8yWK0Nksx2v7
|
||||
+I7u6ZZN2dKDQZKsEoP0g3ptvVRWn9h5otS7yPkOK4Dzj04yJqOSGP9bp6OHEhm1S
|
||||
+x4ErITkN/3MXOf9vT+I6wydVKsw4fdlWgVjmBd90bzVTnv4dWtJio+le+9ad9RSf
|
||||
+M3aD5ufiELeRKMp6ExnC/cnoWtuH+b4BJ37TQ3Kpn3fDtbrzVvQH/dpqZ7P33yqg
|
||||
+PnBEXOiLimDnnmDJ9ImQ1pVTrKJMxaj1Mk6onERe36n/iAsj+BwZvBiv7UaLPMnW
|
||||
+nJGg+LQ4iUZrGWYD4N9Ou++nvsR8dCWRhXSuXensfli3lL/W0P62yzfYCyqOYeL1
|
||||
+msDcCmBEWJUtAaeAbASUIVx02JWPPmMSUqWs8xOecQjzoGuCQg4JM/UfsZzxepw0
|
||||
+bs9YSUVw8J9R2d4kuze65qDTMRg+cK2LX1xg1KkR/UWZOGxHHJAfwGWdPwSkiOPQ
|
||||
+MVJ7LJjvozebHWSuiSxk+GWWr+NdxIJrFRGbivXyAkmqMRrPe1VLVxWwCdyud9o8
|
||||
+b2WbFgrNS2jOnHwldtM2ZAhrF5W4ckvVL7hLp2JoQnJfCcWson9NK6Y2M4bNwQnC
|
||||
+ihxphLzOAw==
|
||||
-----END CERTIFICATE-----
|
||||
diff --git a/repos/system_upgrade/common/files/prod-certs/10.0/72.pem b/repos/system_upgrade/common/files/prod-certs/10.0/72.pem
|
||||
new file mode 100644
|
||||
index 00000000..e0274f9c
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/common/files/prod-certs/10.0/72.pem
|
||||
@@ -0,0 +1,37 @@
|
||||
+-----BEGIN CERTIFICATE-----
|
||||
+MIIGZDCCBEygAwIBAgIUSTvcD4Wsduixh8PFmwk6aI0KTEcwDQYJKoZIhvcNAQEL
|
||||
+BQAwga4xCzAJBgNVBAYTAlVTMRcwFQYDVQQIDA5Ob3J0aCBDYXJvbGluYTEWMBQG
|
||||
+A1UECgwNUmVkIEhhdCwgSW5jLjEYMBYGA1UECwwPUmVkIEhhdCBOZXR3b3JrMS4w
|
||||
+LAYDVQQDDCVSZWQgSGF0IEVudGl0bGVtZW50IFByb2R1Y3QgQXV0aG9yaXR5MSQw
|
||||
+IgYJKoZIhvcNAQkBFhVjYS1zdXBwb3J0QHJlZGhhdC5jb20wHhcNMjQwODE1MDYx
|
||||
+NjQ5WhcNNDQwODE1MDYxNjQ5WjBEMUIwQAYDVQQDDDlSZWQgSGF0IFByb2R1Y3Qg
|
||||
+SUQgW2VjN2EwZDQyLTgzNjItNDg2YS04ZjcyLTc3YThiOWU2MjM0YV0wggIiMA0G
|
||||
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDGP0nTjP4TN3LHVTfeQV+0u/Se01LU
|
||||
+FJ66GhksOGzXzKSx6kbuFde0eHYIwV8tmZOMDIv2LVezHKRClVB1dMalQXfcLaoF
|
||||
+AcHmCViz353vzXHynybzMXFs9xbzZMglduBbcStWHy+TmoJsbVwIAAdv4NYyrQQD
|
||||
+LLVuX8mACCFg0YFG8ok5tN0Kt2liHTYpSoEuRI9ke+joNQkU3fsxcOlV5Cr1W2pG
|
||||
+OkosvC4R9dvRjsjnEQ6tHeRhs5oEBZW3eZhnW3Qv8p9jaNU51TlYXLIH0+Fsx0uL
|
||||
+XETzTWP4YmvBwtrGaq+PhRogJHNw8BM/zrNUzUEFBr6WKWRFB6zkfKNnNkOIZi52
|
||||
+deFuqYuj+fRy5ehAFVWOHNFMzHvUSKJqGaLD5TW8aqQeFA3FvXce03WVwCFQIOvH
|
||||
+F4y+sCNh1aliWkjJbc2yw9a3VhQeJ0wFIAngpy0h/3V3IT3dpK2XHAL9CfIWxk6Z
|
||||
+wSwHNUKfP0aZYyXX/pfMFLXINSoHKSXHRMsf7P+wr0D47atkDLWYHIJjBXG9s5mG
|
||||
+eobEC5OghL4DzW/mEKOwKI5JxUH5yKXfRgG7RwfzlFnQgs2Qd0p2sstZbjCOmEra
|
||||
+cGfaDaLf7O1/6dAQPalCpn+uG5bv2NzIJmX2Rep7XA50XQLBqHg3r/cvMhcQQrIQ
|
||||
+nE2pDC01zYhUTwIDAQABo4HiMIHfMAkGA1UdEwQCMAAwOwYLKwYBBAGSCAkBSAEE
|
||||
+LAwqUmVkIEhhdCBFbnRlcnByaXNlIExpbnV4IGZvciBJQk0geiBTeXN0ZW1zMBUG
|
||||
+CysGAQQBkggJAUgCBAYMBDEwLjAwFgYLKwYBBAGSCAkBSAMEBwwFczM5MHgwJgYL
|
||||
+KwYBBAGSCAkBSAQEFwwVcmhlbC0xMCxyaGVsLTEwLXMzOTB4MB0GA1UdDgQWBBRh
|
||||
+6iC1NXyvZ2Q6/2sI5hB40M0flTAfBgNVHSMEGDAWgBSW/bscQED/QIStsh8LJsHD
|
||||
+am/WfDANBgkqhkiG9w0BAQsFAAOCAgEAsj4qPVsDkFrfuVDn8JCJ7tIH5WhaOzL6
|
||||
+3GBsQIKGd8a1WscPfSpr/phNSBPWFyvV2b+0HzblYzBZbx6ExykTDLh5L01nPM0s
|
||||
++hqPxZgF/kcTbLWmAanl32R9+Gs2P2JN1CaCclXgM4USEagBWYeMhJSmQR3bOnSe
|
||||
+Jjm3tjvhnbIQd6xgPpTjrqZ35z1BW0P0qQFdBbB0k+MfPkhYKEr+Vfn0rU8vk4UP
|
||||
+F9sY9HkZLqIBxlXeTUerNZvHSuOy2KgoS4l25/QwUutHnnSGZZpARiU1XYNcynVL
|
||||
+r5COHlb6TYkeRhSAm6RVM4XPYoFgN6cbhY1orwFC2/0i30EnsTMB6ctnLKCf7qgM
|
||||
+GDG2W7ct0m6koA7s2TGmgp33DPw9adX7qgIV0OjLzBYJ1fyVv3sYlOKRuyDz0l+N
|
||||
+u6Rnv1ecNUspWn+5ogBbdgwU6yah6oo/fJIWm62U38UGH5ic+/7sBnga8q5sDI90
|
||||
++h+nlTIAnD0ICzjEDASiLlYft+hQ9pOt/rgEIrPeKTe+fbefUIXJ5h343E51POnY
|
||||
+uZRXcirc33QL/PgBRce1taIXjsRD+FSJM0tx/vf8H9j0rzSAxDoXJNsdq4/32scy
|
||||
+6Zk2fgtm80xxIzju84jXVUrSBRMpWD9I+FZId4IE7tQhwKNi1b7DdNeaQLfaoq8U
|
||||
+1PEea/tQDSA=
|
||||
+-----END CERTIFICATE-----
|
||||
--
|
||||
2.47.0
|
||||
|
@ -0,0 +1,100 @@
|
||||
From bf302fc794957a88bc4785f4dd2505b8d71012e0 Mon Sep 17 00:00:00 2001
|
||||
From: Evgeni Golov <evgeni@golov.de>
|
||||
Date: Wed, 21 Aug 2024 07:52:02 +0200
|
||||
Subject: [PATCH 04/40] properly scope try/except when loading obsoleted keys
|
||||
|
||||
We want to load all possible keys, even *after* a KeyError happenend
|
||||
|
||||
Fixes: 7e0fb44bb673893d0409903f6a441d0eb2829d22
|
||||
---
|
||||
.../libraries/removeobsoleterpmgpgkeys.py | 8 +--
|
||||
.../tests/test_removeobsoleterpmgpgkeys.py | 50 +++++++++++++++++++
|
||||
2 files changed, 54 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py
|
||||
index bda7efa3..198c4368 100644
|
||||
--- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py
|
||||
+++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/libraries/removeobsoleterpmgpgkeys.py
|
||||
@@ -12,14 +12,14 @@ def _get_obsolete_keys():
|
||||
distribution = api.current_actor().configuration.os_release.release_id
|
||||
obsoleted_keys_map = get_distribution_data(distribution).get('obsoleted-keys', {})
|
||||
keys = []
|
||||
- try:
|
||||
- for version in range(7, int(get_target_major_version()) + 1):
|
||||
+ for version in range(7, int(get_target_major_version()) + 1):
|
||||
+ try:
|
||||
for key in obsoleted_keys_map[str(version)]:
|
||||
name, version, release = key.rsplit("-", 2)
|
||||
if has_package(InstalledRPM, name, version=version, release=release):
|
||||
keys.append(key)
|
||||
- except KeyError:
|
||||
- pass
|
||||
+ except KeyError:
|
||||
+ pass
|
||||
|
||||
return keys
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py
|
||||
index 4d9a0e84..b78174cc 100644
|
||||
--- a/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py
|
||||
+++ b/repos/system_upgrade/common/actors/removeobsoletegpgkeys/tests/test_removeobsoleterpmgpgkeys.py
|
||||
@@ -76,6 +76,56 @@ def test_get_obsolete_keys(monkeypatch, version, expected):
|
||||
assert set(keys) == set(expected)
|
||||
|
||||
|
||||
+@pytest.mark.parametrize(
|
||||
+ "version, obsoleted_keys, expected",
|
||||
+ [
|
||||
+ (10, None, []),
|
||||
+ (10, {}, []),
|
||||
+ (10, {"8": ["gpg-pubkey-888-abc"], "10": ["gpg-pubkey-10-10"]}, ["gpg-pubkey-888-abc", "gpg-pubkey-10-10"]),
|
||||
+ (9, {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ["gpg-pubkey-999-def", "gpg-pubkey-888-abc"]),
|
||||
+ (8, {"8": ["gpg-pubkey-888-abc"], "9": ["gpg-pubkey-999-def"]}, ["gpg-pubkey-888-abc"])
|
||||
+ ]
|
||||
+)
|
||||
+def test_get_obsolete_keys_incomplete_data(monkeypatch, version, obsoleted_keys, expected):
|
||||
+ def get_target_major_version_mocked():
|
||||
+ return version
|
||||
+
|
||||
+ def get_distribution_data_mocked(_distro):
|
||||
+ if obsoleted_keys is None:
|
||||
+ return {}
|
||||
+ return {'obsoleted-keys': obsoleted_keys}
|
||||
+
|
||||
+ def has_package_mocked(*args, **kwargs):
|
||||
+ return True
|
||||
+
|
||||
+ monkeypatch.setattr(
|
||||
+ removeobsoleterpmgpgkeys,
|
||||
+ "get_target_major_version",
|
||||
+ get_target_major_version_mocked,
|
||||
+ )
|
||||
+
|
||||
+ monkeypatch.setattr(
|
||||
+ removeobsoleterpmgpgkeys,
|
||||
+ "get_distribution_data",
|
||||
+ get_distribution_data_mocked,
|
||||
+ )
|
||||
+
|
||||
+ monkeypatch.setattr(
|
||||
+ removeobsoleterpmgpgkeys,
|
||||
+ "has_package",
|
||||
+ has_package_mocked,
|
||||
+ )
|
||||
+
|
||||
+ monkeypatch.setattr(
|
||||
+ api,
|
||||
+ "current_actor",
|
||||
+ CurrentActorMocked(),
|
||||
+ )
|
||||
+
|
||||
+ keys = removeobsoleterpmgpgkeys._get_obsolete_keys()
|
||||
+ assert set(keys) == set(expected)
|
||||
+
|
||||
+
|
||||
@pytest.mark.parametrize(
|
||||
"keys, should_register",
|
||||
[
|
||||
--
|
||||
2.47.0
|
||||
|
283
SOURCES/0005-Update-references-from-master-branch-to-main.patch
Normal file
283
SOURCES/0005-Update-references-from-master-branch-to-main.patch
Normal file
@ -0,0 +1,283 @@
|
||||
From 9d49f4675c2b7b18ba7b344bb0032a5538782560 Mon Sep 17 00:00:00 2001
|
||||
From: Vojtech Sokol <vsokol@redhat.com>
|
||||
Date: Mon, 2 Sep 2024 17:21:36 +0200
|
||||
Subject: [PATCH 05/40] Update references from master branch to main
|
||||
|
||||
Focus was on making the CI and GitHub actions work after the default
|
||||
branch was switched from master to main.
|
||||
|
||||
See: OAMG-4907
|
||||
---
|
||||
.github/workflows/codespell.yml | 4 ++--
|
||||
.github/workflows/differential-shellcheck.yml | 4 ++--
|
||||
.github/workflows/pr-welcome-msg.yml | 2 +-
|
||||
.github/workflows/tmt-tests.yml | 16 ++++++++--------
|
||||
.github/workflows/unit-tests.yml | 12 ++++++------
|
||||
.packit.yaml | 10 +++++-----
|
||||
Makefile | 14 +++++++-------
|
||||
7 files changed, 31 insertions(+), 31 deletions(-)
|
||||
|
||||
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
|
||||
index 673cef17..1195d8d1 100644
|
||||
--- a/.github/workflows/codespell.yml
|
||||
+++ b/.github/workflows/codespell.yml
|
||||
@@ -3,10 +3,10 @@ name: Codespell
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- - master
|
||||
+ - main
|
||||
pull_request:
|
||||
branches:
|
||||
- - master
|
||||
+ - main
|
||||
|
||||
jobs:
|
||||
codespell:
|
||||
diff --git a/.github/workflows/differential-shellcheck.yml b/.github/workflows/differential-shellcheck.yml
|
||||
index f1ed5f6a..e1bafb93 100644
|
||||
--- a/.github/workflows/differential-shellcheck.yml
|
||||
+++ b/.github/workflows/differential-shellcheck.yml
|
||||
@@ -4,7 +4,7 @@
|
||||
name: Differential ShellCheck
|
||||
on:
|
||||
pull_request:
|
||||
- branches: [master]
|
||||
+ branches: [main]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
security-events: write
|
||||
pull-requests: write
|
||||
|
||||
- steps:
|
||||
+ steps:
|
||||
- name: Repository checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
diff --git a/.github/workflows/pr-welcome-msg.yml b/.github/workflows/pr-welcome-msg.yml
|
||||
index ff9414d2..0102c41f 100644
|
||||
--- a/.github/workflows/pr-welcome-msg.yml
|
||||
+++ b/.github/workflows/pr-welcome-msg.yml
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
However, here are additional useful commands for packit:
|
||||
- **`/packit test`** to re-run manually the default tests
|
||||
- **`/packit retest-failed`** to re-run failed tests manually
|
||||
- - **`/packit test oamg/leapp#42`** to run tests with leapp builds for the leapp PR#42 (default is latest upstream - master - build)
|
||||
+ - **`/packit test oamg/leapp#42`** to run tests with leapp builds for the leapp PR#42 (default is latest upstream - main - build)
|
||||
|
||||
Note that first time contributors cannot run tests automatically - they need to be started by a reviewer.
|
||||
|
||||
diff --git a/.github/workflows/tmt-tests.yml b/.github/workflows/tmt-tests.yml
|
||||
index 7e9fd706..1fa00e60 100644
|
||||
--- a/.github/workflows/tmt-tests.yml
|
||||
+++ b/.github/workflows/tmt-tests.yml
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_79to88_integration:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_79to86_integration:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_79to88_sst:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_7to8_aws:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-7to8.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_86to90_integration:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_88to92_integration:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_86to90_sst:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
call_workflow_tests_86to90_aws:
|
||||
needs: call_workflow_copr_build
|
||||
- uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@master
|
||||
+ uses: oamg/leapp/.github/workflows/reuse-tests-8to9.yml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
copr_artifacts: ${{ needs.call_workflow_copr_build.outputs.artifacts }}
|
||||
diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml
|
||||
index 2a05106e..42b72b8d 100644
|
||||
--- a/.github/workflows/unit-tests.yml
|
||||
+++ b/.github/workflows/unit-tests.yml
|
||||
@@ -2,10 +2,10 @@ name: Unit Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- - master
|
||||
+ - main
|
||||
pull_request:
|
||||
branches:
|
||||
- - master
|
||||
+ - main
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -74,10 +74,10 @@ jobs:
|
||||
# NOTE(ivasilev) fetch-depth 0 is critical here as leapp deps discovery depends on specific substring in
|
||||
# commit message and default 1 option will get us just merge commit which has an unrelevant message.
|
||||
fetch-depth: '0'
|
||||
- # NOTE(ivasilev) master -> origin/master is used for leapp deps discovery in Makefile via git log master..HEAD
|
||||
- - name: Set master to origin/master
|
||||
- if: github.ref != 'refs/heads/master'
|
||||
+ # NOTE(ivasilev) main -> origin/main is used for leapp deps discovery in Makefile via git log main..HEAD
|
||||
+ - name: Set main to origin/main
|
||||
+ if: github.ref != 'refs/heads/main'
|
||||
run: |
|
||||
- git branch -f master origin/master
|
||||
+ git branch -f main origin/main
|
||||
- name: ${{matrix.scenarios.name}}
|
||||
run: script -e -c /bin/bash -c 'TERM=xterm podman build --security-opt=seccomp=unconfined -t leapp-tests -f utils/container-tests/Containerfile.${{matrix.scenarios.container}} utils/container-tests && PYTHON_VENV=${{matrix.scenarios.python}} REPOSITORIES=${{matrix.scenarios.repos}} podman run --security-opt=seccomp=unconfined --rm -ti -v ${PWD}:/payload --env=PYTHON_VENV --env=REPOSITORIES leapp-tests'
|
||||
diff --git a/.packit.yaml b/.packit.yaml
|
||||
index d91a47e5..fbfd0eea 100644
|
||||
--- a/.packit.yaml
|
||||
+++ b/.packit.yaml
|
||||
@@ -22,7 +22,7 @@ actions:
|
||||
fix-spec-file:
|
||||
- bash -c "sed -i -r \"0,/Release:/ s/Release:(\s*)\S*/Release:\1${PACKIT_RPMSPEC_RELEASE}%{?dist}/\" packaging/leapp-repository.spec"
|
||||
post-upstream-clone:
|
||||
- # builds from PRs should have lower NVR than those from master branch
|
||||
+ # builds from PRs should have lower NVR than those from main branch
|
||||
- bash -c "sed -i \"s/1%{?dist}/0%{?dist}/g\" packaging/leapp-repository.spec"
|
||||
|
||||
jobs:
|
||||
@@ -44,12 +44,12 @@ jobs:
|
||||
fix-spec-file:
|
||||
- bash -c "sed -i -r \"0,/Release:/ s/Release:(\s*)\S*/Release:\1${PACKIT_RPMSPEC_RELEASE}%{?dist}/\" packaging/leapp-repository.spec"
|
||||
post-upstream-clone:
|
||||
- # builds from PRs should have lower NVR than those from master branch
|
||||
+ # builds from PRs should have lower NVR than those from main branch
|
||||
- bash -c "sed -i \"s/1%{?dist}/0%{?dist}/g\" packaging/leapp-repository.spec"
|
||||
- job: copr_build
|
||||
trigger: commit
|
||||
metadata:
|
||||
- branch: master
|
||||
+ branch: main
|
||||
owner: "@oamg"
|
||||
project: leapp
|
||||
targets:
|
||||
@@ -65,7 +65,7 @@ jobs:
|
||||
fix-spec-file:
|
||||
- bash -c "sed -i -r \"0,/Release:/ s/Release:(\s*)\S*/Release:\1${PACKIT_RPMSPEC_RELEASE}%{?dist}/\" packaging/leapp-repository.spec"
|
||||
post-upstream-clone:
|
||||
- # builds from master branch should start with 100 release, to have high priority
|
||||
+ # builds from main branch should start with 100 release, to have high priority
|
||||
- bash -c "sed -i \"s/1%{?dist}/100%{?dist}/g\" packaging/leapp-repository.spec"
|
||||
- job: copr_build
|
||||
trigger: release
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
fix-spec-file:
|
||||
- bash -c "sed -i -r \"0,/Release:/ s/Release:(\s*)\S*/Release:\1${PACKIT_RPMSPEC_RELEASE}%{?dist}/\" packaging/leapp-repository.spec"
|
||||
post-upstream-clone:
|
||||
- # builds from master branch should start with 100 release, to have high priority
|
||||
+ # builds from main branch should start with 100 release, to have high priority
|
||||
- bash -c "sed -i \"s/1%{?dist}/100%{?dist}/g\" packaging/leapp-repository.spec"
|
||||
|
||||
|
||||
diff --git a/Makefile b/Makefile
|
||||
index 5b2bc4d2..8aeef77d 100644
|
||||
--- a/Makefile
|
||||
+++ b/Makefile
|
||||
@@ -64,7 +64,7 @@ endif
|
||||
|
||||
# just to reduce number of unwanted builds mark as the upstream one when
|
||||
# someone will call copr_build without additional parameters
|
||||
-MASTER_BRANCH=master
|
||||
+MASTER_BRANCH=main
|
||||
|
||||
# In case the PR or MR is defined or in case build is not coming from the
|
||||
# MATER_BRANCH branch, N_REL=0; (so build is not update of the approved
|
||||
@@ -76,10 +76,10 @@ SHORT_SHA=`git rev-parse --short HEAD`
|
||||
BRANCH=`git rev-parse --abbrev-ref HEAD | tr -- '-/' '_'`
|
||||
|
||||
# The dependent framework PR connection will be taken from the top commit's depends-on message.
|
||||
-REQ_LEAPP_PR=$(shell git log master..HEAD | grep -m1 -iE '^[[:space:]]*Depends-On:[[:space:]]*.*[[:digit:]]+[[:space:]]*$$' | grep -Eo '*[[:digit:]]*')
|
||||
+REQ_LEAPP_PR=$(shell git log main..HEAD | grep -m1 -iE '^[[:space:]]*Depends-On:[[:space:]]*.*[[:digit:]]+[[:space:]]*$$' | grep -Eo '*[[:digit:]]*')
|
||||
# NOTE(ivasilev) In case of travis relying on top commit is a no go as a top commit will be a merge commit.
|
||||
ifdef CI
|
||||
- REQ_LEAPP_PR=$(shell git log master..HEAD | grep -m1 -iE '^[[:space:]]*Depends-On:[[:space:]]*.*[[:digit:]]+[[:space:]]*$$' | grep -Eo '[[:digit:]]*')
|
||||
+ REQ_LEAPP_PR=$(shell git log main..HEAD | grep -m1 -iE '^[[:space:]]*Depends-On:[[:space:]]*.*[[:digit:]]+[[:space:]]*$$' | grep -Eo '[[:digit:]]*')
|
||||
endif
|
||||
|
||||
# In case anyone would like to add any other suffix, just make it possible
|
||||
@@ -92,8 +92,8 @@ REQUEST=`if test -n "$$PR"; then echo ".PR$${PR}"; elif test -n "$$MR"; then ech
|
||||
# Examples:
|
||||
# 0.201810080027Z.4078402.packaging.PR2
|
||||
# 0.201810080027Z.4078402.packaging
|
||||
-# 0.201810080027Z.4078402.master.MR2
|
||||
-# 1.201810080027Z.4078402.master
|
||||
+# 0.201810080027Z.4078402.main.MR2
|
||||
+# 1.201810080027Z.4078402.main
|
||||
RELEASE="$(N_REL).$(TIMESTAMP).$(SHORT_SHA).$(BRANCH)$(REQUEST)$(_SUFFIX)"
|
||||
|
||||
all: help
|
||||
@@ -302,7 +302,7 @@ install-deps:
|
||||
pip install --upgrade setuptools; \
|
||||
pip install --upgrade -r requirements.txt; \
|
||||
./utils/install_commands.sh $(_PYTHON_VENV); \
|
||||
- # In case the top commit Depends-On some yet unmerged framework patch - override master leapp with the proper version
|
||||
+ # In case the top commit Depends-On some yet unmerged framework patch - override main leapp with the proper version
|
||||
if [[ ! -z "$(REQ_LEAPP_PR)" ]] ; then \
|
||||
echo "Leapp-repository depends on the yet unmerged pr of the framework #$(REQ_LEAPP_PR), installing it.." && \
|
||||
$(VENVNAME)/bin/pip install -I "git+https://github.com/oamg/leapp.git@refs/pull/$(REQ_LEAPP_PR)/head"; \
|
||||
@@ -332,7 +332,7 @@ install-deps-fedora:
|
||||
pip install --upgrade setuptools; \
|
||||
pip install --upgrade -r requirements.txt; \
|
||||
./utils/install_commands.sh $(_PYTHON_VENV); \
|
||||
- # In case the top commit Depends-On some yet unmerged framework patch - override master leapp with the proper version
|
||||
+ # In case the top commit Depends-On some yet unmerged framework patch - override main leapp with the proper version
|
||||
if [[ ! -z "$(REQ_LEAPP_PR)" ]] ; then \
|
||||
echo "Leapp-repository depends on the yet unmerged pr of the framework #$(REQ_LEAPP_PR), installing it.." && \
|
||||
$(VENVNAME)/bin/pip install -I "git+https://github.com/oamg/leapp.git@refs/pull/$(REQ_LEAPP_PR)/head"; \
|
||||
--
|
||||
2.47.0
|
||||
|
43
SOURCES/0006-ReadOfKernelArgsError-fix-the-error.patch
Normal file
43
SOURCES/0006-ReadOfKernelArgsError-fix-the-error.patch
Normal file
@ -0,0 +1,43 @@
|
||||
From 41e32e3aa6394b8397bef9b797892d9fa119d608 Mon Sep 17 00:00:00 2001
|
||||
From: Yuriy Kohut <yura.kohut@gmail.com>
|
||||
Date: Thu, 29 Aug 2024 12:36:23 +0300
|
||||
Subject: [PATCH 06/40] ReadOfKernelArgsError: fix the error: - AttributeError:
|
||||
module 'leapp.reporting' has no attribute 'Hints'
|
||||
|
||||
---
|
||||
.../kernelcmdlineconfig/libraries/kernelcmdlineconfig.py | 6 +++---
|
||||
1 file changed, 3 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py b/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py
|
||||
index 238a8aa6..6b261c3b 100644
|
||||
--- a/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py
|
||||
+++ b/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py
|
||||
@@ -175,14 +175,14 @@ def entrypoint(configs=None):
|
||||
api.current_logger().error(str(e))
|
||||
|
||||
if use_cmdline_file():
|
||||
- report_hint = reporting.Hints(
|
||||
+ report_hint = (
|
||||
'After the system has been rebooted into the new version of RHEL, you'
|
||||
' should take the kernel cmdline arguments from /proc/cmdline (Everything'
|
||||
' except the BOOT_IMAGE entry and initrd entries) and copy them into'
|
||||
' /etc/kernel/cmdline before installing any new kernels.'
|
||||
)
|
||||
else:
|
||||
- report_hint = reporting.Hints(
|
||||
+ report_hint = (
|
||||
'After the system has been rebooted into the new version of RHEL, you'
|
||||
' should take the kernel cmdline arguments from /proc/cmdline (Everything'
|
||||
' except the BOOT_IMAGE entry and initrd entries) and then use the'
|
||||
@@ -204,7 +204,7 @@ def entrypoint(configs=None):
|
||||
' not able to set the arguments as the default for kernels installed in'
|
||||
' the future.'
|
||||
),
|
||||
- report_hint,
|
||||
+ reporting.Remediation(hint=report_hint),
|
||||
reporting.Severity(reporting.Severity.HIGH),
|
||||
reporting.Groups([
|
||||
reporting.Groups.BOOT,
|
||||
--
|
||||
2.47.0
|
||||
|
@ -0,0 +1,44 @@
|
||||
From 88e13fb0545e0d42df2777538a0c6921bab91e33 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Stodulka <pstodulk@redhat.com>
|
||||
Date: Fri, 27 Sep 2024 14:53:01 +0200
|
||||
Subject: [PATCH 07/40] pylint: exclude rule: too-many-positional-arguments
|
||||
(code: R0917)
|
||||
|
||||
New version of Pylint have the rule for checking of positional
|
||||
arguments - complaining when more than 4 positional arguments exists.
|
||||
We do not want to refactor the code to make it happy and the default
|
||||
value cannot be set right now - that's planned for future Pylint
|
||||
versions at this moment. So excluding this rule.
|
||||
|
||||
For more info:
|
||||
* https://pylint.readthedocs.io/en/latest/user_guide/messages/refactor/too-many-positional-arguments.html
|
||||
---
|
||||
.pylintrc | 5 +++--
|
||||
1 file changed, 3 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/.pylintrc b/.pylintrc
|
||||
index f78c1c3f..5d75df40 100644
|
||||
--- a/.pylintrc
|
||||
+++ b/.pylintrc
|
||||
@@ -41,6 +41,8 @@ disable=
|
||||
consider-using-from-import,
|
||||
use-list-literal,
|
||||
use-dict-literal,
|
||||
+ too-many-lines, # we do not want to take care about that one
|
||||
+ too-many-positional-arguments, # we cannot set yet max-possitional-arguments unfortunately
|
||||
# new for python3 version of pylint
|
||||
useless-object-inheritance,
|
||||
consider-using-set-comprehension, # pylint3 force to use comprehension in place we don't want (py2 doesnt have these options, for inline skip)
|
||||
@@ -57,8 +59,7 @@ disable=
|
||||
redundant-u-string-prefix, # still have py2 to support
|
||||
logging-format-interpolation,
|
||||
logging-not-lazy,
|
||||
- use-yield-from, # yield from cannot be used until we require python 3.3 or greater
|
||||
- too-many-lines # we do not want to take care about that one
|
||||
+ use-yield-from # yield from cannot be used until we require python 3.3 or greater
|
||||
|
||||
[FORMAT]
|
||||
# Maximum number of characters on a single line.
|
||||
--
|
||||
2.47.0
|
||||
|
534
SOURCES/0008-pam_userdb-migrate-backend-database.patch
Normal file
534
SOURCES/0008-pam_userdb-migrate-backend-database.patch
Normal file
@ -0,0 +1,534 @@
|
||||
From 658700d6424e852917b62c190dd23cbb3026b67d Mon Sep 17 00:00:00 2001
|
||||
From: Iker Pedrosa <ipedrosa@redhat.com>
|
||||
Date: Mon, 5 Aug 2024 15:15:44 +0200
|
||||
Subject: [PATCH 08/40] pam_userdb: migrate backend database
|
||||
|
||||
pam_userdb module changed its backend database technology from lidb to
|
||||
gdbm for RHEL10. This requires a set of leapp actors to perform the
|
||||
database migration automatically when upgrading to RHEL10:
|
||||
|
||||
* ScanPamUserDB takes care of scanning the PAM service folder to detect
|
||||
whether pam_userdb is used and the location of the database in use.
|
||||
This information is stored in a model.
|
||||
|
||||
* CheckPamUserDB checks the databases reported by ScanPamUserDB and
|
||||
prints a report about them.
|
||||
|
||||
* ConvertPamUserDB checks the databases reported by ScanPamUserDB and
|
||||
converts them to GDBM format.
|
||||
|
||||
* RemoveOldPamUserDB checks the databases reported by ScanPamUserDB and
|
||||
removes them.
|
||||
|
||||
All these actors include unit-tests.
|
||||
|
||||
Finally, there's also a spec file change to add `libdb-utils` dependency
|
||||
as it is required to convert pam_userdb databases from BerkeleyDB to
|
||||
GDBM.
|
||||
|
||||
Signed-off-by: Iker Pedrosa <ipedrosa@redhat.com>
|
||||
---
|
||||
packaging/leapp-repository.spec | 6 +++
|
||||
.../actors/pamuserdb/checkpamuserdb/actor.py | 18 ++++++++
|
||||
.../libraries/checkpamuserdb.py | 28 ++++++++++++
|
||||
.../tests/test_checkpamuserdb.py | 43 +++++++++++++++++++
|
||||
.../pamuserdb/convertpamuserdb/actor.py | 18 ++++++++
|
||||
.../libraries/convertpamuserdb.py | 27 ++++++++++++
|
||||
.../tests/test_convertpamuserdb.py | 39 +++++++++++++++++
|
||||
.../pamuserdb/removeoldpamuserdb/actor.py | 18 ++++++++
|
||||
.../libraries/removeoldpamuserdb.py | 25 +++++++++++
|
||||
.../tests/test_removeoldpamuserdb.py | 38 ++++++++++++++++
|
||||
.../actors/pamuserdb/scanpamuserdb/actor.py | 18 ++++++++
|
||||
.../scanpamuserdb/libraries/scanpamuserdb.py | 29 +++++++++++++
|
||||
.../tests/files/pam_userdb_basic | 1 +
|
||||
.../tests/files/pam_userdb_complete | 9 ++++
|
||||
.../tests/files/pam_userdb_missing | 1 +
|
||||
.../scanpamuserdb/tests/test_scanpamuserdb.py | 27 ++++++++++++
|
||||
.../el9toel10/models/pamuserdblocation.py | 14 ++++++
|
||||
17 files changed, 359 insertions(+)
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/actor.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/libraries/checkpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/tests/test_checkpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/actor.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/libraries/convertpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/tests/test_convertpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/actor.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/libraries/removeoldpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/tests/test_removeoldpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/actor.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/libraries/scanpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_basic
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_complete
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_missing
|
||||
create mode 100644 repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/test_scanpamuserdb.py
|
||||
create mode 100644 repos/system_upgrade/el9toel10/models/pamuserdblocation.py
|
||||
|
||||
diff --git a/packaging/leapp-repository.spec b/packaging/leapp-repository.spec
|
||||
index 146afc45..0d63ba02 100644
|
||||
--- a/packaging/leapp-repository.spec
|
||||
+++ b/packaging/leapp-repository.spec
|
||||
@@ -211,6 +211,12 @@ Requires: dracut
|
||||
Requires: NetworkManager-libnm
|
||||
Requires: python3-gobject-base
|
||||
|
||||
+%endif
|
||||
+
|
||||
+%if 0%{?rhel} && 0%{?rhel} == 9
|
||||
+############# RHEL 9 dependencies (when the source system is RHEL 9) ##########
|
||||
+# Required to convert pam_userdb database from BerkeleyDB to GDBM
|
||||
+Requires: libdb-utils
|
||||
%endif
|
||||
##################################################
|
||||
# end requirement
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/actor.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/actor.py
|
||||
new file mode 100644
|
||||
index 00000000..8fada645
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/actor.py
|
||||
@@ -0,0 +1,18 @@
|
||||
+from leapp.actors import Actor
|
||||
+from leapp.libraries.actor import checkpamuserdb
|
||||
+from leapp.models import PamUserDbLocation, Report
|
||||
+from leapp.tags import ChecksPhaseTag, IPUWorkflowTag
|
||||
+
|
||||
+
|
||||
+class CheckPamUserDb(Actor):
|
||||
+ """
|
||||
+ Create report with the location of pam_userdb databases
|
||||
+ """
|
||||
+
|
||||
+ name = 'check_pam_user_db'
|
||||
+ consumes = (PamUserDbLocation,)
|
||||
+ produces = (Report,)
|
||||
+ tags = (ChecksPhaseTag, IPUWorkflowTag)
|
||||
+
|
||||
+ def process(self):
|
||||
+ checkpamuserdb.process()
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/libraries/checkpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/libraries/checkpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..05cc71a9
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/libraries/checkpamuserdb.py
|
||||
@@ -0,0 +1,28 @@
|
||||
+from leapp import reporting
|
||||
+from leapp.exceptions import StopActorExecutionError
|
||||
+from leapp.libraries.stdlib import api
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+
|
||||
+FMT_LIST_SEPARATOR = "\n - "
|
||||
+
|
||||
+
|
||||
+def process():
|
||||
+ msg = next(api.consume(PamUserDbLocation), None)
|
||||
+ if not msg:
|
||||
+ raise StopActorExecutionError('Expected PamUserDbLocation, but got None')
|
||||
+
|
||||
+ if msg.locations:
|
||||
+ reporting.create_report([
|
||||
+ reporting.Title('pam_userdb databases will be converted to GDBM'),
|
||||
+ reporting.Summary(
|
||||
+ 'On RHEL 10, GDMB is used by pam_userdb as it\'s backend database,'
|
||||
+ ' replacing BerkeleyDB. Existing pam_userdb databases will be'
|
||||
+ ' converted to GDBM. The following databases will be converted:'
|
||||
+ '{sep}{locations}'.format(sep=FMT_LIST_SEPARATOR, locations=FMT_LIST_SEPARATOR.join(msg.locations))),
|
||||
+ reporting.Severity(reporting.Severity.INFO),
|
||||
+ reporting.Groups([reporting.Groups.SECURITY, reporting.Groups.AUTHENTICATION])
|
||||
+ ])
|
||||
+ else:
|
||||
+ api.current_logger().debug(
|
||||
+ 'No pam_userdb databases were located, thus nothing will be converted'
|
||||
+ )
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/tests/test_checkpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/tests/test_checkpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..2e11106b
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/checkpamuserdb/tests/test_checkpamuserdb.py
|
||||
@@ -0,0 +1,43 @@
|
||||
+import pytest
|
||||
+
|
||||
+from leapp import reporting
|
||||
+from leapp.exceptions import StopActorExecutionError
|
||||
+from leapp.libraries.actor import checkpamuserdb
|
||||
+from leapp.libraries.common.testutils import create_report_mocked, logger_mocked
|
||||
+from leapp.libraries.stdlib import api
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+
|
||||
+
|
||||
+def test_process_no_msg(monkeypatch):
|
||||
+ def consume_mocked(*args, **kwargs):
|
||||
+ yield None
|
||||
+
|
||||
+ monkeypatch.setattr(api, 'consume', consume_mocked)
|
||||
+
|
||||
+ with pytest.raises(StopActorExecutionError):
|
||||
+ checkpamuserdb.process()
|
||||
+
|
||||
+
|
||||
+def test_process_no_location(monkeypatch):
|
||||
+ def consume_mocked(*args, **kwargs):
|
||||
+ yield PamUserDbLocation(locations=[])
|
||||
+
|
||||
+ monkeypatch.setattr(api, 'current_logger', logger_mocked())
|
||||
+ monkeypatch.setattr(api, 'consume', consume_mocked)
|
||||
+
|
||||
+ checkpamuserdb.process()
|
||||
+ assert (
|
||||
+ 'No pam_userdb databases were located, thus nothing will be converted'
|
||||
+ in api.current_logger.dbgmsg
|
||||
+ )
|
||||
+
|
||||
+
|
||||
+def test_process_locations(monkeypatch):
|
||||
+ def consume_mocked(*args, **kwargs):
|
||||
+ yield PamUserDbLocation(locations=['/tmp/db1', '/tmp/db2'])
|
||||
+
|
||||
+ monkeypatch.setattr(reporting, "create_report", create_report_mocked())
|
||||
+ monkeypatch.setattr(api, 'consume', consume_mocked)
|
||||
+
|
||||
+ checkpamuserdb.process()
|
||||
+ assert reporting.create_report.called == 1
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/actor.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/actor.py
|
||||
new file mode 100644
|
||||
index 00000000..5f8525b6
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/actor.py
|
||||
@@ -0,0 +1,18 @@
|
||||
+from leapp.actors import Actor
|
||||
+from leapp.libraries.actor import convertpamuserdb
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+from leapp.tags import IPUWorkflowTag, PreparationPhaseTag
|
||||
+
|
||||
+
|
||||
+class ConvertPamUserDb(Actor):
|
||||
+ """
|
||||
+ Convert the pam_userdb databases to GDBM
|
||||
+ """
|
||||
+
|
||||
+ name = 'convert_pam_user_db'
|
||||
+ consumes = (PamUserDbLocation,)
|
||||
+ produces = ()
|
||||
+ tags = (PreparationPhaseTag, IPUWorkflowTag)
|
||||
+
|
||||
+ def process(self):
|
||||
+ convertpamuserdb.process()
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/libraries/convertpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/libraries/convertpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..e55b4102
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/libraries/convertpamuserdb.py
|
||||
@@ -0,0 +1,27 @@
|
||||
+from leapp.exceptions import StopActorExecutionError
|
||||
+from leapp.libraries.stdlib import api, CalledProcessError, run
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+
|
||||
+
|
||||
+def _convert_db(db_path):
|
||||
+ cmd = ['db_converter', '--src', f'{db_path}.db', '--dest', f'{db_path}.gdbm']
|
||||
+ try:
|
||||
+ run(cmd)
|
||||
+ except (CalledProcessError, OSError) as e:
|
||||
+ # As the db_converter does not remove the original DB after conversion or upon failure,
|
||||
+ # interrupt the upgrade, keeping the original DBs.
|
||||
+ # If all DBs are successfully converted, the leftover DBs are removed in the removeoldpamuserdb actor.
|
||||
+ raise StopActorExecutionError(
|
||||
+ 'Cannot convert pam_userdb database.',
|
||||
+ details={'details': '{}: {}'.format(str(e), e.stderr)}
|
||||
+ )
|
||||
+
|
||||
+
|
||||
+def process():
|
||||
+ msg = next(api.consume(PamUserDbLocation), None)
|
||||
+ if not msg:
|
||||
+ raise StopActorExecutionError('Expected PamUserDbLocation, but got None')
|
||||
+
|
||||
+ if msg.locations:
|
||||
+ for location in msg.locations:
|
||||
+ _convert_db(location)
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/tests/test_convertpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/tests/test_convertpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..46505492
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/convertpamuserdb/tests/test_convertpamuserdb.py
|
||||
@@ -0,0 +1,39 @@
|
||||
+import os
|
||||
+
|
||||
+import pytest
|
||||
+
|
||||
+from leapp.exceptions import StopActorExecutionError
|
||||
+from leapp.libraries.actor import convertpamuserdb
|
||||
+from leapp.libraries.common.testutils import logger_mocked
|
||||
+from leapp.libraries.stdlib import api, CalledProcessError
|
||||
+
|
||||
+CUR_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
+
|
||||
+
|
||||
+def test_convert_db_success(monkeypatch):
|
||||
+ location = os.path.join(CUR_DIR, '/files/db1')
|
||||
+
|
||||
+ def run_mocked(cmd, **kwargs):
|
||||
+ assert cmd == ['db_converter', '--src', f'{location}.db', '--dest', f'{location}.gdbm']
|
||||
+
|
||||
+ monkeypatch.setattr(api, 'current_logger', logger_mocked())
|
||||
+ monkeypatch.setattr(convertpamuserdb, 'run', run_mocked)
|
||||
+ convertpamuserdb._convert_db(location)
|
||||
+ assert len(api.current_logger.errmsg) == 0
|
||||
+
|
||||
+
|
||||
+def test_convert_db_failure(monkeypatch):
|
||||
+ location = os.path.join(CUR_DIR, '/files/db1')
|
||||
+
|
||||
+ def run_mocked(cmd, **kwargs):
|
||||
+ raise CalledProcessError(
|
||||
+ message='A Leapp Command Error occurred.',
|
||||
+ command=cmd,
|
||||
+ result={'exit_code': 1}
|
||||
+ )
|
||||
+
|
||||
+ monkeypatch.setattr(api, 'current_logger', logger_mocked())
|
||||
+ monkeypatch.setattr(convertpamuserdb, 'run', run_mocked)
|
||||
+ with pytest.raises(StopActorExecutionError) as err:
|
||||
+ convertpamuserdb._convert_db(location)
|
||||
+ assert str(err.value) == 'Cannot convert pam_userdb database.'
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/actor.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/actor.py
|
||||
new file mode 100644
|
||||
index 00000000..39a00855
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/actor.py
|
||||
@@ -0,0 +1,18 @@
|
||||
+from leapp.actors import Actor
|
||||
+from leapp.libraries.actor import removeoldpamuserdb
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+from leapp.tags import ApplicationsPhaseTag, IPUWorkflowTag
|
||||
+
|
||||
+
|
||||
+class RemoveOldPamUserDb(Actor):
|
||||
+ """
|
||||
+ Remove old pam_userdb databases
|
||||
+ """
|
||||
+
|
||||
+ name = 'remove_old_pam_user_db'
|
||||
+ consumes = (PamUserDbLocation,)
|
||||
+ produces = ()
|
||||
+ tags = (ApplicationsPhaseTag, IPUWorkflowTag)
|
||||
+
|
||||
+ def process(self):
|
||||
+ removeoldpamuserdb.process()
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/libraries/removeoldpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/libraries/removeoldpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..5fc4cb4d
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/libraries/removeoldpamuserdb.py
|
||||
@@ -0,0 +1,25 @@
|
||||
+from leapp.exceptions import StopActorExecutionError
|
||||
+from leapp.libraries.stdlib import api, CalledProcessError, run
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+
|
||||
+
|
||||
+def _remove_db(db_path):
|
||||
+ cmd = ['rm', '-f', f'{db_path}.db']
|
||||
+ try:
|
||||
+ run(cmd)
|
||||
+ except (CalledProcessError, OSError) as e:
|
||||
+ api.current_logger().error(
|
||||
+ 'Failed to remove {}.db: {}'.format(
|
||||
+ db_path, e
|
||||
+ )
|
||||
+ )
|
||||
+
|
||||
+
|
||||
+def process():
|
||||
+ msg = next(api.consume(PamUserDbLocation), None)
|
||||
+ if not msg:
|
||||
+ raise StopActorExecutionError('Expected PamUserDbLocation, but got None')
|
||||
+
|
||||
+ if msg.locations:
|
||||
+ for location in msg.locations:
|
||||
+ _remove_db(location)
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/tests/test_removeoldpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/tests/test_removeoldpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..2c1d5c75
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/removeoldpamuserdb/tests/test_removeoldpamuserdb.py
|
||||
@@ -0,0 +1,38 @@
|
||||
+import os
|
||||
+
|
||||
+from leapp.libraries.actor import removeoldpamuserdb
|
||||
+from leapp.libraries.common.testutils import logger_mocked
|
||||
+from leapp.libraries.stdlib import api, CalledProcessError
|
||||
+
|
||||
+CUR_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
+
|
||||
+
|
||||
+def test_remove_db_success(monkeypatch):
|
||||
+ location = os.path.join(CUR_DIR, '/files/db1')
|
||||
+
|
||||
+ def run_mocked(cmd, **kwargs):
|
||||
+ assert cmd == ['rm', '-f', f'{location}.db']
|
||||
+
|
||||
+ monkeypatch.setattr(api, 'current_logger', logger_mocked())
|
||||
+ monkeypatch.setattr(removeoldpamuserdb, 'run', run_mocked)
|
||||
+ removeoldpamuserdb._remove_db(location)
|
||||
+ assert len(api.current_logger.errmsg) == 0
|
||||
+
|
||||
+
|
||||
+def test_remove_db_failure(monkeypatch):
|
||||
+ location = os.path.join(CUR_DIR, '/files/db1')
|
||||
+
|
||||
+ def run_mocked(cmd, **kwargs):
|
||||
+ raise CalledProcessError(
|
||||
+ message='A Leapp Command Error occurred.',
|
||||
+ command=cmd,
|
||||
+ result={'exit_code': 1}
|
||||
+ )
|
||||
+
|
||||
+ monkeypatch.setattr(api, 'current_logger', logger_mocked())
|
||||
+ monkeypatch.setattr(removeoldpamuserdb, 'run', run_mocked)
|
||||
+ removeoldpamuserdb._remove_db(location)
|
||||
+ assert (
|
||||
+ 'Failed to remove /files/db1.db'
|
||||
+ not in api.current_logger.errmsg
|
||||
+ )
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/actor.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/actor.py
|
||||
new file mode 100644
|
||||
index 00000000..b6b35f1a
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/actor.py
|
||||
@@ -0,0 +1,18 @@
|
||||
+from leapp.actors import Actor
|
||||
+from leapp.libraries.actor import scanpamuserdb
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+from leapp.tags import FactsPhaseTag, IPUWorkflowTag
|
||||
+
|
||||
+
|
||||
+class ScanPamUserDb(Actor):
|
||||
+ """
|
||||
+ Scan the PAM service folder for the location of pam_userdb databases
|
||||
+ """
|
||||
+
|
||||
+ name = 'scan_pam_user_db'
|
||||
+ consumes = ()
|
||||
+ produces = (PamUserDbLocation,)
|
||||
+ tags = (FactsPhaseTag, IPUWorkflowTag)
|
||||
+
|
||||
+ def process(self):
|
||||
+ self.produce(scanpamuserdb.parse_pam_config_folder('/etc/pam.d/'))
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/libraries/scanpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/libraries/scanpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..0f668c02
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/libraries/scanpamuserdb.py
|
||||
@@ -0,0 +1,29 @@
|
||||
+import os
|
||||
+import re
|
||||
+
|
||||
+from leapp.models import PamUserDbLocation
|
||||
+
|
||||
+
|
||||
+def _parse_pam_config_file(conf_file):
|
||||
+ with open(conf_file, 'r') as file:
|
||||
+ for line in file:
|
||||
+ if 'pam_userdb' in line:
|
||||
+ match = re.search(r'db=(\S+)', line)
|
||||
+ if match:
|
||||
+ return match.group(1)
|
||||
+
|
||||
+ return None
|
||||
+
|
||||
+
|
||||
+def parse_pam_config_folder(conf_folder):
|
||||
+ locations = set()
|
||||
+
|
||||
+ for file_name in os.listdir(conf_folder):
|
||||
+ file_path = os.path.join(conf_folder, file_name)
|
||||
+
|
||||
+ if os.path.isfile(file_path):
|
||||
+ location = _parse_pam_config_file(file_path)
|
||||
+ if location is not None:
|
||||
+ locations.add(location)
|
||||
+
|
||||
+ return PamUserDbLocation(locations=list(locations))
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_basic b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_basic
|
||||
new file mode 100644
|
||||
index 00000000..f115147b
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_basic
|
||||
@@ -0,0 +1 @@
|
||||
+auth required pam_userdb.so db=/tmp/db1
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_complete b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_complete
|
||||
new file mode 100644
|
||||
index 00000000..84e40b48
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_complete
|
||||
@@ -0,0 +1,9 @@
|
||||
+auth required pam_env.so
|
||||
+auth required pam_faildelay.so delay=2000000
|
||||
+auth sufficient pam_fprintd.so
|
||||
+auth [default=1 ignore=ignore success=ok] pam_usertype.so isregular
|
||||
+auth [default=1 ignore=ignore success=ok] pam_localuser.so
|
||||
+auth required pam_userdb.so db=/tmp/db2
|
||||
+auth [default=1 ignore=ignore success=ok] pam_usertype.so isregular
|
||||
+auth sufficient pam_sss.so forward_pass
|
||||
+auth required pam_deny.so
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_missing b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_missing
|
||||
new file mode 100644
|
||||
index 00000000..764947fc
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/files/pam_userdb_missing
|
||||
@@ -0,0 +1 @@
|
||||
+auth sufficient pam_unix.so nullok
|
||||
diff --git a/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/test_scanpamuserdb.py b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/test_scanpamuserdb.py
|
||||
new file mode 100644
|
||||
index 00000000..3b752d87
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/actors/pamuserdb/scanpamuserdb/tests/test_scanpamuserdb.py
|
||||
@@ -0,0 +1,27 @@
|
||||
+import os
|
||||
+
|
||||
+import pytest
|
||||
+
|
||||
+from leapp.libraries.actor import scanpamuserdb
|
||||
+
|
||||
+CUR_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
+
|
||||
+
|
||||
+@pytest.mark.parametrize(
|
||||
+ "inp,exp_out",
|
||||
+ [
|
||||
+ ("files/pam_userdb_missing", None),
|
||||
+ ("files/pam_userdb_basic", "/tmp/db1"),
|
||||
+ ("files/pam_userdb_complete", "/tmp/db2"),
|
||||
+ ],
|
||||
+)
|
||||
+def test_parse_pam_config_file(inp, exp_out):
|
||||
+ file = scanpamuserdb._parse_pam_config_file(os.path.join(CUR_DIR, inp))
|
||||
+ assert file == exp_out
|
||||
+
|
||||
+
|
||||
+def test_parse_pam_config_folder():
|
||||
+ msg = scanpamuserdb.parse_pam_config_folder(os.path.join(CUR_DIR, "files/"))
|
||||
+ assert len(msg.locations) == 2
|
||||
+ assert "/tmp/db1" in msg.locations
|
||||
+ assert "/tmp/db2" in msg.locations
|
||||
diff --git a/repos/system_upgrade/el9toel10/models/pamuserdblocation.py b/repos/system_upgrade/el9toel10/models/pamuserdblocation.py
|
||||
new file mode 100644
|
||||
index 00000000..d15b2041
|
||||
--- /dev/null
|
||||
+++ b/repos/system_upgrade/el9toel10/models/pamuserdblocation.py
|
||||
@@ -0,0 +1,14 @@
|
||||
+from leapp.models import fields, Model
|
||||
+from leapp.topics import SystemInfoTopic
|
||||
+
|
||||
+
|
||||
+class PamUserDbLocation(Model):
|
||||
+ """
|
||||
+ Provides a list of all database files for pam_userdb
|
||||
+ """
|
||||
+ topic = SystemInfoTopic
|
||||
+
|
||||
+ locations = fields.List(fields.String(), default=[])
|
||||
+ """
|
||||
+ The list with the full path to the database files.
|
||||
+ """
|
||||
--
|
||||
2.47.0
|
||||
|
@ -0,0 +1,31 @@
|
||||
From d6e57eec3ded2887008055442ba906a92c572a01 Mon Sep 17 00:00:00 2001
|
||||
From: Matej Matuska <mmatuska@redhat.com>
|
||||
Date: Thu, 10 Oct 2024 14:03:36 +0200
|
||||
Subject: [PATCH 09/40] Replace mirror.centos.org with vault.centos.org Centos
|
||||
7 Containerfile
|
||||
|
||||
As mirror.centos.org is dead, replace mirrorlist with baseurl pointing
|
||||
to vault.centos.org in utils/container-builds/Containerfile.centos7.
|
||||
---
|
||||
utils/container-builds/Containerfile.centos7 | 5 +++++
|
||||
1 file changed, 5 insertions(+)
|
||||
|
||||
diff --git a/utils/container-builds/Containerfile.centos7 b/utils/container-builds/Containerfile.centos7
|
||||
index 70ac3df1..af00eddb 100644
|
||||
--- a/utils/container-builds/Containerfile.centos7
|
||||
+++ b/utils/container-builds/Containerfile.centos7
|
||||
@@ -2,6 +2,11 @@ FROM centos:7
|
||||
|
||||
VOLUME /repo
|
||||
|
||||
+# mirror.centos.org is dead, comment out mirrorlist and set baseurl to vault.centos.org
|
||||
+RUN sed -i s/mirror.centos.org/vault.centos.org/ /etc/yum.repos.d/CentOS-*.repo
|
||||
+RUN sed -i s/^#\s*baseurl=http/baseurl=http/ /etc/yum.repos.d/CentOS-*.repo
|
||||
+RUN sed -i s/^mirrorlist=http/#mirrorlist=http/ /etc/yum.repos.d/CentOS-*.repo
|
||||
+
|
||||
RUN yum update -y && \
|
||||
yum install -y rpm-build python-devel make git
|
||||
|
||||
--
|
||||
2.47.0
|
||||
|
@ -0,0 +1,35 @@
|
||||
From b997e4eeb835809d1fbfd1a0b9a6114c133bf0b4 Mon Sep 17 00:00:00 2001
|
||||
From: Matej Matuska <mmatuska@redhat.com>
|
||||
Date: Thu, 10 Oct 2024 15:28:48 +0200
|
||||
Subject: [PATCH 10/40] kernelcmdlineconfig: Add Report to produces tuple
|
||||
|
||||
The missing `leapp.reporting.Report` class is added to
|
||||
kernelcmdlineconfig actor `produces` tuple.
|
||||
---
|
||||
.../system_upgrade/common/actors/kernelcmdlineconfig/actor.py | 3 ++-
|
||||
1 file changed, 2 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/kernelcmdlineconfig/actor.py b/repos/system_upgrade/common/actors/kernelcmdlineconfig/actor.py
|
||||
index b44fd835..3585a14e 100644
|
||||
--- a/repos/system_upgrade/common/actors/kernelcmdlineconfig/actor.py
|
||||
+++ b/repos/system_upgrade/common/actors/kernelcmdlineconfig/actor.py
|
||||
@@ -4,6 +4,7 @@ from leapp.actors import Actor
|
||||
from leapp.exceptions import StopActorExecutionError
|
||||
from leapp.libraries.actor import kernelcmdlineconfig
|
||||
from leapp.models import FirmwareFacts, InstalledTargetKernelInfo, KernelCmdlineArg, TargetKernelCmdlineArgTasks
|
||||
+from leapp.reporting import Report
|
||||
from leapp.tags import FinalizationPhaseTag, IPUWorkflowTag
|
||||
|
||||
|
||||
@@ -14,7 +15,7 @@ class KernelCmdlineConfig(Actor):
|
||||
|
||||
name = 'kernelcmdlineconfig'
|
||||
consumes = (KernelCmdlineArg, InstalledTargetKernelInfo, FirmwareFacts, TargetKernelCmdlineArgTasks)
|
||||
- produces = ()
|
||||
+ produces = (Report,)
|
||||
tags = (FinalizationPhaseTag, IPUWorkflowTag)
|
||||
|
||||
def process(self):
|
||||
--
|
||||
2.47.0
|
||||
|
@ -0,0 +1,204 @@
|
||||
From c2c96affa7b20c82969419ce49b65cbf646a0c32 Mon Sep 17 00:00:00 2001
|
||||
From: Matej Matuska <mmatuska@redhat.com>
|
||||
Date: Fri, 18 Oct 2024 12:43:19 +0200
|
||||
Subject: [PATCH 11/40] kernelcmdlineconfig: Use args from first entry when
|
||||
multiple entries are listed
|
||||
|
||||
Instead of erroring out when grubby lists multiple entries for the
|
||||
default kernel, always use the `args=` and `root=` from the first one and create
|
||||
a post-upgrade report. The report instruct user to ensure those are the
|
||||
correct ones or to correct them.
|
||||
|
||||
This can happen, for example, if MAKEDEBUG=yes is set in
|
||||
/etc/sysconfing/kernel.
|
||||
|
||||
Jira: RHEL-46911
|
||||
---
|
||||
.../libraries/kernelcmdlineconfig.py | 79 ++++++++++++++++---
|
||||
.../tests/test_kernelcmdlineconfig.py | 48 ++++++++++-
|
||||
2 files changed, 116 insertions(+), 11 deletions(-)
|
||||
|
||||
diff --git a/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py b/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py
|
||||
index 6b261c3b..19c50f3c 100644
|
||||
--- a/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py
|
||||
+++ b/repos/system_upgrade/common/actors/kernelcmdlineconfig/libraries/kernelcmdlineconfig.py
|
||||
@@ -109,10 +109,55 @@ def _extract_grubby_value(record):
|
||||
return matches.group(2)
|
||||
|
||||
|
||||
+def report_multple_entries_for_default_kernel():
|
||||
+ if use_cmdline_file():
|
||||
+ report_hint = (
|
||||
+ 'After the system has been rebooted into the new version of RHEL,'
|
||||
+ ' check that configured default kernel cmdline arguments in /etc/kernel/cmdline '
|
||||
+ ' are correct. In case that different |