diff --git a/0003-ipatests-Check-Default-PAC-type-is-added-to-config.patch b/0003-ipatests-Check-Default-PAC-type-is-added-to-config.patch new file mode 100644 index 0000000..c8f898e --- /dev/null +++ b/0003-ipatests-Check-Default-PAC-type-is-added-to-config.patch @@ -0,0 +1,92 @@ +From ad4b7f6cedaed54acf279033b650010c65face10 Mon Sep 17 00:00:00 2001 +From: Sudhir Menon +Date: Tue, 20 Aug 2024 14:52:03 +0530 +Subject: [PATCH] ipatests: Check Default PAC type is added to config + +This patch checks that the default PAC type +is added to configuration i.e ipaKrbAuthzData: MS-PAC +during ipa-server-installation + +The patch also checks that if 'ipaKrbAuthzData: MS-PAC' +attribute is deleted and then when we run 'ipa-server-upgrade' +command the attribute is added back. + +Related: https://pagure.io/freeipa/issue/9632 + +Signed-off-by: Sudhir Menon +Reviewed-By: Florence Blanc-Renaud +--- + .../test_integration/test_installation.py | 15 +++++++++++ + ipatests/test_integration/test_upgrade.py | 26 ++++++++++++++++++- + 2 files changed, 40 insertions(+), 1 deletion(-) + +diff --git a/ipatests/test_integration/test_installation.py b/ipatests/test_integration/test_installation.py +index ada43e33fe173ea3c315178c37e2a664b05b905b..c5565c452010f23f038ddf329454b591ef09f6af 100644 +--- a/ipatests/test_integration/test_installation.py ++++ b/ipatests/test_integration/test_installation.py +@@ -1190,6 +1190,21 @@ class TestInstallMaster(IntegrationTest): + expected_stdout=f'href="https://{self.master.hostname}/' + ) + ++ def test_pac_configuration_enabled(self): ++ """ ++ This testcase checks that the default PAC type ++ is added to configuration. ++ """ ++ base_dn = str(self.master.domain.basedn) ++ dn = DN( ++ ("cn", "ipaConfig"), ++ ("cn", "etc"), ++ base_dn ++ ) ++ result = tasks.ldapsearch_dm(self.master, str(dn), ++ ["ipaKrbAuthzData"]) ++ assert 'ipaKrbAuthzData: MS-PAC' in result.stdout_text ++ + def test_hostname_parameter(self, server_cleanup): + """ + Test that --hostname parameter is respected in interactive mode. +diff --git a/ipatests/test_integration/test_upgrade.py b/ipatests/test_integration/test_upgrade.py +index 011de939e92790734d63da2f85be1c25349116a8..a0f393780ccc25774466992976532c876aa876da 100644 +--- a/ipatests/test_integration/test_upgrade.py ++++ b/ipatests/test_integration/test_upgrade.py +@@ -165,7 +165,6 @@ class TestUpgrade(IntegrationTest): + ldap.update_entry(location_krb_rec) + + yield _setup_locations +- + ldap = self.master.ldap_connect() + + modified = False +@@ -491,3 +490,28 @@ class TestUpgrade(IntegrationTest): + tasks.reinstall_packages(self.master, ['*ipa-client']) + assert not self.master.transport.file_exists( + paths.SSH_CONFIG + ".orig") ++ ++ def test_mspac_attribute_set(self): ++ """ ++ This testcase deletes the already existing attribute ++ 'ipaKrbAuthzData: MS-PAC'. ++ The test then runs ipa-server-upgrade and checks that ++ the attribute 'ipaKrbAuthzData: MS-PAC' is added again. ++ """ ++ base_dn = str(self.master.domain.basedn) ++ dn = DN( ++ ("cn", "ipaConfig"), ++ ("cn", "etc"), ++ base_dn ++ ) ++ ldif = textwrap.dedent(""" ++ dn: cn=ipaConfig,cn=etc,{} ++ changetype: modify ++ delete: ipaKrbAuthzData ++ """).format(base_dn) ++ tasks.ldapmodify_dm(self.master, ldif) ++ tasks.kinit_admin(self.master) ++ self.master.run_command(['ipa-server-upgrade']) ++ result = tasks.ldapsearch_dm(self.master, str(dn), ++ ["ipaKrbAuthzData"]) ++ assert 'ipaKrbAuthzData: MS-PAC' in result.stdout_text +-- +2.46.0 + diff --git a/0004-selinux-add-all-IPA-log-files-to-ipa_log_t-file-cont.patch b/0004-selinux-add-all-IPA-log-files-to-ipa_log_t-file-cont.patch new file mode 100644 index 0000000..2972dc1 --- /dev/null +++ b/0004-selinux-add-all-IPA-log-files-to-ipa_log_t-file-cont.patch @@ -0,0 +1,86 @@ +From 42eb97ee6bd8011b590aef321d4386ea9352933d Mon Sep 17 00:00:00 2001 +From: Alexander Bokovoy +Date: Wed, 28 Aug 2024 10:02:19 +0300 +Subject: [PATCH] selinux: add all IPA log files to ipa_log_t file context + +We have multiple log files that produced by IPA components. Some of them +are written by the tools that run as root and inherit their file context +from /var/log -> var_log_t. However, increasingly we get tools that were +run through oddjob helpers. These supposed to be run within ipa_helper_t +SELinux context which has write permissions for ipa_log_t file context. + +Add all known log files from the base platform. The following script was +used to generate them: +$ git grep '_LOG = .*ipa.*\.log' ipaplatform/base/paths.py | cut -d= -f2 | \ + xargs -I% echo -e "%\t--\tgen_context(system_u:object_r:ipa_log_t,s0)" + +/var/log/ipabackup.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaclient-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaclient-uninstall.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaclientsamba-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaclientsamba-uninstall.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipareplica-ca-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipareplica-conncheck.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipareplica-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/iparestore.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaserver-enable-sid.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaserver-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaserver-adtrust-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaserver-dns-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaserver-kra-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaserver-uninstall.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaupgrade.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipatrust-enable-agent.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipaepn.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipa-custodia.audit.log -- gen_context(system_u:object_r:ipa_log_t,s0) +/var/log/ipa-migrate.log -- gen_context(system_u:object_r:ipa_log_t,s0) + +ipa-custodia.audit.log was already in the present list. + +Additionally, ipa-migrate-conflict.ldif is used by the ipa-migrate tool +but is not provided through the ipaplatform mechanism. It is added +explicitly. + +Fixes: https://pagure.io/freeipa/issue/9654 + +Signed-off-by: Alexander Bokovoy +Reviewed-By: Florence Blanc-Renaud +--- + selinux/ipa.fc | 21 ++++++++++++++++++++- + 1 file changed, 20 insertions(+), 1 deletion(-) + +diff --git a/selinux/ipa.fc b/selinux/ipa.fc +index 700e3a14a11fcd403a2e6f57ec781c58dae77660..47bd19ba77418cad1f0904dc4a9a35ce9d6ff9d2 100644 +--- a/selinux/ipa.fc ++++ b/selinux/ipa.fc +@@ -24,7 +24,26 @@ + + /var/log/ipa(/.*)? gen_context(system_u:object_r:ipa_log_t,s0) + +-/var/log/ipareplica-conncheck.log.* -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipabackup.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaclient-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaclient-uninstall.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaclientsamba-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaclientsamba-uninstall.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipareplica-ca-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipareplica-conncheck.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipareplica-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/iparestore.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaserver-enable-sid.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaserver-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaserver-adtrust-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaserver-dns-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaserver-kra-install.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaserver-uninstall.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaupgrade.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipatrust-enable-agent.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipaepn.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipa-migrate.log -- gen_context(system_u:object_r:ipa_log_t,s0) ++/var/log/ipa-migrate-conflict.ldif -- gen_context(system_u:object_r:ipa_log_t,s0) + + /var/run/ipa(/.*)? gen_context(system_u:object_r:ipa_var_run_t,s0) + +-- +2.46.0 + diff --git a/0005-Add-ipa-idrange-fix.patch b/0005-Add-ipa-idrange-fix.patch new file mode 100644 index 0000000..9948fa9 --- /dev/null +++ b/0005-Add-ipa-idrange-fix.patch @@ -0,0 +1,1501 @@ +From aa4651526e6697e15ce4960bf1d15d1389889c7f Mon Sep 17 00:00:00 2001 +From: "asharov@redhat.com" +Date: Mon, 24 Jun 2024 15:33:34 +0200 +Subject: [PATCH] Add ipa-idrange-fix + +ipa-idrange-fix is a tool for analysis of existing IPA ranges, users +and groups outside of those ranges, and functionality to propose +and apply remediations to make sure as much users and groups as +possible end up in the IPA-managed ranges. + +Fixes: https://pagure.io/freeipa/issue/9612 + +Signed-off-by: Aleksandr Sharov +Reviewed-By: Rob Crittenden +Reviewed-By: Rob Crittenden +--- + freeipa.spec.in | 2 + + install/tools/Makefile.am | 2 + + install/tools/ipa-idrange-fix.in | 8 + + install/tools/man/Makefile.am | 1 + + install/tools/man/ipa-idrange-fix.1 | 111 ++ + ipaserver/install/ipa_idrange_fix.py | 1085 +++++++++++++++++ + .../test_integration/test_ipa_idrange_fix.py | 189 +++ + 7 files changed, 1398 insertions(+) + create mode 100644 install/tools/ipa-idrange-fix.in + create mode 100644 install/tools/man/ipa-idrange-fix.1 + create mode 100644 ipaserver/install/ipa_idrange_fix.py + create mode 100644 ipatests/test_integration/test_ipa_idrange_fix.py + +diff --git a/freeipa.spec.in b/freeipa.spec.in +index e370290bc74d92ab239bf11e88b3fa7e4faef415..171b6ad27b57553fdd46c7d041715949bb00b163 100755 +--- a/freeipa.spec.in ++++ b/freeipa.spec.in +@@ -1517,6 +1517,7 @@ fi + %{_sbindir}/ipa-pkinit-manage + %{_sbindir}/ipa-crlgen-manage + %{_sbindir}/ipa-cert-fix ++%{_sbindir}/ipa-idrange-fix + %{_sbindir}/ipa-acme-manage + %{_sbindir}/ipa-migrate + %if 0%{?fedora} >= 38 +@@ -1596,6 +1597,7 @@ fi + %{_mandir}/man1/ipa-pkinit-manage.1* + %{_mandir}/man1/ipa-crlgen-manage.1* + %{_mandir}/man1/ipa-cert-fix.1* ++%{_mandir}/man1/ipa-idrange-fix.1* + %{_mandir}/man1/ipa-acme-manage.1* + %{_mandir}/man1/ipa-migrate.1* + +diff --git a/install/tools/Makefile.am b/install/tools/Makefile.am +index c454fad9795c79f88e1d72688f1d15c5234cc113..ca484ec37969c9c06ae7b408b55fa30cd4e8e4fe 100644 +--- a/install/tools/Makefile.am ++++ b/install/tools/Makefile.am +@@ -31,6 +31,7 @@ dist_noinst_DATA = \ + ipa-pkinit-manage.in \ + ipa-crlgen-manage.in \ + ipa-cert-fix.in \ ++ ipa-idrange-fix.in \ + ipa-custodia.in \ + ipa-custodia-check.in \ + ipa-httpd-kdcproxy.in \ +@@ -68,6 +69,7 @@ nodist_sbin_SCRIPTS = \ + ipa-pkinit-manage \ + ipa-crlgen-manage \ + ipa-cert-fix \ ++ ipa-idrange-fix \ + ipa-acme-manage \ + ipa-migrate \ + $(NULL) +diff --git a/install/tools/ipa-idrange-fix.in b/install/tools/ipa-idrange-fix.in +new file mode 100644 +index 0000000000000000000000000000000000000000..5994bd28b15e247c5a086238f36b16cc75ff24c3 +--- /dev/null ++++ b/install/tools/ipa-idrange-fix.in +@@ -0,0 +1,8 @@ ++#!/usr/bin/python3 ++# ++# Copyright (C) 2024 FreeIPA Contributors see COPYING for license ++# ++ ++from ipaserver.install.ipa_idrange_fix import IPAIDRangeFix ++ ++IPAIDRangeFix.run_cli() +diff --git a/install/tools/man/Makefile.am b/install/tools/man/Makefile.am +index 34f359863afca7b6c1e792a53afc25bb8eb41fd3..e9542a77bbbb88054eae1e64311d6e9ec5bee499 100644 +--- a/install/tools/man/Makefile.am ++++ b/install/tools/man/Makefile.am +@@ -29,6 +29,7 @@ dist_man1_MANS = \ + ipa-pkinit-manage.1 \ + ipa-crlgen-manage.1 \ + ipa-cert-fix.1 \ ++ ipa-idrange-fix.1 \ + ipa-acme-manage.1 \ + ipa-migrate.1 \ + $(NULL) +diff --git a/install/tools/man/ipa-idrange-fix.1 b/install/tools/man/ipa-idrange-fix.1 +new file mode 100644 +index 0000000000000000000000000000000000000000..178d2e88779e135a65f3285de62d2dc3b19c175a +--- /dev/null ++++ b/install/tools/man/ipa-idrange-fix.1 +@@ -0,0 +1,111 @@ ++.\" ++.\" Copyright (C) 2024 FreeIPA Contributors see COPYING for license ++.\" ++.TH "ipa-idrange-fix" "1" "May 26 2024" "IPA" "IPA Manual Pages" ++.SH "NAME" ++ipa\-idrange\-fix \- Analyse and fix IPA ID ranges ++.SH "SYNOPSIS" ++ipa\-idrange\-fix [options] ++.SH "DESCRIPTION" ++ ++\fIipa-idrange-fix\fR is a tool for analysis of existing IPA ranges, users and ++groups outside of those ranges, and functionality to propose and apply ++remediations to make sure as many users and groups as possible end up in the ++IPA-managed ranges. Before any changes are applied, a full backup of the system ++is \fBSTRONGLY RECOMMENDED\fR. ++ ++Do not use this program in unattended mode unless you are absolutely sure ++you are consenting to the tool's proposals. ++ ++You can apply the proposals manually via \fIipa idrange(1)\fR commands. ++ ++This tool requires it to be run as \fBroot\fR and does not require a kerberos ++ticket. The directory server needs to be running. ++ ++\fIipa-idrange-fix\fR will read current ranges from LDAP, then check their ++basic constraints, RID bases, etc. If it finds critical issues with ranges, ++manual adjustment will be required. ++ ++After analyzing existing ranges, the tool will search for users and groups that ++are outside of ipa-local ranges. Then it will attempt to propose new ipa-local ++ranges in order to cover users and groups found. ++ ++Finally, the tool will summarize the analysis, and, if there are proposed ++changes, will ask if the user wants to apply those. Please read the ++proposals carefully before proceeding with changes! ++ ++Important note: By default, \fIipa-idrange-fix\fR will not cover the users and ++groups that have IDs under 1000 as these IDs are reserved for system and ++service users and groups. We \fBdon't recommend\fR using IDs under 1000 for ++IPA users and groups as they can possibly overlap with local ones. Please ++consider moving those users out of the range 1..1000, unless they are ++absolutely needed. ++ ++.SH "OPTIONS" ++.TP ++\fB\-\-version\fR ++Show the program's version and exit. ++.TP ++\fB\-h\fR, \fB\-\-help\fR ++Show the help for this program. ++.TP ++\fB\-\-ridoffset \fIINT\fR ++An offset for newly proposed base RIDs for ranges. We introduce offset in order ++to have an ability to increase ranges in the future, increase to more than ++offset will result in RID bases overlapping, and will be denied. If set to 0, ++there will be no offset, proposed RID ranges will start directly one after ++another. ++ ++Default - \fI100000\fR, allowed values - from \fI0\fR to \fI2^31\fR. ++.TP ++\fB\-\-rangegap \fIINT\fR ++A number of IDs between out of ranges IDs to be considered too big to be inside ++a proposed range. If the gap is bigger than this attribute, a new range will be ++started. If set to 0, every entity will get its own range, if allowed by ++\fI--minrange\fR. ++ ++Default - \fI200000\fR, allowed values - from \fI0\fR to \fI2^31\fR. ++.TP ++\fB\-\-minrange \fIINT\fR ++A minimal amount of IDs the tool considers to be a valid range. All IDs that ++would form a range with less than this number will be considered outliers, not ++worth creating an IDrange for, and will be listed explicitly to be moved ++manually. If set to 1, a range will be proposed for every entity, even if the ++entity is single in the middle of an empty space. ++ ++Default - \fI10\fR, allowed values - from \fI1\fR to \fI2^31\fR. ++.TP ++\fB\-\-allowunder1000\fR ++A flag to allow proposing ranges that start with IDs lower than \fI1000\fR. ++Remember, this is not recommended - IDs under 1000 are reserved for system and ++service users and groups. IDranges with these low IDs may result with ++overlapping of IPA and system local users and groups, which can be a serious ++security issue and generally produce a lot of issues around these entities' ++resolution. ++.TP ++\fB\-\-norounding\fR ++A flag to turn off idrange starting id and size rounding - e.g. if we find ++ID 1234, and the size 567, it will stay that way, the proposed range will ++start at ID 1234, and have a 567 size. If not specified, basic rounding to ++outer margins will be applied. Rounding will be 10^size of the proposed range. ++.TP ++\fB\-\-unattended\fR ++Run the tool in unattended mode, if any changes would be proposed, they will ++be applied automatically. ++.TP ++\fB\-v\fR, \fB\-\-verbose\fR ++Print debugging information. ++.TP ++\fB\-q\fR, \fB\-\-quiet\fR ++Output only errors (output from child processes may still be shown). ++.TP ++\fB\-\-log\-file\fR=\fIFILE\fR ++Log to the given file. ++.SH "EXIT STATUS" ++0 if the command was successful ++ ++1 if an error occurred ++ ++.SH "SEE ALSO" ++.BR ipa\ idrange-mod(1) ++.BR ipa\ idrange-add(1) +diff --git a/ipaserver/install/ipa_idrange_fix.py b/ipaserver/install/ipa_idrange_fix.py +new file mode 100644 +index 0000000000000000000000000000000000000000..c6c67ae9330e2d0184efc09d09a84216ef0772a6 +--- /dev/null ++++ b/ipaserver/install/ipa_idrange_fix.py +@@ -0,0 +1,1085 @@ ++"""Tool to analyze and fix IPA ID ranges""" ++# ++# Copyright (C) 2024 FreeIPA Contributors see COPYING for license ++# ++ ++import logging ++import ldap ++ ++from ipalib import api, errors ++from ipapython.admintool import AdminTool ++from ipapython.dn import DN ++from ipapython import ipautil ++from typing import List, Tuple ++ ++logger = logging.getLogger(__name__) ++ ++ ++class IDRange: ++ """Class for ID range entity""" ++ ++ def __init__(self): ++ self.last_id: int = None ++ self.last_base_rid: int = None ++ self.last_secondary_rid: int = None ++ self.name: str = None ++ self.size: int = None ++ self.first_id: int = None ++ self.base_rid: int = None ++ self.secondary_base_rid: int = None ++ self.type: str = None ++ self.suffix: str = None ++ self.dn: str = None ++ ++ def _count(self) -> None: ++ """Function to calculate last IDs for the range""" ++ self.last_id = self.first_id + self.size - 1 ++ if self.type == "ipa-local": ++ self.last_base_rid = ( ++ self.base_rid + self.size ++ if self.base_rid is not None ++ else None ++ ) ++ self.last_secondary_rid = ( ++ self.secondary_base_rid + self.size ++ if self.secondary_base_rid is not None ++ else None ++ ) ++ ++ def __repr__(self): ++ return ( ++ f"IDRange(name='{self.name}', " ++ f"type={self.type}, " ++ f"size={self.size}, " ++ f"first_id={self.first_id}, " ++ f"base_rid={self.base_rid}, " ++ f"secondary_base_rid={self.secondary_base_rid})" ++ ) ++ ++ def __eq__(self, other): ++ return self.first_id == other.first_id ++ ++ ++class IDentity: ++ """A generic class for ID entity - users or groups""" ++ ++ def __init__(self, **kwargs): ++ self.dn: str = kwargs.get('dn') ++ self.name: str = kwargs.get('name') ++ self.user: str = kwargs.get('user') ++ self.number: int = kwargs.get('number') ++ ++ def __str__(self): ++ if self.user: ++ return (f"user '{self.name}', uid={self.number}") ++ return (f"group '{self.name}', gid={self.number}") ++ ++ def debug(self): ++ if self.user: ++ return ( ++ f"user(username='{self.name}', " ++ f"uid={self.number}, " ++ f"{self.dn})" ++ ) ++ return ( ++ f"group(groupname='{self.name}', " ++ f"gid={self.number}, " ++ f"{self.dn})" ++ ) ++ ++ def __eq__(self, other): ++ return self.number == other.number and self.user == other.user ++ ++ ++class IPAIDRangeFix(AdminTool): ++ """Tool to analyze and fix IPA ID ranges""" ++ ++ command_name = "ipa-idrange-fix" ++ log_file_name = "/var/log/ipa-idrange-fix.log" ++ usage = "%prog" ++ description = "Analyze and fix IPA ID ranges" ++ ++ @classmethod ++ def add_options(cls, parser, debug_option=False): ++ super(IPAIDRangeFix, cls).add_options(parser) ++ parser.add_option( ++ "--ridoffset", ++ dest="ridoffset", ++ type=int, ++ default=100000, ++ metavar=100000, ++ help="Offset for a next base RID from previous RID range. \ ++Needed for future range size expansions. Has to be > 0", ++ ) ++ parser.add_option( ++ "--rangegap", ++ dest="rangegap", ++ type=int, ++ default=200000, ++ metavar=200000, ++ help="Threshold for a gap between out-of-range IDs to be \ ++considered a different range. Has to be > 0", ++ ) ++ parser.add_option( ++ "--minrange", ++ dest="minrange", ++ type=int, ++ default=10, ++ metavar=10, ++ help="Minimal considered range size for out-of-range IDs.\ ++All ranges with amount of IDs lower than this number will be discarded and \ ++IDs will be listed to be moved manually. Has to be > 1", ++ ) ++ parser.add_option( ++ "--allowunder1000", ++ dest="allowunder1000", ++ action="store_true", ++ default=False, ++ help="Allow idranges to start below 1000. Be careful to not \ ++overlap IPA users/groups with existing system-local ones!", ++ ) ++ parser.add_option( ++ "--norounding", ++ dest="norounding", ++ action="store_true", ++ default=False, ++ help="Disable IDrange rounding attempt in order to get ranges \ ++exactly covering just IDs provided", ++ ) ++ parser.add_option( ++ "--unattended", ++ dest="unattended", ++ action="store_true", ++ default=False, ++ help="Automatically fix all range issues found without asking \ ++for confirmation", ++ ) ++ ++ def __init__(self, *args, **kwargs): ++ super().__init__(*args, **kwargs) ++ self.realm: str = None ++ self.suffix: DN = None ++ self.proposals_rid: List[IDRange] = [] ++ self.proposals_new: List[IDRange] = [] ++ self.outliers: List[IDentity] = [] ++ self.under1000: List[IDentity] = [] ++ self.id_ranges: List[IDRange] = [] ++ ++ def validate_options(self, needs_root=True): ++ super().validate_options(needs_root) ++ ++ def run(self): ++ api.bootstrap(in_server=True) ++ api.finalize() ++ ++ self.realm = api.env.realm ++ self.suffix = ipautil.realm_to_suffix(self.realm) ++ try: ++ api.Backend.ldap2.connect() ++ ++ # Reading range data ++ self.id_ranges = read_ranges(self.suffix) ++ ++ # Evaluating existing ranges, if something is off, exit ++ if self.evaluate_ranges() != 0: ++ return 1 ++ ++ # reading out of range IDs ++ ids_out_of_range = read_outofrange_identities( ++ self.suffix, self.id_ranges ++ ) ++ ++ # Evaluating out of range IDs ++ self.evaluate_identities(ids_out_of_range) ++ ++ # Print the proposals ++ self.print_intentions() ++ ++ # If there are no proposals, we have nothing to do, exiting ++ if (len(self.proposals_rid) == 0 ++ and len(self.proposals_new) == 0): ++ logger.info("\nNo changes proposed, nothing to do.") ++ return 0 ++ ++ logger.info("\nID ranges table after proposed changes:") ++ draw_ascii_table(self.id_ranges) ++ ++ if self.options.unattended: ++ logger.info( ++ "Unattended mode, proceeding with applying changes!" ++ ) ++ else: ++ response = ipautil.user_input('Enter "yes" to proceed') ++ if response.lower() != "yes": ++ logger.info("Not proceeding.") ++ return 0 ++ logger.info("Proceeding.") ++ ++ # Applying changes ++ for id_range in self.proposals_rid: ++ apply_ridbases(id_range) ++ ++ for id_range in self.proposals_new: ++ create_range(id_range) ++ ++ logger.info("All changes applied successfully!") ++ ++ finally: ++ if api.Backend.ldap2.isconnected(): ++ api.Backend.ldap2.disconnect() ++ ++ return 0 ++ ++ def evaluate_ranges(self) -> int: ++ """Function to evaluate existing ID ranges""" ++ if len(self.id_ranges) == 0: ++ logger.error("No ID ranges found!") ++ return 1 ++ ++ draw_ascii_table(self.id_ranges) ++ ++ if not ranges_overlap_check(self.id_ranges): ++ logger.error( ++ "Ranges overlap detected, cannot proceed! Please adjust \ ++existing ranges manually." ++ ) ++ return 1 ++ ++ # Checking RID bases for existing ranges ++ id_ranges_nobase = get_ranges_no_base(self.id_ranges) ++ ++ if len(id_ranges_nobase) > 0: ++ logger.info( ++ "Found %s ranges without base RIDs", len(id_ranges_nobase) ++ ) ++ for id_range in id_ranges_nobase: ++ logger.debug( ++ "Range '%s' has RID base %s and secondary RID base %s", ++ id_range.name, ++ id_range.base_rid, ++ id_range.secondary_base_rid, ++ ) ++ propose_rid_ranges( ++ self.id_ranges, ++ self.options.ridoffset, ++ self.proposals_rid ++ ) ++ else: ++ logger.info( ++ "All ID ranges have base RIDs set, RID adjustments are \ ++not needed." ++ ) ++ return 0 ++ ++ def evaluate_identities(self, ids_out_of_range: List[IDentity]) -> None: ++ """Function to evaluate out of range IDs""" ++ if len(ids_out_of_range) == 0: ++ logger.info("No out of range IDs found!") ++ else: ++ logger.info( ++ "Found overall %s IDs out of existing ID ranges.\n", ++ len(ids_out_of_range), ++ ) ++ # ruling out IDs under 1000 if flag is not set ++ if not self.options.allowunder1000: ++ self.under1000, ids_out_of_range = separate_under1000( ++ ids_out_of_range ++ ) ++ if len(self.under1000) > 0: ++ logger.info( ++ "Found IDs under 1000, which is not recommeneded \ ++(if you definitely need ranges proposed for those, use --allowunder1000):" ++ ) ++ for identity in self.under1000: ++ logger.info("%s", identity) ++ ++ # Get initial divide of IDs into groups ++ groups = group_identities_by_threshold( ++ ids_out_of_range, self.options.rangegap ++ ) ++ ++ # Get outliers from too small groups and clean groups for ++ # further processing ++ self.outliers, cleangroups = separate_ranges_and_outliers( ++ groups, self.options.minrange ++ ) ++ ++ # Print the outliers, they have to be moved manually ++ if len(self.outliers) > 0: ++ logger.info( ++ "\nIdentities that don't fit the criteria to get a new " ++ "range found! Current attributes:\n" ++ "Minimal range size: %s\n" ++ "Maximum gap between IDs: %s\n" ++ "Try adjusting --minrange, --rangegap or move the " ++ "following identities into already existing ranges:", ++ self.options.minrange, ++ self.options.rangegap ++ ) ++ for identity in self.outliers: ++ logger.info("%s", identity) ++ ++ if len(cleangroups) > 0: ++ # Get IDrange name base ++ basename = get_rangename_base(self.id_ranges) ++ ++ # Create proposals for new ranges from groups ++ for group in cleangroups: ++ newrange = propose_range( ++ group, ++ self.id_ranges, ++ self.options.ridoffset, ++ basename, ++ self.options.norounding, ++ self.options.allowunder1000 ++ ) ++ if newrange is not None: ++ self.proposals_new.append(newrange) ++ self.id_ranges.append(newrange) ++ self.id_ranges.sort(key=lambda x: x.first_id) ++ else: ++ logger.info( ++ "\nNo IDs fit the criteria for a new ID range to propose!" ++ ) ++ ++ def print_intentions(self) -> None: ++ """Function to print out the summary of the proposed changes""" ++ logger.info("\nSummary:") ++ ++ if len(self.outliers) > 0: ++ logger.info("Outlier IDs that are too far away to get a range:") ++ for identity in self.outliers: ++ logger.info("%s", identity) ++ ++ if len(self.under1000) > 0: ++ if self.options.allowunder1000: ++ logger.info("IDs under 1000 were treated like normal IDs.") ++ else: ++ logger.info("IDs under 1000:") ++ for identity in self.under1000: ++ logger.info("%s", identity) ++ else: ++ logger.info("No IDs under 1000 found.") ++ ++ if len(self.proposals_rid) > 0: ++ logger.info("Proposed changes to existing ranges:") ++ for id_range in self.proposals_rid: ++ logger.info( ++ "Range '%s' - base RID: %s, secondary base RID: %s", ++ id_range.name, ++ id_range.base_rid, ++ id_range.secondary_base_rid, ++ ) ++ else: ++ logger.info("No changes proposed for existing ranges.") ++ ++ if len(self.proposals_new) > 0: ++ logger.info("Proposed new ranges:") ++ for id_range in self.proposals_new: ++ logger.info("%s", id_range) ++ else: ++ logger.info("No new ranges proposed.") ++ ++# Working with output ++# region ++ ++ ++def draw_ascii_table(id_ranges: List[IDRange], stdout: bool = False) -> None: ++ """Function to draw a table with ID ranges in ASCII""" ++ table: str = "\n" ++ # Calculate the maximum width required for each column using column names ++ max_widths = { ++ column: max( ++ len(str(column)), ++ max( ++ ( ++ len(str(getattr(id_range, column))) ++ if getattr(id_range, column) is not None ++ else 0 ++ ) ++ for id_range in id_ranges ++ ), ++ ) ++ for column in [ ++ "name", ++ "type", ++ "size", ++ "first_id", ++ "last_id", ++ "base_rid", ++ "last_base_rid", ++ "secondary_base_rid", ++ "last_secondary_rid", ++ ] ++ } ++ ++ # Draw the table header ++ header = "| " ++ for column, width in max_widths.items(): ++ header += f"{column.ljust(width)} | " ++ horizontal_line = "-" * (len(header) - 1) ++ table += horizontal_line + "\n" ++ table += header + "\n" ++ table += horizontal_line + "\n" ++ ++ # Draw the table rows ++ for id_range in id_ranges: ++ row = "| " ++ for column, width in max_widths.items(): ++ value = getattr(id_range, column) ++ if value is not None: ++ row += f"{str(value).rjust(width)} | " ++ else: ++ # Adding the separator ++ row += " " * (width + 1) + "| " ++ table += row + "\n" ++ table += horizontal_line + "\n" ++ if stdout: ++ print(table) ++ else: ++ logger.info(table) ++# endregion ++# Reading from LDAP ++# region ++ ++ ++def read_ranges(suffix) -> List[IDRange]: ++ """Function to read ID ranges from LDAP""" ++ id_ranges: IDRange = [] ++ try: ++ ranges = api.Backend.ldap2.get_entries( ++ DN(api.env.container_ranges, suffix), ++ ldap.SCOPE_ONELEVEL, ++ "(objectclass=ipaIDRange)", ++ ) ++ except errors.NotFound: ++ logger.error("LDAPError: No ranges found!") ++ except errors.ExecutionError as e: ++ logger.error("Exception while reading users: %s", e) ++ else: ++ for entry in ranges: ++ sv = entry.single_value ++ id_range = IDRange() ++ id_range.name = sv.get("cn") ++ id_range.size = int(sv.get("ipaidrangesize")) ++ id_range.first_id = int(sv.get("ipabaseid")) ++ id_range.base_rid = ( ++ int(sv.get("ipabaserid")) if sv.get("ipabaserid") else None ++ ) ++ id_range.secondary_base_rid = ( ++ int(sv.get("ipasecondarybaserid")) ++ if sv.get("ipasecondarybaserid") ++ else None ++ ) ++ id_range.suffix = suffix ++ id_range.type = sv.get("iparangetype") ++ id_range.dn = entry.dn ++ ++ id_range._count() ++ logger.debug("ID range found: %s", id_range) ++ ++ id_ranges.append(id_range) ++ ++ id_ranges.sort(key=lambda x: x.first_id) ++ return id_ranges ++ ++ ++def read_outofrange_identities(suffix, id_ranges) -> List[IDentity]: ++ """Function to read out of range users and groups from LDAP""" ++ users_outofrange = read_ldap_ids( ++ DN(api.env.container_user, suffix), ++ True, ++ id_ranges ++ ) ++ logger.info("Users out of range found: %s", len(users_outofrange)) ++ del_outofrange = read_ldap_ids( ++ DN(api.env.container_deleteuser, suffix), ++ True, ++ id_ranges ++ ) ++ logger.info("Preserved users out of range found: %s", len(del_outofrange)) ++ groups_outofrange = read_ldap_ids( ++ DN(api.env.container_group, suffix), ++ False, ++ id_ranges ++ ) ++ logger.info("Groups out of range found: %s", len(groups_outofrange)) ++ outofrange = users_outofrange + del_outofrange + groups_outofrange ++ outofrange.sort(key=lambda x: x.number) ++ return outofrange ++ ++ ++def read_ldap_ids(container_dn, user: bool, id_ranges) -> List[IDentity]: ++ """Function to read IDs from containter in LDAP""" ++ id_entities = [] ++ if user: ++ id_name = "user" ++ ldap_filter = get_outofrange_filter( ++ id_ranges, ++ "posixaccount", ++ "uidNumber" ++ ) ++ else: ++ id_name = "group" ++ ldap_filter = get_outofrange_filter( ++ id_ranges, ++ "posixgroup", ++ "gidNumber" ++ ) ++ ++ logger.debug("Searching %ss in %s with filter: %s", id_name, container_dn, ++ ldap_filter) ++ try: ++ identities = api.Backend.ldap2.get_entries( ++ container_dn, ++ ldap.SCOPE_ONELEVEL, ++ ldap_filter, ++ ) ++ for entry in identities: ++ id_entities.append(read_identity(entry, user)) ++ except errors.NotFound: ++ logger.debug("No out of range %ss found in %s!", id_name, container_dn) ++ except errors.ExecutionError as e: ++ logger.error("Exception while reading %s: %s", container_dn, e) ++ return id_entities ++ ++ ++def read_identity(ldapentry, user: bool = True) -> IDentity: ++ """Function to convert LDAP entry to IDentity object""" ++ sv = ldapentry.single_value ++ id_entity = IDentity() ++ id_entity.dn = ldapentry.dn ++ id_entity.name = sv.get("cn") ++ id_entity.number = ( ++ int(sv.get("uidNumber")) if user else int(sv.get("gidNumber")) ++ ) ++ id_entity.user = user ++ logger.debug("Out of range found: %s", id_entity.debug()) ++ return id_entity ++ ++ ++def get_outofrange_filter( ++ id_ranges_all: List[IDRange], object_class: str, posix_id: str ++) -> str: ++ """Function to create LDAP filter for out of range users and groups""" ++ # we need to look only for ipa-local ranges ++ id_ranges = get_ipa_local_ranges(id_ranges_all) ++ ++ ldap_filter = f"(&(objectClass={object_class})(|" ++ ++ # adding gaps in ranges to the filter ++ for i in range(len(id_ranges) + 1): ++ if i == 0: ++ start_condition = f"({posix_id}>=1)" ++ else: ++ start_condition = f"({posix_id}>={id_ranges[i - 1].last_id + 1})" ++ ++ if i < len(id_ranges): ++ end_condition = f"({posix_id}<={id_ranges[i].first_id - 1})" ++ else: ++ end_condition = f"({posix_id}<=2147483647)" ++ ++ ldap_filter += f"(&{start_condition}{end_condition})" ++ ++ ldap_filter += "))" ++ ++ return ldap_filter ++# endregion ++# Writing to LDAP ++# region ++ ++ ++def apply_ridbases(id_range: IDRange) -> None: ++ """Funtion to apply RID bases to the range in LDAP""" ++ try: ++ api.Backend.ldap2.modify_s( ++ id_range.dn, ++ [ ++ (ldap.MOD_ADD, "ipaBaseRID", str(id_range.base_rid)), ++ ( ++ ldap.MOD_ADD, ++ "ipaSecondaryBaseRID", ++ str(id_range.secondary_base_rid), ++ ), ++ ], ++ ) ++ logger.info("RID bases updated for range '%s'", id_range.name) ++ ++ except ldap.CONSTRAINT_VIOLATION as e: ++ logger.error( ++ "Failed to add RID bases to the range '%s': %s", ++ id_range.name, ++ e ++ ) ++ raise RuntimeError("Constraint violation.\n") from e ++ ++ except Exception as e: ++ logger.error( ++ "Exception while updating RID bases for range '%s': %s", ++ id_range.name, ++ e, ++ ) ++ raise RuntimeError("Failed to update RID bases.\n") from e ++ ++ ++def create_range(id_range: IDRange) -> None: ++ """Function to create a new range in LDAP""" ++ try: ++ logger.info("Creating range '%s'...", id_range.name) ++ ++ entry = api.Backend.ldap2.make_entry( ++ DN(id_range.dn), ++ objectclass=["ipaIDRange", "ipaDomainIDRange"], ++ ipaidrangesize=[str(id_range.size)], ++ ipabaseid=[str(id_range.first_id)], ++ ipabaserid=[str(id_range.base_rid)], ++ ipasecondarybaserid=[str(id_range.secondary_base_rid)], ++ iparangetype=[id_range.type], ++ ) ++ ++ api.Backend.ldap2.add_entry(entry) ++ logger.info("Range '%s' created successfully", id_range.name) ++ except Exception as e: ++ logger.error( ++ "Exception while creating range '%s': %s", ++ id_range.name, ++ e ++ ) ++ raise RuntimeError("Failed to create range.\n") from e ++# endregion ++# Working with ranges ++# region ++ ++ ++def get_ipa_local_ranges(id_ranges: List[IDRange]) -> List[IDRange]: ++ """Function to get only ipa-local ranges from the list of ranges""" ++ ipa_local_ranges = [] ++ ++ for id_range in id_ranges: ++ if id_range.type == "ipa-local": ++ ipa_local_ranges.append(id_range) ++ ++ return ipa_local_ranges ++ ++ ++def range_overlap_check( ++ range1_start: int, range1_end: int, range2_start: int, range2_end: int ++) -> bool: ++ """Function to check if two ranges overlap""" ++ # False when overlapping ++ return not (range1_start <= range2_end and range2_start <= range1_end) ++ ++ ++def range_overlap_check_idrange(range1: IDRange, range2: IDRange) -> bool: ++ """Function to check if two ranges overlap""" ++ # False when overlapping ++ return range_overlap_check( ++ range1.first_id, range1.last_id, range2.first_id, range2.last_id) ++ ++ ++def newrange_overlap_check( ++ id_ranges: List[IDRange], newrange: IDRange ++) -> bool: ++ """Function to check if proposed range overlaps with existing ones""" ++ for id_range in id_ranges: ++ if not range_overlap_check_idrange(id_range, newrange): ++ return False ++ return True ++ ++ ++def ranges_overlap_check(id_ranges: List[IDRange]) -> bool: ++ """Function to check if any of the existing ranges overlap""" ++ if len(id_ranges) < 2: ++ return True ++ for i in range(len(id_ranges) - 1): ++ for j in range(i + 1, len(id_ranges)): ++ if not range_overlap_check_idrange(id_ranges[i], id_ranges[j]): ++ logger.error( ++ "Ranges '%s' and '%s' overlap!", ++ id_ranges[i].name, ++ id_ranges[j].name, ++ ) ++ return False ++ return True ++# endregion ++# Working with RID bases ++# region ++ ++ ++def propose_rid_ranges( ++ id_ranges: List[IDRange], delta: int, proposals: List[IDRange] ++) -> None: ++ """ ++ Function to propose RID bases for ranges that don't have them set. ++ ++ - delta represents how far we start new base off existing range, ++ used in order to allow for future expansion of existing ranges up ++ to [delta] IDs. ++ """ ++ ipa_local_ranges = get_ipa_local_ranges(id_ranges) ++ ++ for id_range in ipa_local_ranges: ++ proposed_base_rid = 0 ++ proposed_secondary_base_rid = 0 ++ ++ # Calculate proposed base RID and secondary base RID ++ if id_range.base_rid is None: ++ result, proposed_base_rid = propose_rid_base( ++ id_range, ipa_local_ranges, delta, True ++ ) ++ if result: ++ id_range.base_rid = proposed_base_rid ++ id_range.last_base_rid = proposed_base_rid + id_range.size ++ else: ++ # if this fails too, we print the warning and abandon the idea ++ logger.warning( ++ "Warning: Proposed base RIDs %s for '%s' both failed, \ ++please adjust manually", ++ proposed_base_rid, ++ id_range.name, ++ ) ++ continue ++ ++ if id_range.secondary_base_rid is None: ++ result, proposed_secondary_base_rid = propose_rid_base( ++ id_range, ipa_local_ranges, delta, False, proposed_base_rid ++ ) ++ if result: ++ id_range.secondary_base_rid = proposed_secondary_base_rid ++ id_range.last_secondary_rid = ( ++ proposed_secondary_base_rid + id_range.size ++ ) ++ else: ++ # if this fails too, we print the warning and abandon the idea ++ logger.warning( ++ "Warning: Proposed secondary base RIDs %s for '%s' \ ++both failed, please adjust manually", ++ proposed_secondary_base_rid, ++ id_range.name, ++ ) ++ continue ++ ++ # Add range to the proposals if we changed something successfully ++ if proposed_base_rid > 0 or proposed_secondary_base_rid > 0: ++ logger.debug( ++ "Proposed RIDs for range '%s': pri %s, sec %s", ++ id_range.name, ++ proposed_base_rid, ++ proposed_secondary_base_rid, ++ ) ++ proposals.append(id_range) ++ ++ ++def propose_rid_base( ++ idrange: IDRange, ++ ipa_local_ranges: List[IDRange], ++ delta: int, ++ primary: bool = True, ++ previous_base_rid: int = -1 ++) -> Tuple[bool, str]: ++ """ ++ Function to propose a base RID for a range, primary or secondary. ++ We are getting the biggest base RID + size + delta and try ++ if it's a viable option, check same kind first, then the other. ++ """ ++ proposed_base_rid = max_rid(ipa_local_ranges, primary) + delta ++ if proposed_base_rid == previous_base_rid: ++ proposed_base_rid += idrange.size + delta ++ if check_rid_base(ipa_local_ranges, proposed_base_rid, idrange.size): ++ return True, proposed_base_rid ++ ++ # if we fail, we try the same with biggest of a different kind ++ proposed_base_rid_orig = proposed_base_rid ++ proposed_base_rid = max_rid(ipa_local_ranges, not primary) + delta ++ if proposed_base_rid == previous_base_rid: ++ proposed_base_rid += idrange.size + delta ++ if check_rid_base(ipa_local_ranges, proposed_base_rid, idrange.size): ++ return True, proposed_base_rid ++ ++ # if it fails, we return both RID proposals for the range ++ return False, f"{proposed_base_rid_orig} and {proposed_base_rid}" ++ ++ ++def max_rid(id_ranges: List[IDRange], primary: bool = True) -> int: ++ """Function to get maximum RID of primary or secondary RIDs""" ++ maximum_rid = 0 ++ for id_range in id_ranges: ++ ++ # looking only for primary RIDs ++ if primary: ++ if id_range.last_base_rid is not None: ++ maximum_rid = max(maximum_rid, id_range.last_base_rid) ++ # looking only for secondary RIDs ++ else: ++ if id_range.last_secondary_rid is not None: ++ maximum_rid = max(maximum_rid, id_range.last_secondary_rid) ++ ++ return maximum_rid ++ ++ ++def check_rid_base(id_ranges: List[IDRange], base: int, size: int) -> bool: ++ """Function to check if proposed RID base is viable""" ++ end = base + size + 1 ++ ++ # Checking sanity of RID range ++ if base + size > 2147483647: ++ return False ++ if base < 1000: ++ return False ++ ++ # Checking RID range overlaps ++ for id_range in id_ranges: ++ # we are interested only in ipa-local ranges ++ if id_range.type != "ipa-local": ++ continue ++ ++ # if there is no base rid set, there is no secondary base rid set, ++ # so nothing to overlap with ++ if id_range.base_rid is None: ++ continue ++ ++ # checking for an overlap ++ if not range_overlap_check( ++ base, end, id_range.base_rid, id_range.last_base_rid ++ ): ++ logger.debug( ++ "RID check failure: proposed Primary %s + %s, \ ++intersects with %s-%s from range '%s'", ++ base, ++ size, ++ id_range.base_rid, ++ id_range.last_base_rid, ++ id_range.name, ++ ) ++ return False ++ ++ # if there is no secondary base rid set, nothing to overlap with ++ if id_range.secondary_base_rid is None: ++ continue ++ ++ # if either start of end of the range fails inside existing range, ++ # or existing range is inside proposed one, we have an overlap ++ if not range_overlap_check( ++ base, end, id_range.secondary_base_rid, id_range.last_secondary_rid ++ ): ++ logger.debug( ++ "RID check failure: proposed Secondary %s + %s, \ ++intersects with %s-%s from range '%s'", ++ base, ++ size, ++ id_range.secondary_base_rid, ++ id_range.last_secondary_rid, ++ id_range.name, ++ ) ++ return False ++ ++ return True ++ ++ ++def get_ranges_no_base(id_ranges: List[IDRange]) -> List[IDRange]: ++ """Function to get ranges without either of base RIDs set""" ++ ipa_local_ranges = get_ipa_local_ranges(id_ranges) ++ ranges_no_base = [] ++ for id_range in ipa_local_ranges: ++ if id_range.base_rid is None or id_range.secondary_base_rid is None: ++ ranges_no_base.append(id_range) ++ ++ return ranges_no_base ++# endregion ++# Working with IDentities out of range ++# region ++ ++ ++def group_identities_by_threshold( ++ identities: List[IDentity], threshold: int ++) -> List[List[IDentity]]: ++ """Function to group out of range IDs by threshold""" ++ groups: List[List[IDentity]] = [] ++ currentgroup: List[IDentity] = [] ++ if len(identities) == 0: ++ return groups ++ ++ for i in range(len(identities) - 1): ++ # add id to current group ++ currentgroup.append(identities[i]) ++ ++ # If the difference with the next one is greater than the threshold, ++ # start a new group ++ if identities[i + 1].number - identities[i].number > threshold: ++ groups.append(currentgroup) ++ currentgroup = [] ++ ++ # Add the last ID number to the last group ++ currentgroup.append(identities[-1]) ++ groups.append(currentgroup) ++ ++ return groups ++ ++ ++def separate_under1000( ++ identities: List[IDentity], ++) -> Tuple[List[IDentity], List[IDentity]]: ++ """Function to separate IDs under 1000, expects sorted list""" ++ for i, identity in enumerate(identities): ++ if identity.number >= 1000: ++ return identities[:i], identities[i:] ++ return identities, [] ++ ++ ++def separate_ranges_and_outliers( ++ groups: List[List[IDentity]], minrangesize=int ++) -> Tuple[List[List[IDentity]], List[List[IDentity]]]: ++ """Function to separate IDs into outliers and IDs that can get ranges""" ++ outliers = [] ++ cleangroups = [] ++ for group in groups: ++ # if group is smaller than minrangesize, add it's memebers to ourliers ++ if group[-1].number - group[0].number + 1 < minrangesize: ++ for identity in group: ++ outliers.append(identity) ++ # if the group is OK, add it to cleaned groups ++ else: ++ cleangroups.append(group) ++ ++ return outliers, cleangroups ++ ++ ++def round_idrange(start: int, end: int, under1000: bool) -> Tuple[int, int]: ++ """Function to round up range margins to look pretty""" ++ # calculating power of the size ++ sizepower = len(str(end - start + 1)) ++ # multiplier for the nearest rounded number ++ multiplier = 10 ** (sizepower - 1) ++ # getting rounded range margins ++ rounded_start = (start // multiplier) * multiplier ++ if not under1000: ++ rounded_start = max(rounded_start, 1000) ++ else: ++ rounded_start = max(rounded_start, 1) ++ rounded_end = ((end + multiplier) // multiplier) * multiplier - 1 ++ ++ return rounded_start, rounded_end ++ ++ ++def get_rangename_base(id_ranges: List[IDRange]) -> str: ++ """Function to get a base name for new range proposals""" ++ base_name = "" ++ # we want to use default range name as a base for new ranges ++ for id_range in id_ranges: ++ if id_range.base_rid == 1000: ++ base_name = id_range.name ++ ++ # if we didn't find it, propose generic name ++ if base_name == "": ++ base_name = "Auto_added_range" ++ ++ return base_name ++ ++ ++def get_rangename(id_ranges: List[IDRange], basename: str) -> str: ++ """ ++ Function to get a new range name, we add the counter as 3-digit number ++ extension and make sure it's unique ++ """ ++ counter = 1 ++ full_name = f"{basename}_{counter:03}" ++ while any(id_range.name == full_name for id_range in id_ranges): ++ counter += 1 ++ full_name = f"{basename}_{counter:03}" ++ return full_name ++ ++ ++def propose_range( ++ group: List[IDentity], ++ id_ranges: List[IDRange], ++ delta: int, ++ basename: str, ++ norounding: bool, ++ allowunder1000: bool ++) -> IDRange: ++ """Function to propose a new range for group of IDs out of ranges""" ++ startid = group[0].number ++ endid = group[-1].number ++ ++ logger.debug( ++ "Proposing a range for existing IDs out of ranges with start id %s \ ++and end id %s...", ++ startid, ++ endid, ++ ) ++ ++ # creating new range ++ newrange = IDRange() ++ newrange.type = "ipa-local" ++ newrange.name = get_rangename(id_ranges, basename) ++ newrange.suffix = id_ranges[0].suffix ++ newrange.dn = f"cn={newrange.name},cn=ranges,cn=etc,{newrange.suffix}" ++ ++ if norounding: ++ newrange.first_id = startid ++ newrange.last_id = endid ++ newrange.size = newrange.last_id - newrange.first_id + 1 ++ else: ++ # first trying to round up ranges to look pretty ++ newrange.first_id, newrange.last_id = round_idrange( ++ startid, ++ endid, ++ allowunder1000 ++ ) ++ newrange.size = newrange.last_id - newrange.first_id + 1 ++ ++ # if this creates an overlap, try without rounding ++ if not newrange_overlap_check(id_ranges, newrange): ++ newrange.first_id = startid ++ newrange.last_id = endid ++ newrange.size = newrange.last_id - newrange.first_id + 1 ++ # if we still failed, abandon idea ++ if not newrange_overlap_check(id_ranges, newrange): ++ logger.error( ++ "ERROR! Failed to create idrange for existing IDs out of \ ++ranges with start id %s and end id %s, it overlaps with existing range!", ++ startid, ++ endid, ++ ) ++ return None ++ ++ # creating RID bases ++ ipa_local_ranges = get_ipa_local_ranges(id_ranges) ++ ++ result, proposed_base_rid = propose_rid_base( ++ newrange, ipa_local_ranges, delta, True ++ ) ++ if result: ++ newrange.base_rid = proposed_base_rid ++ newrange.last_base_rid = proposed_base_rid + newrange.size ++ else: ++ # if this fails we print the warning ++ logger.warning( ++ "Warning! Proposed base RIDs %s for new range start id %s and \ ++end id %s both failed, please adjust manually", ++ proposed_base_rid, ++ newrange.first_id, ++ newrange.last_id, ++ ) ++ ++ result, proposed_secondary_base_rid = propose_rid_base( ++ newrange, ipa_local_ranges, delta, False, proposed_base_rid ++ ) ++ if result: ++ newrange.secondary_base_rid = proposed_secondary_base_rid ++ newrange.last_secondary_rid = ( ++ proposed_secondary_base_rid + newrange.size ++ ) ++ else: ++ # if this fails we print the warning ++ logger.warning( ++ "Warning! Proposed secondary base RIDs %s for new range start id \ ++%s and end id %s both failed, please adjust manually", ++ proposed_secondary_base_rid, ++ newrange.first_id, ++ newrange.last_id, ++ ) ++ ++ logger.debug("Proposed range: %s", newrange) ++ return newrange ++# endregion +diff --git a/ipatests/test_integration/test_ipa_idrange_fix.py b/ipatests/test_integration/test_ipa_idrange_fix.py +new file mode 100644 +index 0000000000000000000000000000000000000000..de3da9bfd221ce74f1d1bbb0dbe12e4db08b8daa +--- /dev/null ++++ b/ipatests/test_integration/test_ipa_idrange_fix.py +@@ -0,0 +1,189 @@ ++# ++# Copyright (C) 2024 FreeIPA Contributors see COPYING for license ++# ++ ++""" ++Module provides tests for ipa-idrange-fix CLI. ++""" ++ ++import logging ++import re ++ ++from ipatests.pytest_ipa.integration import tasks ++from ipatests.test_integration.base import IntegrationTest ++ ++ ++logger = logging.getLogger(__name__) ++ ++ ++class TestIpaIdrangeFix(IntegrationTest): ++ @classmethod ++ def install(cls, mh): ++ super(TestIpaIdrangeFix, cls).install(mh) ++ tasks.kinit_admin(cls.master) ++ ++ def test_no_issues(self): ++ """Test ipa-idrange-fix command with no issues.""" ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ ++ expected_under1000 = "No IDs under 1000 found" ++ expected_nochanges = "No changes proposed for existing ranges" ++ expected_newrange = "No new ranges proposed" ++ expected_noissues = "No changes proposed, nothing to do." ++ assert expected_under1000 in result.stderr_text ++ assert expected_nochanges in result.stderr_text ++ assert expected_newrange in result.stderr_text ++ assert expected_noissues in result.stderr_text ++ ++ def test_idrange_no_rid_bases(self): ++ """Test ipa-idrange-fix command with IDrange with no RID bases.""" ++ self.master.run_command([ ++ "ipa", ++ "idrange-add", ++ "idrange_no_rid_bases", ++ "--base-id", '10000', ++ "--range-size", '20000', ++ ]) ++ ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ expected_text = "RID bases updated for range 'idrange_no_rid_bases'" ++ ++ # Remove IDrange with no rid bases ++ self.master.run_command(["ipa", "idrange-del", "idrange_no_rid_bases"]) ++ ++ assert expected_text in result.stderr_text ++ ++ def test_idrange_no_rid_bases_reversed(self): ++ """ ++ Test ipa-idrange-fix command with IDrange with no RID bases, but we ++ previously had a range with RID bases reversed - secondary lower than ++ primary. It is a valid configuration, so we should fix no-RID range. ++ """ ++ self.master.run_command([ ++ "ipa", ++ "idrange-add", ++ "idrange_no_rid_bases", ++ "--base-id", '10000', ++ "--range-size", '20000', ++ ]) ++ self.master.run_command([ ++ "ipa", ++ "idrange-add", ++ "idrange_reversed", ++ "--base-id", '50000', ++ "--range-size", '20000', ++ "--rid-base", '100300000' ++ "--secondary-rid-base", '301000' ++ ]) ++ ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ expected_text = "RID bases updated for range 'idrange_no_rid_bases'" ++ ++ # Remove test IDranges ++ self.master.run_command(["ipa", "idrange-del", "idrange_no_rid_bases"]) ++ self.master.run_command(["ipa", "idrange-del", "idrange_reversed"]) ++ ++ assert expected_text in result.stderr_text ++ ++ def test_users_outofrange(self): ++ """Test ipa-idrange-fix command with users out of range.""" ++ for i in range(1, 20): ++ self.master.run_command([ ++ "ipa", ++ "user-add", ++ "testuser{}".format(i), ++ "--first", "Test", ++ "--last", "User {}".format(i), ++ "--uid", str(100000 + i * 10), ++ ]) ++ ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ expected_text = r"Range '[\w\.]+_id_range_\d{3}' created successfully" ++ match = re.search(expected_text, result.stderr_text) ++ ++ # Remove users out of range and created IDrange ++ for i in range(1, 20): ++ self.master.run_command([ ++ "ipa", ++ "user-del", ++ "testuser{}".format(i) ++ ]) ++ if match is not None: ++ self.master.run_command([ ++ "ipa", ++ "idrange-del", ++ match.group(0).split(" ")[1].replace("'", "") ++ ]) ++ ++ assert match is not None ++ ++ def test_user_outlier(self): ++ """Test ipa-idrange-fix command with outlier user.""" ++ self.master.run_command([ ++ "ipa", ++ "user-add", ++ "testuser_outlier", ++ "--first", "Outlier", ++ "--last", "User", ++ "--uid", '500000', ++ ]) ++ ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ expected_text = "Identities that don't fit the criteria to get a new \ ++range found!" ++ expected_user = "user 'Outlier User', uid=500000" ++ ++ # Remove outlier user ++ self.master.run_command(["ipa", "user-del", "testuser_outlier"]) ++ ++ assert expected_text in result.stderr_text ++ assert expected_user in result.stderr_text ++ ++ def test_user_under1000(self): ++ """Test ipa-idrange-fix command with user under 1000.""" ++ self.master.run_command([ ++ "ipa", ++ "user-add", ++ "testuser_under1000", ++ "--first", "Under", ++ "--last", "1000", ++ "--uid", '999', ++ ]) ++ ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ expected_text = "IDs under 1000:" ++ expected_user = "user 'Under 1000', uid=999" ++ ++ # Remove user under 1000 ++ self.master.run_command(["ipa", "user-del", "testuser_under1000"]) ++ ++ assert expected_text in result.stderr_text ++ assert expected_user in result.stderr_text ++ ++ def test_user_preserved(self): ++ """Test ipa-idrange-fix command with preserved user.""" ++ self.master.run_command([ ++ "ipa", ++ "user-add", ++ "testuser_preserved", ++ "--first", "Preserved", ++ "--last", "User", ++ "--uid", '9999', ++ ]) ++ self.master.run_command([ ++ "ipa", ++ "user-del", ++ "testuser_preserved", ++ "--preserve" ++ ]) ++ ++ result = self.master.run_command(["ipa-idrange-fix", "--unattended"]) ++ expected_text = "Identities that don't fit the criteria to get a new \ ++range found!" ++ expected_user = "user 'Preserved User', uid=9999" ++ ++ # Remove preserved user ++ self.master.run_command(["ipa", "user-del", "testuser_preserved"]) ++ ++ assert expected_text in result.stderr_text ++ assert expected_user in result.stderr_text +-- +2.46.0 + diff --git a/0006-ipatests-Add-missing-comma-in-test_idrange_no_rid_ba.patch b/0006-ipatests-Add-missing-comma-in-test_idrange_no_rid_ba.patch new file mode 100644 index 0000000..882df86 --- /dev/null +++ b/0006-ipatests-Add-missing-comma-in-test_idrange_no_rid_ba.patch @@ -0,0 +1,36 @@ +From 4fef80aeaaf017b286bd12ebfc30529f6a65a80e Mon Sep 17 00:00:00 2001 +From: Florence Blanc-Renaud +Date: Mon, 2 Sep 2024 18:28:27 +0200 +Subject: [PATCH] ipatests: Add missing comma in + test_idrange_no_rid_bases_reversed + +The test is calling ipa idrange-add but is missing a comma in +the arguments list. +The resulting call is using "--rid-base 100300000--secondary-rid-base". +Add the missing comma to build the command with +"--rid-base 100300000 --secondary-rid-base" + +Fixes: https://pagure.io/freeipa/issue/9656 + +Signed-off-by: Florence Blanc-Renaud +Reviewed-By: Alexander Bokovoy +--- + ipatests/test_integration/test_ipa_idrange_fix.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/ipatests/test_integration/test_ipa_idrange_fix.py b/ipatests/test_integration/test_ipa_idrange_fix.py +index de3da9bfd221ce74f1d1bbb0dbe12e4db08b8daa..ff8fbdac9d028d26fc55f5e357f89af879a61723 100644 +--- a/ipatests/test_integration/test_ipa_idrange_fix.py ++++ b/ipatests/test_integration/test_ipa_idrange_fix.py +@@ -72,7 +72,7 @@ class TestIpaIdrangeFix(IntegrationTest): + "idrange_reversed", + "--base-id", '50000', + "--range-size", '20000', +- "--rid-base", '100300000' ++ "--rid-base", '100300000', + "--secondary-rid-base", '301000' + ]) + +-- +2.46.0 + diff --git a/0007-ipatests-Update-ipa-adtrust-install-test.patch b/0007-ipatests-Update-ipa-adtrust-install-test.patch new file mode 100644 index 0000000..465b89a --- /dev/null +++ b/0007-ipatests-Update-ipa-adtrust-install-test.patch @@ -0,0 +1,32 @@ +From a18eb8358675b3697ccf8f8d8dc230cc62df6a4d Mon Sep 17 00:00:00 2001 +From: Erik Belko +Date: Thu, 29 Aug 2024 16:47:21 +0200 +Subject: [PATCH] ipatests: Update ipa-adtrust-install test + +update test_user_connects_smb_share_if_locked_specific_group with wait +for SSSD to be online after ipa-adtrust-install command + +Related: https://pagure.io/freeipa/issue/9655 + +Signed-off-by: Erik Belko +Reviewed-By: Alexander Bokovoy +--- + ipatests/test_integration/test_adtrust_install.py | 2 ++ + 1 file changed, 2 insertions(+) + +diff --git a/ipatests/test_integration/test_adtrust_install.py b/ipatests/test_integration/test_adtrust_install.py +index 72e8d874fb17adadc556ba55b825a88a3ac21a67..de252db1705ad940c3b5ee4df967d7c17a4203a7 100644 +--- a/ipatests/test_integration/test_adtrust_install.py ++++ b/ipatests/test_integration/test_adtrust_install.py +@@ -853,6 +853,8 @@ class TestIpaAdTrustInstall(IntegrationTest): + self.master.config.admin_password, + "-U"] + ) ++ # Wait for SSSD to become online before doing any other check ++ tasks.wait_for_sssd_domain_status_online(self.master) + self.master.run_command(["mkdir", "/freeipa4234"]) + self.master.run_command( + ["chcon", "-t", "samba_share_t", +-- +2.46.0 + diff --git a/0008-Installer-activate-ssh-service-in-sssd.conf.patch b/0008-Installer-activate-ssh-service-in-sssd.conf.patch new file mode 100644 index 0000000..9484727 --- /dev/null +++ b/0008-Installer-activate-ssh-service-in-sssd.conf.patch @@ -0,0 +1,33 @@ +From 373d41f211c1a04dc432a068bc7d2ba825ff554c Mon Sep 17 00:00:00 2001 +From: Francisco Trivino +Date: Tue, 13 Aug 2024 12:44:21 +0200 +Subject: [PATCH] Installer: activate ssh service in sssd.conf + +This commit enables SSSD's ssh service in ipa-client-install to ensure +sss_ssh_knownhosts and sss_ssh_knownhostsproxy functions properly. + +Fixes: https://pagure.io/freeipa/issue/9649 +Related: https://pagure.io/freeipa/issue/9536 + +Signed-off-by: Francisco Trivino +Reviewed-By: Rob Crittenden +--- + ipaclient/install/client.py | 2 ++ + 1 file changed, 2 insertions(+) + +diff --git a/ipaclient/install/client.py b/ipaclient/install/client.py +index 802db9614b24553b2b49259f3aebb366093560ac..47a371f629f6ddfb1cd5e9fff9faad737aa01f54 100644 +--- a/ipaclient/install/client.py ++++ b/ipaclient/install/client.py +@@ -974,6 +974,8 @@ def configure_sssd_conf( + + sssd_enable_service(sssdconfig, 'nss') + sssd_enable_service(sssdconfig, 'pam') ++ if options.conf_ssh: ++ sssd_enable_service(sssdconfig, 'ssh') + + domain.set_option('ipa_domain', cli_domain) + domain.set_option('ipa_hostname', client_hostname) +-- +2.46.0 + diff --git a/0009-ipa-migrate-fix-migration-issues-with-entries-using-.patch b/0009-ipa-migrate-fix-migration-issues-with-entries-using-.patch new file mode 100644 index 0000000..c0dc80b --- /dev/null +++ b/0009-ipa-migrate-fix-migration-issues-with-entries-using-.patch @@ -0,0 +1,404 @@ +From 8d242ba741ec22b258d5e70a530cefd0940783c7 Mon Sep 17 00:00:00 2001 +From: Mark Reynolds +Date: Tue, 23 Jul 2024 17:07:06 -0400 +Subject: [PATCH] ipa-migrate - fix migration issues with entries using + ipaUniqueId in the RDN + +We need to handle these entries differently and specify what attribute +and search base to use to find the entry on the local server. Most +entries can use the "cn" attribute but for selinux usermaps we need to +search using the ipaOwner attribute which is a DN, and in turn requires +additional handling/converting in order to properly check if the usermap +exists or not. + +Also fixed an issue where an attribute should be removed from the local +entry if it does not exist on the remote entry. + +And fixed the handling od "sudoOrder" which is defined as multi-valued +in the schema, but we really need to treat it as single-valued + +Fixes: https://pagure.io/freeipa/issue/9640 + +Signed-off-by: Mark Reynolds +Reviewed-By: Rob Crittenden +Reviewed-By: Rob Crittenden +--- + ipaserver/install/ipa_migrate.py | 119 +++++++++++++++++++-- + ipaserver/install/ipa_migrate_constants.py | 82 +++++++++++++-- + 2 files changed, 187 insertions(+), 14 deletions(-) + +diff --git a/ipaserver/install/ipa_migrate.py b/ipaserver/install/ipa_migrate.py +index e21937401b3463335d8297b41a403405071d3795..78c530f24fe5d8c9f5de0f816df9904bf30c7b94 100644 +--- a/ipaserver/install/ipa_migrate.py ++++ b/ipaserver/install/ipa_migrate.py +@@ -32,7 +32,7 @@ from ipaserver.install.ipa_migrate_constants import ( + DS_CONFIG, DB_OBJECTS, DS_INDEXES, BIND_DN, LOG_FILE_NAME, + STRIP_OP_ATTRS, STRIP_ATTRS, STRIP_OC, PROD_ATTRS, + DNA_REGEN_VAL, DNA_REGEN_ATTRS, IGNORE_ATTRS, +- DB_EXCLUDE_TREES ++ DB_EXCLUDE_TREES, POLICY_OP_ATTRS + ) + + """ +@@ -529,6 +529,14 @@ class IPAMigrate(): + # + # Helper functions + # ++ def attr_is_operational(self, attr): ++ schema = self.local_conn.schema ++ attr_obj = schema.get_obj(ldap.schema.AttributeType, attr) ++ if attr_obj is not None: ++ if attr_obj.usage == 1: ++ return True ++ return False ++ + def replace_suffix(self, entry_dn): + """ + Replace the base DN in an entry DN +@@ -1122,6 +1130,18 @@ class IPAMigrate(): + stats['reset_range'] += 1 + return entry + ++ def attr_is_required(self, attr, entry): ++ """ ++ Check if an attribute is required in this entry ++ """ ++ entry_oc = entry['objectClass'] ++ for oc in entry_oc: ++ required_attrs = self.local_conn.get_allowed_attributes( ++ [oc], raise_on_unknown=False, attributes="must") ++ if attr.lower() in required_attrs: ++ return True ++ return False ++ + def clean_entry(self, entry_dn, entry_type, entry_attrs): + """ + Clean up the entry from the remote server +@@ -1311,7 +1331,17 @@ class IPAMigrate(): + f"'{old_value}' " + "new value " + f"'{local_entry[attr][0]}'") +- ++ elif 'single' == sp_attr[1]: ++ # The attribute is defined as multivalued, but ++ # we really need to treat it as single valued ++ self.log_debug("Entry is different and will " ++ f"be updated: '{local_dn}' " ++ f"attribute '{attr}' replaced " ++ "with val " ++ f"'{remote_attrs[attr][0]}' " ++ "old value: " ++ f"{local_entry[attr][0]}") ++ local_entry[attr][0] = remote_attrs[attr][0] + goto_next_attr = True + break + +@@ -1358,6 +1388,31 @@ class IPAMigrate(): + local_entry[attr] = remote_attrs[attr] + entry_updated = True + ++ # Remove attributes in the local entry that do not exist in the ++ # remote entry ++ remove_attrs = [] ++ for attr in local_entry: ++ if (self.attr_is_operational(attr) ++ and attr.lower() not in POLICY_OP_ATTRS) or \ ++ attr.lower() in IGNORE_ATTRS or \ ++ attr.lower() in STRIP_ATTRS or \ ++ attr.lower() == "usercertificate": ++ # This is an attribute that we do not want to remove ++ continue ++ ++ if attr not in remote_attrs and \ ++ not self.attr_is_required(attr, local_entry): ++ # Mark this attribute for deletion ++ remove_attrs.append(attr) ++ entry_updated = True ++ ++ # Remove attributes ++ for remove_attr in remove_attrs: ++ self.log_debug("Entry is different and will be updated: " ++ f"'{local_dn}' attribute '{remove_attr}' " ++ "is being removed") ++ del local_entry[remove_attr] ++ + if range_reset: + stats['reset_range'] += 1 + +@@ -1371,6 +1426,9 @@ class IPAMigrate(): + """ + Process chunks of remote entries from a paged results search + ++ entry_dn = the remote entry DN ++ entry_attrs = the remote entry's attributes stored in a dict ++ + Identify entry type + Process entry (removing/change attr/val/schema) + Compare processed remote entry with local entry, merge/overwrite? +@@ -1426,6 +1484,47 @@ class IPAMigrate(): + # Based on the entry type do additional work + # + ++ # For entries with alternate identifying needs we need to rebuild the ++ # local dn. Typically this is for entries that use ipaUniqueId as the ++ # RDN attr ++ if entry_type != "custom" and 'alt_id' in DB_OBJECTS[entry_type]: ++ attr = DB_OBJECTS[entry_type]['alt_id']['attr'] ++ base = DB_OBJECTS[entry_type]['alt_id']['base'] ++ srch_filter = f'{attr}={entry_attrs[attr][0]}' ++ if DB_OBJECTS[entry_type]['alt_id']['isDN'] is True: ++ # Convert the filter to match the local suffix ++ srch_filter = self.replace_suffix(srch_filter) ++ srch_base = base + str(self.local_suffix) ++ ++ try: ++ entries = self.local_conn.get_entries(DN(srch_base), ++ filter=srch_filter) ++ if len(entries) == 1: ++ local_dn = entries[0].dn ++ elif len(entries) == 0: ++ # Not found, no problem just proceed and we will add it ++ pass ++ else: ++ # Found too many entries - should not happen ++ self.log_error('Found too many local matching entries ' ++ f'for "{local_dn}"') ++ if self.args.force: ++ stats['ignored_errors'] += 1 ++ return ++ else: ++ sys.exit(1) ++ except errors.EmptyResult: ++ # Not found, no problem just proceed and we will add it later ++ pass ++ except (errors.NetworkError, errors.DatabaseError) as e: ++ self.log_error('Failed to find a local matching entry for ' ++ f'"{local_dn}" error: {str(e)}') ++ if self.args.force: ++ stats['ignored_errors'] += 1 ++ return ++ else: ++ sys.exit(1) ++ + # See if the entry exists on the local server + try: + local_entry = self.local_conn.get_entry(DN(local_dn), +@@ -1441,14 +1540,20 @@ class IPAMigrate(): + + if self.dryrun: + self.write_update_to_ldif(local_entry) +- DB_OBJECTS[entry_type]['count'] += 1 ++ if entry_type == "custom": ++ stats['custom'] += 1 ++ else: ++ DB_OBJECTS[entry_type]['count'] += 1 + stats['total_db_migrated'] += 1 + return + + # Update the local entry + try: + self.local_conn.update_entry(local_entry) +- DB_OBJECTS[entry_type]['count'] += 1 ++ if entry_type == "custom": ++ stats['custom'] += 1 ++ else: ++ DB_OBJECTS[entry_type]['count'] += 1 + except errors.ExecutionError as e: + self.log_error(f'Failed to update "{local_dn}" error: ' + f'{str(e)}') +@@ -1567,7 +1672,7 @@ class IPAMigrate(): + """ + Used paged search for online method to avoid large memory footprint + """ +- self.log_info("Migrating database ... (this make take a while)") ++ self.log_info("Migrating database ... (this may take a while)") + if self.args.db_ldif is not None: + self.processDBOffline() + else: +@@ -1608,7 +1713,7 @@ class IPAMigrate(): + f"{len(objectclasses)} objectClasses") + + # Loop over attributes and objectclasses and count them +- schema = self.local_conn._get_schema() ++ schema = self.local_conn.schema + local_schema = schema.ldap_entry() + for schema_type in [(attributes, "attributeTypes"), + (objectclasses, "objectClasses")]: +@@ -1967,7 +2072,7 @@ class IPAMigrate(): + + # Run ipa-server-upgrade + self.log_info("Running ipa-server-upgrade ... " +- "(this make take a while)") ++ "(this may take a while)") + if self.dryrun: + self.log_info("Skipping ipa-server-upgrade in dryrun mode.") + else: +diff --git a/ipaserver/install/ipa_migrate_constants.py b/ipaserver/install/ipa_migrate_constants.py +index 0e26c75497b216f09ed450aa25a09c2102582326..250f1b5b01bf066d316a98489ab6153b89615173 100644 +--- a/ipaserver/install/ipa_migrate_constants.py ++++ b/ipaserver/install/ipa_migrate_constants.py +@@ -19,6 +19,28 @@ STRIP_OP_ATTRS = [ + 'nsuniqueid', + 'dsentrydn', + 'entryuuid', ++ 'entrydn', ++ 'entryid', ++ 'entryusn', ++ 'numsubordinates', ++ 'parentid', ++ 'tombstonenumsubordinates' ++] ++ ++# Operational attributes that we would want to remove from the local entry if ++# they don't exist in the remote entry ++POLICY_OP_ATTRS = [ ++ 'nsaccountlock', ++ 'passwordexpiratontime', ++ 'passwordgraceusertime', ++ 'pwdpolicysubentry', ++ 'passwordexpwarned', ++ 'passwordretrycount', ++ 'retrycountresettime', ++ 'accountunlocktime', ++ 'passwordhistory', ++ 'passwordallowchangetime', ++ 'pwdreset' + ] + + # Atributes to strip from users/groups +@@ -110,7 +132,7 @@ STRIP_OC = [ + # + # The DS_CONFIG mapping breaks each config entry (or type of entry) into its + # own catagory. Each catagory, or type, as DN list "dn", the attributes# we +-# are intrested in. These attributes are broken into singel valued "attrs", ++# are intrested in. These attributes are broken into single valued "attrs", + # or multi-valued attributes "multivalued". If the attributes is single + # valued then the value is replaced, if it's multivalued then it is "appended" + # +@@ -565,6 +587,12 @@ DS_INDEXES = { + # identify the entry. + # The "label" and "count" attributes are used for the Summary Report + # ++# Some entries use ipaUniqueId as the RDN attribute, this makes comparing ++# entries between the remote and local servers problematic. So we need special ++# identifying information to find the local entry. In this case we use the ++# "alt_id" key which is a dict of an attribute 'attr' and partial base DN ++# 'base' - which is expected to end in a comma. ++# + DB_OBJECTS = { + # Plugins + 'automember_def': { +@@ -640,8 +668,8 @@ DB_OBJECTS = { + 'oc': ['ipaconfigobject', 'ipaguiconfig'], + 'subtree': 'cn=ipaconfig,cn=etc,$SUFFIX', + 'special_attrs': [ +- # needs special handling, but +- # ipa-server-upgrade rewrites this attribute anyway! ++ # needs special handling, but ipa-server-upgrade rewrites this ++ # attribute anyway! + ('ipausersearchfields', 'list'), + ], + 'label': 'IPA Config', +@@ -772,11 +800,16 @@ DB_OBJECTS = { + 'mode': 'all', + 'count': 0, + }, +- 'subids': { # unknown what these entries look like TODO ++ 'subids': { + 'oc': [], + 'subtree': ',cn=subids,cn=accounts,$SUFFIX', + 'label': 'Sub IDs', +- 'mode': 'all', # TODO Maybe production only? ++ 'mode': 'production', ++ 'alt_id': { ++ 'attr': 'ipaOwner', ++ 'isDN': True, ++ 'base': 'cn=subids,cn=accounts,', ++ }, + 'count': 0, + }, + +@@ -884,6 +917,11 @@ DB_OBJECTS = { + 'oc': ['ipahbacrule'], + 'subtree': ',cn=hbac,$SUFFIX', + 'label': 'HBAC Rules', ++ 'alt_id': { ++ 'attr': 'cn', ++ 'base': 'cn=hbac,', ++ 'isDN': False, ++ }, + 'mode': 'all', + 'count': 0, + }, +@@ -892,6 +930,11 @@ DB_OBJECTS = { + 'selinux_usermap': { # Not sure if this is needed, entry is empty TODO + 'oc': [], + 'subtree': ',cn=usermap,cn=selinux,$SUFFIX', ++ 'alt_id': { ++ 'attr': 'cn', ++ 'base': 'cn=usermap,cn=selinux,', ++ 'isDN': False, ++ }, + 'label': 'Selinux Usermaps', + 'mode': 'all', + 'count': 0, +@@ -902,12 +945,27 @@ DB_OBJECTS = { + 'oc': ['ipasudorule'], + 'subtree': ',cn=sudorules,cn=sudo,$SUFFIX', + 'label': 'Sudo Rules', ++ 'alt_id': { ++ 'attr': 'cn', ++ 'base': 'cn=sudorules,cn=sudo,', ++ 'isDN': False, ++ }, ++ 'special_attrs': [ ++ # schema defines sudoOrder as mutlivalued, but we need to treat ++ # it as single valued ++ ('sudoorder', 'single'), ++ ], + 'mode': 'all', + 'count': 0, + }, + 'sudo_cmds': { + 'oc': ['ipasudocmd'], + 'subtree': ',cn=sudocmds,cn=sudo,$SUFFIX', ++ 'alt_id': { ++ 'attr': 'sudoCmd', ++ 'base': 'cn=sudocmds,cn=sudo,', ++ 'isDN': False, ++ }, + 'label': 'Sudo Commands', + 'mode': 'all', + 'count': 0, +@@ -991,6 +1049,11 @@ DB_OBJECTS = { + 'oc': ['ipanisnetgroup'], + 'not_oc': ['mepmanagedentry'], + 'subtree': ',cn=ng,cn=alt,$SUFFIX', ++ 'alt_id': { ++ 'attr': 'cn', ++ 'base': 'cn=ng,cn=alt,', ++ 'isDN': False, ++ }, + 'label': 'Network Groups', + 'mode': 'all', + 'count': 0, +@@ -1006,9 +1069,14 @@ DB_OBJECTS = { + 'count': 0, + }, + 'caacls': { +- 'oc': ['top'], ++ 'oc': ['ipacaacl'], + 'subtree': ',cn=caacls,cn=ca,$SUFFIX', +- 'label': 'CA Certificates', ++ 'alt_id': { ++ 'attr': 'cn', ++ 'base': 'cn=caacls,cn=ca,', ++ 'isDN': False, ++ }, ++ 'label': 'CA Certificate ACLs', + 'mode': 'all', + 'count': 0, + }, +-- +2.46.0 + diff --git a/0010-ipa-migrate-fix-alternate-entry-search-filter.patch b/0010-ipa-migrate-fix-alternate-entry-search-filter.patch new file mode 100644 index 0000000..512384a --- /dev/null +++ b/0010-ipa-migrate-fix-alternate-entry-search-filter.patch @@ -0,0 +1,68 @@ +From 3b5a980f5b65b03b9fd7ad0cfbb6c87874d3ff24 Mon Sep 17 00:00:00 2001 +From: Mark Reynolds +Date: Tue, 3 Sep 2024 13:42:05 -0400 +Subject: [PATCH] ipa-migrate - fix alternate entry search filter + +Processing a filter like a DN can cause normalization issues that result +in an invalid filter. Make sure the filter is encapsulated with +parenthesis and we call replace_suffix_value() instead of +replace_suffix() + +Fixes: https://pagure.io/freeipa/issue/9658 + +Signed-off-by: Mark Reynolds + +Fix typo in test + +Reviewed-By: Florence Blanc-Renaud +--- + ipaserver/install/ipa_migrate.py | 4 ++-- + ipatests/test_integration/test_ipa_ipa_migration.py | 6 +++--- + 2 files changed, 5 insertions(+), 5 deletions(-) + +diff --git a/ipaserver/install/ipa_migrate.py b/ipaserver/install/ipa_migrate.py +index 78c530f24fe5d8c9f5de0f816df9904bf30c7b94..38356aa23ea435e2a616f48356feaea7b50dd1e4 100644 +--- a/ipaserver/install/ipa_migrate.py ++++ b/ipaserver/install/ipa_migrate.py +@@ -1490,10 +1490,10 @@ class IPAMigrate(): + if entry_type != "custom" and 'alt_id' in DB_OBJECTS[entry_type]: + attr = DB_OBJECTS[entry_type]['alt_id']['attr'] + base = DB_OBJECTS[entry_type]['alt_id']['base'] +- srch_filter = f'{attr}={entry_attrs[attr][0]}' ++ srch_filter = f'({attr}={entry_attrs[attr][0]})' + if DB_OBJECTS[entry_type]['alt_id']['isDN'] is True: + # Convert the filter to match the local suffix +- srch_filter = self.replace_suffix(srch_filter) ++ srch_filter = self.replace_suffix_value(srch_filter) + srch_base = base + str(self.local_suffix) + + try: +diff --git a/ipatests/test_integration/test_ipa_ipa_migration.py b/ipatests/test_integration/test_ipa_ipa_migration.py +index f697bbfbfc6169309274db689501c99fe148cc70..288165e8a83a96e6f6bd4e52866f98617f497c56 100644 +--- a/ipatests/test_integration/test_ipa_ipa_migration.py ++++ b/ipatests/test_integration/test_ipa_ipa_migration.py +@@ -610,7 +610,7 @@ class TestIPAMigrateScenario1(IntegrationTest): + MIGRATION_SCHEMA_LOG_MSG = "Migrating schema ...\n" + MIGRATION_CONFIG_LOG_MSG = "Migrating configuration ...\n" + IPA_UPGRADE_LOG_MSG = ( +- "Running ipa-server-upgrade ... (this make take a while)\n" ++ "Running ipa-server-upgrade ... (this may take a while)\n" + ) + SIDGEN_TASK_LOG_MSG = "Running SIDGEN task ...\n" + MIGRATION_COMPLETE_LOG_MSG = "Migration complete!\n" +@@ -641,10 +641,10 @@ class TestIPAMigrateScenario1(IntegrationTest): + tasks.kinit_admin(self.replicas[0]) + MIGRATION_SCHEMA_LOG_MSG = "Migrating schema ...\n" + MIGRATION_DATABASE_LOG_MSG = ( +- "Migrating database ... (this make take a while)\n" ++ "Migrating database ... (this may take a while)\n" + ) + IPA_UPGRADE_LOG_MSG = ( +- "Running ipa-server-upgrade ... (this make take a while)\n" ++ "Running ipa-server-upgrade ... (this may take a while)\n" + ) + SIDGEN_TASK_LOG_MSG = "Running SIDGEN task ...\n" + result = run_migrate( +-- +2.46.0 + diff --git a/0011-Custodia-in-fips-mode-add-nomac-or-nomacver-to-opens.patch b/0011-Custodia-in-fips-mode-add-nomac-or-nomacver-to-opens.patch new file mode 100644 index 0000000..1db95cb --- /dev/null +++ b/0011-Custodia-in-fips-mode-add-nomac-or-nomacver-to-opens.patch @@ -0,0 +1,82 @@ +From c96d172d7d2e87513d9bd51a98591858e1f88def Mon Sep 17 00:00:00 2001 +From: Florence Blanc-Renaud +Date: Thu, 5 Sep 2024 14:52:26 +0200 +Subject: [PATCH] Custodia: in fips mode add -nomac or -nomacver to openssl + pkcs12 + +In FIPS mode the command openssl pkcs12 fails unless the +export is called with -nomac and import with -nomacver + +The command is used by custodia to export private keys from the +master and import them in the replica. + +Fixes: https://pagure.io/freeipa/issue/9577 + +Signed-off-by: Florence Blanc-Renaud +Reviewed-By: Alexander Bokovoy +--- + ipaserver/secrets/handlers/pemfile.py | 20 ++++++++++++++++++-- + 1 file changed, 18 insertions(+), 2 deletions(-) + +diff --git a/ipaserver/secrets/handlers/pemfile.py b/ipaserver/secrets/handlers/pemfile.py +index ad36bd02008ff068fa7e237dd9653e31f7ac7d85..006d351699f3086653c2e461fdcb8afb53eea281 100644 +--- a/ipaserver/secrets/handlers/pemfile.py ++++ b/ipaserver/secrets/handlers/pemfile.py +@@ -8,6 +8,7 @@ import json + import os + + from ipaplatform.paths import paths ++from ipaplatform.tasks import tasks + from ipapython import ipautil + from . import common + +@@ -25,7 +26,7 @@ def export_key(args, tmpdir): + f.write(password) + + # OpenSSL does not support pkcs12 export of a cert without key +- ipautil.run([ ++ cmd = [ + paths.OPENSSL, 'pkcs12', '-export', + '-in', args.certfile, + '-out', pk12file, +@@ -34,7 +35,13 @@ def export_key(args, tmpdir): + '-keypbe', 'AES-256-CBC', + '-certpbe', 'AES-256-CBC', + '-macalg', 'sha384', +- ]) ++ ] ++ ++ fips_enabled = tasks.is_fips_enabled() ++ if fips_enabled: ++ cmd.append('-nomac') ++ ++ ipautil.run(cmd) + + with open(pk12file, 'rb') as f: + p12data = f.read() +@@ -69,6 +76,11 @@ def import_key(args, tmpdir): + '-out', args.certfile, + '-password', 'file:{pk12pwfile}'.format(pk12pwfile=pk12pwfile), + ] ++ ++ fips_enabled = tasks.is_fips_enabled() ++ if fips_enabled: ++ cmd.append('-nomacver') ++ + ipautil.run(cmd, umask=0o027) + + # get the private key from the file +@@ -79,6 +91,10 @@ def import_key(args, tmpdir): + '-out', args.keyfile, + '-password', 'file:{pk12pwfile}'.format(pk12pwfile=pk12pwfile), + ] ++ ++ if fips_enabled: ++ cmd.append('-nomacver') ++ + ipautil.run(cmd, umask=0o027) + + +-- +2.46.0 + diff --git a/0012-ipatests-make-TestDuplicates-teardowns-order-agnosti.patch b/0012-ipatests-make-TestDuplicates-teardowns-order-agnosti.patch new file mode 100644 index 0000000..3c8b719 --- /dev/null +++ b/0012-ipatests-make-TestDuplicates-teardowns-order-agnosti.patch @@ -0,0 +1,139 @@ +From d102773ce24481c6797f71557b75e77921164285 Mon Sep 17 00:00:00 2001 +From: Stanislav Levin +Date: Thu, 12 Sep 2024 12:38:52 +0300 +Subject: [PATCH] ipatests: make TestDuplicates teardowns order agnostic + +Fixtures 'user4' and 'user5' track the same actual user 'tuser'. +If used together their teardowns can fail depending on the +order of execution. + +With this change fixtures of TestDuplicates are simplified and +method-scoped. + +Related: https://pagure.io/freeipa/issue/9571 +Signed-off-by: Stanislav Levin +Reviewed-By: Florence Blanc-Renaud +--- + ipatests/test_xmlrpc/test_stageuser_plugin.py | 76 +++++++------------ + 1 file changed, 28 insertions(+), 48 deletions(-) + +diff --git a/ipatests/test_xmlrpc/test_stageuser_plugin.py b/ipatests/test_xmlrpc/test_stageuser_plugin.py +index 9ae5561dfa4e0d54fe1231501bfea3c0ba261849..6ed593fbf24dd2e8ce087625b9cb4c21c9a3c145 100644 +--- a/ipatests/test_xmlrpc/test_stageuser_plugin.py ++++ b/ipatests/test_xmlrpc/test_stageuser_plugin.py +@@ -120,12 +120,6 @@ def stageduser3(request, xmlrpc_setup): + return tracker.make_fixture_activate(request) + + +-@pytest.fixture(scope='class') +-def stageduser4(request, xmlrpc_setup): +- tracker = StageUserTracker(u'tuser', u'test', u'user') +- return tracker.make_fixture(request) +- +- + @pytest.fixture(scope='class') + def stageduser_notposix(request, xmlrpc_setup): + tracker = StageUserTracker(u'notposix', u'notposix', u'notposix') +@@ -161,18 +155,6 @@ def user3(request, xmlrpc_setup): + return tracker.make_fixture(request) + + +-@pytest.fixture(scope='class') +-def user4(request, xmlrpc_setup): +- tracker = UserTracker(u'tuser', u'test', u'user') +- return tracker.make_fixture(request) +- +- +-@pytest.fixture(scope='class') +-def user5(request, xmlrpc_setup): +- tracker = UserTracker(u'tuser', u'test', u'user') +- return tracker.make_fixture(request) +- +- + @pytest.fixture(scope='class') + def user6(request, xmlrpc_setup): + tracker = UserTracker(u'suser2', u'staged', u'user') +@@ -724,52 +706,50 @@ class TestManagers(XMLRPC_test): + + @pytest.mark.tier1 + class TestDuplicates(XMLRPC_test): +- def test_active_same_as_preserved(self, user4, user5): +- user4.ensure_missing() +- user5.make_preserved_user() +- command = user4.make_create_command() ++ @pytest.fixture ++ def user(self, request, xmlrpc_setup): ++ tracker = UserTracker("tuser", "test", "user") ++ return tracker.make_fixture(request) ++ ++ @pytest.fixture ++ def stageduser(self, request, xmlrpc_setup): ++ tracker = StageUserTracker("tuser", "test", "user") ++ return tracker.make_fixture(request) ++ ++ def test_active_same_as_preserved(self, user): ++ user.make_preserved_user() ++ command = user.make_create_command() + with raises_exact(errors.DuplicateEntry( +- message=u'user with name "%s" already exists' % user4.uid)): ++ message=u'user with name "%s" already exists' % user.uid)): + command() +- user5.delete() + +- def test_staged_same_as_active(self, user4, stageduser4): +- user4.ensure_exists() +- stageduser4.create() # can be created ++ def test_staged_same_as_active(self, user, stageduser): ++ user.create() ++ stageduser.create() # can be created + +- command = stageduser4.make_activate_command() ++ command = stageduser.make_activate_command() + with raises_exact(errors.DuplicateEntry( + message=u'active user with name "%s" already exists' % +- user4.uid)): ++ user.uid)): + command() # cannot be activated + +- user4.delete() +- stageduser4.delete() +- +- def test_staged_same_as_preserved(self, user5, stageduser4): +- user5.make_preserved_user() +- stageduser4.create() # can be created ++ def test_staged_same_as_preserved(self, user, stageduser): ++ user.make_preserved_user() ++ stageduser.create() # can be created + +- command = stageduser4.make_activate_command() ++ command = stageduser.make_activate_command() + with raises_exact(errors.DuplicateEntry( + message=u'This entry already exists')): + command() # cannot be activated + +- user5.delete() +- stageduser4.delete() ++ def test_active_same_as_staged(self, user, stageduser): ++ stageduser.create() ++ user.create() # can be created + +- def test_active_same_as_staged(self, user4, stageduser4): +- user4.ensure_missing() +- stageduser4.ensure_exists() +- command = user4.make_create_command() +- result = command() +- user4.track_create() +- user4.check_create(result) # can be created +- +- command = stageduser4.make_activate_command() ++ command = stageduser.make_activate_command() + with raises_exact(errors.DuplicateEntry( + message=u'active user with name "%s" already exists' % +- user4.uid)): ++ user.uid)): + command() # cannot be activated + + +-- +2.46.0 + diff --git a/freeipa.spec b/freeipa.spec index b5aa22b..d2c73bd 100644 --- a/freeipa.spec +++ b/freeipa.spec @@ -205,7 +205,7 @@ Name: %{package_name} Version: %{IPA_VERSION} -Release: 1%{?rc_version:.%rc_version}%{?dist} +Release: 2%{?rc_version:.%rc_version}%{?dist} Summary: The Identity, Policy and Audit system License: GPL-3.0-or-later @@ -239,6 +239,16 @@ Patch1002: 1002-Revert-freeipa.spec-depend-on-bind-dnssec-utils.patch %if 0%{?rhel} >= 9 Patch0001: 0001-Revert-Replace-netifaces-with-ifaddr.patch Patch0002: 0002-freeipa-disable-nis.patch +Patch0003: 0003-ipatests-Check-Default-PAC-type-is-added-to-config.patch +Patch0004: 0004-selinux-add-all-IPA-log-files-to-ipa_log_t-file-cont.patch +Patch0005: 0005-Add-ipa-idrange-fix.patch +Patch0006: 0006-ipatests-Add-missing-comma-in-test_idrange_no_rid_ba.patch +Patch0007: 0007-ipatests-Update-ipa-adtrust-install-test.patch +Patch0008: 0008-Installer-activate-ssh-service-in-sssd.conf.patch +Patch0009: 0009-ipa-migrate-fix-migration-issues-with-entries-using-.patch +Patch0010: 0010-ipa-migrate-fix-alternate-entry-search-filter.patch +Patch0011: 0011-Custodia-in-fips-mode-add-nomac-or-nomacver-to-opens.patch +Patch0012: 0012-ipatests-make-TestDuplicates-teardowns-order-agnosti.patch Patch1001: 1001-Change-branding-to-IPA-and-Identity-Management.patch %endif %endif @@ -1493,6 +1503,7 @@ fi %{_sbindir}/ipa-pkinit-manage %{_sbindir}/ipa-crlgen-manage %{_sbindir}/ipa-cert-fix +%{_sbindir}/ipa-idrange-fix %{_sbindir}/ipa-acme-manage %{_sbindir}/ipa-migrate %if 0%{?fedora} >= 38 @@ -1571,6 +1582,7 @@ fi %{_mandir}/man1/ipa-pkinit-manage.1* %{_mandir}/man1/ipa-crlgen-manage.1* %{_mandir}/man1/ipa-cert-fix.1* +%{_mandir}/man1/ipa-idrange-fix.1* %{_mandir}/man1/ipa-acme-manage.1* %{_mandir}/man1/ipa-migrate.1* @@ -1859,6 +1871,15 @@ fi %endif %changelog +* Wed Sep 18 2024 Florence Blanc-Renaud - 4.12.2.2 +- Resolves: RHEL-47294 SID generation task is failing when SELinux is in Enforcing mode +- Resolves: RHEL-56472 Include latest fixes in python3-ipatests packages +- Resolves: RHEL-56917 RFE add a tool to quickly detect and fix issues with IPA ID ranges +- Resolves: RHEL-56965 Backport test fixes in python3-ipatests +- Resolves: RHEL-58067 ipa replication installation fails in FIPS mode on rhel10 +- Resolves: RHEL-59265 Default hbac rules are duplicated on remote server post ipa-migrate in prod-mode +- Resolves: RHEL-59266 Also enable SSSD's ssh service when enabling sss_ssh_knownhosts + * Thu Aug 22 2024 Florence Blanc-Renaud - 4.12.2.1 - Resolves: RHEL-54545 Covscan issues: Resource Leak - Resolves: RHEL-54304 support for python cryptography 43.0.0