import pki-core-10.12.0-2.module+el8.6.0+14115+8b467244
This commit is contained in:
parent
f4fe8795bb
commit
6dca348454
2
.gitignore
vendored
2
.gitignore
vendored
@ -1 +1 @@
|
||||
SOURCES/pki-10.11.2.tar.gz
|
||||
SOURCES/pki-10.12.0.tar.gz
|
||||
|
@ -1 +1 @@
|
||||
864e86742b5462527a677c060d5b3b1d0f11b299 SOURCES/pki-10.11.2.tar.gz
|
||||
14942c7bda42ccd0f57ea5b2e538eb13a559572f SOURCES/pki-10.12.0.tar.gz
|
||||
|
@ -1,217 +0,0 @@
|
||||
From 8a8fc41a10ffb20e9e4902a9e9f74b2f05948b7a Mon Sep 17 00:00:00 2001
|
||||
From: "Endi S. Dewata" <edewata@redhat.com>
|
||||
Date: Wed, 3 Nov 2021 20:46:46 -0500
|
||||
Subject: [PATCH] Fix AJP connector migration
|
||||
|
||||
In commit e70373ab131aba810f318c1d917896392b49ff4b the AJP
|
||||
connector migration code for Tomcat 9.0.31 in pki-server
|
||||
migrate CLI was converted into an upgrade script that would
|
||||
run regardless of the Tomcat version, and this was causing
|
||||
a problem on platforms that only has older Tomcat versions.
|
||||
|
||||
To fix the problem, the upgrade script has been converted back
|
||||
into pki-server migrate, and it will check the Tomcat version
|
||||
before performing the migration. The server.xml has also been
|
||||
reverted to have the old AJP connectors by default.
|
||||
|
||||
Whenever the server is restarted the pki-server migrate will
|
||||
run so it can migrate the AJP connectors automatically in
|
||||
case Tomcat is upgraded to a newer version.
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=2029023
|
||||
---
|
||||
base/server/python/pki/server/cli/migrate.py | 61 +++++++++++++++++
|
||||
.../upgrade/10.11.0/04-UpdateAJPConnectors.py | 67 -------------------
|
||||
...lowLinking.py => 04-UpdateAllowLinking.py} | 0
|
||||
...UpdateJavaHome.py => 05-UpdateJavaHome.py} | 0
|
||||
base/tomcat-9.0/conf/server.xml | 4 +-
|
||||
5 files changed, 63 insertions(+), 69 deletions(-)
|
||||
delete mode 100644 base/server/upgrade/10.11.0/04-UpdateAJPConnectors.py
|
||||
rename base/server/upgrade/10.11.0/{05-UpdateAllowLinking.py => 04-UpdateAllowLinking.py} (100%)
|
||||
rename base/server/upgrade/10.11.0/{06-UpdateJavaHome.py => 05-UpdateJavaHome.py} (100%)
|
||||
|
||||
diff --git a/base/server/python/pki/server/cli/migrate.py b/base/server/python/pki/server/cli/migrate.py
|
||||
index 256b83c845..2005004c4e 100644
|
||||
--- a/base/server/python/pki/server/cli/migrate.py
|
||||
+++ b/base/server/python/pki/server/cli/migrate.py
|
||||
@@ -23,6 +23,7 @@ from __future__ import print_function
|
||||
|
||||
import getopt
|
||||
import logging
|
||||
+import re
|
||||
import sys
|
||||
|
||||
from lxml import etree
|
||||
@@ -96,9 +97,69 @@ class MigrateCLI(pki.cli.CLI):
|
||||
|
||||
instance.load()
|
||||
instance.init()
|
||||
+ instances = [instance]
|
||||
|
||||
else:
|
||||
instances = pki.server.instance.PKIInstance.instances()
|
||||
|
||||
for instance in instances:
|
||||
instance.init()
|
||||
+
|
||||
+ # update AJP connectors for Tomcat 9.0.31 or later
|
||||
+
|
||||
+ tomcat_version = pki.server.Tomcat.get_version()
|
||||
+ if tomcat_version >= pki.util.Version('9.0.31'):
|
||||
+
|
||||
+ for instance in instances:
|
||||
+ self.update_ajp_connectors(instance)
|
||||
+
|
||||
+ def update_ajp_connectors(self, instance):
|
||||
+
|
||||
+ logger.info('Updating AJP connectors in %s', instance.server_xml)
|
||||
+
|
||||
+ document = etree.parse(instance.server_xml, self.parser)
|
||||
+ server = document.getroot()
|
||||
+
|
||||
+ # replace 'requiredSecret' with 'secret' in comments
|
||||
+
|
||||
+ services = server.findall('Service')
|
||||
+ for service in services:
|
||||
+
|
||||
+ children = list(service)
|
||||
+ for child in children:
|
||||
+
|
||||
+ if not isinstance(child, etree._Comment): # pylint: disable=protected-access
|
||||
+ # not a comment -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if 'protocol="AJP/1.3"' not in child.text:
|
||||
+ # not an AJP connector -> skip
|
||||
+ continue
|
||||
+
|
||||
+ child.text = re.sub(r'requiredSecret=',
|
||||
+ r'secret=',
|
||||
+ child.text,
|
||||
+ flags=re.MULTILINE)
|
||||
+
|
||||
+ # replace 'requiredSecret' with 'secret' in Connectors
|
||||
+
|
||||
+ connectors = server.findall('Service/Connector')
|
||||
+ for connector in connectors:
|
||||
+
|
||||
+ if connector.get('protocol') != 'AJP/1.3':
|
||||
+ # not an AJP connector -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if connector.get('secret'):
|
||||
+ # already has a 'secret' -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if connector.get('requiredSecret') is None:
|
||||
+ # does not have a 'requiredSecret' -> skip
|
||||
+ continue
|
||||
+
|
||||
+ value = connector.attrib.pop('requiredSecret')
|
||||
+ connector.set('secret', value)
|
||||
+
|
||||
+ with open(instance.server_xml, 'wb') as f:
|
||||
+ document.write(f, pretty_print=True, encoding='utf-8')
|
||||
diff --git a/base/server/upgrade/10.11.0/04-UpdateAJPConnectors.py b/base/server/upgrade/10.11.0/04-UpdateAJPConnectors.py
|
||||
deleted file mode 100644
|
||||
index 6e7bbdae24..0000000000
|
||||
--- a/base/server/upgrade/10.11.0/04-UpdateAJPConnectors.py
|
||||
+++ /dev/null
|
||||
@@ -1,67 +0,0 @@
|
||||
-#
|
||||
-# Copyright Red Hat, Inc.
|
||||
-#
|
||||
-# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
-#
|
||||
-from __future__ import absolute_import
|
||||
-import logging
|
||||
-from lxml import etree
|
||||
-import re
|
||||
-
|
||||
-import pki
|
||||
-
|
||||
-logger = logging.getLogger(__name__)
|
||||
-
|
||||
-
|
||||
-class UpdateAJPConnectors(pki.server.upgrade.PKIServerUpgradeScriptlet):
|
||||
-
|
||||
- def __init__(self):
|
||||
- super(UpdateAJPConnectors, self).__init__()
|
||||
- self.message = 'Update AJP connectors in server.xml'
|
||||
-
|
||||
- self.parser = etree.XMLParser(remove_blank_text=True)
|
||||
-
|
||||
- def upgrade_instance(self, instance):
|
||||
-
|
||||
- logger.info('Updating %s', instance.server_xml)
|
||||
- self.backup(instance.server_xml)
|
||||
-
|
||||
- document = etree.parse(instance.server_xml, self.parser)
|
||||
- server = document.getroot()
|
||||
-
|
||||
- logger.info('Renaming requiredSecret to secret')
|
||||
-
|
||||
- services = server.findall('Service')
|
||||
- for service in services:
|
||||
-
|
||||
- children = list(service)
|
||||
- for child in children:
|
||||
-
|
||||
- if isinstance(child, etree._Comment): # pylint: disable=protected-access
|
||||
- if 'protocol="AJP/1.3"' in child.text:
|
||||
- child.text = re.sub(r'requiredSecret=',
|
||||
- r'secret=',
|
||||
- child.text,
|
||||
- flags=re.MULTILINE)
|
||||
-
|
||||
- connectors = server.findall('Service/Connector')
|
||||
- for connector in connectors:
|
||||
-
|
||||
- if connector.get('protocol') != 'AJP/1.3':
|
||||
- # Only modify AJP connectors.
|
||||
- continue
|
||||
-
|
||||
- if connector.get('secret'):
|
||||
- # Nothing to migrate because the secret attribute already
|
||||
- # exists.
|
||||
- continue
|
||||
-
|
||||
- if connector.get('requiredSecret') is None:
|
||||
- # No requiredSecret field either; nothing to do.
|
||||
- continue
|
||||
-
|
||||
- connector.set('secret', connector.get('requiredSecret'))
|
||||
- connector.attrib.pop('requiredSecret', None)
|
||||
-
|
||||
- with open(instance.server_xml, 'wb') as f:
|
||||
- document.write(f, pretty_print=True, encoding='utf-8')
|
||||
diff --git a/base/server/upgrade/10.11.0/05-UpdateAllowLinking.py b/base/server/upgrade/10.11.0/04-UpdateAllowLinking.py
|
||||
similarity index 100%
|
||||
rename from base/server/upgrade/10.11.0/05-UpdateAllowLinking.py
|
||||
rename to base/server/upgrade/10.11.0/04-UpdateAllowLinking.py
|
||||
diff --git a/base/server/upgrade/10.11.0/06-UpdateJavaHome.py b/base/server/upgrade/10.11.0/05-UpdateJavaHome.py
|
||||
similarity index 100%
|
||||
rename from base/server/upgrade/10.11.0/06-UpdateJavaHome.py
|
||||
rename to base/server/upgrade/10.11.0/05-UpdateJavaHome.py
|
||||
diff --git a/base/tomcat-9.0/conf/server.xml b/base/tomcat-9.0/conf/server.xml
|
||||
index 528300fd27..d6f3bb7ff0 100644
|
||||
--- a/base/tomcat-9.0/conf/server.xml
|
||||
+++ b/base/tomcat-9.0/conf/server.xml
|
||||
@@ -190,12 +190,12 @@ Tomcat Port = [TOMCAT_SERVER_PORT] (for shutdown)
|
||||
protocol="AJP/1.3"
|
||||
redirectPort="[PKI_AJP_REDIRECT_PORT]"
|
||||
address="[PKI_AJP_HOST_IPv4]"
|
||||
- secret="[PKI_AJP_SECRET]" />
|
||||
+ requiredSecret="[PKI_AJP_SECRET]" />
|
||||
<Connector port="[PKI_AJP_PORT]"
|
||||
protocol="AJP/1.3"
|
||||
redirectPort="[PKI_AJP_REDIRECT_PORT]"
|
||||
address="[PKI_AJP_HOST_IPv6]"
|
||||
- secret="[PKI_AJP_SECRET]" />
|
||||
+ requiredSecret="[PKI_AJP_SECRET]" />
|
||||
[PKI_CLOSE_AJP_PORT_COMMENT]
|
||||
|
||||
|
||||
--
|
||||
2.33.1
|
||||
|
@ -1,26 +0,0 @@
|
||||
From 607300e57ea05a1475656f1493745f7c7a28b747 Mon Sep 17 00:00:00 2001
|
||||
From: Jack Magne <jmagne@redhat.com>
|
||||
Date: Thu, 23 Sep 2021 13:50:41 -0400
|
||||
Subject: [PATCH] Fix Bug 2001576 - pki instance creation fails for IPA server
|
||||
in FIPS mode (RHEL-8.5). Additional fix to this issue to account for our
|
||||
standalone java tools.
|
||||
|
||||
---
|
||||
base/tools/templates/pki_java_command_wrapper.in | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/base/tools/templates/pki_java_command_wrapper.in b/base/tools/templates/pki_java_command_wrapper.in
|
||||
index 05650630d..d68ed93a3 100644
|
||||
--- a/base/tools/templates/pki_java_command_wrapper.in
|
||||
+++ b/base/tools/templates/pki_java_command_wrapper.in
|
||||
@@ -90,6 +90,7 @@ JAVA_OPTIONS=""
|
||||
|
||||
${JAVA} ${JAVA_OPTIONS} \
|
||||
-cp "${PKI_LIB}/*" \
|
||||
+ -Dcom.redhat.fips=false \
|
||||
-Djava.util.logging.config.file=${PKI_LOGGING_CONFIG} \
|
||||
com.netscape.cmstools.${COMMAND} "$@"
|
||||
|
||||
--
|
||||
2.31.1
|
||||
|
332
SOURCES/0001-Fix-pki-healthcheck-for-clones.patch
Normal file
332
SOURCES/0001-Fix-pki-healthcheck-for-clones.patch
Normal file
@ -0,0 +1,332 @@
|
||||
From 7d62105c676fc79e0c32766c41cd034655a524ff Mon Sep 17 00:00:00 2001
|
||||
From: "Endi S. Dewata" <edewata@redhat.com>
|
||||
Date: Tue, 25 Jan 2022 16:29:53 -0600
|
||||
Subject: [PATCH] Fix pki-healthcheck for clones
|
||||
|
||||
Previously the ClonesConnectivyAndDataCheck.check_kra_clones()
|
||||
was trying to check KRA clone status by retrieving a key using
|
||||
the subsystem cert. This operation did not work since the user
|
||||
associated with the cert did not have access to the keys. The
|
||||
code has been changed to get the status from GetStatus service
|
||||
instead. The original code might be moved into IPA later so it
|
||||
could run with IPA's RA agent credentials which would allow
|
||||
access to the keys.
|
||||
|
||||
Previously the ClonesPlugin.contact_subsystem_using_sslget()
|
||||
used sslget to call GetStatus service and returned the entire
|
||||
output which was then incorrectly processed in XML format. The
|
||||
method has been renamed to get_status() and changed to use
|
||||
PKIConnection and process the response in either JSON or XML
|
||||
format, then only return the subsystem status. All callers
|
||||
have been updated accordingly.
|
||||
|
||||
The ClonesPlugin.contact_subsystem_using_pki() is no longer
|
||||
used so it has been removed.
|
||||
---
|
||||
.../clones/connectivity_and_data.py | 130 ++++++++----------
|
||||
.../pki/server/healthcheck/clones/plugin.py | 75 ++++------
|
||||
base/server/python/pki/server/__init__.py | 8 +-
|
||||
3 files changed, 91 insertions(+), 122 deletions(-)
|
||||
|
||||
diff --git a/base/server/healthcheck/pki/server/healthcheck/clones/connectivity_and_data.py b/base/server/healthcheck/pki/server/healthcheck/clones/connectivity_and_data.py
|
||||
index ca5d6dae48..d9bb480f7f 100644
|
||||
--- a/base/server/healthcheck/pki/server/healthcheck/clones/connectivity_and_data.py
|
||||
+++ b/base/server/healthcheck/pki/server/healthcheck/clones/connectivity_and_data.py
|
||||
@@ -46,93 +46,83 @@ class ClonesConnectivyAndDataCheck(ClonesPlugin):
|
||||
|
||||
def check_kra_clones(self):
|
||||
for host in self.clone_kras:
|
||||
- cur_clone_msg = ' Host: ' + host.Hostname + ' Port: ' + host.SecurePort
|
||||
- # Reach out and get some keys or requests , to serve as a data and connectivity check
|
||||
+
|
||||
+ url = 'https://' + host.Hostname + ':' + host.SecurePort
|
||||
+
|
||||
try:
|
||||
- client_nick = self.security_domain.config.get('ca.connector.KRA.nickName')
|
||||
-
|
||||
- output = self.contact_subsystem_using_pki(
|
||||
- host.SecurePort, host.Hostname, client_nick,
|
||||
- self.passwd, self.db_dir, 'kra-key-show', ['0x01'])
|
||||
-
|
||||
- # check to see if we either got a key or a key not found exception
|
||||
- # of which either will imply a successful connection
|
||||
- if output is not None:
|
||||
- key_found = output.find('Key ID:')
|
||||
- key_not_found = output.find('KeyNotFoundException:')
|
||||
- if key_found >= 0:
|
||||
- logger.info('Key material found from kra clone.')
|
||||
-
|
||||
- if key_not_found >= 0:
|
||||
- logger.info('key not found, possibly empty kra')
|
||||
-
|
||||
- if key_not_found == -1 and key_found == -1:
|
||||
- logger.info('Failure to get key material from kra')
|
||||
- raise BaseException('KRA clone problem detected ' + cur_clone_msg)
|
||||
- else:
|
||||
- raise BaseException('No data obtained from KRA clone.' + cur_clone_msg)
|
||||
+ status = self.get_status(
|
||||
+ host.Hostname,
|
||||
+ host.SecurePort,
|
||||
+ '/kra/admin/kra/getStatus')
|
||||
|
||||
- except BaseException as e:
|
||||
- logger.error("Internal error testing KRA clone. %s", e)
|
||||
- raise BaseException('Internal error testing KRA clone.' + cur_clone_msg)
|
||||
+ logger.info('KRA at %s is %s', url, status)
|
||||
|
||||
- return
|
||||
+ if status != 'running':
|
||||
+ raise Exception('KRA at %s is %s' % (url, status))
|
||||
+
|
||||
+ except Exception as e:
|
||||
+ logger.error('Unable to reach KRA at %s: %s', url, e)
|
||||
+ raise Exception('Unable to reach KRA at %s: %s' % (url, e))
|
||||
|
||||
def check_ocsp_clones(self):
|
||||
for host in self.clone_ocsps:
|
||||
- cur_clone_msg = ' Host: ' + host.Hostname + ' Port: ' + host.SecurePort
|
||||
- # Reach out to the ocsp clones
|
||||
+
|
||||
+ url = 'https://' + host.Hostname + ':' + host.SecurePort
|
||||
+
|
||||
try:
|
||||
- output = self.contact_subsystem_using_sslget(
|
||||
- host.SecurePort, host.Hostname, None,
|
||||
- self.passwd, self.db_dir, None, '/ocsp/admin/ocsp/getStatus')
|
||||
-
|
||||
- good_status = output.find('<State>1</State>')
|
||||
- if good_status == -1:
|
||||
- raise BaseException('OCSP clone problem detected.' + cur_clone_msg)
|
||||
- logger.info('good_status %s ', good_status)
|
||||
- except BaseException as e:
|
||||
- logger.error("Internal error testing OCSP clone. %s", e)
|
||||
- raise BaseException('Internal error testing OCSP clone.' + cur_clone_msg)
|
||||
+ status = self.get_status(
|
||||
+ host.Hostname,
|
||||
+ host.SecurePort,
|
||||
+ '/ocsp/admin/ocsp/getStatus')
|
||||
|
||||
- return
|
||||
+ logger.info('OCSP at %s is %s', url, status)
|
||||
+
|
||||
+ if status != 'running':
|
||||
+ raise Exception('OCSP at %s is %s' % (url, status))
|
||||
+
|
||||
+ except Exception as e:
|
||||
+ logger.error('Unable to reach OCSP at %s: %s', url, e)
|
||||
+ raise Exception('Unable to reach OCSP at %s: %s' % (url, e))
|
||||
|
||||
def check_tks_clones(self):
|
||||
for host in self.clone_tkss:
|
||||
- cur_clone_msg = ' Host: ' + host.Hostname + ' Port: ' + host.SecurePort
|
||||
- # Reach out to the tks clones
|
||||
+
|
||||
+ url = 'https://' + host.Hostname + ':' + host.SecurePort
|
||||
+
|
||||
try:
|
||||
- output = self.contact_subsystem_using_sslget(
|
||||
- host.SecurePort, host.Hostname, None,
|
||||
- self.passwd, self.db_dir, None, '/tks/admin/tks/getStatus')
|
||||
-
|
||||
- good_status = output.find('<State>1</State>')
|
||||
- if good_status == -1:
|
||||
- raise BaseException('TKS clone problem detected.' + cur_clone_msg)
|
||||
- logger.info('good_status %s ', good_status)
|
||||
- except BaseException as e:
|
||||
- logger.error("Internal error testing TKS clone. %s", e)
|
||||
- raise BaseException('Internal error testing TKS clone.' + cur_clone_msg)
|
||||
+ status = self.get_status(
|
||||
+ host.Hostname,
|
||||
+ host.SecurePort,
|
||||
+ '/tks/admin/tks/getStatus')
|
||||
|
||||
- return
|
||||
+ logger.info('TKS at %s is %s', url, status)
|
||||
+
|
||||
+ if status != 'running':
|
||||
+ raise Exception('TKS at %s is %s' % (url, status))
|
||||
+
|
||||
+ except Exception as e:
|
||||
+ logger.error('Unable to reach TKS at %s: %s', url, e)
|
||||
+ raise Exception('Unable to reach TKS at %s: %s' % (url, e))
|
||||
|
||||
def check_tps_clones(self):
|
||||
for host in self.clone_tpss:
|
||||
- cur_clone_msg = ' Host: ' + host.Hostname + ' Port: ' + host.SecurePort
|
||||
- # Reach out to the tps clones
|
||||
+
|
||||
+ url = 'https://' + host.Hostname + ':' + host.SecurePort
|
||||
+
|
||||
try:
|
||||
- output = self.contact_subsystem_using_sslget(
|
||||
- host.SecurePort, host.Hostname, None,
|
||||
- self.passwd, self.db_dir, None, '/tps/admin/tps/getStatus')
|
||||
-
|
||||
- good_status = output.find('<State>1</State>')
|
||||
- if good_status == -1:
|
||||
- raise BaseException('TPS clone problem detected.' + cur_clone_msg)
|
||||
- logger.info('good_status %s ', good_status)
|
||||
- except BaseException as e:
|
||||
- logger.error("Internal error testing TPS clone. %s", e)
|
||||
- raise BaseException('Internal error testing TPS clone.' + cur_clone_msg)
|
||||
- return
|
||||
+ status = self.get_status(
|
||||
+ host.Hostname,
|
||||
+ host.SecurePort,
|
||||
+ '/tps/admin/tps/getStatus')
|
||||
+
|
||||
+ logger.info('TPS at %s is %s', url, status)
|
||||
+
|
||||
+ if status != 'running':
|
||||
+ raise Exception('TPS at %s is %s' % (url, status))
|
||||
+
|
||||
+ except Exception as e:
|
||||
+ logger.error('Unable to reach TPS at %s: %s', url, e)
|
||||
+ raise Exception('Unable to reach TPS at %s: %s' % (url, e))
|
||||
|
||||
@duration
|
||||
def check(self):
|
||||
diff --git a/base/server/healthcheck/pki/server/healthcheck/clones/plugin.py b/base/server/healthcheck/pki/server/healthcheck/clones/plugin.py
|
||||
index 2472f35b5b..824c36a1a9 100644
|
||||
--- a/base/server/healthcheck/pki/server/healthcheck/clones/plugin.py
|
||||
+++ b/base/server/healthcheck/pki/server/healthcheck/clones/plugin.py
|
||||
@@ -6,6 +6,10 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
|
||||
+import json
|
||||
+import logging
|
||||
+import xml.etree.ElementTree as ET
|
||||
+
|
||||
from ipahealthcheck.core.plugin import Plugin, Registry
|
||||
from pki.server.instance import PKIInstance
|
||||
from pki.client import PKIConnection
|
||||
@@ -13,9 +17,6 @@ from pki.system import SecurityDomainClient
|
||||
|
||||
from pki.server.healthcheck.core.main import merge_dogtag_config
|
||||
|
||||
-import logging
|
||||
-import subprocess
|
||||
-
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Temporary workaround to skip VERBOSE data. Fix already pushed to upstream
|
||||
@@ -46,60 +47,36 @@ class ClonesPlugin(Plugin):
|
||||
|
||||
self.instance = PKIInstance(self.config.instance_name)
|
||||
|
||||
- def contact_subsystem_using_pki(
|
||||
- self, subport, subhost, subsystemnick,
|
||||
- token_pwd, db_path, cmd, exts=None):
|
||||
- command = ["/usr/bin/pki",
|
||||
- "-p", str(subport),
|
||||
- "-h", subhost,
|
||||
- "-n", subsystemnick,
|
||||
- "-P", "https",
|
||||
- "-d", db_path,
|
||||
- "-c", token_pwd,
|
||||
- cmd]
|
||||
-
|
||||
- if exts is not None:
|
||||
- command.extend(exts)
|
||||
-
|
||||
- output = None
|
||||
- try:
|
||||
- output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- output = e.output.decode('utf-8')
|
||||
- return output
|
||||
+ def get_status(self, host, port, path):
|
||||
|
||||
- output = output.decode('utf-8')
|
||||
+ self.instance.export_ca_cert()
|
||||
|
||||
- return output
|
||||
+ connection = PKIConnection(
|
||||
+ protocol='https',
|
||||
+ hostname=host,
|
||||
+ port=port,
|
||||
+ cert_paths=self.instance.ca_cert)
|
||||
|
||||
- def contact_subsystem_using_sslget(
|
||||
- self, port, host, subsystemnick,
|
||||
- token_pwd, db_path, params, url):
|
||||
+ response = connection.get(path)
|
||||
|
||||
- command = ["/usr/bin/sslget"]
|
||||
+ content_type = response.headers['Content-Type']
|
||||
+ content = response.text
|
||||
+ logger.info('Content:\n%s', content)
|
||||
|
||||
- if subsystemnick is not None:
|
||||
- command.extend(["-n", subsystemnick])
|
||||
+ # https://github.com/dogtagpki/pki/wiki/GetStatus-Service
|
||||
+ if content_type == 'application/json':
|
||||
+ json_response = json.loads(content)
|
||||
+ status = json_response['Response']['Status']
|
||||
|
||||
- command.extend(["-p", token_pwd, "-d", db_path])
|
||||
-
|
||||
- if params is not None:
|
||||
- command.extend(["-e", params])
|
||||
-
|
||||
- command.extend([
|
||||
- "-r", url, host + ":" + port])
|
||||
-
|
||||
- logger.info(' command : %s ', command)
|
||||
- output = None
|
||||
- try:
|
||||
- output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- output = e.output.decode('utf-8')
|
||||
- return output
|
||||
+ elif content_type == 'application/xml':
|
||||
+ root = ET.fromstring(content)
|
||||
+ status = root.findtext('Status')
|
||||
|
||||
- output = output.decode('utf-8')
|
||||
+ else:
|
||||
+ raise Exception('Unsupported content-type: %s' % content_type)
|
||||
|
||||
- return output
|
||||
+ logger.info('Status: %s', status)
|
||||
+ return status
|
||||
|
||||
def get_security_domain_data(self, host, port):
|
||||
domain_data = None
|
||||
diff --git a/base/server/python/pki/server/__init__.py b/base/server/python/pki/server/__init__.py
|
||||
index 4fbb74684b..0515bbb197 100644
|
||||
--- a/base/server/python/pki/server/__init__.py
|
||||
+++ b/base/server/python/pki/server/__init__.py
|
||||
@@ -241,6 +241,10 @@ class PKIServer(object):
|
||||
def jss_conf(self):
|
||||
return os.path.join(self.conf_dir, 'jss.conf')
|
||||
|
||||
+ @property
|
||||
+ def ca_cert(self):
|
||||
+ return os.path.join(self.nssdb_dir, 'ca.crt')
|
||||
+
|
||||
def is_valid(self):
|
||||
return self.exists()
|
||||
|
||||
@@ -259,8 +263,6 @@ class PKIServer(object):
|
||||
|
||||
def export_ca_cert(self):
|
||||
|
||||
- ca_path = os.path.join(self.nssdb_dir, 'ca.crt')
|
||||
-
|
||||
token = pki.nssdb.INTERNAL_TOKEN_NAME
|
||||
nickname = self.get_sslserver_cert_nickname()
|
||||
|
||||
@@ -272,7 +274,7 @@ class PKIServer(object):
|
||||
nssdb = self.open_nssdb(token=token)
|
||||
|
||||
try:
|
||||
- nssdb.extract_ca_cert(ca_path, nickname)
|
||||
+ nssdb.extract_ca_cert(self.ca_cert, nickname)
|
||||
finally:
|
||||
nssdb.close()
|
||||
|
||||
--
|
||||
2.33.1
|
||||
|
@ -1,225 +0,0 @@
|
||||
From bbdb82268026821cd6a00edae09cc30079effd30 Mon Sep 17 00:00:00 2001
|
||||
From: "Endi S. Dewata" <edewata@redhat.com>
|
||||
Date: Tue, 8 Mar 2022 15:19:09 -0600
|
||||
Subject: [PATCH] Fix pki-server migrate CLI
|
||||
|
||||
The pki-server migrate CLI has been modified to configure the
|
||||
AJP connectors with either secret or requiredSecret parameter
|
||||
(mutually exclusive) depending on the Tomcat version.
|
||||
|
||||
https://bugzilla.redhat.com/show_bug.cgi?id=2061458
|
||||
---
|
||||
base/server/python/pki/server/cli/migrate.py | 60 ----------
|
||||
base/server/python/pki/server/instance.py | 118 +++++++++++++++++++
|
||||
2 files changed, 118 insertions(+), 60 deletions(-)
|
||||
|
||||
diff --git a/base/server/python/pki/server/cli/migrate.py b/base/server/python/pki/server/cli/migrate.py
|
||||
index 2005004c4e..6e0ed6c2a7 100644
|
||||
--- a/base/server/python/pki/server/cli/migrate.py
|
||||
+++ b/base/server/python/pki/server/cli/migrate.py
|
||||
@@ -23,7 +23,6 @@ from __future__ import print_function
|
||||
|
||||
import getopt
|
||||
import logging
|
||||
-import re
|
||||
import sys
|
||||
|
||||
from lxml import etree
|
||||
@@ -104,62 +103,3 @@ class MigrateCLI(pki.cli.CLI):
|
||||
|
||||
for instance in instances:
|
||||
instance.init()
|
||||
-
|
||||
- # update AJP connectors for Tomcat 9.0.31 or later
|
||||
-
|
||||
- tomcat_version = pki.server.Tomcat.get_version()
|
||||
- if tomcat_version >= pki.util.Version('9.0.31'):
|
||||
-
|
||||
- for instance in instances:
|
||||
- self.update_ajp_connectors(instance)
|
||||
-
|
||||
- def update_ajp_connectors(self, instance):
|
||||
-
|
||||
- logger.info('Updating AJP connectors in %s', instance.server_xml)
|
||||
-
|
||||
- document = etree.parse(instance.server_xml, self.parser)
|
||||
- server = document.getroot()
|
||||
-
|
||||
- # replace 'requiredSecret' with 'secret' in comments
|
||||
-
|
||||
- services = server.findall('Service')
|
||||
- for service in services:
|
||||
-
|
||||
- children = list(service)
|
||||
- for child in children:
|
||||
-
|
||||
- if not isinstance(child, etree._Comment): # pylint: disable=protected-access
|
||||
- # not a comment -> skip
|
||||
- continue
|
||||
-
|
||||
- if 'protocol="AJP/1.3"' not in child.text:
|
||||
- # not an AJP connector -> skip
|
||||
- continue
|
||||
-
|
||||
- child.text = re.sub(r'requiredSecret=',
|
||||
- r'secret=',
|
||||
- child.text,
|
||||
- flags=re.MULTILINE)
|
||||
-
|
||||
- # replace 'requiredSecret' with 'secret' in Connectors
|
||||
-
|
||||
- connectors = server.findall('Service/Connector')
|
||||
- for connector in connectors:
|
||||
-
|
||||
- if connector.get('protocol') != 'AJP/1.3':
|
||||
- # not an AJP connector -> skip
|
||||
- continue
|
||||
-
|
||||
- if connector.get('secret'):
|
||||
- # already has a 'secret' -> skip
|
||||
- continue
|
||||
-
|
||||
- if connector.get('requiredSecret') is None:
|
||||
- # does not have a 'requiredSecret' -> skip
|
||||
- continue
|
||||
-
|
||||
- value = connector.attrib.pop('requiredSecret')
|
||||
- connector.set('secret', value)
|
||||
-
|
||||
- with open(instance.server_xml, 'wb') as f:
|
||||
- document.write(f, pretty_print=True, encoding='utf-8')
|
||||
diff --git a/base/server/python/pki/server/instance.py b/base/server/python/pki/server/instance.py
|
||||
index ad938b841d..ff43dae8ec 100644
|
||||
--- a/base/server/python/pki/server/instance.py
|
||||
+++ b/base/server/python/pki/server/instance.py
|
||||
@@ -836,9 +836,127 @@ class PKIInstance(pki.server.PKIServer):
|
||||
nssdb.close()
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
+ def configure_ajp_connectors_secret(self):
|
||||
+
|
||||
+ logger.info('Configuring AJP connectors secret')
|
||||
+
|
||||
+ document = etree.parse(self.server_xml, parser)
|
||||
+ server = document.getroot()
|
||||
+
|
||||
+ # replace 'requiredSecret' with 'secret' in comments
|
||||
+
|
||||
+ services = server.findall('Service')
|
||||
+ for service in services:
|
||||
+
|
||||
+ children = list(service)
|
||||
+ for child in children:
|
||||
+
|
||||
+ if not isinstance(child, etree._Comment): # pylint: disable=protected-access
|
||||
+ # not a comment -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if 'protocol="AJP/1.3"' not in child.text:
|
||||
+ # not an AJP connector -> skip
|
||||
+ continue
|
||||
+
|
||||
+ child.text = re.sub(r'requiredSecret=',
|
||||
+ r'secret=',
|
||||
+ child.text,
|
||||
+ flags=re.MULTILINE)
|
||||
+
|
||||
+ # replace 'requiredSecret' with 'secret' in Connectors
|
||||
+
|
||||
+ connectors = server.findall('Service/Connector')
|
||||
+ for connector in connectors:
|
||||
+
|
||||
+ if connector.get('protocol') != 'AJP/1.3':
|
||||
+ # not an AJP connector -> skip
|
||||
+ continue
|
||||
+
|
||||
+ # remove existing 'requiredSecret' if any
|
||||
+ value = connector.attrib.pop('requiredSecret', None)
|
||||
+ print('AJP connector requiredSecret: %s' % value)
|
||||
+
|
||||
+ if connector.get('secret'):
|
||||
+ # already has a 'secret' -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if not value:
|
||||
+ raise Exception('Missing AJP connector secret in %s' % self.server_xml)
|
||||
+
|
||||
+ # store 'secret'
|
||||
+ connector.set('secret', value)
|
||||
+
|
||||
+ with open(self.server_xml, 'wb') as f:
|
||||
+ document.write(f, pretty_print=True, encoding='utf-8')
|
||||
+
|
||||
+ def configure_ajp_connectors_required_secret(self):
|
||||
+
|
||||
+ logger.info('Configuring AJP connectors requiredSecret')
|
||||
+
|
||||
+ document = etree.parse(self.server_xml, parser)
|
||||
+ server = document.getroot()
|
||||
+
|
||||
+ # replace 'secret' with 'requiredSecret' in comments
|
||||
+
|
||||
+ services = server.findall('Service')
|
||||
+ for service in services:
|
||||
+
|
||||
+ children = list(service)
|
||||
+ for child in children:
|
||||
+
|
||||
+ if not isinstance(child, etree._Comment): # pylint: disable=protected-access
|
||||
+ # not a comment -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if 'protocol="AJP/1.3"' not in child.text:
|
||||
+ # not an AJP connector -> skip
|
||||
+ continue
|
||||
+
|
||||
+ child.text = re.sub(r'secret=',
|
||||
+ r'requiredSecret=',
|
||||
+ child.text,
|
||||
+ flags=re.MULTILINE)
|
||||
+
|
||||
+ # replace 'secret' with 'requiredSecret' in Connectors
|
||||
+
|
||||
+ connectors = server.findall('Service/Connector')
|
||||
+ for connector in connectors:
|
||||
+
|
||||
+ if connector.get('protocol') != 'AJP/1.3':
|
||||
+ # not an AJP connector -> skip
|
||||
+ continue
|
||||
+
|
||||
+ # remove existing 'secret' if any
|
||||
+ value = connector.attrib.pop('secret', None)
|
||||
+ print('AJP connector secret: %s' % value)
|
||||
+
|
||||
+ if connector.get('requiredSecret'):
|
||||
+ # already has a 'requiredSecret' -> skip
|
||||
+ continue
|
||||
+
|
||||
+ if not value:
|
||||
+ raise Exception('Missing AJP connector requiredSecret in %s' % self.server_xml)
|
||||
+
|
||||
+ # store 'requiredSecret'
|
||||
+ connector.set('requiredSecret', value)
|
||||
+
|
||||
+ with open(self.server_xml, 'wb') as f:
|
||||
+ document.write(f, pretty_print=True, encoding='utf-8')
|
||||
+
|
||||
+ def configure_ajp_connectors(self):
|
||||
+
|
||||
+ tomcat_version = pki.server.Tomcat.get_version()
|
||||
+
|
||||
+ if tomcat_version >= pki.util.Version('9.0.31'):
|
||||
+ self.configure_ajp_connectors_secret()
|
||||
+ else:
|
||||
+ self.configure_ajp_connectors_required_secret()
|
||||
+
|
||||
def init(self):
|
||||
super(PKIInstance, self).init()
|
||||
self.validate_banner()
|
||||
+ self.configure_ajp_connectors()
|
||||
|
||||
@classmethod
|
||||
def instances(cls):
|
||||
--
|
||||
2.33.1
|
||||
|
@ -1,289 +0,0 @@
|
||||
From 5d377f31292da71f6ec4a29b13a66a9bea967102 Mon Sep 17 00:00:00 2001
|
||||
From: "Endi S. Dewata" <edewata@redhat.com>
|
||||
Date: Tue, 2 Nov 2021 14:46:02 -0500
|
||||
Subject: [PATCH] Fix replica reinstallation
|
||||
|
||||
The pkispawn and pkidestroy have been modified to ignore
|
||||
failures caused by adding an entry or attribute that is
|
||||
already exists and to check whether a file exists before
|
||||
removing it during replica removal and reinstallation.
|
||||
|
||||
One of the CA clone tests has been modified to test
|
||||
removing and reinstalling a replica.
|
||||
|
||||
Resolves: https://github.com/dogtagpki/pki/issues/3544
|
||||
---
|
||||
.github/workflows/ca-tests.yml | 11 ++
|
||||
.../python/pki/server/deployment/__init__.py | 39 +++++--
|
||||
.../scriptlets/webapp_deployment.py | 19 +--
|
||||
.../cms/servlet/csadmin/LDAPConfigurator.java | 110 +++++++++++-------
|
||||
4 files changed, 116 insertions(+), 63 deletions(-)
|
||||
|
||||
diff --git a/.github/workflows/ca-tests.yml b/.github/workflows/ca-tests.yml
|
||||
index 4832e73c65..fffcb9c3e4 100644
|
||||
--- a/.github/workflows/ca-tests.yml
|
||||
+++ b/.github/workflows/ca-tests.yml
|
||||
@@ -1137,6 +1137,17 @@ jobs:
|
||||
--pkcs12-password-file ${PKIDIR}/pkcs12_password.conf
|
||||
docker exec secondary pki -n caadmin ca-user-show caadmin
|
||||
|
||||
+ - name: Remove CA from secondary PKI container
|
||||
+ run: |
|
||||
+ docker exec secondary pkidestroy -i pki-tomcat -s CA -v
|
||||
+
|
||||
+ - name: Re-install CA in secondary PKI container
|
||||
+ run: |
|
||||
+ docker exec secondary pkispawn \
|
||||
+ -f /usr/share/pki/server/examples/installation/ca-secure-ds-secondary.cfg \
|
||||
+ -s CA \
|
||||
+ -v
|
||||
+
|
||||
- name: Gather artifacts from primary container
|
||||
if: always()
|
||||
run: |
|
||||
diff --git a/base/server/python/pki/server/deployment/__init__.py b/base/server/python/pki/server/deployment/__init__.py
|
||||
index 6eb5b0a78a..d179718dd6 100644
|
||||
--- a/base/server/python/pki/server/deployment/__init__.py
|
||||
+++ b/base/server/python/pki/server/deployment/__init__.py
|
||||
@@ -1074,26 +1074,41 @@ class PKIDeployer:
|
||||
secure_port = server_config.get_secure_port()
|
||||
|
||||
uid = 'CA-%s-%s' % (self.mdict['pki_hostname'], secure_port)
|
||||
-
|
||||
logger.info('Adding %s', uid)
|
||||
- subsystem.add_user(
|
||||
- uid,
|
||||
- full_name=uid,
|
||||
- user_type='agentType',
|
||||
- state='1')
|
||||
|
||||
- logger.info('Adding subsystem certificate into %s', uid)
|
||||
+ try:
|
||||
+ subsystem.add_user(
|
||||
+ uid,
|
||||
+ full_name=uid,
|
||||
+ user_type='agentType',
|
||||
+ state='1')
|
||||
+ except Exception: # pylint: disable=W0703
|
||||
+ logger.warning('Unable to add %s', uid)
|
||||
+ # TODO: ignore error only if user already exists
|
||||
+
|
||||
cert_data = pki.nssdb.convert_cert(
|
||||
cert['data'],
|
||||
'base64',
|
||||
'pem')
|
||||
- subsystem.add_user_cert(
|
||||
- uid,
|
||||
- cert_data=cert_data.encode(),
|
||||
- cert_format='PEM')
|
||||
+
|
||||
+ logger.info('Adding certificate for %s', uid)
|
||||
+
|
||||
+ try:
|
||||
+ subsystem.add_user_cert(
|
||||
+ uid,
|
||||
+ cert_data=cert_data.encode(),
|
||||
+ cert_format='PEM')
|
||||
+ except Exception: # pylint: disable=W0703
|
||||
+ logger.warning('Unable to add certificate for %s', uid)
|
||||
+ # TODO: ignore error only if user cert already exists
|
||||
|
||||
logger.info('Adding %s into Subsystem Group', uid)
|
||||
- subsystem.add_group_member('Subsystem Group', uid)
|
||||
+
|
||||
+ try:
|
||||
+ subsystem.add_group_member('Subsystem Group', uid)
|
||||
+ except Exception: # pylint: disable=W0703
|
||||
+ logger.warning('Unable to add %s into Subsystem Group', uid)
|
||||
+ # TODO: ignore error only if user already exists in the group
|
||||
|
||||
def backup_keys(self, instance, subsystem):
|
||||
|
||||
diff --git a/base/server/python/pki/server/deployment/scriptlets/webapp_deployment.py b/base/server/python/pki/server/deployment/scriptlets/webapp_deployment.py
|
||||
index 342477028a..f9e73fd069 100644
|
||||
--- a/base/server/python/pki/server/deployment/scriptlets/webapp_deployment.py
|
||||
+++ b/base/server/python/pki/server/deployment/scriptlets/webapp_deployment.py
|
||||
@@ -60,12 +60,13 @@ class PkiScriptlet(pkiscriptlet.AbstractBasePkiScriptlet):
|
||||
|
||||
logger.info('Undeploying /%s web application', deployer.mdict['pki_subsystem'].lower())
|
||||
|
||||
- # Delete <instance>/Catalina/localhost/<subsystem>.xml
|
||||
- pki.util.remove(
|
||||
- path=os.path.join(
|
||||
- deployer.mdict['pki_instance_configuration_path'],
|
||||
- "Catalina",
|
||||
- "localhost",
|
||||
- deployer.mdict['pki_subsystem'].lower() + ".xml"),
|
||||
- force=deployer.mdict['pki_force_destroy']
|
||||
- )
|
||||
+ # Delete <instance>/Catalina/localhost/<subsystem>.xml if exists
|
||||
+
|
||||
+ context_xml = os.path.join(
|
||||
+ deployer.mdict['pki_instance_configuration_path'],
|
||||
+ 'Catalina',
|
||||
+ 'localhost',
|
||||
+ deployer.mdict['pki_subsystem'].lower() + '.xml')
|
||||
+
|
||||
+ if os.path.exists(context_xml):
|
||||
+ pki.util.remove(context_xml)
|
||||
diff --git a/base/server/src/main/java/com/netscape/cms/servlet/csadmin/LDAPConfigurator.java b/base/server/src/main/java/com/netscape/cms/servlet/csadmin/LDAPConfigurator.java
|
||||
index 651d166321..1e0364cfea 100644
|
||||
--- a/base/server/src/main/java/com/netscape/cms/servlet/csadmin/LDAPConfigurator.java
|
||||
+++ b/base/server/src/main/java/com/netscape/cms/servlet/csadmin/LDAPConfigurator.java
|
||||
@@ -661,26 +661,35 @@ public class LDAPConfigurator {
|
||||
|
||||
try {
|
||||
connection.add(entry);
|
||||
+ // replication manager added -> done
|
||||
+ return;
|
||||
|
||||
} catch (LDAPException e) {
|
||||
- if (e.getLDAPResultCode() == LDAPException.ENTRY_ALREADY_EXISTS) {
|
||||
- logger.warn("Entry already exists: " + dn);
|
||||
+ if (e.getLDAPResultCode() != LDAPException.ENTRY_ALREADY_EXISTS) {
|
||||
+ logger.error("Unable to add " + dn + ": " + e.getMessage(), e);
|
||||
+ throw e;
|
||||
+ }
|
||||
+ logger.warn("Replication manager already exists: " + dn);
|
||||
+ }
|
||||
|
||||
- try {
|
||||
- logger.info("Deleting " + dn);
|
||||
- connection.delete(dn);
|
||||
+ logger.warn("Deleting existing replication manager: " + dn);
|
||||
|
||||
- logger.info("Re-adding " + dn);
|
||||
- connection.add(entry);
|
||||
+ try {
|
||||
+ connection.delete(dn);
|
||||
|
||||
- } catch (LDAPException ee) {
|
||||
- logger.warn("Unable to recreate " + dn + ": " + ee.getMessage());
|
||||
- }
|
||||
+ } catch (LDAPException e) {
|
||||
+ logger.error("Unable to delete " + dn + ": " + e.getMessage());
|
||||
+ throw e;
|
||||
+ }
|
||||
|
||||
- } else {
|
||||
- logger.error("Unable to add " + dn + ": " + e.getMessage(), e);
|
||||
- throw e;
|
||||
- }
|
||||
+ logger.warn("Adding new replication manager: " + dn);
|
||||
+
|
||||
+ try {
|
||||
+ connection.add(entry);
|
||||
+
|
||||
+ } catch (LDAPException e) {
|
||||
+ logger.error("Unable to add " + dn + ": " + e.getMessage());
|
||||
+ throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -799,28 +808,41 @@ public class LDAPConfigurator {
|
||||
|
||||
try {
|
||||
connection.add(entry);
|
||||
+ // replica object added -> done
|
||||
+ return true;
|
||||
|
||||
} catch (LDAPException e) {
|
||||
-
|
||||
if (e.getLDAPResultCode() != LDAPException.ENTRY_ALREADY_EXISTS) {
|
||||
+ logger.error("Unable to add " + replicaDN + ": " + e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
+ logger.warn("Replica object already exists: " + replicaDN);
|
||||
+ }
|
||||
+
|
||||
+ logger.info("Adding replica bind DN");
|
||||
|
||||
- // BZ 470918: We can't just add the new dn.
|
||||
- // We need to do a replace until the bug is fixed.
|
||||
- logger.warn("Entry already exists, adding bind DN");
|
||||
+ // BZ 470918: We can't just add the new dn.
|
||||
+ // We need to do a replace until the bug is fixed.
|
||||
|
||||
- entry = connection.read(replicaDN);
|
||||
- LDAPAttribute attr = entry.getAttribute("nsDS5ReplicaBindDN");
|
||||
- attr.addValue(bindDN);
|
||||
+ entry = connection.read(replicaDN);
|
||||
+ LDAPAttribute attr = entry.getAttribute("nsDS5ReplicaBindDN");
|
||||
+ attr.addValue(bindDN);
|
||||
|
||||
- LDAPModification mod = new LDAPModification(LDAPModification.REPLACE, attr);
|
||||
+ LDAPModification mod = new LDAPModification(LDAPModification.REPLACE, attr);
|
||||
+
|
||||
+ try {
|
||||
connection.modify(replicaDN, mod);
|
||||
+ // replica bind DN added -> done
|
||||
|
||||
- return false;
|
||||
+ } catch (LDAPException e) {
|
||||
+ if (e.getLDAPResultCode() != LDAPException.ATTRIBUTE_OR_VALUE_EXISTS) {
|
||||
+ logger.error("Unable to add " + bindDN + ": " + e.getMessage(), e);
|
||||
+ throw e;
|
||||
+ }
|
||||
+ logger.warn("Replica bind DN already exists: " + bindDN);
|
||||
}
|
||||
|
||||
- return true;
|
||||
+ return false;
|
||||
}
|
||||
|
||||
public void createReplicationAgreement(
|
||||
@@ -864,29 +886,33 @@ public class LDAPConfigurator {
|
||||
|
||||
try {
|
||||
connection.add(entry);
|
||||
+ // replication agreement added -> done
|
||||
+ return;
|
||||
|
||||
} catch (LDAPException e) {
|
||||
- if (e.getLDAPResultCode() == LDAPException.ENTRY_ALREADY_EXISTS) {
|
||||
- logger.warn("Entry already exists: " + dn);
|
||||
-
|
||||
- try {
|
||||
- connection.delete(dn);
|
||||
- } catch (LDAPException ee) {
|
||||
- logger.error("Unable to delete " + dn + ": " + ee.getMessage(), ee);
|
||||
- throw ee;
|
||||
- }
|
||||
-
|
||||
- try {
|
||||
- connection.add(entry);
|
||||
- } catch (LDAPException ee) {
|
||||
- logger.error("Unable to add " + dn + ": " + ee.getMessage(), ee);
|
||||
- throw ee;
|
||||
- }
|
||||
-
|
||||
- } else {
|
||||
+ if (e.getLDAPResultCode() != LDAPException.ENTRY_ALREADY_EXISTS) {
|
||||
logger.error("Unable to add " + dn + ": " + e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
+ logger.warn("Replication agreement already exists: " + dn);
|
||||
+ }
|
||||
+
|
||||
+ logger.warn("Removing existing replication agreement: " + dn);
|
||||
+
|
||||
+ try {
|
||||
+ connection.delete(dn);
|
||||
+ } catch (LDAPException e) {
|
||||
+ logger.error("Unable to delete " + dn + ": " + e.getMessage(), e);
|
||||
+ throw e;
|
||||
+ }
|
||||
+
|
||||
+ logger.warn("Adding new replication agreement: " + dn);
|
||||
+
|
||||
+ try {
|
||||
+ connection.add(entry);
|
||||
+ } catch (LDAPException e) {
|
||||
+ logger.error("Unable to add " + dn + ": " + e.getMessage(), e);
|
||||
+ throw e;
|
||||
}
|
||||
}
|
||||
|
||||
--
|
||||
2.31.1
|
||||
|
@ -12,10 +12,11 @@ License: GPLv2 and LGPLv2
|
||||
|
||||
# For development (i.e. unsupported) releases, use x.y.z-0.n.<phase>.
|
||||
# For official (i.e. supported) releases, use x.y.z-r where r >=1.
|
||||
Version: 10.11.2
|
||||
Release: 5%{?_timestamp}%{?_commit_id}%{?dist}
|
||||
Version: 10.12.0
|
||||
Release: 2%{?_timestamp}%{?_commit_id}%{?dist}
|
||||
#global _phase -alpha1
|
||||
|
||||
|
||||
# To create a tarball from a version tag:
|
||||
# $ git archive \
|
||||
# --format=tar.gz \
|
||||
@ -30,10 +31,8 @@ Source: https://github.com/dogtagpki/pki/archive/v%{version}%{?_phase}/pki-%{ver
|
||||
# <version tag> \
|
||||
# > pki-VERSION-RELEASE.patch
|
||||
# Patch: pki-VERSION-RELEASE.patch
|
||||
Patch1: 0001-Fix-Bug-2001576-pki-instance-creation-fails-for-IPA-.patch
|
||||
Patch2: 0001-Fix-replica-reinstallation.patch
|
||||
Patch3: 0001-Fix-AJP-connector-migration.patch
|
||||
Patch4: 0001-Fix-pki-server-migrate-CLI.patch
|
||||
|
||||
Patch: 0001-Fix-pki-healthcheck-for-clones.patch
|
||||
|
||||
# md2man isn't available on i686. Additionally, we aren't generally multi-lib
|
||||
# compatible (https://fedoraproject.org/wiki/Packaging:Java)
|
||||
@ -359,6 +358,9 @@ symmetric key operations to Java programs.
|
||||
Summary: PKI Base Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-base < %{version}-%{release}
|
||||
Provides: pki-base = %{version}-%{release}
|
||||
|
||||
Requires: nss >= 3.36.1
|
||||
|
||||
Requires: python3-pki = %{version}-%{release}
|
||||
@ -383,6 +385,7 @@ BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-base-python3 < %{version}
|
||||
Provides: pki-base-python3 = %{version}-%{release}
|
||||
|
||||
%if 0%{?fedora} || 0%{?rhel} > 8
|
||||
%{?python_provide:%python_provide python3-pki}
|
||||
%endif
|
||||
@ -408,6 +411,9 @@ This package contains PKI client library for Python 3.
|
||||
Summary: PKI Base Java Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-base-java < %{version}-%{release}
|
||||
Provides: pki-base-java = %{version}-%{release}
|
||||
|
||||
Requires: %{java_headless}
|
||||
Requires: apache-commons-cli
|
||||
Requires: apache-commons-codec
|
||||
@ -476,6 +482,9 @@ Certificate System into a more complete and robust PKI solution.
|
||||
Summary: PKI Server Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-server < %{version}-%{release}
|
||||
Provides: pki-server = %{version}-%{release}
|
||||
|
||||
Requires: hostname
|
||||
|
||||
Requires: policycoreutils
|
||||
@ -729,6 +738,9 @@ smart card.
|
||||
Summary: PKI Javadoc Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-javadoc < %{version}-%{release}
|
||||
Provides: pki-javadoc = %{version}-%{release}
|
||||
|
||||
# Ensure we end up with a useful installation
|
||||
Conflicts: pki-base < %{version}
|
||||
Conflicts: pki-symkey < %{version}
|
||||
@ -749,6 +761,9 @@ This package contains PKI API documentation.
|
||||
Summary: PKI Console Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-console < %{version}-%{release}
|
||||
Provides: pki-console = %{version}-%{release}
|
||||
|
||||
BuildRequires: idm-console-framework >= 1.2.0
|
||||
|
||||
Requires: idm-console-framework >= 1.2.0
|
||||
@ -769,6 +784,7 @@ The PKI Console is a Java application used to administer PKI server.
|
||||
Summary: %{brand} PKI Server Theme Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-server-theme < %{version}-%{release}
|
||||
Provides: pki-server-theme = %{version}-%{release}
|
||||
|
||||
# Ensure we end up with a useful installation
|
||||
@ -788,6 +804,7 @@ This PKI Server Theme Package contains
|
||||
Summary: %{brand} PKI Console Theme Package
|
||||
BuildArch: noarch
|
||||
|
||||
Obsoletes: pki-console-theme < %{version}-%{release}
|
||||
Provides: pki-console-theme = %{version}-%{release}
|
||||
|
||||
# Ensure we end up with a useful installation
|
||||
@ -1366,20 +1383,16 @@ fi
|
||||
|
||||
################################################################################
|
||||
%changelog
|
||||
* Wed Mar 09 2022 Red Hat PKI Team <rhcs-maint@redhat.com> 10.11.2-5
|
||||
- Bug 2061458 - Additional fix for AJP connector migration
|
||||
* Thu Feb 03 2022 Red Hat PKI Team <rhcs-maint@redhat.com> 10.12.0-2
|
||||
- Bug 2027470 - pki-healthcheck ClonesConnectivyAndDataCheck fails
|
||||
|
||||
* Tue Jan 04 2022 Red Hat PKI Team <rhcs-maint@redhat.com> 10.11.2-4
|
||||
- Bug 2029023 - Fix AJP connector migration
|
||||
|
||||
* Tue Dec 14 2021 Red Hat PKI Team <rhcs-maint@redhat.com> 10.11.2-3
|
||||
- Bug 2024676 - Unable to reinstall PKI clone
|
||||
|
||||
* Fri Sep 24 2021 Red Hat PKI Team <rhcs-maint@redhat.com> 10.11.2-2
|
||||
- Bug 2001576 - pki instance creation fails for IPA in FIPS mode
|
||||
|
||||
* Fri Sep 17 2021 Red Hat PKI Team <rhcs-maint@redhat.com> 10.11.2-1
|
||||
- Rebase to PKI 10.11.2
|
||||
* Tue Nov 09 2021 Red Hat PKI Team <rhcs-maint@redhat.com> 10.12.0-0.1
|
||||
- Rebase to PKI 10.12.0
|
||||
- Bug 1904112 - pki fails to start if empty dir /var/lib/pki/pki-tomcat/kra exists
|
||||
- Bug 1984455 - [RFE] Date Format on the TPS Agent Page
|
||||
- Bug 1980378 - 'keyctl_search: Required key not available' message when running 'ipa-healthcheck'
|
||||
- Bug 2004084 - Reinstall of the same ipa-replica fails with 'RuntimeError: CA configuration failed.'
|
||||
- Bug 2006070 - Upgrades incorrectly add secret attribute to connectors
|
||||
|
||||
* Thu Aug 12 2021 Red Hat PKI Team <rhcs-maint@redhat.com> 10.11.0-2
|
||||
- Bug 1992337 - Double issuance of non-CA subsystem certs at installation
|
||||
|
Loading…
Reference in New Issue
Block a user