Merge branch 'c8-beta' into a8-beta

This commit is contained in:
eabdullin 2023-03-29 06:57:27 +00:00 committed by Stepan Oksanichenko
commit e576580e84
15 changed files with 3235 additions and 2773 deletions

20
.gitignore vendored
View File

@ -3,23 +3,23 @@ SOURCES/backports-3.23.0.gem
SOURCES/dacite-1.6.0.tar.gz
SOURCES/daemons-1.4.1.gem
SOURCES/dataclasses-0.8.tar.gz
SOURCES/ethon-0.15.0.gem
SOURCES/ethon-0.16.0.gem
SOURCES/eventmachine-1.2.7.gem
SOURCES/ffi-1.15.5.gem
SOURCES/json-2.6.2.gem
SOURCES/mustermann-1.1.1.gem
SOURCES/json-2.6.3.gem
SOURCES/mustermann-2.0.2.gem
SOURCES/open4-1.3.4-1.gem
SOURCES/pcs-0.10.14.tar.gz
SOURCES/pcs-0.10.15.tar.gz
SOURCES/pcs-web-ui-0.1.13.tar.gz
SOURCES/pcs-web-ui-node-modules-0.1.13.tar.xz
SOURCES/pyagentx-0.4.pcs.2.tar.gz
SOURCES/python-dateutil-2.8.1.tar.gz
SOURCES/rack-2.2.3.1.gem
SOURCES/rack-protection-2.2.0.gem
SOURCES/rack-test-1.1.0.gem
SOURCES/python-dateutil-2.8.2.tar.gz
SOURCES/rack-2.2.5.gem
SOURCES/rack-protection-2.2.4.gem
SOURCES/rack-test-2.0.2.gem
SOURCES/rexml-3.2.5.gem
SOURCES/ruby2_keywords-0.0.5.gem
SOURCES/sinatra-2.2.0.gem
SOURCES/sinatra-2.2.4.gem
SOURCES/thin-1.8.1.gem
SOURCES/tilt-2.0.10.gem
SOURCES/tilt-2.0.11.gem
SOURCES/tornado-6.1.0.tar.gz

View File

@ -3,23 +3,23 @@ a1826c59be8be0b9321d6604cca0c26ddd9b81fd SOURCES/HAM-logo.png
31546c37fbdc6270d5097687619e9c0db6f1c05c SOURCES/dacite-1.6.0.tar.gz
4795a8962cc1608bfec0d91fa4d438c7cfe90c62 SOURCES/daemons-1.4.1.gem
8b7598273d2ae6dad2b88466aefac55071a41926 SOURCES/dataclasses-0.8.tar.gz
29697a34b8cd9df4a77c650e2a38d0a36cdeee8b SOURCES/ethon-0.15.0.gem
5b56a68268708c474bef04550639ded3add5e946 SOURCES/ethon-0.16.0.gem
7a5b2896e210fac9759c786ee4510f265f75b481 SOURCES/eventmachine-1.2.7.gem
97632b7975067266c0b39596de0a4c86d9330658 SOURCES/ffi-1.15.5.gem
86c10824191e8f351da3fe0a0b6db94a813ada3a SOURCES/json-2.6.2.gem
50a4e37904485810cb05e27d75c9783e5a8f3402 SOURCES/mustermann-1.1.1.gem
6d78f730b7f3b25fb3f93684fe1364acf58bce6b SOURCES/json-2.6.3.gem
f5f804366823c1126791dfefd98dd0539563785c SOURCES/mustermann-2.0.2.gem
41a7fe9f8e3e02da5ae76c821b89c5b376a97746 SOURCES/open4-1.3.4-1.gem
825eab03553c98465e1de265c151ece149ddba04 SOURCES/pcs-0.10.14.tar.gz
00e234824e85afca99df9043dd6eb47490b220c4 SOURCES/pcs-0.10.15.tar.gz
f7455776936492ce7b241f9801d6bbc946b0461a SOURCES/pcs-web-ui-0.1.13.tar.gz
bd18d97d611233914828719c97b4d98d079913d2 SOURCES/pcs-web-ui-node-modules-0.1.13.tar.xz
3176b2f2b332c2b6bf79fe882e83feecf3d3f011 SOURCES/pyagentx-0.4.pcs.2.tar.gz
bd26127e57f83a10f656b62c46524c15aeb844dd SOURCES/python-dateutil-2.8.1.tar.gz
be609467c819d263c138c417548431b81f8da216 SOURCES/rack-2.2.3.1.gem
21cfac2453436c6856da31e741bbfa59da4973e1 SOURCES/rack-protection-2.2.0.gem
b80bc5ca38a885e747271675ba91dd3d02136bf1 SOURCES/rack-test-1.1.0.gem
c2ba10c775b7a52a4b57cac4d4110a0c0f812a82 SOURCES/python-dateutil-2.8.2.tar.gz
3ad7b27b68d5dd893ce91f216bb2685ae6c9846a SOURCES/rack-2.2.5.gem
5347315a7283f0b04443e924ed4eaa17807432c8 SOURCES/rack-protection-2.2.4.gem
3c669527ecbcb9f915a83983ec89320c356e1fe3 SOURCES/rack-test-2.0.2.gem
e7f48fa5fb2d92e6cb21d6b1638fe41a5a7c4287 SOURCES/rexml-3.2.5.gem
d017b9e4d1978e0b3ccc3e2a31493809e4693cd3 SOURCES/ruby2_keywords-0.0.5.gem
5f0d7e63f9d8683f39ad23afe7e00b99602b87cc SOURCES/sinatra-2.2.0.gem
fa6a6c98f885e93f54c23dd0454cae906e82c31b SOURCES/sinatra-2.2.4.gem
1ac6292a98e17247b7bb847a35ff868605256f7b SOURCES/thin-1.8.1.gem
d265c822a6b228392d899e9eb5114613d65e6967 SOURCES/tilt-2.0.10.gem
360d77c80d2851a538fb13d43751093115c34712 SOURCES/tilt-2.0.11.gem
c23c617c7a0205e465bebad5b8cdf289ae8402a2 SOURCES/tornado-6.1.0.tar.gz

File diff suppressed because it is too large Load Diff

View File

@ -1,126 +0,0 @@
From d6258ba9643b4d7528ceff65d433024104942a4c Mon Sep 17 00:00:00 2001
From: Tomas Jelinek <tojeline@redhat.com>
Date: Thu, 14 Jul 2022 16:46:05 +0200
Subject: [PATCH 2/4] make booth ticket mode value case insensitive
---
pcs/lib/booth/config_validators.py | 10 ++++++++
pcs/lib/commands/booth.py | 14 +++++++++---
pcs_test/tier0/lib/commands/test_booth.py | 28 ++++++++++++++++-------
3 files changed, 41 insertions(+), 11 deletions(-)
diff --git a/pcs/lib/booth/config_validators.py b/pcs/lib/booth/config_validators.py
index 99badc46..6c4a4ddc 100644
--- a/pcs/lib/booth/config_validators.py
+++ b/pcs/lib/booth/config_validators.py
@@ -100,6 +100,16 @@ def remove_ticket(conf_facade, ticket_name):
return []
+def ticket_options_normalization() -> validate.TypeNormalizeFunc:
+ return validate.option_value_normalization(
+ {
+ "mode": (
+ lambda value: value.lower() if isinstance(value, str) else value
+ )
+ }
+ )
+
+
def validate_ticket_name(ticket_name: str) -> reports.ReportItemList:
if not __TICKET_NAME_RE.search(ticket_name):
return [
diff --git a/pcs/lib/commands/booth.py b/pcs/lib/commands/booth.py
index e7891fbe..fc1454ce 100644
--- a/pcs/lib/commands/booth.py
+++ b/pcs/lib/commands/booth.py
@@ -23,7 +23,10 @@ from pcs.common.reports.item import (
)
from pcs.common.services.errors import ManageServiceError
from pcs.common.str_tools import join_multilines
-from pcs.lib import tools
+from pcs.lib import (
+ tools,
+ validate,
+)
from pcs.lib.booth import (
config_files,
config_validators,
@@ -329,17 +332,22 @@ def config_ticket_add(
booth_env = env.get_booth_env(instance_name)
try:
booth_conf = booth_env.config.read_to_facade()
+ options_pairs = validate.values_to_pairs(
+ options, config_validators.ticket_options_normalization()
+ )
report_processor.report_list(
config_validators.add_ticket(
booth_conf,
ticket_name,
- options,
+ options_pairs,
allow_unknown_options=allow_unknown_options,
)
)
if report_processor.has_errors:
raise LibraryError()
- booth_conf.add_ticket(ticket_name, options)
+ booth_conf.add_ticket(
+ ticket_name, validate.pairs_to_values(options_pairs)
+ )
booth_env.config.write_facade(booth_conf, can_overwrite=True)
except RawFileError as e:
report_processor.report(raw_file_error_report(e))
diff --git a/pcs_test/tier0/lib/commands/test_booth.py b/pcs_test/tier0/lib/commands/test_booth.py
index 2b20a199..12b169c2 100644
--- a/pcs_test/tier0/lib/commands/test_booth.py
+++ b/pcs_test/tier0/lib/commands/test_booth.py
@@ -1194,7 +1194,7 @@ class ConfigTicketAdd(TestCase, FixtureMixin):
},
)
- def test_success_ticket_options(self):
+ def assert_success_ticket_options(self, options_command, options_config):
self.config.raw_file.read(
file_type_codes.BOOTH_CONFIG,
self.fixture_cfg_path(),
@@ -1203,17 +1203,29 @@ class ConfigTicketAdd(TestCase, FixtureMixin):
self.config.raw_file.write(
file_type_codes.BOOTH_CONFIG,
self.fixture_cfg_path(),
- self.fixture_cfg_content(
- ticket_list=[
- ["ticketA", [("retries", "10"), ("timeout", "20")]]
- ]
- ),
+ self.fixture_cfg_content(ticket_list=[["ticketA", options_config]]),
can_overwrite=True,
)
commands.config_ticket_add(
- self.env_assist.get_env(),
- "ticketA",
+ self.env_assist.get_env(), "ticketA", options_command
+ )
+
+ def test_success_ticket_options(self):
+ self.assert_success_ticket_options(
{"timeout": "20", "retries": "10"},
+ [("retries", "10"), ("timeout", "20")],
+ )
+
+ def test_success_ticket_options_mode(self):
+ self.assert_success_ticket_options(
+ {"timeout": "20", "retries": "10", "mode": "manual"},
+ [("mode", "manual"), ("retries", "10"), ("timeout", "20")],
+ )
+
+ def test_success_ticket_options_mode_case_insensitive(self):
+ self.assert_success_ticket_options(
+ {"timeout": "20", "retries": "10", "mode": "MaNuAl"},
+ [("mode", "manual"), ("retries", "10"), ("timeout", "20")],
)
def test_ticket_already_exists(self):
--
2.35.3

View File

@ -1,46 +0,0 @@
From 7e44b3cd51a3a5079d0d42d91a3445f3b8ae9d17 Mon Sep 17 00:00:00 2001
From: Tomas Jelinek <tojeline@redhat.com>
Date: Fri, 15 Jul 2022 15:55:57 +0200
Subject: [PATCH 3/4] booth sync: check whether /etc/booth exists
---
pcsd/pcsd_file.rb | 6 +-----
pcsd/remote.rb | 4 ++++
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/pcsd/pcsd_file.rb b/pcsd/pcsd_file.rb
index d82b55d2..394db59a 100644
--- a/pcsd/pcsd_file.rb
+++ b/pcsd/pcsd_file.rb
@@ -112,12 +112,8 @@ module PcsdFile
end
end
- def dir()
- return BOOTH_CONFIG_DIR
- end
-
def full_file_name()
- @full_file_name ||= File.join(self.dir, @file[:name])
+ @full_file_name ||= File.join(BOOTH_CONFIG_DIR, @file[:name])
end
end
diff --git a/pcsd/remote.rb b/pcsd/remote.rb
index 9bf96db9..b7bee7e6 100644
--- a/pcsd/remote.rb
+++ b/pcsd/remote.rb
@@ -2622,6 +2622,10 @@ def booth_set_config(params, request, auth_user)
check_permissions(auth_user, Permissions::WRITE)
data = check_request_data_for_json(params, auth_user)
+ if not File.directory?(BOOTH_CONFIG_DIR)
+ raise "Configuration directory for booth '/etc/booth' is missing. Is booth installed?"
+ end
+
PcsdExchangeFormat::validate_item_map_is_Hash('files', data)
PcsdExchangeFormat::validate_item_is_Hash('file', :config, data[:config])
if data[:authfile]
--
2.35.3

View File

@ -1,25 +0,0 @@
From c00b5851a01361bd809a803ab88ff90d584d60e7 Mon Sep 17 00:00:00 2001
From: Tomas Jelinek <tojeline@redhat.com>
Date: Thu, 4 Aug 2022 15:05:41 +0200
Subject: [PATCH 1/2] fix pcs quorum device remove
---
pcsd/remote.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pcsd/remote.rb b/pcsd/remote.rb
index b7bee7e6..74151190 100644
--- a/pcsd/remote.rb
+++ b/pcsd/remote.rb
@@ -2519,7 +2519,7 @@ def qdevice_net_client_destroy(param, request, auth_user)
end
stdout, stderr, retval = run_cmd(
auth_user,
- PCS, '--' 'qdevice', 'net-client', 'destroy'
+ PCS, '--', 'qdevice', 'net-client', 'destroy'
)
if retval != 0
return [400, stderr.join('')]
--
2.37.1

View File

@ -1,25 +0,0 @@
From ef08dbdc4f1fbf86cee3842eec5de32da5468609 Mon Sep 17 00:00:00 2001
From: Ivan Devat <idevat@redhat.com>
Date: Thu, 11 Aug 2022 16:06:29 +0200
Subject: [PATCH 1/2] fix enable sbd from webui
---
pcsd/remote.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pcsd/remote.rb b/pcsd/remote.rb
index 74151190..a8aff853 100644
--- a/pcsd/remote.rb
+++ b/pcsd/remote.rb
@@ -2412,7 +2412,7 @@ def remote_enable_sbd(params, request, auth_user)
end
_, stderr, retcode = run_cmd(
- auth_user, PCS, *flags, '--' 'stonith', 'sbd', 'enable', *arg_list
+ auth_user, PCS, *flags, '--', 'stonith', 'sbd', 'enable', *arg_list
)
if retcode != 0
--
2.37.1

View File

@ -0,0 +1,128 @@
From 0da95a7f05ae7600eebe30df78a3d4622cd6b4f8 Mon Sep 17 00:00:00 2001
From: Ondrej Mular <omular@redhat.com>
Date: Wed, 7 Dec 2022 15:53:25 +0100
Subject: [PATCH 2/5] fix displaying bool and integer values in `pcs resource
config` command
---
pcs/cli/resource/output.py | 18 +++++++++---------
pcs_test/resources/cib-resources.xml | 2 +-
pcs_test/tier1/legacy/test_resource.py | 3 ++-
pcs_test/tools/resources_dto.py | 4 ++--
4 files changed, 14 insertions(+), 13 deletions(-)
diff --git a/pcs/cli/resource/output.py b/pcs/cli/resource/output.py
index 6d1fad16..0705d27b 100644
--- a/pcs/cli/resource/output.py
+++ b/pcs/cli/resource/output.py
@@ -69,9 +69,9 @@ def _resource_operation_to_pairs(
pairs.append(("interval-origin", operation_dto.interval_origin))
if operation_dto.timeout:
pairs.append(("timeout", operation_dto.timeout))
- if operation_dto.enabled:
+ if operation_dto.enabled is not None:
pairs.append(("enabled", _bool_to_cli_value(operation_dto.enabled)))
- if operation_dto.record_pending:
+ if operation_dto.record_pending is not None:
pairs.append(
("record-pending", _bool_to_cli_value(operation_dto.record_pending))
)
@@ -477,13 +477,13 @@ def _resource_bundle_container_options_to_pairs(
options: CibResourceBundleContainerRuntimeOptionsDto,
) -> List[Tuple[str, str]]:
option_list = [("image", options.image)]
- if options.replicas:
+ if options.replicas is not None:
option_list.append(("replicas", str(options.replicas)))
- if options.replicas_per_host:
+ if options.replicas_per_host is not None:
option_list.append(
("replicas-per-host", str(options.replicas_per_host))
)
- if options.promoted_max:
+ if options.promoted_max is not None:
option_list.append(("promoted-max", str(options.promoted_max)))
if options.run_command:
option_list.append(("run-command", options.run_command))
@@ -508,7 +508,7 @@ def _resource_bundle_network_options_to_pairs(
network_options.append(
("ip-range-start", bundle_network_dto.ip_range_start)
)
- if bundle_network_dto.control_port:
+ if bundle_network_dto.control_port is not None:
network_options.append(
("control-port", str(bundle_network_dto.control_port))
)
@@ -516,7 +516,7 @@ def _resource_bundle_network_options_to_pairs(
network_options.append(
("host-interface", bundle_network_dto.host_interface)
)
- if bundle_network_dto.host_netmask:
+ if bundle_network_dto.host_netmask is not None:
network_options.append(
("host-netmask", str(bundle_network_dto.host_netmask))
)
@@ -531,9 +531,9 @@ def _resource_bundle_port_mapping_to_pairs(
bundle_net_port_mapping_dto: CibResourceBundlePortMappingDto,
) -> List[Tuple[str, str]]:
mapping = []
- if bundle_net_port_mapping_dto.port:
+ if bundle_net_port_mapping_dto.port is not None:
mapping.append(("port", str(bundle_net_port_mapping_dto.port)))
- if bundle_net_port_mapping_dto.internal_port:
+ if bundle_net_port_mapping_dto.internal_port is not None:
mapping.append(
("internal-port", str(bundle_net_port_mapping_dto.internal_port))
)
diff --git a/pcs_test/resources/cib-resources.xml b/pcs_test/resources/cib-resources.xml
index 67cf5178..524b8fbb 100644
--- a/pcs_test/resources/cib-resources.xml
+++ b/pcs_test/resources/cib-resources.xml
@@ -53,7 +53,7 @@
</instance_attributes>
</op>
<op name="migrate_from" timeout="20s" interval="0s" id="R7-migrate_from-interval-0s"/>
- <op name="migrate_to" timeout="20s" interval="0s" id="R7-migrate_to-interval-0s"/>
+ <op name="migrate_to" timeout="20s" interval="0s" id="R7-migrate_to-interval-0s" enabled="false" record-pending="false"/>
<op name="monitor" timeout="20s" interval="10s" id="R7-monitor-interval-10s"/>
<op name="reload" timeout="20s" interval="0s" id="R7-reload-interval-0s"/>
<op name="reload-agent" timeout="20s" interval="0s" id="R7-reload-agent-interval-0s"/>
diff --git a/pcs_test/tier1/legacy/test_resource.py b/pcs_test/tier1/legacy/test_resource.py
index 2ea5c423..65ad1090 100644
--- a/pcs_test/tier1/legacy/test_resource.py
+++ b/pcs_test/tier1/legacy/test_resource.py
@@ -753,7 +753,7 @@ Error: moni=tor does not appear to be a valid operation action
o, r = pcs(
self.temp_cib.name,
- "resource create --no-default-ops OPTest ocf:heartbeat:Dummy op monitor interval=30s OCF_CHECK_LEVEL=1 op monitor interval=25s OCF_CHECK_LEVEL=1".split(),
+ "resource create --no-default-ops OPTest ocf:heartbeat:Dummy op monitor interval=30s OCF_CHECK_LEVEL=1 op monitor interval=25s OCF_CHECK_LEVEL=1 enabled=0".split(),
)
ac(o, "")
assert r == 0
@@ -770,6 +770,7 @@ Error: moni=tor does not appear to be a valid operation action
OCF_CHECK_LEVEL=1
monitor: OPTest-monitor-interval-25s
interval=25s
+ enabled=0
OCF_CHECK_LEVEL=1
"""
),
diff --git a/pcs_test/tools/resources_dto.py b/pcs_test/tools/resources_dto.py
index 8f46f6dd..a980ec80 100644
--- a/pcs_test/tools/resources_dto.py
+++ b/pcs_test/tools/resources_dto.py
@@ -233,8 +233,8 @@ PRIMITIVE_R7 = CibResourcePrimitiveDto(
start_delay=None,
interval_origin=None,
timeout="20s",
- enabled=None,
- record_pending=None,
+ enabled=False,
+ record_pending=False,
role=None,
on_fail=None,
meta_attributes=[],
--
2.39.0

View File

@ -0,0 +1,732 @@
From 58589e47f2913276ea1c2164a3ce8ee694fb2b78 Mon Sep 17 00:00:00 2001
From: Ondrej Mular <omular@redhat.com>
Date: Wed, 7 Dec 2022 11:33:25 +0100
Subject: [PATCH 1/5] add warning when updating a misconfigured resource
---
pcs/common/reports/codes.py | 3 +
pcs/common/reports/messages.py | 19 +++++
pcs/lib/cib/resource/primitive.py | 84 ++++++++++++++-----
pcs/lib/pacemaker/live.py | 38 ++-------
.../tier0/common/reports/test_messages.py | 16 ++++
.../cib/resource/test_primitive_validate.py | 56 +++++++------
pcs_test/tier0/lib/pacemaker/test_live.py | 78 +++++------------
pcs_test/tier1/legacy/test_stonith.py | 5 +-
8 files changed, 161 insertions(+), 138 deletions(-)
diff --git a/pcs/common/reports/codes.py b/pcs/common/reports/codes.py
index deecc626..48048af7 100644
--- a/pcs/common/reports/codes.py
+++ b/pcs/common/reports/codes.py
@@ -40,6 +40,9 @@ AGENT_NAME_GUESS_FOUND_MORE_THAN_ONE = M("AGENT_NAME_GUESS_FOUND_MORE_THAN_ONE")
AGENT_NAME_GUESS_FOUND_NONE = M("AGENT_NAME_GUESS_FOUND_NONE")
AGENT_NAME_GUESSED = M("AGENT_NAME_GUESSED")
AGENT_SELF_VALIDATION_INVALID_DATA = M("AGENT_SELF_VALIDATION_INVALID_DATA")
+AGENT_SELF_VALIDATION_SKIPPED_UPDATED_RESOURCE_MISCONFIGURED = M(
+ "AGENT_SELF_VALIDATION_SKIPPED_UPDATED_RESOURCE_MISCONFIGURED"
+)
AGENT_SELF_VALIDATION_RESULT = M("AGENT_SELF_VALIDATION_RESULT")
BAD_CLUSTER_STATE_FORMAT = M("BAD_CLUSTER_STATE_FORMAT")
BOOTH_ADDRESS_DUPLICATION = M("BOOTH_ADDRESS_DUPLICATION")
diff --git a/pcs/common/reports/messages.py b/pcs/common/reports/messages.py
index d27c1dee..24bb222f 100644
--- a/pcs/common/reports/messages.py
+++ b/pcs/common/reports/messages.py
@@ -7584,6 +7584,25 @@ class AgentSelfValidationInvalidData(ReportItemMessage):
return f"Invalid validation data from agent: {self.reason}"
+@dataclass(frozen=True)
+class AgentSelfValidationSkippedUpdatedResourceMisconfigured(ReportItemMessage):
+ """
+ Agent self validation is skipped when updating a resource as it is
+ misconfigured in its current state.
+ """
+
+ result: str
+ _code = codes.AGENT_SELF_VALIDATION_SKIPPED_UPDATED_RESOURCE_MISCONFIGURED
+
+ @property
+ def message(self) -> str:
+ return (
+ "The resource was misconfigured before the update, therefore agent "
+ "self-validation will not be run for the updated configuration. "
+ "Validation output of the original configuration:\n{result}"
+ ).format(result="\n".join(indent(self.result.splitlines())))
+
+
@dataclass(frozen=True)
class BoothAuthfileNotUsed(ReportItemMessage):
"""
diff --git a/pcs/lib/cib/resource/primitive.py b/pcs/lib/cib/resource/primitive.py
index 3ebd01c6..c5df8e58 100644
--- a/pcs/lib/cib/resource/primitive.py
+++ b/pcs/lib/cib/resource/primitive.py
@@ -357,6 +357,31 @@ def _is_ocf_or_stonith_agent(resource_agent_name: ResourceAgentName) -> bool:
return resource_agent_name.standard in ("stonith", "ocf")
+def _get_report_from_agent_self_validation(
+ is_valid: Optional[bool],
+ reason: str,
+ report_severity: reports.ReportItemSeverity,
+) -> reports.ReportItemList:
+ report_items = []
+ if is_valid is None:
+ report_items.append(
+ reports.ReportItem(
+ report_severity,
+ reports.messages.AgentSelfValidationInvalidData(reason),
+ )
+ )
+ elif not is_valid or reason:
+ if is_valid:
+ report_severity = reports.ReportItemSeverity.warning()
+ report_items.append(
+ reports.ReportItem(
+ report_severity,
+ reports.messages.AgentSelfValidationResult(reason),
+ )
+ )
+ return report_items
+
+
def validate_resource_instance_attributes_create(
cmd_runner: CommandRunner,
resource_agent: ResourceAgentFacade,
@@ -402,16 +427,16 @@ def validate_resource_instance_attributes_create(
for report_item in report_items
)
):
- (
- dummy_is_valid,
- agent_validation_reports,
- ) = validate_resource_instance_attributes_via_pcmk(
- cmd_runner,
- agent_name,
- instance_attributes,
- reports.get_severity(reports.codes.FORCE, force),
+ report_items.extend(
+ _get_report_from_agent_self_validation(
+ *validate_resource_instance_attributes_via_pcmk(
+ cmd_runner,
+ agent_name,
+ instance_attributes,
+ ),
+ reports.get_severity(reports.codes.FORCE, force),
+ )
)
- report_items.extend(agent_validation_reports)
return report_items
@@ -505,25 +530,40 @@ def validate_resource_instance_attributes_update(
)
):
(
- is_valid,
- dummy_reports,
+ original_is_valid,
+ original_reason,
) = validate_resource_instance_attributes_via_pcmk(
cmd_runner,
agent_name,
current_instance_attrs,
- reports.ReportItemSeverity.error(),
)
- if is_valid:
- (
- dummy_is_valid,
- agent_validation_reports,
- ) = validate_resource_instance_attributes_via_pcmk(
- cmd_runner,
- resource_agent.metadata.name,
- final_attrs,
- reports.get_severity(reports.codes.FORCE, force),
+ if original_is_valid:
+ report_items.extend(
+ _get_report_from_agent_self_validation(
+ *validate_resource_instance_attributes_via_pcmk(
+ cmd_runner,
+ resource_agent.metadata.name,
+ final_attrs,
+ ),
+ reports.get_severity(reports.codes.FORCE, force),
+ )
+ )
+ elif original_is_valid is None:
+ report_items.append(
+ reports.ReportItem.warning(
+ reports.messages.AgentSelfValidationInvalidData(
+ original_reason
+ )
+ )
+ )
+ else:
+ report_items.append(
+ reports.ReportItem.warning(
+ reports.messages.AgentSelfValidationSkippedUpdatedResourceMisconfigured(
+ original_reason
+ )
+ )
)
- report_items.extend(agent_validation_reports)
return report_items
diff --git a/pcs/lib/pacemaker/live.py b/pcs/lib/pacemaker/live.py
index fd26dabb..726f6b67 100644
--- a/pcs/lib/pacemaker/live.py
+++ b/pcs/lib/pacemaker/live.py
@@ -902,8 +902,7 @@ def _validate_stonith_instance_attributes_via_pcmk(
cmd_runner: CommandRunner,
agent_name: ResourceAgentName,
instance_attributes: Mapping[str, str],
- not_valid_severity: reports.ReportItemSeverity,
-) -> Tuple[Optional[bool], reports.ReportItemList]:
+) -> Tuple[Optional[bool], str]:
cmd = [
settings.stonith_admin,
"--validate",
@@ -917,7 +916,6 @@ def _validate_stonith_instance_attributes_via_pcmk(
cmd,
"./validate/command/output",
instance_attributes,
- not_valid_severity,
)
@@ -925,8 +923,7 @@ def _validate_resource_instance_attributes_via_pcmk(
cmd_runner: CommandRunner,
agent_name: ResourceAgentName,
instance_attributes: Mapping[str, str],
- not_valid_severity: reports.ReportItemSeverity,
-) -> Tuple[Optional[bool], reports.ReportItemList]:
+) -> Tuple[Optional[bool], str]:
cmd = [
settings.crm_resource_binary,
"--validate",
@@ -944,7 +941,6 @@ def _validate_resource_instance_attributes_via_pcmk(
cmd,
"./resource-agent-action/command/output",
instance_attributes,
- not_valid_severity,
)
@@ -953,8 +949,7 @@ def _handle_instance_attributes_validation_via_pcmk(
cmd: StringSequence,
data_xpath: str,
instance_attributes: Mapping[str, str],
- not_valid_severity: reports.ReportItemSeverity,
-) -> Tuple[Optional[bool], reports.ReportItemList]:
+) -> Tuple[Optional[bool], str]:
full_cmd = list(cmd)
for key, value in sorted(instance_attributes.items()):
full_cmd.extend(["--option", f"{key}={value}"])
@@ -963,12 +958,7 @@ def _handle_instance_attributes_validation_via_pcmk(
# dom = _get_api_result_dom(stdout)
dom = xml_fromstring(stdout)
except (etree.XMLSyntaxError, etree.DocumentInvalid) as e:
- return None, [
- reports.ReportItem(
- not_valid_severity,
- reports.messages.AgentSelfValidationInvalidData(str(e)),
- )
- ]
+ return None, str(e)
result = "\n".join(
"\n".join(
line.strip() for line in item.text.split("\n") if line.strip()
@@ -976,38 +966,22 @@ def _handle_instance_attributes_validation_via_pcmk(
for item in dom.iterfind(data_xpath)
if item.get("source") == "stderr" and item.text
).strip()
- if return_value == 0:
- if result:
- return True, [
- reports.ReportItem.warning(
- reports.messages.AgentSelfValidationResult(result)
- )
- ]
- return True, []
- return False, [
- reports.ReportItem(
- not_valid_severity,
- reports.messages.AgentSelfValidationResult(result),
- )
- ]
+ return return_value == 0, result
def validate_resource_instance_attributes_via_pcmk(
cmd_runner: CommandRunner,
resource_agent_name: ResourceAgentName,
instance_attributes: Mapping[str, str],
- not_valid_severity: reports.ReportItemSeverity,
-) -> Tuple[Optional[bool], reports.ReportItemList]:
+) -> Tuple[Optional[bool], str]:
if resource_agent_name.is_stonith:
return _validate_stonith_instance_attributes_via_pcmk(
cmd_runner,
resource_agent_name,
instance_attributes,
- not_valid_severity,
)
return _validate_resource_instance_attributes_via_pcmk(
cmd_runner,
resource_agent_name,
instance_attributes,
- not_valid_severity,
)
diff --git a/pcs_test/tier0/common/reports/test_messages.py b/pcs_test/tier0/common/reports/test_messages.py
index 17627b80..5fcc62fc 100644
--- a/pcs_test/tier0/common/reports/test_messages.py
+++ b/pcs_test/tier0/common/reports/test_messages.py
@@ -5562,6 +5562,22 @@ class AgentSelfValidationInvalidData(NameBuildTest):
)
+class AgentSelfValidationSkippedUpdatedResourceMisconfigured(NameBuildTest):
+ def test_message(self):
+ lines = list(f"line #{i}" for i in range(3))
+ self.assert_message_from_report(
+ (
+ "The resource was misconfigured before the update, therefore "
+ "agent self-validation will not be run for the updated "
+ "configuration. Validation output of the original "
+ "configuration:\n {}"
+ ).format("\n ".join(lines)),
+ reports.AgentSelfValidationSkippedUpdatedResourceMisconfigured(
+ "\n".join(lines)
+ ),
+ )
+
+
class BoothAuthfileNotUsed(NameBuildTest):
def test_message(self):
self.assert_message_from_report(
diff --git a/pcs_test/tier0/lib/cib/resource/test_primitive_validate.py b/pcs_test/tier0/lib/cib/resource/test_primitive_validate.py
index 2cba7086..1bc3a5a6 100644
--- a/pcs_test/tier0/lib/cib/resource/test_primitive_validate.py
+++ b/pcs_test/tier0/lib/cib/resource/test_primitive_validate.py
@@ -609,7 +609,6 @@ class ValidateResourceInstanceAttributesCreateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
attributes,
- reports.ReportItemSeverity.error(reports.codes.FORCE),
)
def test_force(self):
@@ -629,15 +628,14 @@ class ValidateResourceInstanceAttributesCreateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
attributes,
- reports.ReportItemSeverity.warning(),
)
def test_failure(self):
attributes = {"required": "value"}
facade = _fixture_ocf_agent()
- failure_reports = ["report1", "report2"]
- self.agent_self_validation_mock.return_value = False, failure_reports
- self.assertEqual(
+ failure_reason = "failure reason"
+ self.agent_self_validation_mock.return_value = False, failure_reason
+ assert_report_item_list_equal(
primitive.validate_resource_instance_attributes_create(
self.cmd_runner,
facade,
@@ -645,13 +643,18 @@ class ValidateResourceInstanceAttributesCreateSelfValidation(TestCase):
etree.Element("resources"),
force=False,
),
- failure_reports,
+ [
+ fixture.error(
+ reports.codes.AGENT_SELF_VALIDATION_RESULT,
+ result=failure_reason,
+ force_code=reports.codes.FORCE,
+ )
+ ],
)
self.agent_self_validation_mock.assert_called_once_with(
self.cmd_runner,
facade.metadata.name,
attributes,
- reports.ReportItemSeverity.error(reports.codes.FORCE),
)
def test_stonith_check(self):
@@ -671,7 +674,6 @@ class ValidateResourceInstanceAttributesCreateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
attributes,
- reports.ReportItemSeverity.error(reports.codes.FORCE),
)
def test_nonexisting_agent(self):
@@ -1295,13 +1297,11 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
old_attributes,
- reports.ReportItemSeverity.error(),
),
mock.call(
self.cmd_runner,
facade.metadata.name,
new_attributes,
- reports.ReportItemSeverity.error(reports.codes.FORCE),
),
],
)
@@ -1328,13 +1328,11 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
old_attributes,
- reports.ReportItemSeverity.error(),
),
mock.call(
self.cmd_runner,
facade.metadata.name,
new_attributes,
- reports.ReportItemSeverity.warning(),
),
],
)
@@ -1342,13 +1340,13 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
def test_failure(self):
old_attributes = {"required": "old_value"}
new_attributes = {"required": "new_value"}
- failure_reports = ["report1", "report2"]
+ failure_reason = "failure reason"
facade = _fixture_ocf_agent()
self.agent_self_validation_mock.side_effect = (
- (True, []),
- (False, failure_reports),
+ (True, ""),
+ (False, failure_reason),
)
- self.assertEqual(
+ assert_report_item_list_equal(
primitive.validate_resource_instance_attributes_update(
self.cmd_runner,
facade,
@@ -1357,7 +1355,13 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self._fixture_resources(old_attributes),
force=False,
),
- failure_reports,
+ [
+ fixture.error(
+ reports.codes.AGENT_SELF_VALIDATION_RESULT,
+ result=failure_reason,
+ force_code=reports.codes.FORCE,
+ )
+ ],
)
self.assertEqual(
self.agent_self_validation_mock.mock_calls,
@@ -1366,13 +1370,11 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
old_attributes,
- reports.ReportItemSeverity.error(),
),
mock.call(
self.cmd_runner,
facade.metadata.name,
new_attributes,
- reports.ReportItemSeverity.error(reports.codes.FORCE),
),
],
)
@@ -1399,13 +1401,11 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
old_attributes,
- reports.ReportItemSeverity.error(),
),
mock.call(
self.cmd_runner,
facade.metadata.name,
new_attributes,
- reports.ReportItemSeverity.error(reports.codes.FORCE),
),
],
)
@@ -1471,10 +1471,10 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
def test_current_attributes_failure(self):
old_attributes = {"required": "old_value"}
new_attributes = {"required": "new_value"}
- failure_reports = ["report1", "report2"]
+ failure_reason = "failure reason"
facade = _fixture_ocf_agent()
- self.agent_self_validation_mock.return_value = False, failure_reports
- self.assertEqual(
+ self.agent_self_validation_mock.return_value = False, failure_reason
+ assert_report_item_list_equal(
primitive.validate_resource_instance_attributes_update(
self.cmd_runner,
facade,
@@ -1483,7 +1483,12 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self._fixture_resources(old_attributes),
force=False,
),
- [],
+ [
+ fixture.warn(
+ reports.codes.AGENT_SELF_VALIDATION_SKIPPED_UPDATED_RESOURCE_MISCONFIGURED,
+ result=failure_reason,
+ )
+ ],
)
self.assertEqual(
self.agent_self_validation_mock.mock_calls,
@@ -1492,7 +1497,6 @@ class ValidateResourceInstanceAttributesUpdateSelfValidation(TestCase):
self.cmd_runner,
facade.metadata.name,
old_attributes,
- reports.ReportItemSeverity.error(),
),
],
)
diff --git a/pcs_test/tier0/lib/pacemaker/test_live.py b/pcs_test/tier0/lib/pacemaker/test_live.py
index 5c8000cd..239a72b1 100644
--- a/pcs_test/tier0/lib/pacemaker/test_live.py
+++ b/pcs_test/tier0/lib/pacemaker/test_live.py
@@ -1752,16 +1752,15 @@ class HandleInstanceAttributesValidateViaPcmkTest(TestCase):
base_cmd = ["some", "command"]
(
is_valid,
- report_list,
+ reason,
) = lib._handle_instance_attributes_validation_via_pcmk(
runner,
base_cmd,
"result/output",
{"attr1": "val1", "attr2": "val2"},
- not_valid_severity=Severity.info(),
)
self.assertTrue(is_valid)
- self.assertEqual(report_list, [])
+ self.assertEqual(reason, "")
runner.run.assert_called_once_with(
base_cmd + ["--option", "attr1=val1", "--option", "attr2=val2"]
)
@@ -1771,23 +1770,17 @@ class HandleInstanceAttributesValidateViaPcmkTest(TestCase):
base_cmd = ["some", "command"]
(
is_valid,
- report_list,
+ reason,
) = lib._handle_instance_attributes_validation_via_pcmk(
runner,
base_cmd,
"result/output",
{"attr1": "val1", "attr2": "val2"},
- not_valid_severity=Severity.info(),
)
self.assertIsNone(is_valid)
- assert_report_item_list_equal(
- report_list,
- [
- fixture.info(
- report_codes.AGENT_SELF_VALIDATION_INVALID_DATA,
- reason="Start tag expected, '<' not found, line 1, column 1 (<string>, line 1)",
- )
- ],
+ self.assertEqual(
+ reason,
+ "Start tag expected, '<' not found, line 1, column 1 (<string>, line 1)",
)
runner.run.assert_called_once_with(
base_cmd + ["--option", "attr1=val1", "--option", "attr2=val2"]
@@ -1806,19 +1799,15 @@ class HandleInstanceAttributesValidateViaPcmkTest(TestCase):
base_cmd = ["some", "command"]
(
is_valid,
- report_list,
+ reason,
) = lib._handle_instance_attributes_validation_via_pcmk(
runner,
base_cmd,
"result/output",
{"attr1": "val1", "attr2": "val2"},
- not_valid_severity=Severity.info(),
)
self.assertTrue(is_valid)
- assert_report_item_list_equal(
- report_list,
- [],
- )
+ self.assertEqual(reason, "")
runner.run.assert_called_once_with(
base_cmd + ["--option", "attr1=val1", "--option", "attr2=val2"]
)
@@ -1837,23 +1826,15 @@ class HandleInstanceAttributesValidateViaPcmkTest(TestCase):
base_cmd = ["some", "command"]
(
is_valid,
- report_list,
+ reason,
) = lib._handle_instance_attributes_validation_via_pcmk(
runner,
base_cmd,
"result/output",
{"attr1": "val1", "attr2": "val2"},
- not_valid_severity=Severity.info(),
)
self.assertFalse(is_valid)
- assert_report_item_list_equal(
- report_list,
- [
- fixture.info(
- report_codes.AGENT_SELF_VALIDATION_RESULT, result=""
- )
- ],
- )
+ self.assertEqual(reason, "")
runner.run.assert_called_once_with(
base_cmd + ["--option", "attr1=val1", "--option", "attr2=val2"]
)
@@ -1881,23 +1862,17 @@ class HandleInstanceAttributesValidateViaPcmkTest(TestCase):
base_cmd = ["some", "command"]
(
is_valid,
- report_list,
+ reason,
) = lib._handle_instance_attributes_validation_via_pcmk(
runner,
base_cmd,
"result/output",
{"attr1": "val1", "attr2": "val2"},
- not_valid_severity=Severity.info(),
)
self.assertFalse(is_valid)
- assert_report_item_list_equal(
- report_list,
- [
- fixture.info(
- report_codes.AGENT_SELF_VALIDATION_RESULT,
- result="first line\nImportant output\nand another line",
- )
- ],
+ self.assertEqual(
+ reason,
+ "first line\nImportant output\nand another line",
)
runner.run.assert_called_once_with(
base_cmd + ["--option", "attr1=val1", "--option", "attr2=val2"]
@@ -1925,23 +1900,17 @@ class HandleInstanceAttributesValidateViaPcmkTest(TestCase):
base_cmd = ["some", "command"]
(
is_valid,
- report_list,
+ reason,
) = lib._handle_instance_attributes_validation_via_pcmk(
runner,
base_cmd,
"result/output",
{"attr1": "val1", "attr2": "val2"},
- not_valid_severity=Severity.info(),
)
self.assertTrue(is_valid)
- assert_report_item_list_equal(
- report_list,
- [
- fixture.warn(
- report_codes.AGENT_SELF_VALIDATION_RESULT,
- result="first line\nImportant output\nand another line",
- )
- ],
+ self.assertEqual(
+ reason,
+ "first line\nImportant output\nand another line",
)
runner.run.assert_called_once_with(
base_cmd + ["--option", "attr1=val1", "--option", "attr2=val2"]
@@ -1953,7 +1922,6 @@ class ValidateResourceInstanceAttributesViaPcmkTest(TestCase):
def setUp(self):
self.runner = mock.Mock()
self.attrs = dict(attra="val1", attrb="val2")
- self.severity = Severity.info()
patcher = mock.patch(
"pcs.lib.pacemaker.live._handle_instance_attributes_validation_via_pcmk"
)
@@ -1967,7 +1935,7 @@ class ValidateResourceInstanceAttributesViaPcmkTest(TestCase):
)
self.assertEqual(
lib._validate_resource_instance_attributes_via_pcmk(
- self.runner, agent, self.attrs, self.severity
+ self.runner, agent, self.attrs
),
self.ret_val,
)
@@ -1987,7 +1955,6 @@ class ValidateResourceInstanceAttributesViaPcmkTest(TestCase):
],
"./resource-agent-action/command/output",
self.attrs,
- self.severity,
)
def test_without_provider(self):
@@ -1996,7 +1963,7 @@ class ValidateResourceInstanceAttributesViaPcmkTest(TestCase):
)
self.assertEqual(
lib._validate_resource_instance_attributes_via_pcmk(
- self.runner, agent, self.attrs, self.severity
+ self.runner, agent, self.attrs
),
self.ret_val,
)
@@ -2014,7 +1981,6 @@ class ValidateResourceInstanceAttributesViaPcmkTest(TestCase):
],
"./resource-agent-action/command/output",
self.attrs,
- self.severity,
)
@@ -2024,7 +1990,6 @@ class ValidateStonithInstanceAttributesViaPcmkTest(TestCase):
def setUp(self):
self.runner = mock.Mock()
self.attrs = dict(attra="val1", attrb="val2")
- self.severity = Severity.info()
patcher = mock.patch(
"pcs.lib.pacemaker.live._handle_instance_attributes_validation_via_pcmk"
)
@@ -2038,7 +2003,7 @@ class ValidateStonithInstanceAttributesViaPcmkTest(TestCase):
)
self.assertEqual(
lib._validate_stonith_instance_attributes_via_pcmk(
- self.runner, agent, self.attrs, self.severity
+ self.runner, agent, self.attrs
),
self.ret_val,
)
@@ -2054,5 +2019,4 @@ class ValidateStonithInstanceAttributesViaPcmkTest(TestCase):
],
"./validate/command/output",
self.attrs,
- self.severity,
)
diff --git a/pcs_test/tier1/legacy/test_stonith.py b/pcs_test/tier1/legacy/test_stonith.py
index 9911d604..cf430d75 100644
--- a/pcs_test/tier1/legacy/test_stonith.py
+++ b/pcs_test/tier1/legacy/test_stonith.py
@@ -1294,7 +1294,10 @@ class StonithTest(TestCase, AssertPcsMixin):
),
)
- self.assert_pcs_success("stonith update test3 username=testA".split())
+ self.assert_pcs_success(
+ "stonith update test3 username=testA".split(),
+ stdout_start="Warning: ",
+ )
self.assert_pcs_success(
"stonith config test2".split(),
--
2.39.0

View File

@ -0,0 +1,485 @@
From 5bed788246ac19c866a60ab3773d94fa4ca28c37 Mon Sep 17 00:00:00 2001
From: Miroslav Lisik <mlisik@redhat.com>
Date: Thu, 5 Jan 2023 16:21:44 +0100
Subject: [PATCH 5/5] Fix stonith-watchdog-timeout validation
---
pcs/lib/cluster_property.py | 25 ++++-
pcs/lib/sbd.py | 15 ++-
.../lib/commands/test_cluster_property.py | 50 ++++++++--
pcs_test/tier0/lib/test_cluster_property.py | 98 ++++++++++++++-----
pcs_test/tier1/test_cluster_property.py | 14 ++-
5 files changed, 157 insertions(+), 45 deletions(-)
diff --git a/pcs/lib/cluster_property.py b/pcs/lib/cluster_property.py
index 9ccacd74..b622bdaf 100644
--- a/pcs/lib/cluster_property.py
+++ b/pcs/lib/cluster_property.py
@@ -8,6 +8,7 @@ from lxml.etree import _Element
from pcs.common import reports
from pcs.common.services.interfaces import ServiceManagerInterface
+from pcs.common.tools import timeout_to_seconds
from pcs.common.types import StringSequence
from pcs.lib import (
sbd,
@@ -38,8 +39,21 @@ def _validate_stonith_watchdog_timeout_property(
force: bool = False,
) -> reports.ReportItemList:
report_list: reports.ReportItemList = []
+ original_value = value
+ # if value is not empty, try to convert time interval string
+ if value:
+ seconds = timeout_to_seconds(value)
+ if seconds is None:
+ # returns empty list because this should be reported by
+ # ValueTimeInterval validator
+ return report_list
+ value = str(seconds)
if sbd.is_sbd_enabled(service_manager):
- report_list.extend(sbd.validate_stonith_watchdog_timeout(value, force))
+ report_list.extend(
+ sbd.validate_stonith_watchdog_timeout(
+ validate.ValuePair(original_value, value), force
+ )
+ )
else:
if value not in ["", "0"]:
report_list.append(
@@ -124,9 +138,6 @@ def validate_set_cluster_properties(
# unknow properties are reported by NamesIn validator
continue
property_metadata = possible_properties_dict[property_name]
- if property_metadata.name == "stonith-watchdog-timeout":
- # needs extra validation
- continue
if property_metadata.type == "boolean":
validators.append(
validate.ValuePcmkBoolean(
@@ -154,9 +165,13 @@ def validate_set_cluster_properties(
)
)
elif property_metadata.type == "time":
+ # make stonith-watchdog-timeout value not forcable
validators.append(
validate.ValueTimeInterval(
- property_metadata.name, severity=severity
+ property_metadata.name,
+ severity=severity
+ if property_metadata.name != "stonith-watchdog-timeout"
+ else reports.ReportItemSeverity.error(),
)
)
report_list.extend(
diff --git a/pcs/lib/sbd.py b/pcs/lib/sbd.py
index 1e3cfb37..38cd8767 100644
--- a/pcs/lib/sbd.py
+++ b/pcs/lib/sbd.py
@@ -1,6 +1,9 @@
import re
from os import path
-from typing import Optional
+from typing import (
+ Optional,
+ Union,
+)
from pcs import settings
from pcs.common import reports
@@ -392,7 +395,10 @@ def _get_local_sbd_watchdog_timeout() -> int:
def validate_stonith_watchdog_timeout(
- stonith_watchdog_timeout: str, force: bool = False
+ stonith_watchdog_timeout: Union[
+ validate.TypeOptionValue, validate.ValuePair
+ ],
+ force: bool = False,
) -> reports.ReportItemList:
"""
Check sbd status and config when user is setting stonith-watchdog-timeout
@@ -401,6 +407,7 @@ def validate_stonith_watchdog_timeout(
stonith_watchdog_timeout -- value to be validated
"""
+ stonith_watchdog_timeout = validate.ValuePair.get(stonith_watchdog_timeout)
severity = reports.get_severity(reports.codes.FORCE, force)
if _is_device_set_local():
return (
@@ -412,11 +419,11 @@ def validate_stonith_watchdog_timeout(
),
)
]
- if stonith_watchdog_timeout not in ["", "0"]
+ if stonith_watchdog_timeout.normalized not in ["", "0"]
else []
)
- if stonith_watchdog_timeout in ["", "0"]:
+ if stonith_watchdog_timeout.normalized in ["", "0"]:
return [
reports.ReportItem(
severity,
diff --git a/pcs_test/tier0/lib/commands/test_cluster_property.py b/pcs_test/tier0/lib/commands/test_cluster_property.py
index 319d1df6..fd124843 100644
--- a/pcs_test/tier0/lib/commands/test_cluster_property.py
+++ b/pcs_test/tier0/lib/commands/test_cluster_property.py
@@ -120,6 +120,34 @@ class StonithWatchdogTimeoutMixin(LoadMetadataMixin):
)
self.env_assist.assert_reports([])
+ def _set_invalid_value(self, forced=False):
+ self.config.remove("services.is_enabled")
+ self.env_assist.assert_raise_library_error(
+ lambda: cluster_property.set_properties(
+ self.env_assist.get_env(),
+ {"stonith-watchdog-timeout": "15x"},
+ [] if not forced else [reports.codes.FORCE],
+ )
+ )
+ self.env_assist.assert_reports(
+ [
+ fixture.error(
+ reports.codes.INVALID_OPTION_VALUE,
+ option_name="stonith-watchdog-timeout",
+ option_value="15x",
+ allowed_values="time interval (e.g. 1, 2s, 3m, 4h, ...)",
+ cannot_be_empty=False,
+ forbidden_characters=None,
+ ),
+ ]
+ )
+
+ def test_set_invalid_value(self):
+ self._set_invalid_value(forced=False)
+
+ def test_set_invalid_value_forced(self):
+ self._set_invalid_value(forced=True)
+
class TestSetStonithWatchdogTimeoutSBDIsDisabled(
StonithWatchdogTimeoutMixin, TestCase
@@ -132,6 +160,9 @@ class TestSetStonithWatchdogTimeoutSBDIsDisabled(
def test_set_zero(self):
self._set_success({"stonith-watchdog-timeout": "0"})
+ def test_set_zero_time_suffix(self):
+ self._set_success({"stonith-watchdog-timeout": "0s"})
+
def test_set_not_zero_or_empty(self):
self.env_assist.assert_raise_library_error(
lambda: cluster_property.set_properties(
@@ -231,12 +262,12 @@ class TestSetStonithWatchdogTimeoutSBDIsEnabledWatchdogOnly(
def test_set_zero_forced(self):
self.config.env.push_cib(
crm_config=fixture_crm_config_properties(
- [("cib-bootstrap-options", {"stonith-watchdog-timeout": "0"})]
+ [("cib-bootstrap-options", {"stonith-watchdog-timeout": "0s"})]
)
)
cluster_property.set_properties(
self.env_assist.get_env(),
- {"stonith-watchdog-timeout": "0"},
+ {"stonith-watchdog-timeout": "0s"},
[reports.codes.FORCE],
)
self.env_assist.assert_reports(
@@ -271,7 +302,7 @@ class TestSetStonithWatchdogTimeoutSBDIsEnabledWatchdogOnly(
self.env_assist.assert_raise_library_error(
lambda: cluster_property.set_properties(
self.env_assist.get_env(),
- {"stonith-watchdog-timeout": "9"},
+ {"stonith-watchdog-timeout": "9s"},
[],
)
)
@@ -281,7 +312,7 @@ class TestSetStonithWatchdogTimeoutSBDIsEnabledWatchdogOnly(
reports.codes.STONITH_WATCHDOG_TIMEOUT_TOO_SMALL,
force_code=reports.codes.FORCE,
cluster_sbd_watchdog_timeout=10,
- entered_watchdog_timeout="9",
+ entered_watchdog_timeout="9s",
)
]
)
@@ -289,12 +320,12 @@ class TestSetStonithWatchdogTimeoutSBDIsEnabledWatchdogOnly(
def test_too_small_forced(self):
self.config.env.push_cib(
crm_config=fixture_crm_config_properties(
- [("cib-bootstrap-options", {"stonith-watchdog-timeout": "9"})]
+ [("cib-bootstrap-options", {"stonith-watchdog-timeout": "9s"})]
)
)
cluster_property.set_properties(
self.env_assist.get_env(),
- {"stonith-watchdog-timeout": "9"},
+ {"stonith-watchdog-timeout": "9s"},
[reports.codes.FORCE],
)
self.env_assist.assert_reports(
@@ -302,13 +333,13 @@ class TestSetStonithWatchdogTimeoutSBDIsEnabledWatchdogOnly(
fixture.warn(
reports.codes.STONITH_WATCHDOG_TIMEOUT_TOO_SMALL,
cluster_sbd_watchdog_timeout=10,
- entered_watchdog_timeout="9",
+ entered_watchdog_timeout="9s",
)
]
)
def test_more_than_timeout(self):
- self._set_success({"stonith-watchdog-timeout": "11"})
+ self._set_success({"stonith-watchdog-timeout": "11s"})
@mock.patch("pcs.lib.sbd.get_local_sbd_device_list", lambda: ["dev1", "dev2"])
@@ -323,6 +354,9 @@ class TestSetStonithWatchdogTimeoutSBDIsEnabledSharedDevices(
def test_set_to_zero(self):
self._set_success({"stonith-watchdog-timeout": "0"})
+ def test_set_to_zero_time_suffix(self):
+ self._set_success({"stonith-watchdog-timeout": "0min"})
+
def test_set_not_zero_or_empty(self):
self.env_assist.assert_raise_library_error(
lambda: cluster_property.set_properties(
diff --git a/pcs_test/tier0/lib/test_cluster_property.py b/pcs_test/tier0/lib/test_cluster_property.py
index 2feb728d..8d6f90b1 100644
--- a/pcs_test/tier0/lib/test_cluster_property.py
+++ b/pcs_test/tier0/lib/test_cluster_property.py
@@ -83,6 +83,7 @@ FIXTURE_VALID_OPTIONS_DICT = {
"integer_param": "10",
"percentage_param": "20%",
"select_param": "s3",
+ "stonith-watchdog-timeout": "0",
"time_param": "5min",
}
@@ -96,6 +97,8 @@ FIXTURE_INVALID_OPTIONS_DICT = {
"have-watchdog": "100",
}
+STONITH_WATCHDOG_TIMEOUT_UNSET_VALUES = ["", "0", "0s"]
+
def _fixture_parameter(name, param_type, default, enum_values):
return ResourceAgentParameter(
@@ -239,6 +242,7 @@ class TestValidateSetClusterProperties(TestCase):
sbd_enabled=False,
sbd_devices=False,
force=False,
+ valid_value=True,
):
self.mock_is_sbd_enabled.return_value = sbd_enabled
self.mock_sbd_devices.return_value = ["devices"] if sbd_devices else []
@@ -254,9 +258,13 @@ class TestValidateSetClusterProperties(TestCase):
),
expected_report_list,
)
- if "stonith-watchdog-timeout" in new_properties and (
- new_properties["stonith-watchdog-timeout"]
- or "stonith-watchdog-timeout" in configured_properties
+ if (
+ "stonith-watchdog-timeout" in new_properties
+ and (
+ new_properties["stonith-watchdog-timeout"]
+ or "stonith-watchdog-timeout" in configured_properties
+ )
+ and valid_value
):
self.mock_is_sbd_enabled.assert_called_once_with(
self.mock_service_manager
@@ -266,7 +274,10 @@ class TestValidateSetClusterProperties(TestCase):
if sbd_devices:
self.mock_sbd_timeout.assert_not_called()
else:
- if new_properties["stonith-watchdog-timeout"] in ["", "0"]:
+ if (
+ new_properties["stonith-watchdog-timeout"]
+ in STONITH_WATCHDOG_TIMEOUT_UNSET_VALUES
+ ):
self.mock_sbd_timeout.assert_not_called()
else:
self.mock_sbd_timeout.assert_called_once_with()
@@ -280,6 +291,8 @@ class TestValidateSetClusterProperties(TestCase):
self.mock_sbd_timeout.assert_not_called()
self.mock_is_sbd_enabled.reset_mock()
+ self.mock_sbd_devices.reset_mock()
+ self.mock_sbd_timeout.reset_mock()
def test_no_properties_to_set_or_unset(self):
self.assert_validate_set(
@@ -328,7 +341,7 @@ class TestValidateSetClusterProperties(TestCase):
)
def test_unset_stonith_watchdog_timeout_sbd_disabled(self):
- for value in ["0", ""]:
+ for value in STONITH_WATCHDOG_TIMEOUT_UNSET_VALUES:
with self.subTest(value=value):
self.assert_validate_set(
["stonith-watchdog-timeout"],
@@ -349,22 +362,27 @@ class TestValidateSetClusterProperties(TestCase):
)
def test_set_ok_stonith_watchdog_timeout_sbd_enabled_without_devices(self):
- self.assert_validate_set(
- [], {"stonith-watchdog-timeout": "15"}, [], sbd_enabled=True
- )
+ for value in ["15", "15s"]:
+ with self.subTest(value=value):
+ self.assert_validate_set(
+ [],
+ {"stonith-watchdog-timeout": value},
+ [],
+ sbd_enabled=True,
+ )
def test_set_small_stonith_watchdog_timeout_sbd_enabled_without_devices(
self,
):
self.assert_validate_set(
[],
- {"stonith-watchdog-timeout": "9"},
+ {"stonith-watchdog-timeout": "9s"},
[
fixture.error(
reports.codes.STONITH_WATCHDOG_TIMEOUT_TOO_SMALL,
force_code=reports.codes.FORCE,
cluster_sbd_watchdog_timeout=10,
- entered_watchdog_timeout="9",
+ entered_watchdog_timeout="9s",
)
],
sbd_enabled=True,
@@ -387,28 +405,54 @@ class TestValidateSetClusterProperties(TestCase):
force=True,
)
- def test_set_not_a_number_stonith_watchdog_timeout_sbd_enabled_without_devices(
+ def _set_invalid_value_stonith_watchdog_timeout(
+ self, sbd_enabled=False, sbd_devices=False
+ ):
+ for value in ["invalid", "10x"]:
+ with self.subTest(value=value):
+ self.assert_validate_set(
+ [],
+ {"stonith-watchdog-timeout": value},
+ [
+ fixture.error(
+ reports.codes.INVALID_OPTION_VALUE,
+ option_name="stonith-watchdog-timeout",
+ option_value=value,
+ allowed_values="time interval (e.g. 1, 2s, 3m, 4h, ...)",
+ cannot_be_empty=False,
+ forbidden_characters=None,
+ )
+ ],
+ sbd_enabled=sbd_enabled,
+ sbd_devices=sbd_devices,
+ valid_value=False,
+ )
+
+ def test_set_invalid_value_stonith_watchdog_timeout_sbd_enabled_without_devices(
self,
):
+ self._set_invalid_value_stonith_watchdog_timeout(
+ sbd_enabled=True, sbd_devices=False
+ )
- self.assert_validate_set(
- [],
- {"stonith-watchdog-timeout": "invalid"},
- [
- fixture.error(
- reports.codes.STONITH_WATCHDOG_TIMEOUT_TOO_SMALL,
- force_code=reports.codes.FORCE,
- cluster_sbd_watchdog_timeout=10,
- entered_watchdog_timeout="invalid",
- )
- ],
- sbd_enabled=True,
+ def test_set_invalid_value_stonith_watchdog_timeout_sbd_enabled_with_devices(
+ self,
+ ):
+ self._set_invalid_value_stonith_watchdog_timeout(
+ sbd_enabled=True, sbd_devices=True
+ )
+
+ def test_set_invalid_value_stonith_watchdog_timeout_sbd_disabled(
+ self,
+ ):
+ self._set_invalid_value_stonith_watchdog_timeout(
+ sbd_enabled=False, sbd_devices=False
)
def test_unset_stonith_watchdog_timeout_sbd_enabled_without_devices(
self,
):
- for value in ["0", ""]:
+ for value in STONITH_WATCHDOG_TIMEOUT_UNSET_VALUES:
with self.subTest(value=value):
self.assert_validate_set(
["stonith-watchdog-timeout"],
@@ -426,7 +470,7 @@ class TestValidateSetClusterProperties(TestCase):
def test_unset_stonith_watchdog_timeout_sbd_enabled_without_devices_forced(
self,
):
- for value in ["0", ""]:
+ for value in STONITH_WATCHDOG_TIMEOUT_UNSET_VALUES:
with self.subTest(value=value):
self.assert_validate_set(
["stonith-watchdog-timeout"],
@@ -459,7 +503,7 @@ class TestValidateSetClusterProperties(TestCase):
def test_set_stonith_watchdog_timeout_sbd_enabled_with_devices_forced(self):
self.assert_validate_set(
[],
- {"stonith-watchdog-timeout": 15},
+ {"stonith-watchdog-timeout": "15s"},
[
fixture.warn(
reports.codes.STONITH_WATCHDOG_TIMEOUT_CANNOT_BE_SET,
@@ -472,7 +516,7 @@ class TestValidateSetClusterProperties(TestCase):
)
def test_unset_stonith_watchdog_timeout_sbd_enabled_with_devices(self):
- for value in ["0", ""]:
+ for value in STONITH_WATCHDOG_TIMEOUT_UNSET_VALUES:
with self.subTest(value=value):
self.assert_validate_set(
["stonith-watchdog-timeout"],
diff --git a/pcs_test/tier1/test_cluster_property.py b/pcs_test/tier1/test_cluster_property.py
index ff1f9cfb..51e25efc 100644
--- a/pcs_test/tier1/test_cluster_property.py
+++ b/pcs_test/tier1/test_cluster_property.py
@@ -169,7 +169,7 @@ class TestPropertySet(PropertyMixin, TestCase):
def test_set_stonith_watchdog_timeout(self):
self.assert_pcs_fail(
- "property set stonith-watchdog-timeout=5".split(),
+ "property set stonith-watchdog-timeout=5s".split(),
stdout_full=(
"Error: stonith-watchdog-timeout can only be unset or set to 0 "
"while SBD is disabled\n"
@@ -179,6 +179,18 @@ class TestPropertySet(PropertyMixin, TestCase):
)
self.assert_resources_xml_in_cib(UNCHANGED_CRM_CONFIG)
+ def test_set_stonith_watchdog_timeout_invalid_value(self):
+ self.assert_pcs_fail(
+ "property set stonith-watchdog-timeout=5x".split(),
+ stdout_full=(
+ "Error: '5x' is not a valid stonith-watchdog-timeout value, use"
+ " time interval (e.g. 1, 2s, 3m, 4h, ...)\n"
+ "Error: Errors have occurred, therefore pcs is unable to "
+ "continue\n"
+ ),
+ )
+ self.assert_resources_xml_in_cib(UNCHANGED_CRM_CONFIG)
+
class TestPropertyUnset(PropertyMixin, TestCase):
def test_success(self):
--
2.39.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,311 @@
From 3cd35ed8e5b190c2e8203acd68a0100b84ed3bb4 Mon Sep 17 00:00:00 2001
From: Ondrej Mular <omular@redhat.com>
Date: Tue, 31 Jan 2023 17:44:16 +0100
Subject: [PATCH] fix update of stonith-watchdog-timeout when cluster is not
running
---
pcs/lib/communication/sbd.py | 4 +-
.../lib/commands/sbd/test_disable_sbd.py | 10 ++--
.../tier0/lib/commands/sbd/test_enable_sbd.py | 49 ++++++++++---------
pcsd/pcs.rb | 17 +++++--
4 files changed, 48 insertions(+), 32 deletions(-)
diff --git a/pcs/lib/communication/sbd.py b/pcs/lib/communication/sbd.py
index 4762245c..633312a4 100644
--- a/pcs/lib/communication/sbd.py
+++ b/pcs/lib/communication/sbd.py
@@ -98,8 +98,8 @@ class StonithWatchdogTimeoutAction(
)
if report_item is None:
self._on_success()
- return []
- self._report(report_item)
+ else:
+ self._report(report_item)
return self._get_next_list()
diff --git a/pcs_test/tier0/lib/commands/sbd/test_disable_sbd.py b/pcs_test/tier0/lib/commands/sbd/test_disable_sbd.py
index 13135fb2..f8f165bf 100644
--- a/pcs_test/tier0/lib/commands/sbd/test_disable_sbd.py
+++ b/pcs_test/tier0/lib/commands/sbd/test_disable_sbd.py
@@ -19,7 +19,7 @@ class DisableSbd(TestCase):
self.config.corosync_conf.load(filename=self.corosync_conf_name)
self.config.http.host.check_auth(node_labels=self.node_list)
self.config.http.pcmk.set_stonith_watchdog_timeout_to_zero(
- node_labels=self.node_list[:1]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.disable_sbd(node_labels=self.node_list)
disable_sbd(self.env_assist.get_env())
@@ -56,7 +56,7 @@ class DisableSbd(TestCase):
self.config.corosync_conf.load(filename=self.corosync_conf_name)
self.config.http.host.check_auth(node_labels=self.node_list)
self.config.http.pcmk.set_stonith_watchdog_timeout_to_zero(
- node_labels=self.node_list[:1]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.disable_sbd(node_labels=self.node_list)
@@ -158,7 +158,9 @@ class DisableSbd(TestCase):
]
)
self.config.http.pcmk.set_stonith_watchdog_timeout_to_zero(
- node_labels=online_nodes_list[:1]
+ communication_list=[
+ [dict(label=node)] for node in self.node_list[1:]
+ ],
)
self.config.http.sbd.disable_sbd(node_labels=online_nodes_list)
disable_sbd(self.env_assist.get_env(), ignore_offline_nodes=True)
@@ -291,7 +293,7 @@ class DisableSbd(TestCase):
self.config.corosync_conf.load(filename=self.corosync_conf_name)
self.config.http.host.check_auth(node_labels=self.node_list)
self.config.http.pcmk.set_stonith_watchdog_timeout_to_zero(
- node_labels=self.node_list[:1]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.disable_sbd(
communication_list=[
diff --git a/pcs_test/tier0/lib/commands/sbd/test_enable_sbd.py b/pcs_test/tier0/lib/commands/sbd/test_enable_sbd.py
index 57e680e0..f192f429 100644
--- a/pcs_test/tier0/lib/commands/sbd/test_enable_sbd.py
+++ b/pcs_test/tier0/lib/commands/sbd/test_enable_sbd.py
@@ -130,7 +130,7 @@ class OddNumOfNodesSuccess(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -164,7 +164,7 @@ class OddNumOfNodesSuccess(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -218,7 +218,7 @@ class OddNumOfNodesDefaultsSuccess(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -248,7 +248,7 @@ class OddNumOfNodesDefaultsSuccess(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -351,7 +351,7 @@ class WatchdogValidations(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -407,7 +407,7 @@ class EvenNumOfNodes(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -443,7 +443,7 @@ class EvenNumOfNodes(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -480,7 +480,7 @@ class EvenNumOfNodes(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -513,7 +513,7 @@ class EvenNumOfNodes(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
self.config.http.sbd.enable_sbd(node_labels=self.node_list)
enable_sbd(
@@ -604,7 +604,9 @@ class OfflineNodes(TestCase):
node_labels=self.online_node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.online_node_list[0]]
+ communication_list=[
+ [dict(label=node)] for node in self.online_node_list
+ ],
)
self.config.http.sbd.enable_sbd(node_labels=self.online_node_list)
enable_sbd(
@@ -644,7 +646,9 @@ class OfflineNodes(TestCase):
node_labels=self.online_node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.online_node_list[0]]
+ communication_list=[
+ [dict(label=node)] for node in self.online_node_list
+ ],
)
self.config.http.sbd.enable_sbd(node_labels=self.online_node_list)
enable_sbd(
@@ -1226,7 +1230,7 @@ class FailureHandling(TestCase):
node_labels=self.node_list,
)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
- node_labels=[self.node_list[0]]
+ communication_list=[[dict(label=node)] for node in self.node_list],
)
def _remove_calls(self, count):
@@ -1302,7 +1306,8 @@ class FailureHandling(TestCase):
)
def test_removing_stonith_wd_timeout_failure(self):
- self._remove_calls(2)
+ self._remove_calls(len(self.node_list) + 1)
+
self.config.http.pcmk.remove_stonith_watchdog_timeout(
communication_list=[
self.communication_list_failure[:1],
@@ -1331,7 +1336,7 @@ class FailureHandling(TestCase):
)
def test_removing_stonith_wd_timeout_not_connected(self):
- self._remove_calls(2)
+ self._remove_calls(len(self.node_list) + 1)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
communication_list=[
self.communication_list_not_connected[:1],
@@ -1360,7 +1365,7 @@ class FailureHandling(TestCase):
)
def test_removing_stonith_wd_timeout_complete_failure(self):
- self._remove_calls(2)
+ self._remove_calls(len(self.node_list) + 1)
self.config.http.pcmk.remove_stonith_watchdog_timeout(
communication_list=[
self.communication_list_not_connected[:1],
@@ -1406,7 +1411,7 @@ class FailureHandling(TestCase):
)
def test_set_sbd_config_failure(self):
- self._remove_calls(4)
+ self._remove_calls(len(self.node_list) + 1 + 2)
self.config.http.sbd.set_sbd_config(
communication_list=[
dict(
@@ -1453,7 +1458,7 @@ class FailureHandling(TestCase):
)
def test_set_corosync_conf_failed(self):
- self._remove_calls(5)
+ self._remove_calls(len(self.node_list) + 1 + 3)
self.config.env.push_corosync_conf(
corosync_conf_text=_get_corosync_conf_text_with_atb(
self.corosync_conf_name
@@ -1477,7 +1482,7 @@ class FailureHandling(TestCase):
)
def test_check_sbd_invalid_data_format(self):
- self._remove_calls(7)
+ self._remove_calls(len(self.node_list) + 1 + 5)
self.config.http.sbd.check_sbd(
communication_list=[
dict(
@@ -1516,7 +1521,7 @@ class FailureHandling(TestCase):
)
def test_check_sbd_failure(self):
- self._remove_calls(7)
+ self._remove_calls(len(self.node_list) + 1 + 5)
self.config.http.sbd.check_sbd(
communication_list=[
dict(
@@ -1558,7 +1563,7 @@ class FailureHandling(TestCase):
)
def test_check_sbd_not_connected(self):
- self._remove_calls(7)
+ self._remove_calls(len(self.node_list) + 1 + 5)
self.config.http.sbd.check_sbd(
communication_list=[
dict(
@@ -1601,7 +1606,7 @@ class FailureHandling(TestCase):
)
def test_get_online_targets_failed(self):
- self._remove_calls(9)
+ self._remove_calls(len(self.node_list) + 1 + 7)
self.config.http.host.check_auth(
communication_list=self.communication_list_failure
)
@@ -1626,7 +1631,7 @@ class FailureHandling(TestCase):
)
def test_get_online_targets_not_connected(self):
- self._remove_calls(9)
+ self._remove_calls(len(self.node_list) + 1 + 7)
self.config.http.host.check_auth(
communication_list=self.communication_list_not_connected
)
diff --git a/pcsd/pcs.rb b/pcsd/pcs.rb
index 452de97f..e3397c25 100644
--- a/pcsd/pcs.rb
+++ b/pcsd/pcs.rb
@@ -1838,13 +1838,22 @@ end
def set_cluster_prop_force(auth_user, prop, val)
cmd = ['property', 'set', "#{prop}=#{val}"]
flags = ['--force']
+ sig_file = "#{CIB_PATH}.sig"
+ retcode = 0
+
if pacemaker_running?
- user = auth_user
+ _, _, retcode = run_cmd(auth_user, PCS, *flags, "--", *cmd)
else
- user = PCSAuth.getSuperuserAuth()
- flags += ['-f', CIB_PATH]
+ if File.exist?(CIB_PATH)
+ flags += ['-f', CIB_PATH]
+ _, _, retcode = run_cmd(PCSAuth.getSuperuserAuth(), PCS, *flags, "--", *cmd)
+ begin
+ File.delete(sig_file)
+ rescue => e
+ $logger.debug("Cannot delete file '#{sig_file}': #{e.message}")
+ end
+ end
end
- _, _, retcode = run_cmd(user, PCS, *flags, "--", *cmd)
return (retcode == 0)
end
--
2.39.0

View File

@ -1,7 +1,7 @@
From d332704bb952b2b990688e469a4b209bf051be46 Mon Sep 17 00:00:00 2001
From 4470259655fa10cb5908fee00653483e7056f1a7 Mon Sep 17 00:00:00 2001
From: Ivan Devat <idevat@redhat.com>
Date: Tue, 20 Nov 2018 15:03:56 +0100
Subject: [PATCH 2/2] do not support cluster setup with udp(u) transport
Subject: [PATCH] do not support cluster setup with udp(u) transport
---
pcs/pcs.8.in | 2 ++
@ -10,7 +10,7 @@ Subject: [PATCH 2/2] do not support cluster setup with udp(u) transport
3 files changed, 6 insertions(+)
diff --git a/pcs/pcs.8.in b/pcs/pcs.8.in
index 6a00db89..97394e96 100644
index d1a6dcf2..cd00f8ac 100644
--- a/pcs/pcs.8.in
+++ b/pcs/pcs.8.in
@@ -436,6 +436,8 @@ By default, encryption is enabled with cipher=aes256 and hash=sha256. To disable
@ -23,10 +23,10 @@ index 6a00db89..97394e96 100644
.br
Transport options are: ip_version, netmtu
diff --git a/pcs/usage.py b/pcs/usage.py
index e76bf8e5..2e721dbd 100644
index c3174d82..0a6ffcb6 100644
--- a/pcs/usage.py
+++ b/pcs/usage.py
@@ -1002,6 +1002,7 @@ Commands:
@@ -1004,6 +1004,7 @@ Commands:
hash=sha256. To disable encryption, set cipher=none and hash=none.
Transports udp and udpu:
@ -49,5 +49,5 @@ index 2f26e831..a7702ac4 100644
#csetup-transport-options.knet .without-knet
{
--
2.37.1
2.38.1

View File

@ -0,0 +1,40 @@
From 91d13a82a0803f2a4653a2ec9379a27f4555dcb5 Mon Sep 17 00:00:00 2001
From: Mamoru TASAKA <mtasaka@fedoraproject.org>
Date: Thu, 8 Dec 2022 22:47:59 +0900
Subject: [PATCH 3/5] pcsd ruby: adjust to json 2.6.3 error message change
json 2.6.3 now removes line number information from parser
error message.
Adjust regex pattern on pcs test code for ruby to support
this error format.
Fixes #606 .
---
pcsd/test/test_config.rb | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pcsd/test/test_config.rb b/pcsd/test/test_config.rb
index 7aaf4349..a580b24f 100644
--- a/pcsd/test/test_config.rb
+++ b/pcsd/test/test_config.rb
@@ -126,7 +126,7 @@ class TestConfig < Test::Unit::TestCase
assert_equal('error', $logger.log[0][0])
assert_match(
# the number is based on JSON gem version
- /Unable to parse pcs_settings file: \d+: unexpected token/,
+ /Unable to parse pcs_settings file: (\d+: )?unexpected token/,
$logger.log[0][1]
)
assert_equal(fixture_empty_config, cfg.text)
@@ -723,7 +723,7 @@ class TestCfgKnownHosts < Test::Unit::TestCase
assert_equal('error', $logger.log[0][0])
assert_match(
# the number is based on JSON gem version
- /Unable to parse known-hosts file: \d+: unexpected token/,
+ /Unable to parse known-hosts file: (\d+: )?unexpected token/,
$logger.log[0][1]
)
assert_empty_data(cfg)
--
2.39.0

View File

@ -1,18 +1,18 @@
Name: pcs
Version: 0.10.14
Version: 0.10.15
Release: 4%{?dist}.alma
# https://docs.fedoraproject.org/en-US/packaging-guidelines/LicensingGuidelines/
# https://fedoraproject.org/wiki/Licensing:Main?rd=Licensing#Good_Licenses
# GPLv2: pcs
# ASL 2.0: dataclasses, tornado
# ASL 2.0 or BSD: dateutil
# GPL-2.0-only: pcs
# Apache-2.0: dataclasses, tornado
# Apache-2.0 or BSD-3-Clause: dateutil
# MIT: backports, dacite, daemons, ember, ethon, handlebars, jquery, jquery-ui,
# mustermann, rack, rack-protection, rack-test, sinatra, tilt
# GPLv2 or Ruby: eventmachne, json
# (GPLv2 or Ruby) and BSD: thin
# BSD or Ruby: open4, ruby2_keywords
# BSD and MIT: ffi
License: GPLv2 and ASL 2.0 and MIT and BSD and (GPLv2 or Ruby) and (BSD or Ruby) and (ASL 2.0 or BSD)
# GPL-2.0-only or Ruby: eventmachine, json
# (GPL-2.0-only or Ruby) and BSD-2-Clause: thin
# BSD-2-Clause or Ruby: open4, ruby2_keywords
# BSD-3-Clause and MIT: ffi
License: GPL-2.0-only AND Apache-2.0 AND MIT AND BSD-3-Clause AND (GPL-2.0-only OR Ruby) AND (BSD-2-Clause OR Ruby) AND BSD-2-Clause AND (Apache-2.0 OR BSD-3-Clause)
URL: https://github.com/ClusterLabs/pcs
Group: System Environment/Base
Summary: Pacemaker Configuration System
@ -20,7 +20,7 @@ Summary: Pacemaker Configuration System
ExclusiveArch: i686 x86_64 s390x ppc64le aarch64
%global version_or_commit %{version}
# %%global version_or_commit %%{version}.48-15d27
# %%global version_or_commit %%{version}.27-cb2fb
%global pcs_source_name %{name}-%{version_or_commit}
@ -32,26 +32,25 @@ ExclusiveArch: i686 x86_64 s390x ppc64le aarch64
%global pcs_snmp_pkg_name pcs-snmp
%global pyagentx_version 0.4.pcs.2
%global tornado_version 6.1.0
%global dataclasses_version 0.8
%global dacite_version 1.6.0
%global dateutil_version 2.8.1
%global dateutil_version 2.8.2
%global version_rubygem_backports 3.23.0
%global version_rubygem_daemons 1.4.1
%global version_rubygem_ethon 0.15.0
%global version_rubygem_ethon 0.16.0
%global version_rubygem_eventmachine 1.2.7
%global version_rubygem_ffi 1.15.5
%global version_rubygem_json 2.6.2
%global version_rubygem_mustermann 1.1.1
%global version_rubygem_json 2.6.3
%global version_rubygem_mustermann 2.0.2
%global version_rubygem_open4 1.3.4
%global version_rubygem_rack 2.2.3.1
%global version_rubygem_rack_protection 2.2.0
%global version_rubygem_rack_test 1.1.0
%global version_rubygem_rack 2.2.5
%global version_rubygem_rack_protection 2.2.4
%global version_rubygem_rack_test 2.0.2
%global version_rubygem_rexml 3.2.5
%global version_rubygem_ruby2_keywords 0.0.5
%global version_rubygem_sinatra 2.2.0
%global version_rubygem_sinatra 2.2.4
%global version_rubygem_thin 1.8.1
%global version_rubygem_tilt 2.0.10
%global version_rubygem_tilt 2.0.11
# javascript bundled libraries for old web-ui
%global ember_version 1.4.0
@ -59,6 +58,10 @@ ExclusiveArch: i686 x86_64 s390x ppc64le aarch64
%global jquery_ui_version 1.12.1
%global jquery_version 3.6.0
# DO NOT UPDATE
# Tornado 6.2 requires Python 3.7+
%global tornado_version 6.1.0
%global pcs_bundled_dir pcs_bundled
%global pcsd_public_dir pcsd/public
%global rubygem_bundle_dir pcsd/vendor/bundle
@ -86,7 +89,7 @@ Source41: https://github.com/ondrejmular/pyagentx/archive/v%{pyagentx_version}/p
Source42: https://github.com/tornadoweb/tornado/archive/v%{tornado_version}/tornado-%{tornado_version}.tar.gz
Source43: https://github.com/ericvsmith/dataclasses/archive/%{dataclasses_version}/dataclasses-%{dataclasses_version}.tar.gz
Source44: https://github.com/konradhalas/dacite/archive/v%{dacite_version}/dacite-%{dacite_version}.tar.gz
Source45: https://github.com/dateutil/dateutil/archive/%{dateutil_version}/python-dateutil-%{dateutil_version}.tar.gz
Source45: https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-%{dateutil_version}.tar.gz
Source81: https://rubygems.org/downloads/backports-%{version_rubygem_backports}.gem
Source82: https://rubygems.org/downloads/ethon-%{version_rubygem_ethon}.gem
@ -118,15 +121,17 @@ Source101: https://github.com/ClusterLabs/pcs-web-ui/releases/download/%{ui_modu
# pcs patches: <= 200
# Patch1: bzNUMBER-01-name.patch
Patch1: bz1786964-01-code-formatting.patch
Patch2: bz1786964-02-make-booth-ticket-mode-value-case-insensitive.patch
Patch3: bz1791670-01-booth-sync-check-whether-etc-booth-exists.patch
Patch4: bz2115326-01-fix-pcs-quorum-device-remove.patch
Patch5: bz2117650-01-fix-enable-sbd-from-webui.patch
Patch1: do-not-support-cluster-setup-with-udp-u-transport.patch
Patch2: bz2151511-01-add-warning-when-updating-a-misconfigured-resource.patch
Patch3: bz2151166-01-fix-displaying-bool-and-integer-values.patch
Patch4: pcsd-rubygem-json-error-message-change.patch
Patch5: bz2159455-01-add-agent-validation-option.patch
Patch6: bz2158804-01-fix-stonith-watchdog-timeout-validation.patch
Patch7: bz2166243-01-fix-stonith-watchdog-timeout-offline-update.patch
# Downstream patches do not come from upstream. They adapt pcs for specific
# RHEL needs.
Patch101: do-not-support-cluster-setup-with-udp-u-transport.patch
# Patch101: do-not-support-cluster-setup-with-udp-u-transport.patch
# ui patches: >200
@ -161,6 +166,10 @@ BuildRequires: rubygem-test-unit
BuildRequires: diffstat
# for post, preun and postun macros
BuildRequires: systemd
# pam is used for authentication inside daemon (python ctypes)
# needed for tier0 tests during build
BuildRequires: pam
# pcsd fonts and font management tools for creating symlinks to fonts
BuildRequires: fontconfig
BuildRequires: liberation-sans-fonts
@ -252,9 +261,9 @@ easily view, modify and create pacemaker based clusters.
Group: System Environment/Base
Summary: Pacemaker cluster SNMP agent
# https://fedoraproject.org/wiki/Licensing:Main?rd=Licensing#Good_Licenses
# GPLv2: pcs
# GPL-2.0-only: pcs
# BSD-2-Clause: pyagentx
License: GPLv2 and BSD-2-Clause
License: GPL-2.0-only and BSD-2-Clause
URL: https://github.com/ClusterLabs/pcs
# tar for unpacking pyagetx source tar ball
@ -326,7 +335,10 @@ update_times_patch %{PATCH2}
update_times_patch %{PATCH3}
update_times_patch %{PATCH4}
update_times_patch %{PATCH5}
update_times_patch %{PATCH101}
update_times_patch %{PATCH6}
update_times_patch %{PATCH7}
# update_times_patch %{PATCH101}
cp -f %SOURCE1 %{pcsd_public_dir}/images
@ -367,7 +379,7 @@ cp -f %SOURCE45 rpm/
%define debug_package %{nil}
./autogen.sh
%{configure} --enable-local-build --enable-use-local-cache-only --enable-individual-bundling PYTHON=%{__python3} ruby_CFLAGS="%{optflags}" ruby_LIBS="%{build_ldflags}"
%{configure} --enable-local-build --enable-use-local-cache-only --enable-individual-bundling --enable-booth-enable-authfile-set --enable-booth-enable-authfile-unset PYTHON=%{__python3} ruby_CFLAGS="%{optflags}" ruby_LIBS="%{build_ldflags}"
make all
# build pcs-web-ui
@ -383,7 +395,7 @@ pwd
cp -r %{_builddir}/%{ui_src_name}/build ${RPM_BUILD_ROOT}%{_libdir}/%{pcsd_public_dir}/ui
# prepare license files
# some rubygems do not have a license file (ruby2_keywords, thin)
# some rubygems do not have a license file (thin)
mv %{rubygem_bundle_dir}/gems/backports-%{version_rubygem_backports}/LICENSE.txt backports_LICENSE.txt
mv %{rubygem_bundle_dir}/gems/daemons-%{version_rubygem_daemons}/LICENSE daemons_LICENSE
mv %{rubygem_bundle_dir}/gems/ethon-%{version_rubygem_ethon}/LICENSE ethon_LICENSE
@ -398,6 +410,7 @@ mv %{rubygem_bundle_dir}/gems/open4-%{version_rubygem_open4}/LICENSE open4_LICEN
mv %{rubygem_bundle_dir}/gems/rack-%{version_rubygem_rack}/MIT-LICENSE rack_MIT-LICENSE
mv %{rubygem_bundle_dir}/gems/rack-protection-%{version_rubygem_rack_protection}/License rack-protection_License
mv %{rubygem_bundle_dir}/gems/rack-test-%{version_rubygem_rack_test}/MIT-LICENSE.txt rack-test_MIT-LICENSE.txt
mv %{rubygem_bundle_dir}/gems/ruby2_keywords-%{version_rubygem_ruby2_keywords}/LICENSE ruby2_keywords_LICENSE
mv %{rubygem_bundle_dir}/gems/sinatra-%{version_rubygem_sinatra}/LICENSE sinatra_LICENSE
mv %{rubygem_bundle_dir}/gems/tilt-%{version_rubygem_tilt}/COPYING tilt_COPYING
@ -478,7 +491,7 @@ run_all_tests
remove_all_tests
%posttrans
# Make sure the new version of the daemon is runnning.
# Make sure the new version of the daemon is running.
# Also, make sure to start pcsd-ruby if it hasn't been started or even
# installed before. This is done by restarting pcsd.service.
%{_bindir}/systemctl daemon-reload
@ -533,6 +546,7 @@ remove_all_tests
%license rack_MIT-LICENSE
%license rack-protection_License
%license rack-test_MIT-LICENSE.txt
%license ruby2_keywords_LICENSE
%license sinatra_LICENSE
%license tilt_COPYING
%{python3_sitelib}/*
@ -575,9 +589,36 @@ remove_all_tests
%license pyagentx_LICENSE.txt
%changelog
* Fri Oct 07 2022 Andrew Lukoshko <alukoshko@almalinux.org> - 0.10.14-4.alma
* Wed Mar 29 2023 Andrew Lukoshko <alukoshko@almalinux.org> - 0.10.15-4.alma
- Debrand logo
* Thu Feb 9 2023 Michal Pospisil <mpospisi@redhat.com> - 0.10.15-4
- Fixed enabling/disabling sbd when cluster is not running
- Added BuildRequires: pam - needed for tier0 tests during build
- Resolves: rhbz#2166243
* Mon Jan 16 2023 Michal Pospisil <mpospisi@redhat.com> - 0.10.15-3
- Allow time values in stonith-watchdog-time property
- Resource/stonith agent self-validation of instance attributes is now disabled by default, as many agents do not work with it properly
- Updated bundled rubygems: rack, rack-protection, sinatra
- Added license for ruby2_keywords
- Resolves: rhbz#2158804 rhbz#2159455
* Fri Dec 16 2022 Michal Pospisil <mpospisi@redhat.com> - 0.10.15-2
- Added warning when omitting validation of misconfigured resource
- Fixed displaying of bool and integer values in `pcs resource config` command
- Updated bundled rubygems: ethon, json, rack-protection, sinatra
- Resolves: rhbz#2151166 rhbz#2151511
* Wed Nov 23 2022 Michal Pospisil <mpospisi@redhat.com> - 0.10.15-1
- Rebased to latest upstream sources (see CHANGELOG.md)
- Updated Python bundled dependency dateutil
- Resolves: rhbz#2112002 rhbz#2112263 rhbz#2112291 rhbz#2132582
* Tue Oct 25 2022 Miroslav Lisik <mlisik@redhat.com> - 0.10.14-6
- Rebased to latest upstream sources (see CHANGELOG.md)
- Updated rubygem bundled packages: mustermann, rack, rack-protection, rack-test, sinatra, tilt
- Resolves: rhbz#1816852 rhbz#1918527 rhbz#2112267 rhbz#2112291
* Wed Aug 17 2022 Miroslav Lisik <mlisik@redhat.com> - 0.10.14-4
- Fixed enable sbd from webui
- Resolves: rhbz#2117650