From d88962d655257940a678724cc8d7bc1008ed3a46 Mon Sep 17 00:00:00 2001 From: Tomas Jelinek Date: Tue, 5 May 2020 11:02:36 +0200 Subject: [PATCH 1/3] fix running 'pcs status' on remote nodes --- pcs/lib/commands/status.py | 24 +++- pcs_test/tier0/lib/commands/test_status.py | 122 +++++++++++++++++++++ 2 files changed, 141 insertions(+), 5 deletions(-) diff --git a/pcs/lib/commands/status.py b/pcs/lib/commands/status.py index 26332a65..84e3e046 100644 --- a/pcs/lib/commands/status.py +++ b/pcs/lib/commands/status.py @@ -1,3 +1,4 @@ +import os.path from typing import ( Iterable, List, @@ -6,6 +7,7 @@ from typing import ( ) from xml.etree.ElementTree import Element +from pcs import settings from pcs.common import file_type_codes from pcs.common.node_communicator import Communicator from pcs.common.reports import ( @@ -17,7 +19,7 @@ from pcs.common.tools import ( indent, ) from pcs.lib import reports -from pcs.lib.cib import stonith +from pcs.lib.cib import nvpair, stonith from pcs.lib.cib.tools import get_crm_config, get_resources from pcs.lib.communication.nodes import CheckReachability from pcs.lib.communication.tools import run as run_communication @@ -57,6 +59,7 @@ def full_cluster_status_plaintext( """ # pylint: disable=too-many-branches # pylint: disable=too-many-locals + # pylint: disable=too-many-statements # validation if not env.is_cib_live and env.is_corosync_conf_live: @@ -84,7 +87,11 @@ def full_cluster_status_plaintext( status_text, warning_list = get_cluster_status_text( runner, hide_inactive_resources, verbose ) - corosync_conf = env.get_corosync_conf() + corosync_conf = None + # If we are live on a remote node, we have no corosync.conf. + # TODO Use the new file framework so the path is not exposed. + if not live or os.path.exists(settings.corosync_conf_file): + corosync_conf = env.get_corosync_conf() cib = env.get_cib() if verbose: ticket_status_text, ticket_status_stderr, ticket_status_retval = ( @@ -97,7 +104,7 @@ def full_cluster_status_plaintext( except LibraryError: pass local_services_status = _get_local_services_status(runner) - if verbose: + if verbose and corosync_conf: node_name_list, node_names_report_list = get_existing_nodes_names( corosync_conf ) @@ -117,8 +124,15 @@ def full_cluster_status_plaintext( if report_processor.has_errors: raise LibraryError() + cluster_name = ( + corosync_conf.get_cluster_name() + if corosync_conf + else nvpair.get_value( + "cluster_property_set", get_crm_config(cib), "cluster-name", "" + ) + ) parts = [] - parts.append(f"Cluster name: {corosync_conf.get_cluster_name()}") + parts.append(f"Cluster name: {cluster_name}") if warning_list: parts.extend(["", "WARNINGS:"] + warning_list + [""]) parts.append(status_text) @@ -136,7 +150,7 @@ def full_cluster_status_plaintext( else: parts.extend(indent(ticket_status_text.splitlines())) if live: - if verbose: + if verbose and corosync_conf: parts.extend(["", "PCSD Status:"]) parts.extend(indent( _format_node_reachability(node_name_list, node_reachability) diff --git a/pcs_test/tier0/lib/commands/test_status.py b/pcs_test/tier0/lib/commands/test_status.py index 06878668..7d54d579 100644 --- a/pcs_test/tier0/lib/commands/test_status.py +++ b/pcs_test/tier0/lib/commands/test_status.py @@ -1,6 +1,7 @@ from textwrap import dedent from unittest import TestCase +from pcs import settings from pcs.common import file_type_codes, report_codes from pcs.lib.commands import status from pcs_test.tools import fixture @@ -9,16 +10,33 @@ from pcs_test.tools.misc import read_test_resource as rc_read class FullClusterStatusPlaintext(TestCase): + # pylint: disable=too-many-public-methods def setUp(self): self.env_assist, self.config = get_env_tools(self) self.node_name_list = ["node1", "node2", "node3"] self.maxDiff = None + @staticmethod + def _fixture_xml_clustername(name): + return """ + + + + + + """.format( + name=name + ) + def _fixture_config_live_minimal(self): (self.config .runner.pcmk.load_state_plaintext( stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load() .runner.cib.load(resources=""" @@ -30,6 +48,25 @@ class FullClusterStatusPlaintext(TestCase): ) ) + def _fixture_config_live_remote_minimal(self): + ( + self.config.runner.pcmk.load_state_plaintext( + stdout="crm_mon cluster status", + ) + .fs.exists(settings.corosync_conf_file, return_value=False) + .runner.cib.load( + optional_in_conf=self._fixture_xml_clustername("test-cib"), + resources=""" + + + + """, + ) + .runner.systemctl.is_active( + "sbd", is_active=False, name="runner.systemctl.is_active.sbd" + ) + ) + def _fixture_config_local_daemons( self, corosync_enabled=True, corosync_active=True, @@ -150,6 +187,7 @@ class FullClusterStatusPlaintext(TestCase): .runner.pcmk.load_state_plaintext( stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load_content("invalid corosync conf") ) self.env_assist.assert_raise_library_error( @@ -170,6 +208,7 @@ class FullClusterStatusPlaintext(TestCase): .runner.pcmk.load_state_plaintext( stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load() .runner.cib.load_content( "some stdout", stderr="cib load error", returncode=1 @@ -214,6 +253,7 @@ class FullClusterStatusPlaintext(TestCase): verbose=True, stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load(node_name_list=self.node_name_list) .runner.cib.load(resources=""" @@ -254,6 +294,82 @@ class FullClusterStatusPlaintext(TestCase): ) ) + def test_success_live_remote_node(self): + self._fixture_config_live_remote_minimal() + self._fixture_config_local_daemons( + corosync_enabled=False, + corosync_active=False, + pacemaker_enabled=False, + pacemaker_active=False, + pacemaker_remote_enabled=True, + pacemaker_remote_active=True, + ) + self.assertEqual( + status.full_cluster_status_plaintext(self.env_assist.get_env()), + dedent( + """\ + Cluster name: test-cib + crm_mon cluster status + + Daemon Status: + corosync: inactive/disabled + pacemaker: inactive/disabled + pacemaker_remote: active/enabled + pcsd: active/enabled""" + ), + ) + + def test_success_live_remote_node_verbose(self): + ( + self.config.runner.pcmk.can_fence_history_status( + stderr="not supported" + ) + .runner.pcmk.load_state_plaintext( + verbose=True, stdout="crm_mon cluster status", + ) + .fs.exists(settings.corosync_conf_file, return_value=False) + .runner.cib.load( + optional_in_conf=self._fixture_xml_clustername("test-cib"), + resources=""" + + + + """, + ) + .runner.pcmk.load_ticket_state_plaintext(stdout="ticket status") + .runner.systemctl.is_active( + "sbd", is_active=False, name="runner.systemctl.is_active.sbd" + ) + ) + self._fixture_config_local_daemons( + corosync_enabled=False, + corosync_active=False, + pacemaker_enabled=False, + pacemaker_active=False, + pacemaker_remote_enabled=True, + pacemaker_remote_active=True, + ) + + self.assertEqual( + status.full_cluster_status_plaintext( + self.env_assist.get_env(), verbose=True + ), + dedent( + """\ + Cluster name: test-cib + crm_mon cluster status + + Tickets: + ticket status + + Daemon Status: + corosync: inactive/disabled + pacemaker: inactive/disabled + pacemaker_remote: active/enabled + pcsd: active/enabled""" + ), + ) + def test_succes_mocked(self): (self.config .env.set_corosync_conf_data(rc_read("corosync.conf")) @@ -316,6 +432,7 @@ class FullClusterStatusPlaintext(TestCase): fence_history=True, stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load(node_name_list=self.node_name_list) .runner.cib.load(resources=""" @@ -365,6 +482,7 @@ class FullClusterStatusPlaintext(TestCase): verbose=True, stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load(node_name_list=self.node_name_list) .runner.cib.load(resources=""" @@ -421,6 +539,7 @@ class FullClusterStatusPlaintext(TestCase): .runner.pcmk.load_state_plaintext( stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load() .runner.cib.load() .runner.systemctl.is_active( @@ -453,6 +572,7 @@ class FullClusterStatusPlaintext(TestCase): .runner.pcmk.load_state_plaintext( stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load() .runner.cib.load() .runner.systemctl.is_active( @@ -481,6 +601,7 @@ class FullClusterStatusPlaintext(TestCase): .runner.pcmk.load_state_plaintext( stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load() .runner.cib.load(resources=""" @@ -539,6 +660,7 @@ class FullClusterStatusPlaintext(TestCase): verbose=True, stdout="crm_mon cluster status", ) + .fs.exists(settings.corosync_conf_file, return_value=True) .corosync_conf.load(node_name_list=self.node_name_list) .runner.cib.load(resources=""" -- 2.25.4