From 4562939050ac84c3dd7f9d2278676b13fcc8c1c9 Mon Sep 17 00:00:00 2001 From: Tomas Jelinek Date: Fri, 4 Oct 2024 12:07:08 +0200 Subject: [PATCH] refactor: ha_cluster_info: split to several modules Implementing changes proposed in code review --- .sanity-ansible-ignore-2.13.txt | 4 + .sanity-ansible-ignore-2.14.txt | 8 + .sanity-ansible-ignore-2.15.txt | 8 + .sanity-ansible-ignore-2.16.txt | 4 + library/ha_cluster_info.py | 311 +-------- module_utils/ha_cluster_lsr/info/__init__.py | 0 module_utils/ha_cluster_lsr/info/exporter.py | 128 ++++ module_utils/ha_cluster_lsr/info/loader.py | 182 ++++++ tests/unit/test_ha_cluster_info.py | 636 +------------------ tests/unit/test_info_exporter.py | 384 +++++++++++ tests/unit/test_info_loader.py | 273 ++++++++ 11 files changed, 1009 insertions(+), 929 deletions(-) create mode 100644 module_utils/ha_cluster_lsr/info/__init__.py create mode 100644 module_utils/ha_cluster_lsr/info/exporter.py create mode 100644 module_utils/ha_cluster_lsr/info/loader.py create mode 100644 tests/unit/test_info_exporter.py create mode 100644 tests/unit/test_info_loader.py diff --git a/.sanity-ansible-ignore-2.13.txt b/.sanity-ansible-ignore-2.13.txt index 845566e6..850df1fc 100644 --- a/.sanity-ansible-ignore-2.13.txt +++ b/.sanity-ansible-ignore-2.13.txt @@ -23,4 +23,8 @@ plugins/modules/pcs_qdevice_certs.py import-3.8!skip plugins/modules/ha_cluster_info.py compile-2.7!skip plugins/modules/ha_cluster_info.py import-2.7!skip plugins/modules/ha_cluster_info.py validate-modules:missing-gplv3-license +plugins/module_utils/ha_cluster_lsr/info/exporter.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py import-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py import-2.7!skip tests/ha_cluster/unit/test_ha_cluster_info.py shebang!skip diff --git a/.sanity-ansible-ignore-2.14.txt b/.sanity-ansible-ignore-2.14.txt index 43cb08c8..fc80944e 100644 --- a/.sanity-ansible-ignore-2.14.txt +++ b/.sanity-ansible-ignore-2.14.txt @@ -31,4 +31,12 @@ plugins/modules/ha_cluster_info.py compile-3.5!skip plugins/modules/ha_cluster_info.py import-2.7!skip plugins/modules/ha_cluster_info.py import-3.5!skip plugins/modules/ha_cluster_info.py validate-modules:missing-gplv3-license +plugins/module_utils/ha_cluster_lsr/info/exporter.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py compile-3.5!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py import-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py import-3.5!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py compile-3.5!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py import-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py import-3.5!skip tests/ha_cluster/unit/test_ha_cluster_info.py shebang!skip diff --git a/.sanity-ansible-ignore-2.15.txt b/.sanity-ansible-ignore-2.15.txt index 43cb08c8..fc80944e 100644 --- a/.sanity-ansible-ignore-2.15.txt +++ b/.sanity-ansible-ignore-2.15.txt @@ -31,4 +31,12 @@ plugins/modules/ha_cluster_info.py compile-3.5!skip plugins/modules/ha_cluster_info.py import-2.7!skip plugins/modules/ha_cluster_info.py import-3.5!skip plugins/modules/ha_cluster_info.py validate-modules:missing-gplv3-license +plugins/module_utils/ha_cluster_lsr/info/exporter.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py compile-3.5!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py import-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py import-3.5!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py compile-3.5!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py import-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py import-3.5!skip tests/ha_cluster/unit/test_ha_cluster_info.py shebang!skip diff --git a/.sanity-ansible-ignore-2.16.txt b/.sanity-ansible-ignore-2.16.txt index 845566e6..850df1fc 100644 --- a/.sanity-ansible-ignore-2.16.txt +++ b/.sanity-ansible-ignore-2.16.txt @@ -23,4 +23,8 @@ plugins/modules/pcs_qdevice_certs.py import-3.8!skip plugins/modules/ha_cluster_info.py compile-2.7!skip plugins/modules/ha_cluster_info.py import-2.7!skip plugins/modules/ha_cluster_info.py validate-modules:missing-gplv3-license +plugins/module_utils/ha_cluster_lsr/info/exporter.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/exporter.py import-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py compile-2.7!skip +plugins/module_utils/ha_cluster_lsr/info/loader.py import-2.7!skip tests/ha_cluster/unit/test_ha_cluster_info.py shebang!skip diff --git a/library/ha_cluster_info.py b/library/ha_cluster_info.py index a3aeba62..f6c3e453 100644 --- a/library/ha_cluster_info.py +++ b/library/ha_cluster_info.py @@ -11,7 +11,6 @@ # make ansible-test happy, even though the module requires Python 3 # pylint: disable=invalid-name __metaclass__ = type -# pylint: enable=invalid-name DOCUMENTATION = r""" --- @@ -72,297 +71,15 @@ - HORIZONTALLINE """ -import json -import os.path -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple from ansible.module_utils.basic import AnsibleModule -COROSYNC_CONF_PATH = "/etc/corosync/corosync.conf" -KNOWN_HOSTS_PATH = "/var/lib/pcsd/known-hosts" +# pylint: disable=no-name-in-module +from ansible.module_utils.ha_cluster_lsr.info import exporter, loader -CommandRunner = Callable[ - # parameters: args, environ_update - # environ_update should be a keyword argument, but they are not possible in - # Callable. typing.Protocol would have to be used to type that, but that is - # not available in Python 3.6 - [List[str], Optional[Dict[str, str]]], - # return value: rc, stdout, stderr - Tuple[int, str, str], -] - - -class CliCommandError(Exception): - """ - Running pcs has failed - """ - - def __init__( - self, pcs_command: List[str], rc: int, stdout: str, stderr: str - ): - self.pcs_command = pcs_command - self.rc = rc - self.stdout = stdout - self.stderr = stderr - - @property - def kwargs(self) -> Dict[str, Any]: - """ - Arguments given to the constructor - """ - return dict( - pcs_command=self.pcs_command, - rc=self.rc, - stdout=self.stdout, - stderr=self.stderr, - ) - - -class JsonParseError(Exception): - """ - Unable to parse JSON data - """ - - def __init__( - self, - error: str, - data: str, - data_desc: str, - additional_info: Optional[str] = None, - ): - self.error = error - self.data = data - self.data_desc = data_desc - self.additional_info = additional_info - - @property - def kwargs(self) -> Dict[str, Any]: - """ - Arguments given to the constructor - """ - return dict( - error=self.error, - data=self.data, - data_desc=self.data_desc, - additional_info=self.additional_info, - ) - - -class JsonMissingKey(Exception): - """ - A key is not present in pcs JSON output - """ - - def __init__(self, key: str, data: Dict[str, Any], data_desc: str): - self.key = key - self.data = data - self.data_desc = data_desc - - @property - def kwargs(self) -> Dict[str, Any]: - """ - Arguments given to the constructor - """ - return dict(key=self.key, data=self.data, data_desc=self.data_desc) - - -# functions loading data from cluster - - -def is_service_enabled(run_command: CommandRunner, service: str) -> bool: - """ - Check whether a specified service is enabled in the OS - - service -- name of the service to check without the ".service" suffix - """ - env = { - # make sure to get output of external processes in English and ASCII - "LC_ALL": "C", - } - # wokeignore:rule=dummy - rc, dummy_stdout, dummy_stderr = run_command( - ["systemctl", "is-enabled", f"{service}.service"], env - ) - return rc == 0 - - -def load_start_on_boot(run_command: CommandRunner) -> bool: - """ - Detect wheter a cluster is configured to start on boot - """ - return is_service_enabled(run_command, "corosync") or is_service_enabled( - run_command, "pacemaker" - ) - - -def call_pcs_cli( - run_command: CommandRunner, command: List[str] -) -> Dict[str, Any]: - """ - Run pcs CLI with the specified command, transform resulting JSON into a dict - - command -- pcs command to run without the "pcs" prefix - """ - env = { - # make sure to get output of external processes in English and ASCII - "LC_ALL": "C", - } - full_command = ["pcs"] + command - rc, stdout, stderr = run_command(full_command, env) - if rc != 0: - raise CliCommandError(full_command, rc, stdout, stderr) - try: - return json.loads(stdout) - except json.JSONDecodeError as e: - raise JsonParseError( - str(e), stdout, " ".join(full_command), stderr - ) from e - - -def load_corosync_conf(run_command: CommandRunner) -> Dict[str, Any]: - """ - Get corosync configuration from pcs - """ - return call_pcs_cli( - run_command, ["cluster", "config", "--output-format=json"] - ) - - -def load_pcsd_known_hosts() -> Dict[str, str]: - """ - Load pcsd known hosts and return dict node_name: node_address - """ - result: Dict[str, str] = dict() - if not os.path.exists(KNOWN_HOSTS_PATH): - return result - try: - with open(KNOWN_HOSTS_PATH, "r", encoding="utf-8") as known_hosts_file: - known_hosts = json.load(known_hosts_file) - for host_name, host_data in known_hosts.get("known_hosts", {}).items(): - if not host_data.get("dest_list"): - continue - # currently no more than one address is supported by both the role - # and pcs - addr = host_data.get("dest_list")[0].get("addr") - port = host_data.get("dest_list")[0].get("port") - if not addr: - continue - host_addr = addr - if port: - host_addr = ( - f"[{addr}]:{port}" if ":" in addr else f"{addr}:{port}" - ) - result[host_name] = host_addr - return result - except json.JSONDecodeError as e: - # cannot show actual data as they contain sensitive information - tokens - raise JsonParseError(str(e), "not logging data", "known hosts") from e - - -# functions transforming data from pcs format to role format - - -def dict_to_nv_list(input_dict: Dict[str, Any]) -> List[Dict[str, Any]]: - """ - Convert a dict to a list of dicts with keys 'name' and 'value' - """ - return [dict(name=name, value=value) for name, value in input_dict.items()] - - -def export_corosync_options( - corosync_conf_dict: Dict[str, Any] -) -> Dict[str, Any]: - """ - Transform corosync config from pcs format to role format excluding nodes - - corosync_conf_dict -- corosync config structure provided by pcs - """ - result: Dict[str, Any] = dict() - try: - result["ha_cluster_cluster_name"] = corosync_conf_dict["cluster_name"] - - transport = dict(type=corosync_conf_dict["transport"].lower()) - if corosync_conf_dict["transport_options"]: - transport["options"] = dict_to_nv_list( - corosync_conf_dict["transport_options"] - ) - if corosync_conf_dict["links_options"]: - link_list = [] - for link_dict in corosync_conf_dict["links_options"].values(): - # linknumber is an index in links_options, but it is present in - # link_dict as well - link_list.append(dict_to_nv_list(link_dict)) - transport["links"] = link_list - if corosync_conf_dict["compression_options"]: - transport["compression"] = dict_to_nv_list( - corosync_conf_dict["compression_options"] - ) - if corosync_conf_dict["crypto_options"]: - transport["crypto"] = dict_to_nv_list( - corosync_conf_dict["crypto_options"] - ) - result["ha_cluster_transport"] = transport - - if corosync_conf_dict["totem_options"]: - result["ha_cluster_totem"] = dict( - options=dict_to_nv_list(corosync_conf_dict["totem_options"]) - ) - - if corosync_conf_dict["quorum_options"]: - result["ha_cluster_quorum"] = dict( - options=dict_to_nv_list(corosync_conf_dict["quorum_options"]) - ) - except KeyError as e: - raise JsonMissingKey( - e.args[0], corosync_conf_dict, "corosync configuration" - ) from e - return result - - -def export_cluster_nodes( - corosync_conf_nodes: List[Dict[str, Any]], pcs_node_addr: Dict[str, str] -) -> List[Dict[str, Any]]: - """ - Transform node configuration from pcs format to role format - - corosync_conf_dict -- corosync config structure provided by pcs - pcs_node_addr -- dict holding pcs address for cluster nodes - """ - node_list: List[Dict[str, Any]] = [] - if not corosync_conf_nodes: - return node_list - for index, node_dict in enumerate(corosync_conf_nodes): - # corosync node configuration - try: - one_node = dict( - node_name=node_dict["name"], - corosync_addresses=[ - addr_dict["addr"] - for addr_dict in sorted( - node_dict["addrs"], - key=lambda item: item["link"], - ) - ], - ) - except KeyError as e: - raise JsonMissingKey( - e.args[0], - dict(nodes=corosync_conf_nodes), - f"corosync configuration for node on index {index}", - ) from e - # pcs node configuration - if one_node["node_name"] in pcs_node_addr: - one_node["pcs_address"] = pcs_node_addr[one_node["node_name"]] - # finish one node export - node_list.append(one_node) - return node_list - - -# ansible module tools and top layer functions - - -def get_cmd_runner(module: AnsibleModule) -> CommandRunner: +def get_cmd_runner(module: AnsibleModule) -> loader.CommandRunner: """ Provide a function responsible for running external processes """ @@ -387,21 +104,21 @@ def export_cluster_configuration(module: AnsibleModule) -> Dict[str, Any]: result: dict[str, Any] = dict() cmd_runner = get_cmd_runner(module) - result["ha_cluster_start_on_boot"] = load_start_on_boot(cmd_runner) + result["ha_cluster_start_on_boot"] = loader.get_start_on_boot(cmd_runner) # Corosync config is availabe via CLI since pcs-0.10.8, via API v2 since # pcs-0.12.0 and pcs-0.11.9. For old pcs versions, CLI must be used, and # there is no benefit in implementing access via API on top of that. # No need to check pcs capabilities. If this is not supported by pcs, # exporting anything else is pointless (and not supported by pcs anyway). - corosync_conf_pcs = load_corosync_conf(cmd_runner) + corosync_conf_pcs = loader.get_corosync_conf(cmd_runner) # known-hosts file is available since pcs-0.10, but is not exported by pcs # in any version. # No need to check pcs capabilities. - known_hosts_pcs = load_pcsd_known_hosts() + known_hosts_pcs = loader.get_pcsd_known_hosts() # Convert corosync config to role format - corosync_conf_role = export_corosync_options(corosync_conf_pcs) + corosync_conf_role = exporter.export_corosync_options(corosync_conf_pcs) for key in ( "ha_cluster_cluster_name", "ha_cluster_transport", @@ -413,11 +130,11 @@ def export_cluster_configuration(module: AnsibleModule) -> Dict[str, Any]: # Convert cluster definition to role format try: - result["ha_cluster_node_options"] = export_cluster_nodes( + result["ha_cluster_node_options"] = exporter.export_cluster_nodes( corosync_conf_pcs["nodes"], known_hosts_pcs ) except KeyError as e: - raise JsonMissingKey( + raise exporter.JsonMissingKey( e.args[0], corosync_conf_pcs, "corosync configuration" ) from e @@ -436,22 +153,22 @@ def main() -> None: module_result["ha_cluster"] = ha_cluster_result try: - if os.path.exists(COROSYNC_CONF_PATH): + if loader.has_corosync_conf(): ha_cluster_result.update(**export_cluster_configuration(module)) ha_cluster_result["ha_cluster_cluster_present"] = True else: ha_cluster_result["ha_cluster_cluster_present"] = False module.exit_json(**module_result) - except JsonMissingKey as e: + except exporter.JsonMissingKey as e: module.fail_json( msg=f"Missing key {e.key} in pcs {e.data_desc} JSON output", error_details=e.kwargs, ) - except JsonParseError as e: + except loader.JsonParseError as e: module.fail_json( msg="Error while parsing pcs JSON output", error_details=e.kwargs ) - except CliCommandError as e: + except loader.CliCommandError as e: module.fail_json(msg="Error while running pcs", error_details=e.kwargs) diff --git a/module_utils/ha_cluster_lsr/info/__init__.py b/module_utils/ha_cluster_lsr/info/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/module_utils/ha_cluster_lsr/info/exporter.py b/module_utils/ha_cluster_lsr/info/exporter.py new file mode 100644 index 00000000..78dcfdbf --- /dev/null +++ b/module_utils/ha_cluster_lsr/info/exporter.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2024 Red Hat, Inc. +# Author: Tomas Jelinek +# SPDX-License-Identifier: MIT + +# make ansible-test happy, even though the module requires Python 3 +from __future__ import absolute_import, division, print_function + +# make ansible-test happy, even though the module requires Python 3 +# pylint: disable=invalid-name +__metaclass__ = type + +from typing import Any, Dict, List + + +class JsonMissingKey(Exception): + """ + A key is not present in pcs JSON output + """ + + def __init__(self, key: str, data: Dict[str, Any], data_desc: str): + self.key = key + self.data = data + self.data_desc = data_desc + + @property + def kwargs(self) -> Dict[str, Any]: + """ + Arguments given to the constructor + """ + return dict(key=self.key, data=self.data, data_desc=self.data_desc) + + +def _dict_to_nv_list(input_dict: Dict[str, Any]) -> List[Dict[str, Any]]: + """ + Convert a dict to a list of dicts with keys 'name' and 'value' + """ + return [dict(name=name, value=value) for name, value in input_dict.items()] + + +def export_corosync_options( + corosync_conf_dict: Dict[str, Any] +) -> Dict[str, Any]: + """ + Transform corosync config from pcs format to role format excluding nodes + + corosync_conf_dict -- corosync config structure provided by pcs + """ + result: Dict[str, Any] = dict() + try: + result["ha_cluster_cluster_name"] = corosync_conf_dict["cluster_name"] + + transport = dict(type=corosync_conf_dict["transport"].lower()) + if corosync_conf_dict["transport_options"]: + transport["options"] = _dict_to_nv_list( + corosync_conf_dict["transport_options"] + ) + if corosync_conf_dict["links_options"]: + link_list = [] + for link_dict in corosync_conf_dict["links_options"].values(): + # linknumber is an index in links_options, but it is present in + # link_dict as well + link_list.append(_dict_to_nv_list(link_dict)) + transport["links"] = link_list + if corosync_conf_dict["compression_options"]: + transport["compression"] = _dict_to_nv_list( + corosync_conf_dict["compression_options"] + ) + if corosync_conf_dict["crypto_options"]: + transport["crypto"] = _dict_to_nv_list( + corosync_conf_dict["crypto_options"] + ) + result["ha_cluster_transport"] = transport + + if corosync_conf_dict["totem_options"]: + result["ha_cluster_totem"] = dict( + options=_dict_to_nv_list(corosync_conf_dict["totem_options"]) + ) + + if corosync_conf_dict["quorum_options"]: + result["ha_cluster_quorum"] = dict( + options=_dict_to_nv_list(corosync_conf_dict["quorum_options"]) + ) + except KeyError as e: + raise JsonMissingKey( + e.args[0], corosync_conf_dict, "corosync configuration" + ) from e + return result + + +def export_cluster_nodes( + corosync_conf_nodes: List[Dict[str, Any]], pcs_node_addr: Dict[str, str] +) -> List[Dict[str, Any]]: + """ + Transform node configuration from pcs format to role format + + corosync_conf_dict -- corosync config structure provided by pcs + pcs_node_addr -- dict holding pcs address for cluster nodes + """ + node_list: List[Dict[str, Any]] = [] + if not corosync_conf_nodes: + return node_list + for index, node_dict in enumerate(corosync_conf_nodes): + # corosync node configuration + try: + one_node = dict( + node_name=node_dict["name"], + corosync_addresses=[ + addr_dict["addr"] + for addr_dict in sorted( + node_dict["addrs"], + key=lambda item: item["link"], + ) + ], + ) + except KeyError as e: + raise JsonMissingKey( + e.args[0], + dict(nodes=corosync_conf_nodes), + f"corosync configuration for node on index {index}", + ) from e + # pcs node configuration + if one_node["node_name"] in pcs_node_addr: + one_node["pcs_address"] = pcs_node_addr[one_node["node_name"]] + # finish one node export + node_list.append(one_node) + return node_list diff --git a/module_utils/ha_cluster_lsr/info/loader.py b/module_utils/ha_cluster_lsr/info/loader.py new file mode 100644 index 00000000..f755826c --- /dev/null +++ b/module_utils/ha_cluster_lsr/info/loader.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2024 Red Hat, Inc. +# Author: Tomas Jelinek +# SPDX-License-Identifier: MIT + +# make ansible-test happy, even though the module requires Python 3 +from __future__ import absolute_import, division, print_function + +# make ansible-test happy, even though the module requires Python 3 +# pylint: disable=invalid-name +__metaclass__ = type + +import json +import os.path +from typing import Any, Callable, Dict, List, Optional, Tuple + +COROSYNC_CONF_PATH = "/etc/corosync/corosync.conf" +KNOWN_HOSTS_PATH = "/var/lib/pcsd/known-hosts" + +CommandRunner = Callable[ + # parameters: args, environ_update + # environ_update should be a keyword argument, but they are not possible in + # Callable. typing.Protocol would have to be used to type that, but that is + # not available in Python 3.6 + [List[str], Optional[Dict[str, str]]], + # return value: rc, stdout, stderr + Tuple[int, str, str], +] + + +class CliCommandError(Exception): + """ + Running pcs has failed + """ + + def __init__( + self, pcs_command: List[str], rc: int, stdout: str, stderr: str + ): + self.pcs_command = pcs_command + self.rc = rc + self.stdout = stdout + self.stderr = stderr + + @property + def kwargs(self) -> Dict[str, Any]: + """ + Arguments given to the constructor + """ + return dict( + pcs_command=self.pcs_command, + rc=self.rc, + stdout=self.stdout, + stderr=self.stderr, + ) + + +class JsonParseError(Exception): + """ + Unable to parse JSON data + """ + + def __init__( + self, + error: str, + data: str, + data_desc: str, + additional_info: Optional[str] = None, + ): + self.error = error + self.data = data + self.data_desc = data_desc + self.additional_info = additional_info + + @property + def kwargs(self) -> Dict[str, Any]: + """ + Arguments given to the constructor + """ + return dict( + error=self.error, + data=self.data, + data_desc=self.data_desc, + additional_info=self.additional_info, + ) + + +def _is_service_enabled(run_command: CommandRunner, service: str) -> bool: + """ + Check whether a specified service is enabled in the OS + + service -- name of the service to check without the ".service" suffix + """ + env = { + # make sure to get output of external processes in English and ASCII + "LC_ALL": "C", + } + # wokeignore:rule=dummy + rc, dummy_stdout, dummy_stderr = run_command( + ["systemctl", "is-enabled", f"{service}.service"], env + ) + return rc == 0 + + +def get_start_on_boot(run_command: CommandRunner) -> bool: + """ + Detect wheter a cluster is configured to start on boot + """ + return _is_service_enabled(run_command, "corosync") or _is_service_enabled( + run_command, "pacemaker" + ) + + +def _call_pcs_cli( + run_command: CommandRunner, command: List[str] +) -> Dict[str, Any]: + """ + Run pcs CLI with the specified command, transform resulting JSON into a dict + + command -- pcs command to run without the "pcs" prefix + """ + env = { + # make sure to get output of external processes in English and ASCII + "LC_ALL": "C", + } + full_command = ["pcs"] + command + rc, stdout, stderr = run_command(full_command, env) + if rc != 0: + raise CliCommandError(full_command, rc, stdout, stderr) + try: + return json.loads(stdout) + except json.JSONDecodeError as e: + raise JsonParseError( + str(e), stdout, " ".join(full_command), stderr + ) from e + + +def has_corosync_conf() -> bool: + """ + Check whether corosync.conf file is present + """ + return os.path.exists(COROSYNC_CONF_PATH) + + +def get_corosync_conf(run_command: CommandRunner) -> Dict[str, Any]: + """ + Get corosync configuration from pcs + """ + return _call_pcs_cli( + run_command, ["cluster", "config", "--output-format=json"] + ) + + +def get_pcsd_known_hosts() -> Dict[str, str]: + """ + Load pcsd known hosts and return dict node_name: node_address + """ + result: Dict[str, str] = dict() + if not os.path.exists(KNOWN_HOSTS_PATH): + return result + try: + with open(KNOWN_HOSTS_PATH, "r", encoding="utf-8") as known_hosts_file: + known_hosts = json.load(known_hosts_file) + for host_name, host_data in known_hosts.get("known_hosts", {}).items(): + if not host_data.get("dest_list"): + continue + # currently no more than one address is supported by both the role + # and pcs + addr = host_data.get("dest_list")[0].get("addr") + port = host_data.get("dest_list")[0].get("port") + if not addr: + continue + host_addr = addr + if port: + host_addr = ( + f"[{addr}]:{port}" if ":" in addr else f"{addr}:{port}" + ) + result[host_name] = host_addr + return result + except json.JSONDecodeError as e: + # cannot show actual data as they contain sensitive information - tokens + raise JsonParseError(str(e), "not logging data", "known hosts") from e diff --git a/tests/unit/test_ha_cluster_info.py b/tests/unit/test_ha_cluster_info.py index 3ef13db6..52443dc7 100644 --- a/tests/unit/test_ha_cluster_info.py +++ b/tests/unit/test_ha_cluster_info.py @@ -11,7 +11,6 @@ import json import sys from importlib import import_module -from typing import Any, Dict, List from unittest import TestCase, mock sys.modules["ansible.module_utils.ha_cluster_lsr"] = import_module( @@ -20,638 +19,11 @@ import ha_cluster_info -# functions loading data from cluster - - -class IsServiceEnabled(TestCase): - def setUp(self) -> None: - self.runner_mock = mock.Mock() - - def test_is_enabled(self) -> None: - self.runner_mock.return_value = (0, "enabled", "") - self.assertTrue( - ha_cluster_info.is_service_enabled(self.runner_mock, "corosync") - ) - self.runner_mock.assert_called_once_with( - ["systemctl", "is-enabled", "corosync.service"], - {"LC_ALL": "C"}, - ) - - def test_is_disabled(self) -> None: - self.runner_mock.return_value = (1, "disabled", "") - self.assertFalse( - ha_cluster_info.is_service_enabled(self.runner_mock, "pacemaker") - ) - self.runner_mock.assert_called_once_with( - ["systemctl", "is-enabled", "pacemaker.service"], - {"LC_ALL": "C"}, - ) - - def test_unexpected_output(self) -> None: - self.runner_mock.return_value = (4, "not-found", "") - self.assertFalse( - ha_cluster_info.is_service_enabled(self.runner_mock, "pcmk") - ) - self.runner_mock.assert_called_once_with( - ["systemctl", "is-enabled", "pcmk.service"], - {"LC_ALL": "C"}, - ) - - -class LoadStartOnBoot(TestCase): - @mock.patch("ha_cluster_info.is_service_enabled") - def test_main(self, mock_is_enabled: mock.Mock) -> None: - runner_mock = mock.Mock() - mock_is_enabled.side_effect = [False, False] - self.assertFalse(ha_cluster_info.load_start_on_boot(runner_mock)) - - mock_is_enabled.side_effect = [True, False] - self.assertTrue(ha_cluster_info.load_start_on_boot(runner_mock)) - - mock_is_enabled.side_effect = [False, True] - self.assertTrue(ha_cluster_info.load_start_on_boot(runner_mock)) - - mock_is_enabled.side_effect = [True, True] - self.assertTrue(ha_cluster_info.load_start_on_boot(runner_mock)) - - -class CallPcsCli(TestCase): - def test_success(self) -> None: - runner_mock = mock.Mock() - runner_mock.return_value = ( - 0, - """{"json": "test data", "foo": "bar"}""", - "", - ) - self.assertEqual( - ha_cluster_info.call_pcs_cli(runner_mock, ["cluster", "config"]), - dict(json="test data", foo="bar"), - ) - runner_mock.assert_called_once_with( - ["pcs", "cluster", "config"], - {"LC_ALL": "C"}, - ) - - def test_pcs_error(self) -> None: - runner_mock = mock.Mock() - runner_mock.return_value = ( - 1, - "some stdout message", - "some stderr message", - ) - with self.assertRaises(ha_cluster_info.CliCommandError) as cm: - ha_cluster_info.call_pcs_cli(runner_mock, ["cluster", "config"]) - self.assertEqual( - cm.exception.kwargs, - dict( - pcs_command=["pcs", "cluster", "config"], - stdout="some stdout message", - stderr="some stderr message", - rc=1, - ), - ) - runner_mock.assert_called_once_with( - ["pcs", "cluster", "config"], - {"LC_ALL": "C"}, - ) - - def test_json_error(self) -> None: - runner_mock = mock.Mock() - runner_mock.return_value = ( - 0, - "not a json", - "", - ) - with self.assertRaises(ha_cluster_info.JsonParseError) as cm: - ha_cluster_info.call_pcs_cli(runner_mock, ["cluster", "config"]) - self.assertEqual( - cm.exception.kwargs, - dict( - data="not a json", - data_desc="pcs cluster config", - error="Expecting value: line 1 column 1 (char 0)", - additional_info="", - ), - ) - runner_mock.assert_called_once_with( - ["pcs", "cluster", "config"], - {"LC_ALL": "C"}, - ) - - -class LoadCorosyncConf(TestCase): - pcs_command = ["pcs", "cluster", "config", "--output-format=json"] - env = {"LC_ALL": "C"} - - def test_success(self) -> None: - runner_mock = mock.Mock() - runner_mock.return_value = (0, """{"some": "json"}""", "") - self.assertEqual( - ha_cluster_info.load_corosync_conf(runner_mock), dict(some="json") - ) - runner_mock.assert_called_once_with(self.pcs_command, self.env) - - def test_pcs_error(self) -> None: - runner_mock = mock.Mock() - runner_mock.return_value = (1, "stdout message", "stderr message") - with self.assertRaises(ha_cluster_info.CliCommandError) as cm: - ha_cluster_info.load_corosync_conf(runner_mock) - self.assertEqual( - cm.exception.kwargs, - dict( - pcs_command=self.pcs_command, - stdout="stdout message", - stderr="stderr message", - rc=1, - ), - ) - runner_mock.assert_called_once_with(self.pcs_command, self.env) - - def test_json_error(self) -> None: - runner_mock = mock.Mock() - runner_mock.return_value = (0, "not a json", "") - with self.assertRaises(ha_cluster_info.JsonParseError) as cm: - ha_cluster_info.load_corosync_conf(runner_mock) - self.assertEqual( - cm.exception.kwargs, - dict( - data="not a json", - data_desc=" ".join(self.pcs_command), - error="Expecting value: line 1 column 1 (char 0)", - additional_info="", - ), - ) - runner_mock.assert_called_once_with(self.pcs_command, self.env) - - -class LoadPcsdKnownHosts(TestCase): - file_path = "/var/lib/pcsd/known-hosts" - - @mock.patch("ha_cluster_info.os.path.exists") - def test_file_not_present(self, mock_exists: mock.Mock) -> None: - mock_exists.return_value = False - self.assertEqual(ha_cluster_info.load_pcsd_known_hosts(), dict()) - mock_exists.assert_called_once_with(self.file_path) - - @mock.patch("ha_cluster_info.os.path.exists") - def test_json_error(self, mock_exists: mock.Mock) -> None: - mock_exists.return_value = True - mock_data = "not a json" - with mock.patch( - "ha_cluster_info.open", mock.mock_open(read_data=mock_data) - ) as mock_open: - with self.assertRaises(ha_cluster_info.JsonParseError) as cm: - ha_cluster_info.load_pcsd_known_hosts() - self.assertEqual( - cm.exception.kwargs, - dict( - data="not logging data", - data_desc="known hosts", - error="Expecting value: line 1 column 1 (char 0)", - additional_info=None, - ), - ) - mock_open.assert_called_once_with( - self.file_path, "r", encoding="utf-8" - ) - mock_exists.assert_called_once_with(self.file_path) - - @mock.patch("ha_cluster_info.os.path.exists") - def test_json_empty(self, mock_exists: mock.Mock) -> None: - mock_exists.return_value = True - mock_data = "{}" - with mock.patch( - "ha_cluster_info.open", mock.mock_open(read_data=mock_data) - ) as mock_open: - self.assertEqual( - ha_cluster_info.load_pcsd_known_hosts(), - dict(), - ) - mock_open.assert_called_once_with( - self.file_path, "r", encoding="utf-8" - ) - mock_exists.assert_called_once_with(self.file_path) - - @mock.patch("ha_cluster_info.os.path.exists") - def test_extract(self, mock_exists: mock.Mock) -> None: - mock_exists.return_value = True - mock_data = json.dumps( - dict( - known_hosts=dict( - node1=dict(), - node2=dict(dest_list=[]), - node3=dict(dest_list=[dict()]), - node4=dict(dest_list=[dict(addr="node4A")]), - node5=dict(dest_list=[dict(port="10005")]), - node6=dict(dest_list=[dict(addr="node6A", port="10006")]), - node7=dict( - dest_list=[dict(addr="2001:db8::7", port="10007")] - ), - node8=dict( - dest_list=[ - dict(addr="192.0.2.8", port="10008"), - dict(addr="node8B"), - ] - ), - ) - ) - ) - with mock.patch( - "ha_cluster_info.open", mock.mock_open(read_data=mock_data) - ) as mock_open: - self.assertEqual( - ha_cluster_info.load_pcsd_known_hosts(), - dict( - node4="node4A", - node6="node6A:10006", - node7="[2001:db8::7]:10007", - node8="192.0.2.8:10008", - ), - ) - mock_open.assert_called_once_with( - self.file_path, "r", encoding="utf-8" - ) - mock_exists.assert_called_once_with(self.file_path) - - -# functions transforming data from pcs format to role format - - -class DictToNvList(TestCase): - def test_no_item(self) -> None: - self.assertEqual( - ha_cluster_info.dict_to_nv_list(dict()), - [], - ) - - def test_one_item(self) -> None: - self.assertEqual( - ha_cluster_info.dict_to_nv_list(dict(one="1")), - [dict(name="one", value="1")], - ) - - def test_two_items(self) -> None: - self.assertEqual( - ha_cluster_info.dict_to_nv_list(dict(one="1", two="2")), - [dict(name="one", value="1"), dict(name="two", value="2")], - ) - - -class ExportCorosyncConf(TestCase): - maxDiff = None - - def assert_missing_key(self, data: Dict[str, Any], key: str) -> None: - with self.assertRaises(ha_cluster_info.JsonMissingKey) as cm: - ha_cluster_info.export_corosync_options(data) - self.assertEqual( - cm.exception.kwargs, - dict(data=data, key=key, data_desc="corosync configuration"), - ) - - def test_missing_keys(self) -> None: - self.assert_missing_key(dict(), "cluster_name") - self.assert_missing_key(dict(cluster_name="x"), "transport") - self.assert_missing_key( - dict(cluster_name="x", transport="x"), "transport_options" - ) - self.assert_missing_key( - dict(cluster_name="x", transport="x", transport_options=dict()), - "links_options", - ) - self.assert_missing_key( - dict( - cluster_name="x", - transport="x", - transport_options=dict(), - links_options=dict(), - ), - "compression_options", - ) - self.assert_missing_key( - dict( - cluster_name="x", - transport="x", - transport_options=dict(), - links_options=dict(), - compression_options=dict(), - ), - "crypto_options", - ) - self.assert_missing_key( - dict( - cluster_name="x", - transport="x", - transport_options=dict(), - links_options=dict(), - compression_options=dict(), - crypto_options=dict(), - ), - "totem_options", - ) - self.assert_missing_key( - dict( - cluster_name="x", - transport="x", - transport_options=dict(), - links_options=dict(), - compression_options=dict(), - crypto_options=dict(), - totem_options=dict(), - ), - "quorum_options", - ) - - def test_minimal(self) -> None: - pcs_data = dict( - cluster_name="my-cluster", - transport="KNET", - transport_options=dict(), - links_options=dict(), - compression_options=dict(), - crypto_options=dict(), - totem_options=dict(), - quorum_options=dict(), - ) - role_data = ha_cluster_info.export_corosync_options(pcs_data) - self.assertEqual( - role_data, - dict( - ha_cluster_cluster_name="my-cluster", - ha_cluster_transport=dict(type="knet"), - ), - ) - - def test_simple_options_mirroring(self) -> None: - pcs_data = dict( - cluster_name="my-cluster", - transport="KNET", - totem_options=dict(totem1="a", totem2="b"), - transport_options=dict(transport1="c", transport2="d"), - compression_options=dict(compression1="e", compression2="f"), - crypto_options=dict(crypto1="g", crypto2="h"), - quorum_options=dict(quorum1="i", quorum2="j"), - links_options=dict(), - ) - role_data = ha_cluster_info.export_corosync_options(pcs_data) - self.assertEqual( - role_data, - dict( - ha_cluster_cluster_name="my-cluster", - ha_cluster_transport=dict( - type="knet", - options=[ - dict(name="transport1", value="c"), - dict(name="transport2", value="d"), - ], - compression=[ - dict(name="compression1", value="e"), - dict(name="compression2", value="f"), - ], - crypto=[ - dict(name="crypto1", value="g"), - dict(name="crypto2", value="h"), - ], - ), - ha_cluster_totem=dict( - options=[ - dict(name="totem1", value="a"), - dict(name="totem2", value="b"), - ], - ), - ha_cluster_quorum=dict( - options=[ - dict(name="quorum1", value="i"), - dict(name="quorum2", value="j"), - ], - ), - ), - ) - - def test_one_link(self) -> None: - pcs_data = dict( - cluster_name="my-cluster", - transport="KNET", - transport_options=dict(), - links_options={"0": dict(name1="value1", name2="value2")}, - compression_options=dict(), - crypto_options=dict(), - totem_options=dict(), - quorum_options=dict(), - ) - role_data = ha_cluster_info.export_corosync_options(pcs_data) - self.assertEqual( - role_data, - dict( - ha_cluster_cluster_name="my-cluster", - ha_cluster_transport=dict( - type="knet", - links=[ - [ - dict(name="name1", value="value1"), - dict(name="name2", value="value2"), - ] - ], - ), - ), - ) - - def test_more_links(self) -> None: - pcs_data = dict( - cluster_name="my-cluster", - transport="KNET", - transport_options=dict(), - links_options={ - "0": dict(linknumber="0", name0="value0"), - "7": dict(linknumber="7", name7="value7"), - "3": dict(linknumber="3", name3="value3"), - }, - compression_options=dict(), - crypto_options=dict(), - totem_options=dict(), - quorum_options=dict(), - ) - role_data = ha_cluster_info.export_corosync_options(pcs_data) - self.assertEqual( - role_data, - dict( - ha_cluster_cluster_name="my-cluster", - ha_cluster_transport=dict( - type="knet", - links=[ - [ - dict(name="linknumber", value="0"), - dict(name="name0", value="value0"), - ], - [ - dict(name="linknumber", value="7"), - dict(name="name7", value="value7"), - ], - [ - dict(name="linknumber", value="3"), - dict(name="name3", value="value3"), - ], - ], - ), - ), - ) - - -class ExportClusterNodes(TestCase): - maxDiff = None - - def assert_missing_key( - self, data: List[Dict[str, Any]], key: str, index: str = "0" - ) -> None: - with self.assertRaises(ha_cluster_info.JsonMissingKey) as cm: - ha_cluster_info.export_cluster_nodes(data, {}) - self.assertEqual( - cm.exception.kwargs, - dict( - data=dict(nodes=data), - key=key, - data_desc=f"corosync configuration for node on index {index}", - ), - ) - - def test_no_nodes(self) -> None: - self.assertEqual(ha_cluster_info.export_cluster_nodes([], {}), []) - - def test_corosync_nodes_missing_keys(self) -> None: - corosync_data: List[Dict[str, Any]] = [dict()] - self.assert_missing_key(corosync_data, "name") - - corosync_data = [dict(name="nodename")] - self.assert_missing_key(corosync_data, "addrs") - - corosync_data = [dict(name="nodename", addrs=[dict()])] - self.assert_missing_key(corosync_data, "link") - - corosync_data = [dict(name="nodename", addrs=[dict(link="0")])] - self.assert_missing_key(corosync_data, "addr") - - def test_corosync_nodes_one_link(self) -> None: - corosync_data = [ - dict( - name="node1", - nodeid=1, - addrs=[dict(addr="node1addr", link="0", type="IPv4")], - ), - dict( - name="node2", - nodeid=2, - addrs=[dict(addr="node2addr", link="0", type="FQDN")], - ), - ] - role_data = ha_cluster_info.export_cluster_nodes(corosync_data, {}) - self.assertEqual( - role_data, - [ - dict(node_name="node1", corosync_addresses=["node1addr"]), - dict(node_name="node2", corosync_addresses=["node2addr"]), - ], - ) - - def test_corosync_nodes_multiple_links(self) -> None: - corosync_data = [ - dict( - name="node1", - nodeid=1, - addrs=[ - dict(addr="node1addr1", link="0", type="IPv4"), - dict(addr="node1addr2", link="1", type="IPv6"), - ], - ), - dict( - name="node2", - nodeid=2, - addrs=[ - dict(addr="node2addr1", link="0", type="IPv4"), - dict(addr="node2addr2", link="1", type="IPv6"), - ], - ), - ] - role_data = ha_cluster_info.export_cluster_nodes(corosync_data, {}) - self.assertEqual( - role_data, - [ - dict( - node_name="node1", - corosync_addresses=["node1addr1", "node1addr2"], - ), - dict( - node_name="node2", - corosync_addresses=["node2addr1", "node2addr2"], - ), - ], - ) - - def test_corosync_nodes_no_address(self) -> None: - corosync_data = [ - dict( - name="node1", - nodeid=1, - addrs=[], - ), - ] - role_data = ha_cluster_info.export_cluster_nodes(corosync_data, {}) - self.assertEqual( - role_data, - [ - dict(node_name="node1", corosync_addresses=[]), - ], - ) - - def test_pcs_nodes_no_cluster_nodes(self) -> None: - corosync_data: List[Dict[str, Any]] = [] - pcs_data = dict(node1="node1A") - role_data = ha_cluster_info.export_cluster_nodes( - corosync_data, pcs_data - ) - self.assertEqual( - role_data, - [], - ) - - def test_pcs_nodes(self) -> None: - corosync_data = [ - dict( - name="node1", - nodeid=1, - addrs=[dict(addr="node1addr", link="0", type="FQDN")], - ), - dict( - name="node2", - nodeid=2, - addrs=[dict(addr="node2addr", link="0", type="FQDN")], - ), - ] - pcs_data = dict(node1="node1A", node3="node3A") - role_data = ha_cluster_info.export_cluster_nodes( - corosync_data, pcs_data - ) - self.assertEqual( - role_data, - [ - dict( - node_name="node1", - corosync_addresses=["node1addr"], - pcs_address="node1A", - ), - dict( - node_name="node2", - corosync_addresses=["node2addr"], - ), - ], - ) - - -# ansible module tools and top layer functions - class ExportClusterConfiguration(TestCase): maxDiff = None - @mock.patch("ha_cluster_info.load_pcsd_known_hosts") + @mock.patch("ha_cluster_info.loader.get_pcsd_known_hosts") def test_export_minimal( self, mock_load_pcsd_known_hosts: mock.Mock, @@ -714,7 +86,7 @@ def test_export_minimal( mock_load_pcsd_known_hosts.assert_called_once_with() - @mock.patch("ha_cluster_info.load_pcsd_known_hosts") + @mock.patch("ha_cluster_info.loader.get_pcsd_known_hosts") def test_export( self, mock_load_pcsd_known_hosts: mock.Mock, @@ -800,7 +172,7 @@ def test_export( mock_load_pcsd_known_hosts.assert_called_once_with() - @mock.patch("ha_cluster_info.load_pcsd_known_hosts") + @mock.patch("ha_cluster_info.loader.get_pcsd_known_hosts") def test_missing_corosync_nodes_key( self, mock_load_pcsd_known_hosts: mock.Mock, @@ -829,7 +201,7 @@ def test_missing_corosync_nodes_key( node2="node2pcs", ) - with self.assertRaises(ha_cluster_info.JsonMissingKey) as cm: + with self.assertRaises(ha_cluster_info.exporter.JsonMissingKey) as cm: ha_cluster_info.export_cluster_configuration(module_mock) self.assertEqual( cm.exception.kwargs, diff --git a/tests/unit/test_info_exporter.py b/tests/unit/test_info_exporter.py new file mode 100644 index 00000000..4680e0dc --- /dev/null +++ b/tests/unit/test_info_exporter.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2024 Red Hat, Inc. +# Author: Tomas Jelinek +# SPDX-License-Identifier: MIT + +# pylint: disable=missing-class-docstring +# pylint: disable=missing-function-docstring + +import sys +from importlib import import_module +from typing import Any, Dict, List +from unittest import TestCase + +sys.modules["ansible.module_utils.ha_cluster_lsr"] = import_module( + "ha_cluster_lsr" +) + +from ha_cluster_lsr.info import exporter + + +class DictToNvList(TestCase): + # pylint: disable=protected-access + def test_no_item(self) -> None: + self.assertEqual( + exporter._dict_to_nv_list(dict()), + [], + ) + + def test_one_item(self) -> None: + self.assertEqual( + exporter._dict_to_nv_list(dict(one="1")), + [dict(name="one", value="1")], + ) + + def test_two_items(self) -> None: + self.assertEqual( + exporter._dict_to_nv_list(dict(one="1", two="2")), + [dict(name="one", value="1"), dict(name="two", value="2")], + ) + + +class ExportCorosyncConf(TestCase): + maxDiff = None + + def assert_missing_key(self, data: Dict[str, Any], key: str) -> None: + with self.assertRaises(exporter.JsonMissingKey) as cm: + exporter.export_corosync_options(data) + self.assertEqual( + cm.exception.kwargs, + dict(data=data, key=key, data_desc="corosync configuration"), + ) + + def test_missing_keys(self) -> None: + self.assert_missing_key(dict(), "cluster_name") + self.assert_missing_key(dict(cluster_name="x"), "transport") + self.assert_missing_key( + dict(cluster_name="x", transport="x"), "transport_options" + ) + self.assert_missing_key( + dict(cluster_name="x", transport="x", transport_options=dict()), + "links_options", + ) + self.assert_missing_key( + dict( + cluster_name="x", + transport="x", + transport_options=dict(), + links_options=dict(), + ), + "compression_options", + ) + self.assert_missing_key( + dict( + cluster_name="x", + transport="x", + transport_options=dict(), + links_options=dict(), + compression_options=dict(), + ), + "crypto_options", + ) + self.assert_missing_key( + dict( + cluster_name="x", + transport="x", + transport_options=dict(), + links_options=dict(), + compression_options=dict(), + crypto_options=dict(), + ), + "totem_options", + ) + self.assert_missing_key( + dict( + cluster_name="x", + transport="x", + transport_options=dict(), + links_options=dict(), + compression_options=dict(), + crypto_options=dict(), + totem_options=dict(), + ), + "quorum_options", + ) + + def test_minimal(self) -> None: + pcs_data = dict( + cluster_name="my-cluster", + transport="KNET", + transport_options=dict(), + links_options=dict(), + compression_options=dict(), + crypto_options=dict(), + totem_options=dict(), + quorum_options=dict(), + ) + role_data = exporter.export_corosync_options(pcs_data) + self.assertEqual( + role_data, + dict( + ha_cluster_cluster_name="my-cluster", + ha_cluster_transport=dict(type="knet"), + ), + ) + + def test_simple_options_mirroring(self) -> None: + pcs_data = dict( + cluster_name="my-cluster", + transport="KNET", + totem_options=dict(totem1="a", totem2="b"), + transport_options=dict(transport1="c", transport2="d"), + compression_options=dict(compression1="e", compression2="f"), + crypto_options=dict(crypto1="g", crypto2="h"), + quorum_options=dict(quorum1="i", quorum2="j"), + links_options=dict(), + ) + role_data = exporter.export_corosync_options(pcs_data) + self.assertEqual( + role_data, + dict( + ha_cluster_cluster_name="my-cluster", + ha_cluster_transport=dict( + type="knet", + options=[ + dict(name="transport1", value="c"), + dict(name="transport2", value="d"), + ], + compression=[ + dict(name="compression1", value="e"), + dict(name="compression2", value="f"), + ], + crypto=[ + dict(name="crypto1", value="g"), + dict(name="crypto2", value="h"), + ], + ), + ha_cluster_totem=dict( + options=[ + dict(name="totem1", value="a"), + dict(name="totem2", value="b"), + ], + ), + ha_cluster_quorum=dict( + options=[ + dict(name="quorum1", value="i"), + dict(name="quorum2", value="j"), + ], + ), + ), + ) + + def test_one_link(self) -> None: + pcs_data = dict( + cluster_name="my-cluster", + transport="KNET", + transport_options=dict(), + links_options={"0": dict(name1="value1", name2="value2")}, + compression_options=dict(), + crypto_options=dict(), + totem_options=dict(), + quorum_options=dict(), + ) + role_data = exporter.export_corosync_options(pcs_data) + self.assertEqual( + role_data, + dict( + ha_cluster_cluster_name="my-cluster", + ha_cluster_transport=dict( + type="knet", + links=[ + [ + dict(name="name1", value="value1"), + dict(name="name2", value="value2"), + ] + ], + ), + ), + ) + + def test_more_links(self) -> None: + pcs_data = dict( + cluster_name="my-cluster", + transport="KNET", + transport_options=dict(), + links_options={ + "0": dict(linknumber="0", name0="value0"), + "7": dict(linknumber="7", name7="value7"), + "3": dict(linknumber="3", name3="value3"), + }, + compression_options=dict(), + crypto_options=dict(), + totem_options=dict(), + quorum_options=dict(), + ) + role_data = exporter.export_corosync_options(pcs_data) + self.assertEqual( + role_data, + dict( + ha_cluster_cluster_name="my-cluster", + ha_cluster_transport=dict( + type="knet", + links=[ + [ + dict(name="linknumber", value="0"), + dict(name="name0", value="value0"), + ], + [ + dict(name="linknumber", value="7"), + dict(name="name7", value="value7"), + ], + [ + dict(name="linknumber", value="3"), + dict(name="name3", value="value3"), + ], + ], + ), + ), + ) + + +class ExportClusterNodes(TestCase): + maxDiff = None + + def assert_missing_key( + self, data: List[Dict[str, Any]], key: str, index: str = "0" + ) -> None: + with self.assertRaises(exporter.JsonMissingKey) as cm: + exporter.export_cluster_nodes(data, {}) + self.assertEqual( + cm.exception.kwargs, + dict( + data=dict(nodes=data), + key=key, + data_desc=f"corosync configuration for node on index {index}", + ), + ) + + def test_no_nodes(self) -> None: + self.assertEqual(exporter.export_cluster_nodes([], {}), []) + + def test_corosync_nodes_missing_keys(self) -> None: + corosync_data: List[Dict[str, Any]] = [dict()] + self.assert_missing_key(corosync_data, "name") + + corosync_data = [dict(name="nodename")] + self.assert_missing_key(corosync_data, "addrs") + + corosync_data = [dict(name="nodename", addrs=[dict()])] + self.assert_missing_key(corosync_data, "link") + + corosync_data = [dict(name="nodename", addrs=[dict(link="0")])] + self.assert_missing_key(corosync_data, "addr") + + def test_corosync_nodes_one_link(self) -> None: + corosync_data = [ + dict( + name="node1", + nodeid=1, + addrs=[dict(addr="node1addr", link="0", type="IPv4")], + ), + dict( + name="node2", + nodeid=2, + addrs=[dict(addr="node2addr", link="0", type="FQDN")], + ), + ] + role_data = exporter.export_cluster_nodes(corosync_data, {}) + self.assertEqual( + role_data, + [ + dict(node_name="node1", corosync_addresses=["node1addr"]), + dict(node_name="node2", corosync_addresses=["node2addr"]), + ], + ) + + def test_corosync_nodes_multiple_links(self) -> None: + corosync_data = [ + dict( + name="node1", + nodeid=1, + addrs=[ + dict(addr="node1addr1", link="0", type="IPv4"), + dict(addr="node1addr2", link="1", type="IPv6"), + ], + ), + dict( + name="node2", + nodeid=2, + addrs=[ + dict(addr="node2addr1", link="0", type="IPv4"), + dict(addr="node2addr2", link="1", type="IPv6"), + ], + ), + ] + role_data = exporter.export_cluster_nodes(corosync_data, {}) + self.assertEqual( + role_data, + [ + dict( + node_name="node1", + corosync_addresses=["node1addr1", "node1addr2"], + ), + dict( + node_name="node2", + corosync_addresses=["node2addr1", "node2addr2"], + ), + ], + ) + + def test_corosync_nodes_no_address(self) -> None: + corosync_data = [ + dict( + name="node1", + nodeid=1, + addrs=[], + ), + ] + role_data = exporter.export_cluster_nodes(corosync_data, {}) + self.assertEqual( + role_data, + [ + dict(node_name="node1", corosync_addresses=[]), + ], + ) + + def test_pcs_nodes_no_cluster_nodes(self) -> None: + corosync_data: List[Dict[str, Any]] = [] + pcs_data = dict(node1="node1A") + role_data = exporter.export_cluster_nodes(corosync_data, pcs_data) + self.assertEqual( + role_data, + [], + ) + + def test_pcs_nodes(self) -> None: + corosync_data = [ + dict( + name="node1", + nodeid=1, + addrs=[dict(addr="node1addr", link="0", type="FQDN")], + ), + dict( + name="node2", + nodeid=2, + addrs=[dict(addr="node2addr", link="0", type="FQDN")], + ), + ] + pcs_data = dict(node1="node1A", node3="node3A") + role_data = exporter.export_cluster_nodes(corosync_data, pcs_data) + self.assertEqual( + role_data, + [ + dict( + node_name="node1", + corosync_addresses=["node1addr"], + pcs_address="node1A", + ), + dict( + node_name="node2", + corosync_addresses=["node2addr"], + ), + ], + ) diff --git a/tests/unit/test_info_loader.py b/tests/unit/test_info_loader.py new file mode 100644 index 00000000..05268dfd --- /dev/null +++ b/tests/unit/test_info_loader.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2024 Red Hat, Inc. +# Author: Tomas Jelinek +# SPDX-License-Identifier: MIT + +# pylint: disable=missing-class-docstring +# pylint: disable=missing-function-docstring + +import json +import sys +from importlib import import_module +from unittest import TestCase, mock + +sys.modules["ansible.module_utils.ha_cluster_lsr"] = import_module( + "ha_cluster_lsr" +) + +from ha_cluster_lsr.info import loader + + +class IsServiceEnabled(TestCase): + # pylint: disable=protected-access + def setUp(self) -> None: + self.runner_mock = mock.Mock() + + def test_is_enabled(self) -> None: + self.runner_mock.return_value = (0, "enabled", "") + self.assertTrue( + loader._is_service_enabled(self.runner_mock, "corosync") + ) + self.runner_mock.assert_called_once_with( + ["systemctl", "is-enabled", "corosync.service"], + {"LC_ALL": "C"}, + ) + + def test_is_disabled(self) -> None: + self.runner_mock.return_value = (1, "disabled", "") + self.assertFalse( + loader._is_service_enabled(self.runner_mock, "pacemaker") + ) + self.runner_mock.assert_called_once_with( + ["systemctl", "is-enabled", "pacemaker.service"], + {"LC_ALL": "C"}, + ) + + def test_unexpected_output(self) -> None: + self.runner_mock.return_value = (4, "not-found", "") + self.assertFalse(loader._is_service_enabled(self.runner_mock, "pcmk")) + self.runner_mock.assert_called_once_with( + ["systemctl", "is-enabled", "pcmk.service"], + {"LC_ALL": "C"}, + ) + + +class GetStartOnBoot(TestCase): + @mock.patch("ha_cluster_lsr.info.loader._is_service_enabled") + def test_main(self, mock_is_enabled: mock.Mock) -> None: + runner_mock = mock.Mock() + mock_is_enabled.side_effect = [False, False] + self.assertFalse(loader.get_start_on_boot(runner_mock)) + + mock_is_enabled.side_effect = [True, False] + self.assertTrue(loader.get_start_on_boot(runner_mock)) + + mock_is_enabled.side_effect = [False, True] + self.assertTrue(loader.get_start_on_boot(runner_mock)) + + mock_is_enabled.side_effect = [True, True] + self.assertTrue(loader.get_start_on_boot(runner_mock)) + + +class CallPcsCli(TestCase): + # pylint: disable=protected-access + def test_success(self) -> None: + runner_mock = mock.Mock() + runner_mock.return_value = ( + 0, + """{"json": "test data", "foo": "bar"}""", + "", + ) + self.assertEqual( + loader._call_pcs_cli(runner_mock, ["cluster", "config"]), + dict(json="test data", foo="bar"), + ) + runner_mock.assert_called_once_with( + ["pcs", "cluster", "config"], + {"LC_ALL": "C"}, + ) + + def test_pcs_error(self) -> None: + runner_mock = mock.Mock() + runner_mock.return_value = ( + 1, + "some stdout message", + "some stderr message", + ) + with self.assertRaises(loader.CliCommandError) as cm: + loader._call_pcs_cli(runner_mock, ["cluster", "config"]) + self.assertEqual( + cm.exception.kwargs, + dict( + pcs_command=["pcs", "cluster", "config"], + stdout="some stdout message", + stderr="some stderr message", + rc=1, + ), + ) + runner_mock.assert_called_once_with( + ["pcs", "cluster", "config"], + {"LC_ALL": "C"}, + ) + + def test_json_error(self) -> None: + runner_mock = mock.Mock() + runner_mock.return_value = ( + 0, + "not a json", + "", + ) + with self.assertRaises(loader.JsonParseError) as cm: + loader._call_pcs_cli(runner_mock, ["cluster", "config"]) + self.assertEqual( + cm.exception.kwargs, + dict( + data="not a json", + data_desc="pcs cluster config", + error="Expecting value: line 1 column 1 (char 0)", + additional_info="", + ), + ) + runner_mock.assert_called_once_with( + ["pcs", "cluster", "config"], + {"LC_ALL": "C"}, + ) + + +class GetCorosyncConf(TestCase): + pcs_command = ["pcs", "cluster", "config", "--output-format=json"] + env = {"LC_ALL": "C"} + + def test_success(self) -> None: + runner_mock = mock.Mock() + runner_mock.return_value = (0, """{"some": "json"}""", "") + self.assertEqual( + loader.get_corosync_conf(runner_mock), dict(some="json") + ) + runner_mock.assert_called_once_with(self.pcs_command, self.env) + + def test_pcs_error(self) -> None: + runner_mock = mock.Mock() + runner_mock.return_value = (1, "stdout message", "stderr message") + with self.assertRaises(loader.CliCommandError) as cm: + loader.get_corosync_conf(runner_mock) + self.assertEqual( + cm.exception.kwargs, + dict( + pcs_command=self.pcs_command, + stdout="stdout message", + stderr="stderr message", + rc=1, + ), + ) + runner_mock.assert_called_once_with(self.pcs_command, self.env) + + def test_json_error(self) -> None: + runner_mock = mock.Mock() + runner_mock.return_value = (0, "not a json", "") + with self.assertRaises(loader.JsonParseError) as cm: + loader.get_corosync_conf(runner_mock) + self.assertEqual( + cm.exception.kwargs, + dict( + data="not a json", + data_desc=" ".join(self.pcs_command), + error="Expecting value: line 1 column 1 (char 0)", + additional_info="", + ), + ) + runner_mock.assert_called_once_with(self.pcs_command, self.env) + + +class GetPcsdKnownHosts(TestCase): + file_path = "/var/lib/pcsd/known-hosts" + + @mock.patch("ha_cluster_lsr.info.loader.os.path.exists") + def test_file_not_present(self, mock_exists: mock.Mock) -> None: + mock_exists.return_value = False + self.assertEqual(loader.get_pcsd_known_hosts(), dict()) + mock_exists.assert_called_once_with(self.file_path) + + @mock.patch("ha_cluster_lsr.info.loader.os.path.exists") + def test_json_error(self, mock_exists: mock.Mock) -> None: + mock_exists.return_value = True + mock_data = "not a json" + with mock.patch( + "ha_cluster_lsr.info.loader.open", + mock.mock_open(read_data=mock_data), + ) as mock_open: + with self.assertRaises(loader.JsonParseError) as cm: + loader.get_pcsd_known_hosts() + self.assertEqual( + cm.exception.kwargs, + dict( + data="not logging data", + data_desc="known hosts", + error="Expecting value: line 1 column 1 (char 0)", + additional_info=None, + ), + ) + mock_open.assert_called_once_with( + self.file_path, "r", encoding="utf-8" + ) + mock_exists.assert_called_once_with(self.file_path) + + @mock.patch("ha_cluster_lsr.info.loader.os.path.exists") + def test_json_empty(self, mock_exists: mock.Mock) -> None: + mock_exists.return_value = True + mock_data = "{}" + with mock.patch( + "ha_cluster_lsr.info.loader.open", + mock.mock_open(read_data=mock_data), + ) as mock_open: + self.assertEqual( + loader.get_pcsd_known_hosts(), + dict(), + ) + mock_open.assert_called_once_with( + self.file_path, "r", encoding="utf-8" + ) + mock_exists.assert_called_once_with(self.file_path) + + @mock.patch("ha_cluster_lsr.info.loader.os.path.exists") + def test_extract(self, mock_exists: mock.Mock) -> None: + mock_exists.return_value = True + mock_data = json.dumps( + dict( + known_hosts=dict( + node1=dict(), + node2=dict(dest_list=[]), + node3=dict(dest_list=[dict()]), + node4=dict(dest_list=[dict(addr="node4A")]), + node5=dict(dest_list=[dict(port="10005")]), + node6=dict(dest_list=[dict(addr="node6A", port="10006")]), + node7=dict( + dest_list=[dict(addr="2001:db8::7", port="10007")] + ), + node8=dict( + dest_list=[ + dict(addr="192.0.2.8", port="10008"), + dict(addr="node8B"), + ] + ), + ) + ) + ) + with mock.patch( + "ha_cluster_lsr.info.loader.open", + mock.mock_open(read_data=mock_data), + ) as mock_open: + self.assertEqual( + loader.get_pcsd_known_hosts(), + dict( + node4="node4A", + node6="node6A:10006", + node7="[2001:db8::7]:10007", + node8="192.0.2.8:10008", + ), + ) + mock_open.assert_called_once_with( + self.file_path, "r", encoding="utf-8" + ) + mock_exists.assert_called_once_with(self.file_path)