From 2c8d74653e3217ba1458d65854e3a448fcedfc5d Mon Sep 17 00:00:00 2001 From: Ivan Devat Date: Tue, 28 Jun 2016 15:36:30 +0200 Subject: [PATCH] add booth support --- pcs/alert.py | 22 +- pcs/app.py | 7 + pcs/booth.py | 76 ++ pcs/cli/booth/__init__.py | 0 pcs/cli/booth/command.py | 177 ++++ pcs/cli/booth/env.py | 121 +++ pcs/cli/booth/test/__init__.py | 0 pcs/cli/booth/test/test_command.py | 44 + pcs/cli/booth/test/test_env.py | 118 +++ pcs/cli/common/console_report.py | 13 +- pcs/cli/common/env.py | 2 + pcs/cli/common/lib_wrapper.py | 78 +- pcs/cli/common/middleware.py | 9 +- pcs/cli/common/parse_args.py | 27 + pcs/cli/common/test/test_lib_wrapper.py | 28 +- pcs/cli/common/test/test_middleware.py | 6 +- pcs/cli/common/test/test_parse_args.py | 84 +- pcs/cluster.py | 10 + pcs/common/env_file_role_codes.py | 9 + pcs/common/report_codes.py | 40 + pcs/common/test/__init__.py | 0 pcs/common/tools.py | 5 + pcs/lib/booth/__init__.py | 0 pcs/lib/booth/config_exchange.py | 43 + pcs/lib/booth/config_files.py | 97 +++ pcs/lib/booth/config_parser.py | 90 ++ pcs/lib/booth/config_structure.py | 111 +++ pcs/lib/booth/env.py | 149 ++++ pcs/lib/booth/reports.py | 409 +++++++++ pcs/lib/booth/resource.py | 116 +++ pcs/lib/booth/status.py | 41 + pcs/lib/booth/sync.py | 208 +++++ pcs/lib/booth/test/__init__.py | 0 pcs/lib/booth/test/test_config_exchange.py | 70 ++ pcs/lib/booth/test/test_config_files.py | 272 ++++++ pcs/lib/booth/test/test_config_parser.py | 169 ++++ pcs/lib/booth/test/test_config_structure.py | 224 +++++ pcs/lib/booth/test/test_env.py | 228 +++++ pcs/lib/booth/test/test_resource.py | 203 +++++ pcs/lib/booth/test/test_status.py | 137 +++ pcs/lib/booth/test/test_sync.py | 1215 +++++++++++++++++++++++++++ pcs/lib/cib/tools.py | 7 + pcs/lib/commands/booth.py | 349 ++++++++ pcs/lib/commands/test/test_booth.py | 614 ++++++++++++++ pcs/lib/commands/test/test_ticket.py | 15 +- pcs/lib/corosync/live.py | 3 + pcs/lib/env.py | 44 +- pcs/lib/env_file.py | 122 +++ pcs/lib/errors.py | 14 + pcs/lib/external.py | 66 +- pcs/lib/reports.py | 215 ++++- pcs/lib/test/misc.py | 20 + pcs/lib/test/test_env_file.py | 187 +++++ pcs/lib/test/test_errors.py | 20 + pcs/pcs.8 | 52 ++ pcs/resource.py | 19 +- pcs/settings_default.py | 2 + pcs/stonith.py | 3 +- pcs/test/resources/.gitignore | 1 + pcs/test/resources/tmp_keyfile | 1 + pcs/test/suite.py | 16 +- pcs/test/test_alert.py | 8 +- pcs/test/test_booth.py | 342 ++++++++ pcs/test/test_lib_cib_tools.py | 21 + pcs/test/test_lib_external.py | 86 ++ pcs/test/tools/color_text_runner.py | 9 +- pcs/test/tools/pcs_unittest.py | 7 + pcs/usage.py | 72 ++ pcs/utils.py | 68 +- pcsd/pcs.rb | 76 +- pcsd/remote.rb | 144 ++++ pcsd/settings.rb | 1 + 72 files changed, 7093 insertions(+), 169 deletions(-) create mode 100644 pcs/booth.py create mode 100644 pcs/cli/booth/__init__.py create mode 100644 pcs/cli/booth/command.py create mode 100644 pcs/cli/booth/env.py create mode 100644 pcs/cli/booth/test/__init__.py create mode 100644 pcs/cli/booth/test/test_command.py create mode 100644 pcs/cli/booth/test/test_env.py create mode 100644 pcs/common/env_file_role_codes.py create mode 100644 pcs/common/test/__init__.py create mode 100644 pcs/lib/booth/__init__.py create mode 100644 pcs/lib/booth/config_exchange.py create mode 100644 pcs/lib/booth/config_files.py create mode 100644 pcs/lib/booth/config_parser.py create mode 100644 pcs/lib/booth/config_structure.py create mode 100644 pcs/lib/booth/env.py create mode 100644 pcs/lib/booth/reports.py create mode 100644 pcs/lib/booth/resource.py create mode 100644 pcs/lib/booth/status.py create mode 100644 pcs/lib/booth/sync.py create mode 100644 pcs/lib/booth/test/__init__.py create mode 100644 pcs/lib/booth/test/test_config_exchange.py create mode 100644 pcs/lib/booth/test/test_config_files.py create mode 100644 pcs/lib/booth/test/test_config_parser.py create mode 100644 pcs/lib/booth/test/test_config_structure.py create mode 100644 pcs/lib/booth/test/test_env.py create mode 100644 pcs/lib/booth/test/test_resource.py create mode 100644 pcs/lib/booth/test/test_status.py create mode 100644 pcs/lib/booth/test/test_sync.py create mode 100644 pcs/lib/commands/booth.py create mode 100644 pcs/lib/commands/test/test_booth.py create mode 100644 pcs/lib/env_file.py create mode 100644 pcs/lib/test/misc.py create mode 100644 pcs/lib/test/test_env_file.py create mode 100644 pcs/lib/test/test_errors.py create mode 100644 pcs/test/resources/tmp_keyfile create mode 100644 pcs/test/test_booth.py create mode 100644 pcs/test/tools/pcs_unittest.py diff --git a/pcs/alert.py b/pcs/alert.py index 4786f57..693bb8d 100644 --- a/pcs/alert.py +++ b/pcs/alert.py @@ -6,16 +6,18 @@ from __future__ import ( ) import sys +from functools import partial from pcs import ( usage, utils, ) from pcs.cli.common.errors import CmdLineInputError -from pcs.cli.common.parse_args import prepare_options +from pcs.cli.common.parse_args import prepare_options, group_by_keywords from pcs.cli.common.console_report import indent from pcs.lib.errors import LibraryError +parse_cmd_sections = partial(group_by_keywords, implicit_first_keyword="main") def alert_cmd(*args): argv = args[1] @@ -67,16 +69,6 @@ def recipient_cmd(*args): ) -def parse_cmd_sections(arg_list, section_list): - output = dict([(section, []) for section in section_list + ["main"]]) - cur_section = "main" - for arg in arg_list: - if arg in section_list: - cur_section = arg - continue - output[cur_section].append(arg) - - return output def ensure_only_allowed_options(parameter_dict, allowed_list): @@ -91,7 +83,7 @@ def alert_add(lib, argv, modifiers): if not argv: raise CmdLineInputError() - sections = parse_cmd_sections(argv, ["options", "meta"]) + sections = parse_cmd_sections(argv, set(["options", "meta"])) main_args = prepare_options(sections["main"]) ensure_only_allowed_options(main_args, ["id", "description", "path"]) @@ -110,7 +102,7 @@ def alert_update(lib, argv, modifiers): alert_id = argv[0] - sections = parse_cmd_sections(argv[1:], ["options", "meta"]) + sections = parse_cmd_sections(argv[1:], set(["options", "meta"])) main_args = prepare_options(sections["main"]) ensure_only_allowed_options(main_args, ["description", "path"]) @@ -137,7 +129,7 @@ def recipient_add(lib, argv, modifiers): alert_id = argv[0] recipient_value = argv[1] - sections = parse_cmd_sections(argv[2:], ["options", "meta"]) + sections = parse_cmd_sections(argv[2:], set(["options", "meta"])) main_args = prepare_options(sections["main"]) ensure_only_allowed_options(main_args, ["description", "id"]) @@ -158,7 +150,7 @@ def recipient_update(lib, argv, modifiers): recipient_id = argv[0] - sections = parse_cmd_sections(argv[1:], ["options", "meta"]) + sections = parse_cmd_sections(argv[1:], set(["options", "meta"])) main_args = prepare_options(sections["main"]) ensure_only_allowed_options(main_args, ["description", "value"]) diff --git a/pcs/app.py b/pcs/app.py index 3758ee4..ab9e970 100644 --- a/pcs/app.py +++ b/pcs/app.py @@ -13,6 +13,7 @@ logging.basicConfig() from pcs import ( acl, + booth, cluster, config, constraint, @@ -97,6 +98,7 @@ def main(argv=None): "token=", "token_coefficient=", "consensus=", "join=", "miss_count_const=", "fail_recv_const=", "corosync_conf=", "cluster_conf=", + "booth-conf=", "booth-key=", "remote", "watchdog=", #in pcs status - do not display resorce status on inactive node "hide-inactive", @@ -199,6 +201,11 @@ def main(argv=None): args, utils.get_modificators() ), + "booth": lambda argv: booth.booth_cmd( + utils.get_library_wrapper(), + argv, + utils.get_modificators() + ), } if command not in cmd_map: usage.main() diff --git a/pcs/booth.py b/pcs/booth.py new file mode 100644 index 0000000..764dcd8 --- /dev/null +++ b/pcs/booth.py @@ -0,0 +1,76 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import sys + +from pcs import usage +from pcs import utils +from pcs.cli.booth import command +from pcs.cli.common.errors import CmdLineInputError +from pcs.lib.errors import LibraryError +from pcs.resource import resource_create, resource_remove + + +def booth_cmd(lib, argv, modifiers): + """ + routes booth command + """ + if len(argv) < 1: + usage.booth() + sys.exit(1) + + sub_cmd, argv_next = argv[0], argv[1:] + try: + if sub_cmd == "help": + usage.booth(argv) + elif sub_cmd == "config": + command.config_show(lib, argv_next, modifiers) + elif sub_cmd == "setup": + command.config_setup(lib, argv_next, modifiers) + elif sub_cmd == "destroy": + command.config_destroy(lib, argv_next, modifiers) + elif sub_cmd == "ticket": + if len(argv_next) < 1: + raise CmdLineInputError() + if argv_next[0] == "add": + command.config_ticket_add(lib, argv_next[1:], modifiers) + elif argv_next[0] == "remove": + command.config_ticket_remove(lib, argv_next[1:], modifiers) + elif argv_next[0] == "grant": + command.ticket_grant(lib, argv_next[1:], modifiers) + elif argv_next[0] == "revoke": + command.ticket_revoke(lib, argv_next[1:], modifiers) + else: + raise CmdLineInputError() + elif sub_cmd == "create": + command.get_create_in_cluster(resource_create)( + lib, argv_next, modifiers + ) + elif sub_cmd == "remove": + command.get_remove_from_cluster(resource_remove)( + lib, argv_next, modifiers + ) + elif sub_cmd == "sync": + command.sync(lib, argv_next, modifiers) + elif sub_cmd == "pull": + command.pull(lib, argv_next, modifiers) + elif sub_cmd == "enable": + command.enable(lib, argv_next, modifiers) + elif sub_cmd == "disable": + command.disable(lib, argv_next, modifiers) + elif sub_cmd == "start": + command.start(lib, argv_next, modifiers) + elif sub_cmd == "stop": + command.stop(lib, argv_next, modifiers) + elif sub_cmd == "status": + command.status(lib, argv_next, modifiers) + else: + raise CmdLineInputError() + except LibraryError as e: + utils.process_library_reports(e.args) + except CmdLineInputError as e: + utils.exit_on_cmdline_input_errror(e, "booth", sub_cmd) diff --git a/pcs/cli/booth/__init__.py b/pcs/cli/booth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pcs/cli/booth/command.py b/pcs/cli/booth/command.py new file mode 100644 index 0000000..bea6582 --- /dev/null +++ b/pcs/cli/booth/command.py @@ -0,0 +1,177 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from pcs.cli.common.errors import CmdLineInputError +from pcs.cli.common.parse_args import group_by_keywords + + +DEFAULT_BOOTH_NAME = "booth" + +def __get_name(modifiers): + return modifiers["name"] if modifiers["name"] else DEFAULT_BOOTH_NAME + +def config_setup(lib, arg_list, modifiers): + """ + create booth config + """ + booth_configuration = group_by_keywords( + arg_list, + set(["sites", "arbitrators"]), + keyword_repeat_allowed=False + ) + if "sites" not in booth_configuration or not booth_configuration["sites"]: + raise CmdLineInputError() + + lib.booth.config_setup(booth_configuration, modifiers["force"]) + +def config_destroy(lib, arg_list, modifiers): + """ + destroy booth config + """ + if arg_list: + raise CmdLineInputError() + lib.booth.config_destroy(ignore_config_load_problems=modifiers["force"]) + + +def config_show(lib, arg_list, modifiers): + """ + print booth config + """ + booth_configuration = lib.booth.config_show() + authfile_lines = [] + if booth_configuration["authfile"]: + authfile_lines.append( + "authfile = {0}".format(booth_configuration["authfile"]) + ) + + line_list = ( + ["site = {0}".format(site) for site in booth_configuration["sites"]] + + + [ + "arbitrator = {0}".format(arbitrator) + for arbitrator in booth_configuration["arbitrators"] + ] + + authfile_lines + + [ + 'ticket = "{0}"'.format(ticket) + for ticket in booth_configuration["tickets"] + ] + ) + for line in line_list: + print(line) + +def config_ticket_add(lib, arg_list, modifiers): + """ + add ticket to current configuration + """ + if len(arg_list) != 1: + raise CmdLineInputError + lib.booth.config_ticket_add(arg_list[0]) + +def config_ticket_remove(lib, arg_list, modifiers): + """ + add ticket to current configuration + """ + if len(arg_list) != 1: + raise CmdLineInputError + lib.booth.config_ticket_remove(arg_list[0]) + +def ticket_operation(lib_call, arg_list, modifiers): + site_ip = None + if len(arg_list) == 2: + site_ip = arg_list[1] + elif len(arg_list) != 1: + raise CmdLineInputError() + + ticket = arg_list[0] + lib_call(__get_name(modifiers), ticket, site_ip) + +def ticket_revoke(lib, arg_list, modifiers): + ticket_operation(lib.booth.ticket_revoke, arg_list, modifiers) + +def ticket_grant(lib, arg_list, modifiers): + ticket_operation(lib.booth.ticket_grant, arg_list, modifiers) + +def get_create_in_cluster(resource_create): + #TODO resource_remove is provisional hack until resources are not moved to + #lib + def create_in_cluster(lib, arg_list, modifiers): + if len(arg_list) != 2 or arg_list[0] != "ip": + raise CmdLineInputError() + ip = arg_list[1] + + lib.booth.create_in_cluster( + __get_name(modifiers), + ip, + resource_create, + ) + return create_in_cluster + +def get_remove_from_cluster(resource_remove): + #TODO resource_remove is provisional hack until resources are not moved to + #lib + def remove_from_cluster(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + + lib.booth.remove_from_cluster(__get_name(modifiers), resource_remove) + + return remove_from_cluster + + +def sync(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + lib.booth.config_sync( + DEFAULT_BOOTH_NAME, + skip_offline_nodes=modifiers["skip_offline_nodes"] + ) + + +def enable(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + lib.booth.enable(DEFAULT_BOOTH_NAME) + + +def disable(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + lib.booth.disable(DEFAULT_BOOTH_NAME) + + +def start(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + lib.booth.start(DEFAULT_BOOTH_NAME) + + +def stop(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + lib.booth.stop(DEFAULT_BOOTH_NAME) + + +def pull(lib, arg_list, modifiers): + if len(arg_list) != 1: + raise CmdLineInputError() + lib.booth.pull(arg_list[0], DEFAULT_BOOTH_NAME) + + +def status(lib, arg_list, modifiers): + if arg_list: + raise CmdLineInputError() + booth_status = lib.booth.status(DEFAULT_BOOTH_NAME) + if booth_status.get("ticket"): + print("TICKETS:") + print(booth_status["ticket"]) + if booth_status.get("peers"): + print("PEERS:") + print(booth_status["peers"]) + if booth_status.get("status"): + print("DAEMON STATUS:") + print(booth_status["status"]) diff --git a/pcs/cli/booth/env.py b/pcs/cli/booth/env.py new file mode 100644 index 0000000..918e487 --- /dev/null +++ b/pcs/cli/booth/env.py @@ -0,0 +1,121 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import os.path + +from pcs.cli.common import console_report +from pcs.common import report_codes, env_file_role_codes as file_role_codes +from pcs.lib.errors import LibraryEnvError + + +def read_env_file(path): + try: + return { + "content": open(path).read() if os.path.isfile(path) else None + } + except EnvironmentError as e: + raise console_report.error( + "Unable to read {0}: {1}".format(path, e.strerror) + ) + +def write_env_file(env_file, file_path): + try: + f = open(file_path, "wb" if env_file.get("is_binary", False) else "w") + f.write(env_file["content"]) + f.close() + except EnvironmentError as e: + raise console_report.error( + "Unable to write {0}: {1}".format(file_path, e.strerror) + ) + +def process_no_existing_file_expectation(file_role, env_file, file_path): + if( + env_file["no_existing_file_expected"] + and + os.path.exists(file_path) + ): + msg = "{0} {1} already exists".format(file_role, file_path) + if not env_file["can_overwrite_existing_file"]: + raise console_report.error( + "{0}, use --force to override".format(msg) + ) + console_report.warn(msg) + +def is_missing_file_report(report, file_role_code): + return ( + report.code == report_codes.FILE_DOES_NOT_EXIST + and + report.info["file_role"] == file_role_code + ) + +def report_missing_file(file_role, file_path): + console_report.error( + "{0} '{1}' does not exist".format(file_role, file_path) + ) + +def middleware_config(name, config_path, key_path): + if config_path and not key_path: + raise console_report.error( + "With --booth-conf must be specified --booth-key as well" + ) + + if key_path and not config_path: + raise console_report.error( + "With --booth-key must be specified --booth-conf as well" + ) + + is_mocked_environment = config_path and key_path + + def create_booth_env(): + if not is_mocked_environment: + return {"name": name} + return { + "name": name, + "config_file": read_env_file(config_path), + "key_file": read_env_file(key_path), + "key_path": key_path, + } + + def flush(modified_env): + if not is_mocked_environment: + return + if not modified_env: + #TODO now this would not happen + #for more information see comment in + #pcs.cli.common.lib_wrapper.lib_env_to_cli_env + raise console_report.error("Error during library communication") + + process_no_existing_file_expectation( + "booth config file", + modified_env["config_file"], + config_path + ) + process_no_existing_file_expectation( + "booth key file", + modified_env["key_file"], + key_path + ) + write_env_file(modified_env["key_file"], key_path) + write_env_file(modified_env["config_file"], config_path) + + def apply(next_in_line, env, *args, **kwargs): + env.booth = create_booth_env() + try: + result_of_next = next_in_line(env, *args, **kwargs) + except LibraryEnvError as e: + for report in e.args: + if is_missing_file_report(report, file_role_codes.BOOTH_CONFIG): + report_missing_file("Booth config file", config_path) + e.sign_processed(report) + if is_missing_file_report(report, file_role_codes.BOOTH_KEY): + report_missing_file("Booth key file", key_path) + e.sign_processed(report) + raise e + flush(env.booth["modified_env"]) + return result_of_next + + return apply diff --git a/pcs/cli/booth/test/__init__.py b/pcs/cli/booth/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pcs/cli/booth/test/test_command.py b/pcs/cli/booth/test/test_command.py new file mode 100644 index 0000000..00216f2 --- /dev/null +++ b/pcs/cli/booth/test/test_command.py @@ -0,0 +1,44 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +from pcs.cli.booth import command +from pcs.test.tools.pcs_mock import mock + + +class ConfigSetupTest(TestCase): + def test_call_lib_with_correct_args(self): + lib = mock.MagicMock() + lib.booth = mock.MagicMock() + lib.booth.config_setup = mock.MagicMock() + + command.config_setup( + lib, + arg_list=[ + "sites", "1.1.1.1", "2.2.2.2", "4.4.4.4", + "arbitrators", "3.3.3.3" + ], + modifiers={ + "force": False, + } + ) + lib.booth.config_setup.assert_called_once_with( + { + "sites": ["1.1.1.1", "2.2.2.2", "4.4.4.4"], + "arbitrators": ["3.3.3.3"], + }, + False + ) + +class ConfigTicketAddTest(TestCase): + def test_call_lib_with_ticket_name(self): + lib = mock.MagicMock() + lib.booth = mock.MagicMock() + lib.booth.config_ticket_add = mock.MagicMock() + command.config_ticket_add(lib, arg_list=["TICKET_A"], modifiers={}) + lib.booth.config_ticket_add.assert_called_once_with("TICKET_A") diff --git a/pcs/cli/booth/test/test_env.py b/pcs/cli/booth/test/test_env.py new file mode 100644 index 0000000..1ead6f2 --- /dev/null +++ b/pcs/cli/booth/test/test_env.py @@ -0,0 +1,118 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +from pcs.cli.booth.env import middleware_config +from pcs.common import report_codes, env_file_role_codes +from pcs.lib.errors import LibraryEnvError, ReportItem +from pcs.test.tools.pcs_mock import mock + + +class BoothConfTest(TestCase): + @mock.patch("pcs.cli.booth.env.os.path.isfile") + def test_sucessfully_care_about_local_file(self, mock_is_file): + #setup, fixtures + def next_in_line(env): + env.booth["modified_env"] = { + "config_file": { + "content": "file content", + "no_existing_file_expected": False, + }, + "key_file": { + "content": "key file content", + "no_existing_file_expected": False, + } + } + return "call result" + mock_is_file.return_value = True + mock_env = mock.MagicMock() + + mock_open = mock.mock_open() + with mock.patch( + "pcs.cli.booth.env.open", + mock_open, + create=True + ): + #run tested code + booth_conf_middleware = middleware_config( + "booth-name", + "/local/file/path.conf", + "/local/file/path.key", + ) + + self.assertEqual( + "call result", + booth_conf_middleware(next_in_line, mock_env) + ) + + #assertions + self.assertEqual(mock_is_file.mock_calls,[ + mock.call("/local/file/path.conf"), + mock.call("/local/file/path.key"), + ]) + + self.assertEqual(mock_env.booth["name"], "booth-name") + self.assertEqual(mock_env.booth["config_file"], {"content": ""}) + self.assertEqual(mock_env.booth["key_file"], {"content": ""}) + + self.assertEqual(mock_open.mock_calls, [ + mock.call(u'/local/file/path.conf'), + mock.call().read(), + mock.call(u'/local/file/path.key'), + mock.call().read(), + mock.call(u'/local/file/path.key', u'w'), + mock.call().write(u'key file content'), + mock.call().close(), + mock.call(u'/local/file/path.conf', u'w'), + mock.call().write(u'file content'), + mock.call().close(), + ]) + + @mock.patch("pcs.cli.booth.env.console_report") + @mock.patch("pcs.cli.booth.env.os.path.isfile") + def test_catch_exactly_his_exception( + self, mock_is_file, mock_console_report + ): + next_in_line = mock.Mock(side_effect=LibraryEnvError( + ReportItem.error(report_codes.FILE_DOES_NOT_EXIST, "", info={ + "file_role": env_file_role_codes.BOOTH_CONFIG, + }), + ReportItem.error(report_codes.FILE_DOES_NOT_EXIST, "", info={ + "file_role": env_file_role_codes.BOOTH_KEY, + }), + ReportItem.error("OTHER ERROR", "", info={}), + )) + mock_is_file.return_value = False + mock_env = mock.MagicMock() + + #run tested code + booth_conf_middleware = middleware_config( + "booth-name", + "/local/file/path.conf", + "/local/file/path.key", + ) + raised_exception = [] + def run_middleware(): + try: + booth_conf_middleware(next_in_line, mock_env) + except Exception as e: + raised_exception.append(e) + raise e + + self.assertRaises(LibraryEnvError, run_middleware) + self.assertEqual(1, len(raised_exception[0].unprocessed)) + self.assertEqual("OTHER ERROR", raised_exception[0].unprocessed[0].code) + + self.assertEqual(mock_console_report.error.mock_calls, [ + mock.call( + "Booth config file '/local/file/path.conf' does not exist" + ), + mock.call( + "Booth key file '/local/file/path.key' does not exist" + ), + ]) diff --git a/pcs/cli/common/console_report.py b/pcs/cli/common/console_report.py index 3d42798..e600168 100644 --- a/pcs/cli/common/console_report.py +++ b/pcs/cli/common/console_report.py @@ -8,10 +8,15 @@ from __future__ import ( import sys -def error(message, exit=True): - sys.stderr.write("Error: {0}\n".format(message)) - if exit: - sys.exit(1) +def warn(message): + sys.stdout.write(format_message(message, "Warning: ")) + +def format_message(message, prefix): + return "{0}{1}\n".format(prefix, message) + +def error(message): + sys.stderr.write(format_message(message, "Error: ")) + return SystemExit(1) def indent(line_list, indent_step=2): """ diff --git a/pcs/cli/common/env.py b/pcs/cli/common/env.py index 2ba4f70..b1d951d 100644 --- a/pcs/cli/common/env.py +++ b/pcs/cli/common/env.py @@ -6,11 +6,13 @@ from __future__ import ( ) class Env(object): + #pylint: disable=too-many-instance-attributes def __init__(self): self.cib_data = None self.cib_upgraded = False self.user = None self.groups = None self.corosync_conf_data = None + self.booth = None self.auth_tokens_getter = None self.debug = False diff --git a/pcs/cli/common/lib_wrapper.py b/pcs/cli/common/lib_wrapper.py index c4b8342..c836575 100644 --- a/pcs/cli/common/lib_wrapper.py +++ b/pcs/cli/common/lib_wrapper.py @@ -5,27 +5,30 @@ from __future__ import ( unicode_literals, ) -from collections import namedtuple -from functools import partial import logging +import sys +from collections import namedtuple from pcs.cli.common import middleware - -#from pcs.lib import commands does not work: "commands" is package -from pcs.lib.commands.constraint import colocation as constraint_colocation -from pcs.lib.commands.constraint import order as constraint_order -from pcs.lib.commands.constraint import ticket as constraint_ticket +from pcs.cli.common.reports import ( + LibraryReportProcessorToConsole, + process_library_reports +) from pcs.lib.commands import ( + booth, quorum, qdevice, sbd, alert, ) -from pcs.cli.common.reports import ( - LibraryReportProcessorToConsole as LibraryReportProcessorToConsole, +from pcs.lib.commands.constraint import ( + colocation as constraint_colocation, + order as constraint_order, + ticket as constraint_ticket ) - from pcs.lib.env import LibraryEnvironment +from pcs.lib.errors import LibraryEnvError + _CACHE = {} @@ -40,7 +43,8 @@ def cli_env_to_lib_env(cli_env): cli_env.groups, cli_env.cib_data, cli_env.corosync_conf_data, - cli_env.auth_tokens_getter, + booth=cli_env.booth, + auth_tokens_getter=cli_env.auth_tokens_getter, ) def lib_env_to_cli_env(lib_env, cli_env): @@ -49,6 +53,19 @@ def lib_env_to_cli_env(lib_env, cli_env): cli_env.cib_upgraded = lib_env.cib_upgraded if not lib_env.is_corosync_conf_live: cli_env.corosync_conf_data = lib_env.get_corosync_conf_data() + + #TODO + #now we know: if is in cli_env booth is in lib_env as well + #when we communicate with the library over the network we will need extra + #sanitization here + #this applies generally, not only for booth + #corosync_conf and cib suffers with this problem as well but in this cases + #it is dangerously hidden: when inconsistency between cli and lib + #environment inconsitency occurs, original content is put to file (which is + #wrong) + if cli_env.booth: + cli_env.booth["modified_env"] = lib_env.booth.export() + return cli_env def bind(cli_env, run_with_middleware, run_library_command): @@ -62,7 +79,17 @@ def bind(cli_env, run_with_middleware, run_library_command): lib_env_to_cli_env(lib_env, cli_env) return lib_call_result - return partial(run_with_middleware, run, cli_env) + + def decorated_run(*args, **kwargs): + try: + return run_with_middleware(run, cli_env, *args, **kwargs) + except LibraryEnvError as e: + process_library_reports(e.unprocessed) + #TODO we use explicit exit here - process_library_reports stil has + #possibility to not exit - it will need deeper rethinking + sys.exit(1) + + return decorated_run def bind_all(env, run_with_middleware, dictionary): return wrapper(dict( @@ -172,6 +199,33 @@ def load_module(env, middleware_factory, name): } ) + if name == "booth": + return bind_all( + env, + middleware.build( + middleware_factory.booth_conf, + middleware_factory.cib + ), + { + "config_setup": booth.config_setup, + "config_destroy": booth.config_destroy, + "config_show": booth.config_show, + "config_ticket_add": booth.config_ticket_add, + "config_ticket_remove": booth.config_ticket_remove, + "create_in_cluster": booth.create_in_cluster, + "remove_from_cluster": booth.remove_from_cluster, + "config_sync": booth.config_sync, + "enable": booth.enable_booth, + "disable": booth.disable_booth, + "start": booth.start_booth, + "stop": booth.stop_booth, + "pull": booth.pull_config, + "status": booth.get_status, + "ticket_grant": booth.ticket_grant, + "ticket_revoke": booth.ticket_revoke, + } + ) + raise Exception("No library part '{0}'".format(name)) class Library(object): diff --git a/pcs/cli/common/middleware.py b/pcs/cli/common/middleware.py index e53e138..9254a12 100644 --- a/pcs/cli/common/middleware.py +++ b/pcs/cli/common/middleware.py @@ -29,11 +29,12 @@ def cib(use_local_cib, load_cib_content, write_cib): """ def apply(next_in_line, env, *args, **kwargs): if use_local_cib: - env.cib_data = load_cib_content() + original_content = load_cib_content() + env.cib_data = original_content result_of_next = next_in_line(env, *args, **kwargs) - if use_local_cib: + if use_local_cib and env.cib_data != original_content: write_cib(env.cib_data, env.cib_upgraded) return result_of_next @@ -45,7 +46,7 @@ def corosync_conf_existing(local_file_path): try: env.corosync_conf_data = open(local_file_path).read() except EnvironmentError as e: - console_report.error("Unable to read {0}: {1}".format( + raise console_report.error("Unable to read {0}: {1}".format( local_file_path, e.strerror )) @@ -58,7 +59,7 @@ def corosync_conf_existing(local_file_path): f.write(env.corosync_conf_data) f.close() except EnvironmentError as e: - console_report.error("Unable to write {0}: {1}".format( + raise console_report.error("Unable to write {0}: {1}".format( local_file_path, e.strerror )) diff --git a/pcs/cli/common/parse_args.py b/pcs/cli/common/parse_args.py index 3b01775..d17c5da 100644 --- a/pcs/cli/common/parse_args.py +++ b/pcs/cli/common/parse_args.py @@ -25,3 +25,30 @@ def prepare_options(cmdline_args): name, value = arg.split("=", 1) options[name] = value return options + +def group_by_keywords( + arg_list, keyword_set, + implicit_first_keyword=None, keyword_repeat_allowed=True, +): + groups = dict([(keyword, []) for keyword in keyword_set]) + if implicit_first_keyword: + groups[implicit_first_keyword] = [] + + if not arg_list: + return groups + + used_keywords = [] + if implicit_first_keyword: + used_keywords.append(implicit_first_keyword) + elif arg_list[0] not in keyword_set: + raise CmdLineInputError() + + for arg in arg_list: + if arg in list(groups.keys()): + if arg in used_keywords and not keyword_repeat_allowed: + raise CmdLineInputError() + used_keywords.append(arg) + else: + groups[used_keywords[-1]].append(arg) + + return groups diff --git a/pcs/cli/common/test/test_lib_wrapper.py b/pcs/cli/common/test/test_lib_wrapper.py index f34d2d0..c10bb62 100644 --- a/pcs/cli/common/test/test_lib_wrapper.py +++ b/pcs/cli/common/test/test_lib_wrapper.py @@ -6,8 +6,10 @@ from __future__ import ( ) from unittest import TestCase -from pcs.cli.common.lib_wrapper import Library +from pcs.cli.common.lib_wrapper import Library, bind from pcs.test.tools.pcs_mock import mock +from pcs.lib.errors import ReportItem +from pcs.lib.errors import LibraryEnvError class LibraryWrapperTest(TestCase): def test_raises_for_bad_path(self): @@ -30,6 +32,28 @@ class LibraryWrapperTest(TestCase): mock_middleware_factory = mock.MagicMock() mock_middleware_factory.cib = dummy_middleware mock_middleware_factory.corosync_conf_existing = dummy_middleware - Library('env', mock_middleware_factory).constraint_order.set('first', second="third") + mock_env = mock.MagicMock() + Library(mock_env, mock_middleware_factory).constraint_order.set( + 'first', second="third" + ) mock_order_set.assert_called_once_with(lib_env, "first", second="third") + +class BindTest(TestCase): + @mock.patch("pcs.cli.common.lib_wrapper.process_library_reports") + def test_report_unprocessed_library_env_errors(self, mock_process_report): + report1 = ReportItem.error("OTHER ERROR", "", info={}) + report2 = ReportItem.error("OTHER ERROR", "", info={}) + report3 = ReportItem.error("OTHER ERROR", "", info={}) + e = LibraryEnvError(report1, report2, report3) + e.sign_processed(report2) + mock_middleware = mock.Mock(side_effect=e) + + binded = bind( + cli_env=None, + run_with_middleware=mock_middleware, + run_library_command=None + ) + + self.assertRaises(SystemExit, lambda: binded(cli_env=None)) + mock_process_report.assert_called_once_with([report1, report3]) diff --git a/pcs/cli/common/test/test_middleware.py b/pcs/cli/common/test/test_middleware.py index 6179882..c030cd9 100644 --- a/pcs/cli/common/test/test_middleware.py +++ b/pcs/cli/common/test/test_middleware.py @@ -6,7 +6,8 @@ from __future__ import ( ) from unittest import TestCase -import pcs.cli.common.middleware + +from pcs.cli.common import middleware class MiddlewareBuildTest(TestCase): @@ -29,7 +30,7 @@ class MiddlewareBuildTest(TestCase): next(lib, argv, modificators) log.append('m2 done') - run_with_middleware = pcs.cli.common.middleware.build(m1, m2) + run_with_middleware = middleware.build(m1, m2) run_with_middleware(command, "1", "2", "3") self.assertEqual(log, [ 'm1 start: 1, 2, 3', @@ -38,3 +39,4 @@ class MiddlewareBuildTest(TestCase): 'm2 done', 'm1 done', ]) + diff --git a/pcs/cli/common/test/test_parse_args.py b/pcs/cli/common/test/test_parse_args.py index 1d6c4b0..eb358a5 100644 --- a/pcs/cli/common/test/test_parse_args.py +++ b/pcs/cli/common/test/test_parse_args.py @@ -6,7 +6,11 @@ from __future__ import ( ) from unittest import TestCase -from pcs.cli.common.parse_args import split_list, prepare_options +from pcs.cli.common.parse_args import( + split_list, + prepare_options, + group_by_keywords, +) from pcs.cli.common.errors import CmdLineInputError @@ -42,3 +46,81 @@ class SplitListTest(TestCase): [[], ['a', 'b'], ['c', 'd'], []], split_list(['|','a', 'b', '|', 'c', 'd', "|"], '|') ) + +class SplitByKeywords(TestCase): + def test_split_with_implicit_first_keyword(self): + self.assertEqual( + group_by_keywords( + [0, "first", 1, 2, "second", 3], + set(["first", "second"]), + implicit_first_keyword="zero" + ), + { + "zero": [0], + "first": [1, 2], + "second": [3], + } + ) + + def test_splict_without_implict_keyword(self): + self.assertEqual( + group_by_keywords( + ["first", 1, 2, "second", 3], + set(["first", "second"]), + ), + { + "first": [1, 2], + "second": [3], + } + ) + + def test_raises_when_args_do_not_start_with_keyword_nor_implicit(self): + self.assertRaises(CmdLineInputError, lambda: group_by_keywords( + [0, "first", 1, 2, "second", 3], + set(["first", "second"]), + )) + + def test_returns_dict_with_empty_lists_for_no_args(self): + self.assertEqual( + group_by_keywords( + [], + set(["first", "second"]) + ), + { + "first": [], + "second": [], + } + ) + + def test_returns_dict_with_empty_lists_for_no_args_implicit_case(self): + self.assertEqual( + group_by_keywords( + [], + set(["first", "second"]), + implicit_first_keyword="zero", + ), + { + "zero": [], + "first": [], + "second": [], + } + ) + + def test_allow_keywords_repeating(self): + self.assertEqual( + group_by_keywords( + ["first", 1, 2, "second", 3, "first", 4], + set(["first", "second"]), + ), + { + "first": [1, 2, 4], + "second": [3], + } + ) + + def test_can_disallow_keywords_repeating(self): + self.assertRaises(CmdLineInputError, lambda: group_by_keywords( + ["first", 1, 2, "second", 3, "first"], + set(["first", "second"]), + keyword_repeat_allowed=False, + )) diff --git a/pcs/cluster.py b/pcs/cluster.py index 3f41d96..90fec63 100644 --- a/pcs/cluster.py +++ b/pcs/cluster.py @@ -42,6 +42,7 @@ from pcs.lib import ( sbd as lib_sbd, reports as lib_reports, ) +from pcs.lib.booth import sync as booth_sync from pcs.lib.commands.quorum import _add_device_model_net from pcs.lib.corosync import ( config_parser as corosync_conf_utils, @@ -1388,6 +1389,7 @@ def cluster_node(argv): report_processor = lib_env.report_processor node_communicator = lib_env.node_communicator() node_addr = NodeAddresses(node0, node1) + modifiers = utils.get_modificators() try: if lib_sbd.is_sbd_enabled(utils.cmd_runner()): if "--watchdog" not in utils.pcs_options: @@ -1421,6 +1423,14 @@ def cluster_node(argv): lib_sbd.disable_sbd_service_on_node( report_processor, node_communicator, node_addr ) + + booth_sync.send_all_config_to_node( + node_communicator, + report_processor, + node_addr, + rewrite_existing=modifiers["force"], + skip_wrong_config=modifiers["force"] + ) except LibraryError as e: process_library_reports(e.args) except NodeCommunicationException as e: diff --git a/pcs/common/env_file_role_codes.py b/pcs/common/env_file_role_codes.py new file mode 100644 index 0000000..1f47387 --- /dev/null +++ b/pcs/common/env_file_role_codes.py @@ -0,0 +1,9 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +BOOTH_CONFIG = "BOOTH_CONFIG" +BOOTH_KEY = "BOOTH_KEY" diff --git a/pcs/common/report_codes.py b/pcs/common/report_codes.py index 53f2ccb..e71d418 100644 --- a/pcs/common/report_codes.py +++ b/pcs/common/report_codes.py @@ -8,6 +8,9 @@ from __future__ import ( # force cathegories FORCE_ACTIVE_RRP = "ACTIVE_RRP" FORCE_ALERT_RECIPIENT_VALUE_NOT_UNIQUE = "FORCE_ALERT_RECIPIENT_VALUE_NOT_UNIQUE" +FORCE_BOOTH_REMOVE_FROM_CIB = "FORCE_BOOTH_REMOVE_FROM_CIB" +FORCE_BOOTH_DESTROY = "FORCE_BOOTH_DESTROY" +FORCE_FILE_OVERWRITE = "FORCE_FILE_OVERWRITE" FORCE_CONSTRAINT_DUPLICATE = "CONSTRAINT_DUPLICATE" FORCE_CONSTRAINT_MULTIINSTANCE_RESOURCE = "CONSTRAINT_MULTIINSTANCE_RESOURCE" FORCE_LOAD_THRESHOLD = "LOAD_THRESHOLD" @@ -17,10 +20,40 @@ FORCE_UNKNOWN_AGENT = "UNKNOWN_AGENT" FORCE_UNSUPPORTED_AGENT = "UNSUPPORTED_AGENT" FORCE_METADATA_ISSUE = "METADATA_ISSUE" SKIP_OFFLINE_NODES = "SKIP_OFFLINE_NODES" +SKIP_UNREADABLE_CONFIG = "SKIP_UNREADABLE_CONFIG" AGENT_GENERAL_ERROR = "AGENT_GENERAL_ERROR" AGENT_NOT_FOUND = "AGENT_NOT_FOUND" BAD_CLUSTER_STATE_FORMAT = 'BAD_CLUSTER_STATE_FORMAT' +BOOTH_ADDRESS_DUPLICATION = "BOOTH_ADDRESS_DUPLICATION" +BOOTH_ALREADY_IN_CIB = "BOOTH_ALREADY_IN_CIB" +BOOTH_CANNOT_DETERMINE_LOCAL_SITE_IP = "BOOTH_CANNOT_DETERMINE_LOCAL_SITE_IP" +BOOTH_CANNOT_IDENTIFY_KEYFILE = "BOOTH_CANNOT_IDENTIFY_KEYFILE" +BOOTH_CONFIG_FILE_ALREADY_EXISTS = "BOOTH_CONFIG_FILE_ALREADY_EXISTS" +BOOTH_CONFIG_IO_ERROR = "BOOTH_CONFIG_IO_ERROR" +BOOTH_CONFIG_IS_USED = "BOOTH_CONFIG_IS_USED" +BOOTH_CONFIG_READ_ERROR = "BOOTH_CONFIG_READ_ERROR" +BOOTH_CONFIG_WRITE_ERROR = "BOOTH_CONFIG_WRITE_ERROR" +BOOTH_CONFIG_UNEXPECTED_LINES = "BOOTH_CONFIG_UNEXPECTED_LINES" +BOOTH_CONFIGS_SAVED_ON_NODE = "BOOTH_CONFIGS_SAVED_ON_NODE" +BOOTH_CONFIGS_SAVING_ON_NODE = "BOOTH_CONFIGS_SAVING_ON_NODE" +BOOTH_DAEMON_STATUS_ERROR = "BOOTH_DAEMON_STATUS_ERROR" +BOOTH_DISTRIBUTING_CONFIG = "BOOTH_DISTRIBUTING_CONFIG" +BOOTH_EVEN_PEERS_NUM = "BOOTH_EVEN_PEERS_NUM" +BOOTH_FETCHING_CONFIG_FROM_NODE = "BOOTH_FETCHING_CONFIG_FROM_NODE" +BOOTH_INVALID_CONFIG_NAME = "BOOTH_INVALID_CONFIG_NAME" +BOOTH_INVALID_NAME = "BOOTH_INVALID_NAME" +BOOTH_LACK_OF_SITES = "BOOTH_LACK_OF_SITES" +BOOTH_MULTIPLE_TIMES_IN_CIB = "BOOTH_MULTIPLE_TIMES_IN_CIB" +BOOTH_NOT_EXISTS_IN_CIB = "BOOTH_NOT_EXISTS_IN_CIB" +BOOTH_PEERS_STATUS_ERROR = "BOOTH_PEERS_STATUS_ERROR" +BOOTH_SKIPPING_CONFIG = "BOOTH_SKIPPING_CONFIG" +BOOTH_TICKET_DOES_NOT_EXIST = "BOOTH_TICKET_DOES_NOT_EXIST" +BOOTH_TICKET_DUPLICATE = "BOOTH_TICKET_DUPLICATE" +BOOTH_TICKET_OPERATION_FAILED = "BOOTH_TICKET_OPERATION_FAILED" +BOOTH_TICKET_NAME_INVALID = "BOOTH_TICKET_NAME_INVALID" +BOOTH_TICKET_STATUS_ERROR = "BOOTH_TICKET_STATUS_ERROR" +BOOTH_UNSUPORTED_FILE_LOCATION = "BOOTH_UNSUPORTED_FILE_LOCATION" CIB_ALERT_NOT_FOUND = "CIB_ALERT_NOT_FOUND" CIB_ALERT_RECIPIENT_ALREADY_EXISTS = "CIB_ALERT_RECIPIENT_ALREADY_EXISTS" CIB_ALERT_RECIPIENT_VALUE_INVALID = "CIB_ALERT_RECIPIENT_VALUE_INVALID" @@ -38,6 +71,7 @@ CMAN_UDPU_RESTART_REQUIRED = 'CMAN_UDPU_RESTART_REQUIRED' CMAN_UNSUPPORTED_COMMAND = "CMAN_UNSUPPORTED_COMMAND" COMMON_ERROR = 'COMMON_ERROR' COMMON_INFO = 'COMMON_INFO' +LIVE_ENVIRONMENT_REQUIRED = "LIVE_ENVIRONMENT_REQUIRED" COROSYNC_CONFIG_ACCEPTED_BY_NODE = "COROSYNC_CONFIG_ACCEPTED_BY_NODE" COROSYNC_CONFIG_DISTRIBUTION_STARTED = "COROSYNC_CONFIG_DISTRIBUTION_STARTED" COROSYNC_CONFIG_DISTRIBUTION_NODE_ERROR = "COROSYNC_CONFIG_DISTRIBUTION_NODE_ERROR" @@ -53,6 +87,9 @@ COROSYNC_RUNNING_ON_NODE = "COROSYNC_RUNNING_ON_NODE" CRM_MON_ERROR = "CRM_MON_ERROR" DUPLICATE_CONSTRAINTS_EXIST = "DUPLICATE_CONSTRAINTS_EXIST" EMPTY_RESOURCE_SET_LIST = "EMPTY_RESOURCE_SET_LIST" +FILE_ALREADY_EXISTS = "FILE_ALREADY_EXISTS" +FILE_DOES_NOT_EXIST = "FILE_DOES_NOT_EXIST" +FILE_IO_ERROR = "FILE_IO_ERROR" ID_ALREADY_EXISTS = 'ID_ALREADY_EXISTS' ID_NOT_FOUND = 'ID_NOT_FOUND' IGNORED_CMAN_UNSUPPORTED_OPTION = 'IGNORED_CMAN_UNSUPPORTED_OPTION' @@ -134,10 +171,13 @@ SERVICE_START_SUCCESS = "SERVICE_START_SUCCESS" SERVICE_STOP_ERROR = "SERVICE_STOP_ERROR" SERVICE_STOP_STARTED = "SERVICE_STOP_STARTED" SERVICE_STOP_SUCCESS = "SERVICE_STOP_SUCCESS" +UNABLE_TO_DETERMINE_USER_UID = "UNABLE_TO_DETERMINE_USER_UID" +UNABLE_TO_DETERMINE_GROUP_GID = "UNABLE_TO_DETERMINE_GROUP_GID" UNABLE_TO_GET_AGENT_METADATA = 'UNABLE_TO_GET_AGENT_METADATA' UNABLE_TO_READ_COROSYNC_CONFIG = "UNABLE_TO_READ_COROSYNC_CONFIG" UNABLE_TO_GET_SBD_CONFIG = "UNABLE_TO_GET_SBD_CONFIG" UNABLE_TO_GET_SBD_STATUS = "UNABLE_TO_GET_SBD_STATUS" UNKNOWN_COMMAND = 'UNKNOWN_COMMAND' UNSUPPORTED_AGENT = 'UNSUPPORTED_AGENT' +UNSUPPORTED_OPERATION_ON_NON_SYSTEMD_SYSTEMS = "UNSUPPORTED_OPERATION_ON_NON_SYSTEMD_SYSTEMS" WATCHDOG_NOT_FOUND = "WATCHDOG_NOT_FOUND" diff --git a/pcs/common/test/__init__.py b/pcs/common/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pcs/common/tools.py b/pcs/common/tools.py index f4f6c4b..275f6b9 100644 --- a/pcs/common/tools.py +++ b/pcs/common/tools.py @@ -33,3 +33,8 @@ def run_parallel(worker, data_list): for thread in thread_list: thread.join() + +def format_environment_error(e): + if e.filename: + return "{0}: '{1}'".format(e.strerror, e.filename) + return e.strerror diff --git a/pcs/lib/booth/__init__.py b/pcs/lib/booth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pcs/lib/booth/config_exchange.py b/pcs/lib/booth/config_exchange.py new file mode 100644 index 0000000..e0569ba --- /dev/null +++ b/pcs/lib/booth/config_exchange.py @@ -0,0 +1,43 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +from pcs.lib.booth.config_structure import ConfigItem + +EXCHANGE_PRIMITIVES = ["authfile"] +EXCHANGE_LISTS = [ + ("site", "sites"), + ("arbitrator", "arbitrators"), + ("ticket", "tickets"), +] + + +def to_exchange_format(booth_configuration): + exchange_lists = dict(EXCHANGE_LISTS) + exchange = dict( + (exchange_key, []) for exchange_key in exchange_lists.values() + ) + + for key, value, _ in booth_configuration: + if key in exchange_lists: + exchange[exchange_lists[key]].append(value) + if key in EXCHANGE_PRIMITIVES: + exchange[key] = value + + return exchange + + +def from_exchange_format(exchange_format): + booth_config = [] + for key in EXCHANGE_PRIMITIVES: + if key in exchange_format: + booth_config.append(ConfigItem(key, exchange_format[key])) + + for key, exchange_key in EXCHANGE_LISTS: + booth_config.extend([ + ConfigItem(key, value) + for value in exchange_format.get(exchange_key, []) + ]) + return booth_config diff --git a/pcs/lib/booth/config_files.py b/pcs/lib/booth/config_files.py new file mode 100644 index 0000000..aaad951 --- /dev/null +++ b/pcs/lib/booth/config_files.py @@ -0,0 +1,97 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import os +import binascii + +from pcs.common import report_codes, env_file_role_codes as file_roles +from pcs.common.tools import format_environment_error +from pcs.lib import reports as lib_reports +from pcs.lib.booth import reports +from pcs.lib.errors import ReportItemSeverity +from pcs.settings import booth_config_dir as BOOTH_CONFIG_DIR + + +def generate_key(): + return binascii.hexlify(os.urandom(32)) + +def get_all_configs_file_names(): + """ + Returns list of all file names ending with '.conf' in booth configuration + directory. + """ + return [ + file_name for file_name in os.listdir(BOOTH_CONFIG_DIR) + if os.path.isfile(file_name) and file_name.endswith(".conf") and + len(file_name) > len(".conf") + ] + + +def _read_config(file_name): + """ + Read specified booth config from default booth config directory. + + file_name -- string, name of file + """ + with open(os.path.join(BOOTH_CONFIG_DIR, file_name), "r") as file: + return file.read() + + +def read_configs(reporter, skip_wrong_config=False): + """ + Returns content of all configs present on local system in dictionary, + where key is name of config and value is its content. + + reporter -- report processor + skip_wrong_config -- if True skip local configs that are unreadable + """ + report_list = [] + output = {} + for file_name in get_all_configs_file_names(): + try: + output[file_name] = _read_config(file_name) + except EnvironmentError: + report_list.append(reports.booth_config_unable_to_read( + file_name, + ( + ReportItemSeverity.WARNING if skip_wrong_config + else ReportItemSeverity.ERROR + ), + ( + None if skip_wrong_config + else report_codes.SKIP_UNREADABLE_CONFIG + ) + )) + reporter.process_list(report_list) + return output + + +def read_authfile(reporter, path): + """ + Returns content of specified authfile as bytes. None if file is not in + default booth directory or there was some IO error. + + reporter -- report processor + path -- path to the authfile to be read + """ + if not path: + return None + if os.path.dirname(os.path.abspath(path)) != BOOTH_CONFIG_DIR: + reporter.process(reports.booth_unsupported_file_location(path)) + return None + try: + with open(path, "rb") as file: + return file.read() + except EnvironmentError as e: + reporter.process(lib_reports.file_io_error( + file_roles.BOOTH_KEY, + path, + reason=format_environment_error(e), + operation="read", + severity=ReportItemSeverity.WARNING + )) + return None diff --git a/pcs/lib/booth/config_parser.py b/pcs/lib/booth/config_parser.py new file mode 100644 index 0000000..62d2203 --- /dev/null +++ b/pcs/lib/booth/config_parser.py @@ -0,0 +1,90 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import re + +from pcs.lib.booth import config_structure, reports +from pcs.lib.errors import LibraryError + + +class InvalidLines(Exception): + pass + +def parse(content): + try: + return organize_lines(parse_to_raw_lines(content)) + except InvalidLines as e: + raise LibraryError( + reports.booth_config_unexpected_lines(e.args[0]) + ) + +def build(config_line_list): + return "\n".join(build_to_lines(config_line_list)) + +def build_to_lines(config_line_list, deep=0): + line_list = [] + for key, value, details in config_line_list: + line_value = value if key != "ticket" else '"{0}"'.format(value) + line_list.append("{0}{1} = {2}".format(" "*deep, key, line_value)) + if details: + line_list.extend(build_to_lines(details, deep+1)) + return line_list + + +def organize_lines(raw_line_list): + #Decision: Global key is moved up when is below ticket. Alternative is move + #it below all ticket details. But it is confusing. + global_section = [] + ticket_section = [] + current_ticket = None + for key, value in raw_line_list: + if key == "ticket": + current_ticket = config_structure.ConfigItem(key, value) + ticket_section.append(current_ticket) + elif key in config_structure.GLOBAL_KEYS or not current_ticket: + global_section.append(config_structure.ConfigItem(key, value)) + else: + current_ticket.details.append( + config_structure.ConfigItem(key, value) + ) + + return global_section + ticket_section + +def search_with_multiple_re(re_object_list, string): + """ + return MatchObject of first matching regular expression object or None + list re_object_list contains regular expresssion objects (products of + re.compile) + """ + for expression in re_object_list: + match = expression.search(string) + if match: + return match + return None + +def parse_to_raw_lines(config_content): + keyword_part = r"^(?P[a-zA-Z0-9_-]+)\s*=\s*" + expression_list = [re.compile(pattern.format(keyword_part)) for pattern in [ + r"""{0}(?P[^'"]+)$""", + r"""{0}'(?P[^']*)'\s*(#.*)?$""", + r"""{0}"(?P[^"]*)"\s*(#.*)?$""", + ]] + + line_list = [] + invalid_line_list = [] + for line in config_content.splitlines(): + line = line.strip() + match = search_with_multiple_re(expression_list, line) + if match: + line_list.append((match.group("key"), match.group("value"))) + elif line and not line.startswith("#"): + invalid_line_list.append(line) + + if invalid_line_list: + raise InvalidLines(invalid_line_list) + + return line_list diff --git a/pcs/lib/booth/config_structure.py b/pcs/lib/booth/config_structure.py new file mode 100644 index 0000000..c92f718 --- /dev/null +++ b/pcs/lib/booth/config_structure.py @@ -0,0 +1,111 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import re + +from pcs.lib.booth import reports +from pcs.lib.errors import LibraryError +from collections import namedtuple + +GLOBAL_KEYS = ( + "transport", + "port", + "name", + "authfile", + "maxtimeskew", + "site", + "arbitrator", + "site-user", + "site-group", + "arbitrator-user", + "arbitrator-group", + "debug", + "ticket", +) +TICKET_KEYS = ( + "acquire-after", + "attr-prereq", + "before-acquire-handler", + "expire", + "renewal-freq", + "retries", + "timeout", + "weights", +) + +class ConfigItem(namedtuple("ConfigItem", "key value details")): + def __new__(cls, key, value, details=None): + details = details if details else [] + return super(ConfigItem, cls).__new__(cls, key, value, details) + +def validate_peers(site_list, arbitrator_list): + report = [] + + if len(site_list) < 2: + report.append(reports.booth_lack_of_sites(site_list)) + + peer_list = site_list + arbitrator_list + + if len(peer_list) % 2 == 0: + report.append(reports.booth_even_peers_num(len(peer_list))) + + address_set = set() + duplicate_addresses = set() + for address in peer_list: + if address in address_set: + duplicate_addresses.add(address) + else: + address_set.add(address) + if duplicate_addresses: + report.append(reports.booth_address_duplication(duplicate_addresses)) + + if report: + raise LibraryError(*report) + +def remove_ticket(booth_configuration, ticket_name): + validate_ticket_exists(booth_configuration, ticket_name) + return [ + config_item for config_item in booth_configuration + if config_item.key != "ticket" or config_item.value != ticket_name + ] + +def add_ticket(booth_configuration, ticket_name): + validate_ticket_name(ticket_name) + validate_ticket_unique(booth_configuration, ticket_name) + return booth_configuration + [ + ConfigItem("ticket", ticket_name) + ] + +def validate_ticket_exists(booth_configuration, ticket_name): + if not ticket_exists(booth_configuration, ticket_name): + raise LibraryError(reports.booth_ticket_does_not_exist(ticket_name)) + +def validate_ticket_unique(booth_configuration, ticket_name): + if ticket_exists(booth_configuration, ticket_name): + raise LibraryError(reports.booth_ticket_duplicate(ticket_name)) + +def ticket_exists(booth_configuration, ticket_name): + return any( + value for key, value, _ in booth_configuration + if key == "ticket" and value == ticket_name + ) + +def validate_ticket_name(ticket_name): + if not re.compile(r"^[\w-]+$").search(ticket_name): + raise LibraryError(reports.booth_ticket_name_invalid(ticket_name)) + +def set_authfile(booth_configuration, auth_file): + return [ConfigItem("authfile", auth_file)] + [ + config_item for config_item in booth_configuration + if config_item.key != "authfile" + ] + +def get_authfile(booth_configuration): + for key, value, _ in reversed(booth_configuration): + if key == "authfile": + return value + return None diff --git a/pcs/lib/booth/env.py b/pcs/lib/booth/env.py new file mode 100644 index 0000000..57d47aa --- /dev/null +++ b/pcs/lib/booth/env.py @@ -0,0 +1,149 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import os +import pwd +import grp + +from pcs import settings +from pcs.common import env_file_role_codes +from pcs.common.tools import format_environment_error +from pcs.lib import reports as common_reports +from pcs.lib.booth import reports +from pcs.lib.env_file import GhostFile, RealFile +from pcs.lib.errors import LibraryError +from pcs.settings import booth_config_dir as BOOTH_CONFIG_DIR + + +def get_booth_env_file_name(name, extension): + report_list = [] + if "/" in name: + report_list.append( + reports.booth_invalid_name(name, "contains illegal character '/'") + ) + if report_list: + raise LibraryError(*report_list) + return "{0}.{1}".format(os.path.join(BOOTH_CONFIG_DIR, name), extension) + +def get_config_file_name(name): + return get_booth_env_file_name(name, "conf") + +def get_key_path(name): + return get_booth_env_file_name(name, "key") + +def report_keyfile_io_error(file_path, operation, e): + return LibraryError(common_reports.file_io_error( + file_role=env_file_role_codes.BOOTH_KEY, + file_path=file_path, + operation=operation, + reason=format_environment_error(e) + )) + +def set_keyfile_access(file_path): + #shutil.chown is not in python2 + try: + uid = pwd.getpwnam(settings.pacemaker_uname).pw_uid + except KeyError: + raise LibraryError(common_reports.unable_to_determine_user_uid( + settings.pacemaker_uname + )) + try: + gid = grp.getgrnam(settings.pacemaker_gname).gr_gid + except KeyError: + raise LibraryError(common_reports.unable_to_determine_group_gid( + settings.pacemaker_gname + )) + try: + os.chown(file_path, uid, gid) + except EnvironmentError as e: + raise report_keyfile_io_error(file_path, "chown", e) + try: + os.chmod(file_path, 0o600) + except EnvironmentError as e: + raise report_keyfile_io_error(file_path, "chmod", e) + +class BoothEnv(object): + def __init__(self, report_processor, env_data): + self.__report_processor = report_processor + self.__name = env_data["name"] + if "config_file" in env_data: + self.__config = GhostFile( + file_role=env_file_role_codes.BOOTH_CONFIG, + content=env_data["config_file"]["content"] + ) + self.__key_path = env_data["key_path"] + self.__key = GhostFile( + file_role=env_file_role_codes.BOOTH_KEY, + content=env_data["key_file"]["content"] + ) + else: + self.__config = RealFile( + file_role=env_file_role_codes.BOOTH_CONFIG, + file_path=get_config_file_name(env_data["name"]), + ) + self.__set_key_path(get_key_path(env_data["name"])) + + def __set_key_path(self, path): + self.__key_path = path + self.__key = RealFile( + file_role=env_file_role_codes.BOOTH_KEY, + file_path=path, + ) + + def command_expect_live_env(self): + if not self.__config.is_live: + raise LibraryError(common_reports.live_environment_required([ + "--booth-conf", + "--booth-key", + ])) + + def set_key_path(self, path): + if not self.__config.is_live: + raise AssertionError( + "Set path of keyfile is supported only in live environment" + ) + self.__set_key_path(path) + + @property + def name(self): + return self.__name + + @property + def key_path(self): + return self.__key_path + + def get_config_content(self): + return self.__config.read() + + def create_config(self, content, can_overwrite_existing=False): + self.__config.assert_no_conflict_with_existing( + self.__report_processor, + can_overwrite_existing + ) + self.__config.write(content) + + def create_key(self, key_content, can_overwrite_existing=False): + self.__key.assert_no_conflict_with_existing( + self.__report_processor, + can_overwrite_existing + ) + self.__key.write(key_content, set_keyfile_access, is_binary=True) + + def push_config(self, content): + self.__config.write(content) + + def remove_key(self): + self.__key.remove(silence_no_existence=True) + + def remove_config(self): + self.__config.remove() + + def export(self): + return {} if self.__config.is_live else { + "config_file": self.__config.export(), + "key_file": self.__key.export(), + } diff --git a/pcs/lib/booth/reports.py b/pcs/lib/booth/reports.py new file mode 100644 index 0000000..8a804e0 --- /dev/null +++ b/pcs/lib/booth/reports.py @@ -0,0 +1,409 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from pcs.common import report_codes +from pcs.lib.errors import ReportItem, ReportItemSeverity + + +def booth_lack_of_sites(site_list): + """ + Less than 2 booth sites entered. But it does not make sense. + list site_list contains currently entered sites + """ + return ReportItem.error( + report_codes.BOOTH_LACK_OF_SITES, + "lack of sites for booth configuration (need 2 at least):" + " sites {sites_string}" + , + info={ + "sites": site_list, + "sites_string": ", ".join(site_list) if site_list else "missing", + } + ) + +def booth_even_peers_num(number): + """ + Booth requires odd number of peers. But even number of peers was entered. + integer number determines how many peers was entered + """ + return ReportItem.error( + report_codes.BOOTH_EVEN_PEERS_NUM, + "odd number of peers is required (entered {number} peers)", + info={ + "number": number, + } + ) + +def booth_address_duplication(duplicate_addresses): + """ + Address of each peer must unique. But address duplication appeared. + set duplicate_addresses contains addreses entered multiple times + """ + return ReportItem.error( + report_codes.BOOTH_ADDRESS_DUPLICATION, + "duplicate address for booth configuration: {addresses_string}" + , + info={ + "addresses": duplicate_addresses, + "addresses_string": ", ".join(duplicate_addresses), + } + ) + +def booth_config_unexpected_lines(line_list): + """ + Booth config have defined structure. But line out of structure definition + appeared. + list line_list contains lines out of defined structure + """ + return ReportItem.error( + report_codes.BOOTH_CONFIG_UNEXPECTED_LINES, + "unexpected line appeard in config: \n{lines_string}", + info={ + "line_list": line_list, + "lines_string": "\n".join(line_list) + } + ) + +def booth_invalid_name(name, reason): + """ + Booth instance name have rules. For example it cannot contain illegal + characters like '/'. But some of rules was violated. + string name is entered booth instance name + """ + return ReportItem.error( + report_codes.BOOTH_INVALID_NAME, + "booth name '{name}' is not valid ({reason})" + , + info={ + "name": name, + "reason": reason, + } + ) + +def booth_ticket_name_invalid(ticket_name): + """ + Name of booth ticket may consists of alphanumeric characters or dash. + Entered ticket name violating this rule. + string ticket_name is entered booth ticket name + """ + return ReportItem.error( + report_codes.BOOTH_TICKET_NAME_INVALID, + "booth ticket name '{ticket_name}' is not valid," + " use alphanumeric chars or dash" + , + info={ + "ticket_name": ticket_name, + } + ) + +def booth_ticket_duplicate(ticket_name): + """ + Each booth ticket name must be uniqe. But duplicate booth ticket name + was entered. + string ticket_name is entered booth ticket name + """ + return ReportItem.error( + report_codes.BOOTH_TICKET_DUPLICATE, + "booth ticket name '{ticket_name}' already exists in configuration", + info={ + "ticket_name": ticket_name, + } + ) + +def booth_ticket_does_not_exist(ticket_name): + """ + Some operations (like ticket remove) expect the ticket name in booth + configuration. But the ticket name not found in booth configuration. + string ticket_name is entered booth ticket name + """ + return ReportItem.error( + report_codes.BOOTH_TICKET_DOES_NOT_EXIST, + "booth ticket name '{ticket_name}' does not exist", + info={ + "ticket_name": ticket_name, + } + ) + +def booth_already_in_cib(name): + """ + Each booth instance should be in a cib once maximally. Existence of booth + instance in cib detected during creating new one. + string name is booth instance name + """ + return ReportItem.error( + report_codes.BOOTH_ALREADY_IN_CIB, + "booth instance '{name}' is already created as cluster resource", + info={ + "name": name, + } + ) + +def booth_not_exists_in_cib(name): + """ + Remove booth instance from cib required. But no such instance found in cib. + string name is booth instance name + """ + return ReportItem.error( + report_codes.BOOTH_NOT_EXISTS_IN_CIB, + "booth instance '{name}' not found in cib", + info={ + "name": name, + } + ) + +def booth_config_is_used(name, detail=""): + """ + Booth config use detected during destroy request. + string name is booth instance name + string detail provide more details (for example booth instance is used as + cluster resource or is started/enabled under systemd) + """ + return ReportItem.error( + report_codes.BOOTH_CONFIG_IS_USED, + "booth instance '{name}' is used{detail_string}", + info={ + "name": name, + "detail": detail, + "detail_string": " {0}".format(detail) if detail else "", + } + ) + +def booth_multiple_times_in_cib( + name, severity=ReportItemSeverity.ERROR +): + """ + Each booth instance should be in a cib once maximally. But multiple + occurences detected. For example during remove booth instance from cib. + Notify user about this fact is required. When operation is forced + user should be notified about multiple occurences. + string name is booth instance name + ReportItemSeverity severit should be ERROR or WARNING (depends on context) + is flag for next report processing + Because of severity coupling with ReportItem is it specified here. + """ + return ReportItem( + report_codes.BOOTH_MULTIPLE_TIMES_IN_CIB, + severity, + "found more than one booth instance '{name}' in cib", + info={ + "name": name, + }, + forceable=report_codes.FORCE_BOOTH_REMOVE_FROM_CIB + if severity == ReportItemSeverity.ERROR else None + ) + + +def booth_distributing_config(name=None): + """ + Sending booth config to all nodes in cluster. + + name -- name of booth instance + """ + return ReportItem.info( + report_codes.BOOTH_DISTRIBUTING_CONFIG, + "Sending booth config{0} to all cluster nodes.".format( + " ({name})" if name and name != "booth" else "" + ), + info={"name": name} + ) + + +def booth_config_saved(node=None, name_list=None): + """ + Booth config has been saved on specified node. + + node -- name of node + name_list -- list of names of booth instance + """ + if name_list: + name = ", ".join(name_list) + if name == "booth": + msg = "Booth config saved." + else: + msg = "Booth config(s) ({name}) saved." + else: + msg = "Booth config saved." + name = None + return ReportItem.info( + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + msg if node is None else "{node}: " + msg, + info={ + "node": node, + "name": name, + "name_list": name_list + } + ) + + +def booth_config_unable_to_read( + name, severity=ReportItemSeverity.ERROR, forceable=None +): + """ + Unable to read from specified booth instance config. + + name -- name of booth instance + severity -- severity of report item + forceable -- is this report item forceable? by what category? + """ + if name and name != "booth": + msg = "Unable to read booth config ({name})." + else: + msg = "Unable to read booth config." + return ReportItem( + report_codes.BOOTH_CONFIG_READ_ERROR, + severity, + msg, + info={"name": name}, + forceable=forceable + ) + + +def booth_config_not_saved(node, reason, name=None): + """ + Saving booth config failed on specified node. + + node -- node name + reason -- reason of failure + name -- name of booth instance + """ + if name and name != "booth": + msg = "Unable to save booth config ({name}) on node '{node}': {reason}" + else: + msg = "Unable to save booth config on node '{node}': {reason}" + return ReportItem.error( + report_codes.BOOTH_CONFIG_WRITE_ERROR, + msg, + info={ + "node": node, + "name": name, + "reason": reason + } + ) + + +def booth_sending_local_configs_to_node(node): + """ + Sending all local booth configs to node + + node -- node name + """ + return ReportItem.info( + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + "{node}: Saving booth config(s)...", + info={"node": node} + ) + + +def booth_fetching_config_from_node(node, config=None): + if config or config == 'booth': + msg = "Fetching booth config from node '{node}'..." + else: + msg = "Fetching booth config '{config}' from node '{node}'..." + return ReportItem.info( + report_codes.BOOTH_FETCHING_CONFIG_FROM_NODE, + msg, + info={ + "node": node, + "config": config, + } + ) + + +def booth_unsupported_file_location(file): + return ReportItem.warning( + report_codes.BOOTH_UNSUPORTED_FILE_LOCATION, + "skipping file {file}: unsupported file location", + info={"file": file} + ) + + +def booth_daemon_status_error(reason): + return ReportItem.error( + report_codes.BOOTH_DAEMON_STATUS_ERROR, + "unable to get status of booth daemon: {reason}", + info={"reason": reason} + ) + + +def booth_tickets_status_error(reason=None): + return ReportItem.error( + report_codes.BOOTH_TICKET_STATUS_ERROR, + "unable to get status of booth tickets", + info={ + "reason": reason, + } + ) + + +def booth_peers_status_error(reason=None): + return ReportItem.error( + report_codes.BOOTH_PEERS_STATUS_ERROR, + "unable to get status of booth peers", + info={ + "reason": reason, + } + ) + +def booth_cannot_determine_local_site_ip(): + """ + Some booth operations are performed on specific site and requires to specify + site ip. When site specification omitted pcs can try determine local ip. + But determine local site ip failed. + """ + return ReportItem.error( + report_codes.BOOTH_CANNOT_DETERMINE_LOCAL_SITE_IP, + "cannot determine local site ip, please specify site parameter", + info={} + ) + +def booth_ticket_operation_failed(operation, reason, site_ip, ticket_name): + """ + Pcs uses external booth tools for some ticket_name operations. For example + grand and revoke. But the external command failed. + string operatin determine what was intended perform with ticket_name + string reason is taken from external booth command + string site_ip specifiy what site had to run the command + string ticket_name specify with which ticket had to run the command + """ + return ReportItem.error( + report_codes.BOOTH_TICKET_OPERATION_FAILED, + "unable to {operation} booth ticket '{ticket_name}' for site '{site_ip}', " + "reason: {reason}" + , + info={ + "operation": operation, + "reason": reason, + "site_ip": site_ip, + "ticket_name": ticket_name, + } + ) + +def booth_skipping_config(config_file, reason): + """ + Warning about skipping booth config file. + + config_file -- file name of config which is skipped + reason -- reason + """ + return ReportItem.warning( + report_codes.BOOTH_SKIPPING_CONFIG, + "Skipping config file '{config_file}': {reason}", + info={ + "config_file": config_file, + "reason": reason, + } + ) + +def booth_cannot_identify_keyfile(severity=ReportItemSeverity.ERROR): + return ReportItem( + report_codes.BOOTH_CANNOT_IDENTIFY_KEYFILE, + severity, + "cannot identify authfile in booth configuration", + info={}, + forceable=report_codes.FORCE_BOOTH_DESTROY + if severity == ReportItemSeverity.ERROR else None + ) diff --git a/pcs/lib/booth/resource.py b/pcs/lib/booth/resource.py new file mode 100644 index 0000000..e793713 --- /dev/null +++ b/pcs/lib/booth/resource.py @@ -0,0 +1,116 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from pcs.lib.cib.tools import find_unique_id + + +class BoothNotFoundInCib(Exception): + pass + +class BoothMultipleOccurenceFoundInCib(Exception): + pass + +def create_resource_id(resources_section, name, suffix): + return find_unique_id( + resources_section.getroottree(), "booth-{0}-{1}".format(name, suffix) + ) + +def get_creator(resource_create): + #TODO resource_create is provisional hack until resources are not moved to + #lib + def create_booth_in_cluster(ip, booth_config_file_path, create_id): + ip_id = create_id("ip") + booth_id = create_id("service") + group_id = create_id("group") + + resource_create( + ra_id=ip_id, + ra_type="ocf:heartbeat:IPaddr2", + ra_values=["ip={0}".format(ip)], + op_values=[], + meta_values=[], + clone_opts=[], + group=group_id, + ) + resource_create( + ra_id=booth_id, + ra_type="ocf:pacemaker:booth-site", + ra_values=["config={0}".format(booth_config_file_path)], + op_values=[], + meta_values=[], + clone_opts=[], + group=group_id, + ) + return create_booth_in_cluster + +def is_ip_resource(resource_element): + return resource_element.attrib["type"] == "IPaddr2" + +def find_grouped_ip_element_to_remove(booth_element): + if booth_element.getparent().tag != "group": + return None + + group = booth_element.getparent() + if len(group) != 2: + #when something else in group, ip is not for remove + return None + for element in group: + if is_ip_resource(element): + return element + return None + +def get_remover(resource_remove): + def remove_from_cluster( + resources_section, booth_config_file_path, remove_multiple=False + ): + element_list = find_for_config( + resources_section, + booth_config_file_path + ) + if not element_list: + raise BoothNotFoundInCib() + + if len(element_list) > 1 and not remove_multiple: + raise BoothMultipleOccurenceFoundInCib() + + number_of_removed_booth_elements = 0 + for element in element_list: + ip_resource_to_remove = find_grouped_ip_element_to_remove(element) + if ip_resource_to_remove is not None: + resource_remove(ip_resource_to_remove.attrib["id"]) + resource_remove(element.attrib["id"]) + number_of_removed_booth_elements += 1 + + return number_of_removed_booth_elements + + return remove_from_cluster + +def find_for_config(resources_section, booth_config_file_path): + return resources_section.xpath((""" + .//primitive[ + @type="booth-site" + and + instance_attributes[nvpair[@name="config" and @value="{0}"]] + ] + """).format(booth_config_file_path)) + +def find_bound_ip(resources_section, booth_config_file_path): + return resources_section.xpath((""" + .//group[ + primitive[ + @type="booth-site" + and + instance_attributes[ + nvpair[@name="config" and @value="{0}"] + ] + ] + ] + /primitive[@type="IPaddr2"] + /instance_attributes + /nvpair[@name="ip"] + /@value + """).format(booth_config_file_path)) diff --git a/pcs/lib/booth/status.py b/pcs/lib/booth/status.py new file mode 100644 index 0000000..4b93161 --- /dev/null +++ b/pcs/lib/booth/status.py @@ -0,0 +1,41 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from pcs import settings +from pcs.lib.booth import reports +from pcs.lib.errors import LibraryError + + +def get_daemon_status(runner, name=None): + cmd = [settings.booth_binary, "status"] + if name: + cmd += ["-c", name] + output, return_value = runner.run(cmd) + # 7 means that there is no booth instance running + if return_value not in [0, 7]: + raise LibraryError(reports.booth_daemon_status_error(output)) + return output + + +def get_tickets_status(runner, name=None): + cmd = [settings.booth_binary, "list"] + if name: + cmd += ["-c", name] + output, return_value = runner.run(cmd) + if return_value != 0: + raise LibraryError(reports.booth_tickets_status_error(output)) + return output + + +def get_peers_status(runner, name=None): + cmd = [settings.booth_binary, "peers"] + if name: + cmd += ["-c", name] + output, return_value = runner.run(cmd) + if return_value != 0: + raise LibraryError(reports.booth_peers_status_error(output)) + return output diff --git a/pcs/lib/booth/sync.py b/pcs/lib/booth/sync.py new file mode 100644 index 0000000..c9bc30b --- /dev/null +++ b/pcs/lib/booth/sync.py @@ -0,0 +1,208 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import os +import json +import base64 + +from pcs.common import report_codes +from pcs.lib import reports as lib_reports +from pcs.lib.errors import LibraryError, ReportItemSeverity as Severities +from pcs.lib.external import ( + NodeCommunicator, + NodeCommunicationException, + node_communicator_exception_to_report_item, + parallel_nodes_communication_helper, +) +from pcs.lib.booth import ( + config_files as booth_conf, + config_structure, + config_parser, + reports, +) + + +def _set_config_on_node( + communicator, reporter, node, name, config_data, authfile=None, + authfile_data=None +): + """ + Set booth config for instance 'name' on specified node. + + communicator -- NodeCommunicator + reporter -- report processor + node -- NodeAddresses + name -- name of booth instance + config_data -- booth config as string + authfile -- path to authfile + authfile_data -- authfile content as bytes + """ + data = { + "config": { + "name": "{0}.conf".format(name), + "data": config_data + } + } + if authfile is not None and authfile_data is not None: + data["authfile"] = { + "name": os.path.basename(authfile), + "data": base64.b64encode(authfile_data).decode("utf-8") + } + communicator.call_node( + node, + "remote/booth_set_config", + NodeCommunicator.format_data_dict([("data_json", json.dumps(data))]) + ) + reporter.process(reports.booth_config_saved(node.label, [name])) + + +def send_config_to_all_nodes( + communicator, reporter, node_list, name, config_data, authfile=None, + authfile_data=None, skip_offline=False +): + """ + Send config_data of specified booth instance from local node to all nodes in + node_list. + + communicator -- NodeCommunicator + reporter -- report processor + node_list -- NodeAddressesList + name -- name of booth instance + config_data -- config_data content as string + authfile -- path to authfile + authfile_data -- content of authfile as bytes + skip_offline -- if True offline nodes will be skipped + """ + reporter.process(reports.booth_distributing_config(name)) + parallel_nodes_communication_helper( + _set_config_on_node, + [ + ( + [ + communicator, reporter, node, name, config_data, + authfile, authfile_data + ], + {} + ) + for node in node_list + ], + reporter, + skip_offline + ) + + +def send_all_config_to_node( + communicator, + reporter, + node, + rewrite_existing=False, + skip_wrong_config=False +): + """ + Send all booth configs from default booth config directory and theri + authfiles to specified node. + + communicator -- NodeCommunicator + reporter -- report processor + node -- NodeAddress + rewrite_existing -- if True rewrite existing file + skip_wrong_config -- if True skip local configs that are unreadable + """ + config_dict = booth_conf.read_configs(reporter, skip_wrong_config) + if not config_dict: + return + file_list = [] + for config, config_data in sorted(config_dict.items()): + try: + authfile_path = config_structure.get_authfile( + config_parser.parse(config_data) + ) + file_list.append({ + "name": config, + "data": config_data, + "is_authfile": False + }) + if authfile_path: + content = booth_conf.read_authfile(reporter, authfile_path) + if not content: + continue + file_list.append({ + "name": os.path.basename(authfile_path), + "data": base64.b64encode(content).decode("utf-8"), + "is_authfile": True + }) + except LibraryError: + reporter.process(reports.booth_skipping_config( + config, "unable to parse config" + )) + + data = [("data_json", json.dumps(file_list))] + + if rewrite_existing: + data.append(("rewrite_existing", "1")) + + reporter.process(reports.booth_sending_local_configs_to_node(node.label)) + try: + response = json.loads(communicator.call_node( + node, + "remote/booth_save_files", + NodeCommunicator.format_data_dict(data) + )) + report_list = [] + for file in response["existing"]: + report_list.append(lib_reports.file_already_exists( + None, + file, + Severities.WARNING if rewrite_existing else Severities.ERROR, + ( + None if rewrite_existing + else report_codes.FORCE_FILE_OVERWRITE + ), + node.label + )) + for file, reason in response["failed"].items(): + report_list.append(reports.booth_config_not_saved( + node.label, reason, file + )) + reporter.process_list(report_list) + reporter.process( + reports.booth_config_saved(node.label, response["saved"]) + ) + except NodeCommunicationException as e: + raise LibraryError(node_communicator_exception_to_report_item(e)) + except (KeyError, ValueError): + raise LibraryError(lib_reports.invalid_response_format(node.label)) + + +def pull_config_from_node(communicator, node, name): + """ + Get config of specified booth instance and its authfile if there is one + from 'node'. It returns dictionary with format: + { + "config": { + "name": , + "data": + }, + "authfile": { + "name": , + "data": + } + + communicator -- NodeCommunicator + node -- NodeAddresses + name -- name of booth instance + """ + try: + return json.loads(communicator.call_node( + node, + "remote/booth_get_config", + NodeCommunicator.format_data_dict([("name", name)]) + )) + except NodeCommunicationException as e: + raise LibraryError(node_communicator_exception_to_report_item(e)) + except ValueError: + raise LibraryError(lib_reports.invalid_response_format(node.label)) diff --git a/pcs/lib/booth/test/__init__.py b/pcs/lib/booth/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pcs/lib/booth/test/test_config_exchange.py b/pcs/lib/booth/test/test_config_exchange.py new file mode 100644 index 0000000..a9a40ce --- /dev/null +++ b/pcs/lib/booth/test/test_config_exchange.py @@ -0,0 +1,70 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +from unittest import TestCase +from pcs.lib.booth import config_structure, config_exchange + + +class FromExchangeFormatTest(TestCase): + def test_convert_all_supported_items(self): + self.assertEqual( + [ + config_structure.ConfigItem("authfile", "/path/to/auth.file"), + config_structure.ConfigItem("site", "1.1.1.1"), + config_structure.ConfigItem("site", "2.2.2.2"), + config_structure.ConfigItem("arbitrator", "3.3.3.3"), + config_structure.ConfigItem("ticket", "TA"), + config_structure.ConfigItem("ticket", "TB"), + ], + config_exchange.from_exchange_format( + { + "sites": ["1.1.1.1", "2.2.2.2"], + "arbitrators": ["3.3.3.3"], + "tickets": ["TA", "TB"], + "authfile": "/path/to/auth.file", + }, + ) + ) + + +class GetExchenageFormatTest(TestCase): + def test_convert_parsed_config_to_exchange_format(self): + self.assertEqual( + { + "sites": ["1.1.1.1", "2.2.2.2"], + "arbitrators": ["3.3.3.3"], + "tickets": ["TA", "TB"], + "authfile": "/path/to/auth.file", + }, + config_exchange.to_exchange_format([ + config_structure.ConfigItem("site", "1.1.1.1"), + config_structure.ConfigItem("site", "2.2.2.2"), + config_structure.ConfigItem("arbitrator", "3.3.3.3"), + config_structure.ConfigItem("authfile", "/path/to/auth.file"), + config_structure.ConfigItem("ticket", "TA"), + config_structure.ConfigItem("ticket", "TB", [ + config_structure.ConfigItem("timeout", "10") + ]), + ]) + ) + + def test_convert_parsed_config_to_exchange_format_without_authfile(self): + self.assertEqual( + { + "sites": ["1.1.1.1", "2.2.2.2"], + "arbitrators": ["3.3.3.3"], + "tickets": ["TA", "TB"], + }, + config_exchange.to_exchange_format([ + config_structure.ConfigItem("site", "1.1.1.1"), + config_structure.ConfigItem("site", "2.2.2.2"), + config_structure.ConfigItem("arbitrator", "3.3.3.3"), + config_structure.ConfigItem("ticket", "TA"), + config_structure.ConfigItem("ticket", "TB", [ + config_structure.ConfigItem("timeout", "10") + ]), + ]) + ) diff --git a/pcs/lib/booth/test/test_config_files.py b/pcs/lib/booth/test/test_config_files.py new file mode 100644 index 0000000..2d4c3ea --- /dev/null +++ b/pcs/lib/booth/test/test_config_files.py @@ -0,0 +1,272 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from os.path import join +from unittest import TestCase + +from pcs.common import report_codes, env_file_role_codes as file_roles +from pcs.lib.booth import config_files +from pcs.lib.errors import ReportItemSeverity as severities +from pcs.settings import booth_config_dir as BOOTH_CONFIG_DIR +from pcs.test.tools.assertions import assert_raise_library_error, assert_report_item_list_equal +from pcs.test.tools.custom_mock import MockLibraryReportProcessor +from pcs.test.tools.pcs_mock import mock + +def patch_config_files(target, *args, **kwargs): + return mock.patch( + "pcs.lib.booth.config_files.{0}".format(target), *args, **kwargs + ) + +@mock.patch("os.listdir") +@mock.patch("os.path.isfile") +class GetAllConfigsFileNamesTest(TestCase): + def test_success(self, mock_is_file, mock_listdir): + def mock_is_file_fn(file_name): + if file_name in ["dir.cong", "dir"]: + return False + elif file_name in [ + "name1", "name2.conf", "name.conf.conf", ".conf", "name3.conf" + ]: + return True + else: + raise AssertionError("unexpected input") + + mock_is_file.side_effect = mock_is_file_fn + mock_listdir.return_value = [ + "name1", "name2.conf", "name.conf.conf", ".conf", "name3.conf", + "dir.cong", "dir" + ] + self.assertEqual( + ["name2.conf", "name.conf.conf", "name3.conf"], + config_files.get_all_configs_file_names() + ) + mock_listdir.assert_called_once_with(BOOTH_CONFIG_DIR) + + +class ReadConfigTest(TestCase): + def test_success(self): + self.maxDiff = None + mock_open = mock.mock_open(read_data="config content") + with patch_config_files("open", mock_open, create=True): + self.assertEqual( + "config content", + config_files._read_config("my-file.conf") + ) + + self.assertEqual( + [ + mock.call(join(BOOTH_CONFIG_DIR, "my-file.conf"), "r"), + mock.call().__enter__(), + mock.call().read(), + mock.call().__exit__(None, None, None) + ], + mock_open.mock_calls + ) + + +@patch_config_files("_read_config") +@patch_config_files("get_all_configs_file_names") +class ReadConfigsTest(TestCase): + def setUp(self): + self.mock_reporter = MockLibraryReportProcessor() + + def test_success(self, mock_get_configs, mock_read): + def _mock_read_cfg(file): + if file == "name1.conf": + return "config1" + elif file == "name2.conf": + return "config2" + elif file == "name3.conf": + return "config3" + else: + raise AssertionError("unexpected input: {0}".format(file)) + mock_get_configs.return_value = [ + "name1.conf", "name2.conf", "name3.conf" + ] + mock_read.side_effect = _mock_read_cfg + + self.assertEqual( + { + "name1.conf": "config1", + "name2.conf": "config2", + "name3.conf": "config3" + }, + config_files.read_configs(self.mock_reporter) + ) + + mock_get_configs.assert_called_once_with() + self.assertEqual(3, mock_read.call_count) + mock_read.assert_has_calls([ + mock.call("name1.conf"), + mock.call("name2.conf"), + mock.call("name3.conf") + ]) + self.assertEqual(0, len(self.mock_reporter.report_item_list)) + + def test_skip_failed(self, mock_get_configs, mock_read): + def _mock_read_cfg(file): + if file in ["name1.conf", "name3.conf"]: + raise EnvironmentError() + elif file == "name2.conf": + return "config2" + else: + raise AssertionError("unexpected input: {0}".format(file)) + + mock_get_configs.return_value = [ + "name1.conf", "name2.conf", "name3.conf" + ] + mock_read.side_effect = _mock_read_cfg + + self.assertEqual( + {"name2.conf": "config2"}, + config_files.read_configs(self.mock_reporter, True) + ) + mock_get_configs.assert_called_once_with() + self.assertEqual(3, mock_read.call_count) + mock_read.assert_has_calls([ + mock.call("name1.conf"), + mock.call("name2.conf"), + mock.call("name3.conf") + ]) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + severities.WARNING, + report_codes.BOOTH_CONFIG_READ_ERROR, + {"name": "name1.conf"} + ), + ( + severities.WARNING, + report_codes.BOOTH_CONFIG_READ_ERROR, + {"name": "name3.conf"} + ) + ] + ) + + def test_do_not_skip_failed(self, mock_get_configs, mock_read): + def _mock_read_cfg(file): + if file in ["name1.conf", "name3.conf"]: + raise EnvironmentError() + elif file == "name2.conf": + return "config2" + else: + raise AssertionError("unexpected input: {0}".format(file)) + + mock_get_configs.return_value = [ + "name1.conf", "name2.conf", "name3.conf" + ] + mock_read.side_effect = _mock_read_cfg + + assert_raise_library_error( + lambda: config_files.read_configs(self.mock_reporter), + ( + severities.ERROR, + report_codes.BOOTH_CONFIG_READ_ERROR, + {"name": "name1.conf"}, + report_codes.SKIP_UNREADABLE_CONFIG + ), + ( + severities.ERROR, + report_codes.BOOTH_CONFIG_READ_ERROR, + {"name": "name3.conf"}, + report_codes.SKIP_UNREADABLE_CONFIG + ) + ) + mock_get_configs.assert_called_once_with() + self.assertEqual(3, mock_read.call_count) + mock_read.assert_has_calls([ + mock.call("name1.conf"), + mock.call("name2.conf"), + mock.call("name3.conf") + ]) + self.assertEqual(2, len(self.mock_reporter.report_item_list)) + + +class ReadAuthfileTest(TestCase): + def setUp(self): + self.mock_reporter = MockLibraryReportProcessor() + self.maxDiff = None + + def test_success(self): + path = join(BOOTH_CONFIG_DIR, "file.key") + mock_open = mock.mock_open(read_data="key") + + with patch_config_files("open", mock_open, create=True): + self.assertEqual( + "key", config_files.read_authfile(self.mock_reporter, path) + ) + + self.assertEqual( + [ + mock.call(path, "rb"), + mock.call().__enter__(), + mock.call().read(), + mock.call().__exit__(None, None, None) + ], + mock_open.mock_calls + ) + self.assertEqual(0, len(self.mock_reporter.report_item_list)) + + def test_path_none(self): + self.assertTrue( + config_files.read_authfile(self.mock_reporter, None) is None + ) + self.assertEqual(0, len(self.mock_reporter.report_item_list)) + + def test_invalid_path(self): + path = "/not/etc/booth/booth.key" + self.assertTrue( + config_files.read_authfile(self.mock_reporter, path) is None + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [( + severities.WARNING, + report_codes.BOOTH_UNSUPORTED_FILE_LOCATION, + {"file": path} + )] + ) + + def test_not_abs_path(self): + path = "/etc/booth/../booth.key" + self.assertTrue( + config_files.read_authfile(self.mock_reporter, path) is None + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [( + severities.WARNING, + report_codes.BOOTH_UNSUPORTED_FILE_LOCATION, + {"file": path} + )] + ) + + @patch_config_files("format_environment_error", return_value="reason") + def test_read_failure(self, _): + path = join(BOOTH_CONFIG_DIR, "file.key") + mock_open = mock.mock_open() + mock_open().read.side_effect = EnvironmentError() + + with patch_config_files("open", mock_open, create=True): + return_value = config_files.read_authfile(self.mock_reporter, path) + + self.assertTrue(return_value is None) + + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [( + severities.WARNING, + report_codes.FILE_IO_ERROR, + { + "file_role": file_roles.BOOTH_KEY, + "file_path": path, + "reason": "reason", + "operation": "read", + } + )] + ) diff --git a/pcs/lib/booth/test/test_config_parser.py b/pcs/lib/booth/test/test_config_parser.py new file mode 100644 index 0000000..684fc79 --- /dev/null +++ b/pcs/lib/booth/test/test_config_parser.py @@ -0,0 +1,169 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from pcs.common import report_codes +from pcs.lib.booth import config_parser +from pcs.lib.booth.config_structure import ConfigItem +from pcs.lib.errors import ReportItemSeverity as severities +from pcs.test.tools.assertions import assert_raise_library_error +from pcs.test.tools.pcs_unittest import TestCase + + +class BuildTest(TestCase): + def test_build_file_content_from_parsed_structure(self): + self.assertEqual( + "\n".join([ + "authfile = /path/to/auth.file", + "site = 1.1.1.1", + "site = 2.2.2.2", + "arbitrator = 3.3.3.3", + 'ticket = "TA"', + 'ticket = "TB"', + " timeout = 10", + ]), + config_parser.build([ + ConfigItem("authfile", "/path/to/auth.file"), + ConfigItem("site", "1.1.1.1"), + ConfigItem("site", "2.2.2.2"), + ConfigItem("arbitrator", "3.3.3.3"), + ConfigItem("ticket", "TA"), + ConfigItem("ticket", "TB", [ + ConfigItem("timeout", "10") + ]), + ]) + ) + + +class OrganizeLinesTest(TestCase): + def test_move_non_ticket_config_keys_above_tickets(self): + self.assertEqual( + [ + ConfigItem("site", "1.1.1.1"), + ConfigItem('site', '2.2.2.2'), + ConfigItem('arbitrator', '3.3.3.3'), + ConfigItem("ticket", "TA"), + ], + config_parser.organize_lines([ + ("site", "1.1.1.1"), + ("ticket", "TA"), + ('site', '2.2.2.2'), + ('arbitrator', '3.3.3.3'), + ]) + ) + + def test_use_ticket_key_as_ticket_detail(self): + self.maxDiff = None + self.assertEqual( + [ + ConfigItem("site", "1.1.1.1"), + ConfigItem('expire', '300'), + ConfigItem('site', '2.2.2.2'), + ConfigItem('arbitrator', '3.3.3.3'), + ConfigItem("ticket", "TA", [ + ConfigItem("timeout", "10"), + ConfigItem('--nonexistent', 'value'), + ConfigItem("expire", "300"), + ]), + ConfigItem("ticket", "TB", [ + ConfigItem("timeout", "20"), + ConfigItem("renewal-freq", "40"), + ]), + ], + config_parser.organize_lines([ + ("site", "1.1.1.1"), + ("expire", "300"), # out of ticket content is kept global + ("ticket", "TA"), + ("site", "2.2.2.2"), # move to global + ("timeout", "10"), + ("--nonexistent", "value"), # no global is kept under ticket + ("expire", "300"), + ("ticket", "TB"), + ('arbitrator', '3.3.3.3'), + ("timeout", "20"), + ("renewal-freq", "40"), + ]) + ) + + +class ParseRawLinesTest(TestCase): + def test_parse_simple_correct_lines(self): + self.assertEqual( + [ + ("site", "1.1.1.1"), + ('site', '2.2.2.2'), + ('arbitrator', '3.3.3.3'), + ('syntactically_correct', 'nonsense'), + ('line-with', 'hash#literal'), + ], + config_parser.parse_to_raw_lines("\n".join([ + "site = 1.1.1.1", + " site = 2.2.2.2 ", + "arbitrator=3.3.3.3", + "syntactically_correct = nonsense", + "line-with = hash#literal", + ])) + ) + + def test_parse_lines_with_whole_line_comment(self): + self.assertEqual( + [("site", "1.1.1.1")], + config_parser.parse_to_raw_lines("\n".join([ + " # some comment", + "site = 1.1.1.1", + ])) + ) + + def test_skip_empty_lines(self): + self.assertEqual( + [("site", "1.1.1.1")], + config_parser.parse_to_raw_lines("\n".join([ + " ", + "site = 1.1.1.1", + ])) + ) + + def test_raises_when_unexpected_lines_appear(self): + invalid_line_list = [ + "first invalid line", + "second = 'invalid line' something else #comment", + "third = 'invalid line 'something#'#", + ] + line_list = ["site = 1.1.1.1"] + invalid_line_list + with self.assertRaises(config_parser.InvalidLines) as context_manager: + config_parser.parse_to_raw_lines("\n".join(line_list)) + self.assertEqual(context_manager.exception.args[0], invalid_line_list) + + def test_parse_lines_finishing_with_comment(self): + self.assertEqual( + [("site", "1.1.1.1")], + config_parser.parse_to_raw_lines("\n".join([ + "site = '1.1.1.1' #comment", + ])) + ) + +class ParseTest(TestCase): + def test_raises_when_invalid_lines_appear(self): + invalid_line_list = [ + "first invalid line", + "second = 'invalid line' something else #comment" + ] + line_list = ["site = 1.1.1.1"] + invalid_line_list + assert_raise_library_error( + lambda: + config_parser.parse("\n".join(line_list)) + , + ( + severities.ERROR, + report_codes.BOOTH_CONFIG_UNEXPECTED_LINES, + { + "line_list": invalid_line_list, + }, + ), + ) + + def test_do_not_raises_when_no_invalid_liens_there(self): + config_parser.parse("site = 1.1.1.1") diff --git a/pcs/lib/booth/test/test_config_structure.py b/pcs/lib/booth/test/test_config_structure.py new file mode 100644 index 0000000..27faca5 --- /dev/null +++ b/pcs/lib/booth/test/test_config_structure.py @@ -0,0 +1,224 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +from pcs.common import report_codes +from pcs.lib.booth import config_structure +from pcs.lib.errors import ReportItemSeverity as severities +from pcs.test.tools.assertions import assert_raise_library_error +from pcs.test.tools.pcs_mock import mock + + +class ValidateTicketExistsTest(TestCase): + def test_raises_on_duplicate_ticket(self): + assert_raise_library_error( + lambda: config_structure.validate_ticket_exists( + [config_structure.ConfigItem("ticket", "B")], "A" + ), + ( + severities.ERROR, + report_codes.BOOTH_TICKET_DOES_NOT_EXIST, + { + "ticket_name": "A", + }, + ), + ) + +class ValidateTicketUniqueTest(TestCase): + def test_raises_on_duplicate_ticket(self): + assert_raise_library_error( + lambda: config_structure.validate_ticket_unique( + [config_structure.ConfigItem("ticket", "A")], "A" + ), + ( + severities.ERROR, + report_codes.BOOTH_TICKET_DUPLICATE, + { + "ticket_name": "A", + }, + ), + ) + + def test_do_not_raises_when_no_duplicated_ticket(self): + config_structure.validate_ticket_unique([], "A") + +class TicketExistsTest(TestCase): + def test_returns_true_if_ticket_in_structure(self): + self.assertTrue(config_structure.ticket_exists( + [config_structure.ConfigItem("ticket", "A")], "A" + )) + + def test_returns_false_if_ticket_in_structure(self): + self.assertFalse(config_structure.ticket_exists( + [config_structure.ConfigItem("ticket", "A")], "B" + )) + +class ValidateTicketNameTest(TestCase): + def test_accept_valid_ticket_name(self): + config_structure.validate_ticket_name("abc") + + def test_refuse_bad_ticket_name(self): + assert_raise_library_error( + lambda: config_structure.validate_ticket_name("@ticket"), + ( + severities.ERROR, + report_codes.BOOTH_TICKET_NAME_INVALID, + { + "ticket_name": "@ticket", + }, + ), + ) + +class ValidatePeersTest(TestCase): + def test_do_no_raises_on_correct_args(self): + config_structure.validate_peers( + site_list=["1.1.1.1", "2.2.2.2"], + arbitrator_list=["3.3.3.3"] + ) + + def test_refuse_less_than_2_sites(self): + assert_raise_library_error( + lambda: config_structure.validate_peers( + site_list=["1.1.1.1"], + arbitrator_list=["3.3.3.3", "4.4.4.4"] + ), + ( + severities.ERROR, + report_codes.BOOTH_LACK_OF_SITES, + { + "sites": ["1.1.1.1"], + } + ), + ) + + def test_refuse_even_number_peers(self): + assert_raise_library_error( + lambda: config_structure.validate_peers( + site_list=["1.1.1.1", "2.2.2.2"], + arbitrator_list=[] + ), + ( + severities.ERROR, + report_codes.BOOTH_EVEN_PEERS_NUM, + { + "number": 2, + } + ), + ) + + def test_refuse_address_duplication(self): + assert_raise_library_error( + lambda: config_structure.validate_peers( + site_list=["1.1.1.1", "1.1.1.1", "1.1.1.1"], + arbitrator_list=["3.3.3.3", "4.4.4.4"] + ), + ( + severities.ERROR, + report_codes.BOOTH_ADDRESS_DUPLICATION, + { + "addresses": set(["1.1.1.1"]), + } + ), + ) + + def test_refuse_problem_combination(self): + assert_raise_library_error( + lambda: config_structure.validate_peers( + site_list=["1.1.1.1"], + arbitrator_list=["1.1.1.1"] + ), + ( + severities.ERROR, + report_codes.BOOTH_LACK_OF_SITES, + { + "sites": ["1.1.1.1"], + } + ), + ( + severities.ERROR, + report_codes.BOOTH_EVEN_PEERS_NUM, + { + "number": 2, + } + ), + ( + severities.ERROR, + report_codes.BOOTH_ADDRESS_DUPLICATION, + { + "addresses": set(["1.1.1.1"]), + } + ), + ) + +class RemoveTicketTest(TestCase): + @mock.patch("pcs.lib.booth.config_structure.validate_ticket_exists") + def test_successfully_remove_ticket(self, mock_validate_ticket_exists): + configuration = [ + config_structure.ConfigItem("ticket", "some-ticket"), + config_structure.ConfigItem("ticket", "deprecated-ticket"), + ] + self.assertEqual( + config_structure.remove_ticket(configuration, "deprecated-ticket"), + [ + config_structure.ConfigItem("ticket", "some-ticket"), + ] + ) + mock_validate_ticket_exists.assert_called_once_with( + configuration, + "deprecated-ticket" + ) + +class AddTicketTest(TestCase): + @mock.patch("pcs.lib.booth.config_structure.validate_ticket_unique") + @mock.patch("pcs.lib.booth.config_structure.validate_ticket_name") + def test_successfully_add_ticket( + self, mock_validate_name, mock_validate_uniq + ): + configuration = [ + config_structure.ConfigItem("ticket", "some-ticket"), + ] + self.assertEqual( + config_structure.add_ticket(configuration, "new-ticket"), + [ + config_structure.ConfigItem("ticket", "some-ticket"), + config_structure.ConfigItem("ticket", "new-ticket"), + ], + ) + + mock_validate_name.assert_called_once_with("new-ticket") + mock_validate_uniq.assert_called_once_with(configuration, "new-ticket") + +class SetAuthfileTest(TestCase): + def test_add_authfile(self): + self.assertEqual( + [ + config_structure.ConfigItem("authfile", "/path/to/auth.file"), + config_structure.ConfigItem("site", "1.1.1.1"), + ], + config_structure.set_authfile( + [ + config_structure.ConfigItem("site", "1.1.1.1"), + ], + "/path/to/auth.file" + ) + ) + def test_reset_authfile(self): + self.assertEqual( + [ + config_structure.ConfigItem("authfile", "/path/to/auth.file"), + config_structure.ConfigItem("site", "1.1.1.1"), + ], + config_structure.set_authfile( + [ + config_structure.ConfigItem("site", "1.1.1.1"), + config_structure.ConfigItem("authfile", "/old/path/to/auth1.file"), + config_structure.ConfigItem("authfile", "/old/path/to/auth2.file"), + ], + "/path/to/auth.file" + ) + ) diff --git a/pcs/lib/booth/test/test_env.py b/pcs/lib/booth/test/test_env.py new file mode 100644 index 0000000..77e0944 --- /dev/null +++ b/pcs/lib/booth/test/test_env.py @@ -0,0 +1,228 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import grp +import os +import pwd +from unittest import TestCase + +from pcs import settings +from pcs.common import report_codes +from pcs.lib.booth import env +from pcs.lib.errors import ReportItemSeverity as severities +from pcs.test.tools.assertions import assert_raise_library_error +from pcs.test.tools.misc import get_test_resource as rc +from pcs.test.tools.pcs_mock import mock + +def patch_env(target, *args, **kwargs): + return mock.patch( + "pcs.lib.booth.env.{0}".format(target), *args, **kwargs + ) + +class GetConfigFileNameTest(TestCase): + @patch_env("os.path.exists") + def test_refuse_when_name_starts_with_slash(self, mock_path_exists): + mock_path_exists.return_value = True + assert_raise_library_error( + lambda: env.get_config_file_name("/booth"), + ( + severities.ERROR, + report_codes.BOOTH_INVALID_NAME, + { + "name": "/booth", + "reason": "contains illegal character '/'", + } + ), + ) + +class BoothEnvTest(TestCase): + @patch_env("RealFile") + def test_get_content_from_file(self, mock_real_file): + mock_real_file.return_value = mock.MagicMock( + read=mock.MagicMock(return_value="content") + ) + self.assertEqual( + "content", + env.BoothEnv("report processor", env_data={"name": "booth"}) + .get_config_content() + ) + + @patch_env("set_keyfile_access") + @patch_env("RealFile") + def test_create_config(self, mock_real_file, mock_set_keyfile_access): + mock_file = mock.MagicMock( + assert_no_conflict_with_existing=mock.MagicMock(), + write=mock.MagicMock(), + ) + mock_real_file.return_value = mock_file + + + env.BoothEnv( + "report processor", + env_data={"name": "booth"} + ).create_config("a", can_overwrite_existing=True) + + self.assertEqual(mock_file.assert_no_conflict_with_existing.mock_calls,[ + mock.call('report processor', True), + ]) + self.assertEqual(mock_file.write.mock_calls, [mock.call('a')]) + + @patch_env("RealFile") + def test_push_config(self, mock_real_file): + mock_file = mock.MagicMock( + assert_no_conflict_with_existing=mock.MagicMock(), + write=mock.MagicMock(), + ) + mock_real_file.return_value = mock_file + env.BoothEnv( + "report processor", + env_data={"name": "booth"} + ).push_config("a") + mock_file.write.assert_called_once_with("a") + + + + def test_export_config_file_when_was_present_in_env_data(self): + self.assertEqual( + env.BoothEnv( + "report processor", + { + "name": "booth-name", + "config_file": { + "content": "a\nb", + }, + "key_file": { + "content": "secure", + }, + "key_path": "/path/to/file.key", + } + ).export(), + { + "config_file": { + "content": "a\nb", + "can_overwrite_existing_file": False, + "no_existing_file_expected": False, + "is_binary": False, + }, + "key_file": { + "content": "secure", + "can_overwrite_existing_file": False, + "no_existing_file_expected": False, + "is_binary": False, + }, + } + ) + + def test_do_not_export_config_file_when_no_provided(self): + self.assertEqual( + env.BoothEnv("report processor", {"name": "booth"}).export(), + {} + ) + +class SetKeyfileAccessTest(TestCase): + def test_set_desired_file_access(self): + #setup + file_path = rc("temp-keyfile") + if os.path.exists(file_path): + os.remove(file_path) + with open(file_path, "w") as file: + file.write("content") + + #check assumptions + stat = os.stat(file_path) + self.assertNotEqual('600', oct(stat.st_mode)[-3:]) + current_user = pwd.getpwuid(os.getuid())[0] + if current_user != settings.pacemaker_uname: + file_user = pwd.getpwuid(stat.st_uid)[0] + self.assertNotEqual(file_user, settings.pacemaker_uname) + current_group = grp.getgrgid(os.getgid())[0] + if current_group != settings.pacemaker_gname: + file_group = grp.getgrgid(stat.st_gid)[0] + self.assertNotEqual(file_group, settings.pacemaker_gname) + + #run tested method + env.set_keyfile_access(file_path) + + #check + stat = os.stat(file_path) + self.assertEqual('600', oct(stat.st_mode)[-3:]) + + file_user = pwd.getpwuid(stat.st_uid)[0] + self.assertEqual(file_user, settings.pacemaker_uname) + + file_group = grp.getgrgid(stat.st_gid)[0] + self.assertEqual(file_group, settings.pacemaker_gname) + + @patch_env("pwd.getpwnam", mock.MagicMock(side_effect=KeyError)) + @patch_env("settings.pacemaker_uname", "some-user") + def test_raises_when_cannot_get_uid(self): + assert_raise_library_error( + lambda: env.set_keyfile_access("/booth"), + ( + severities.ERROR, + report_codes.UNABLE_TO_DETERMINE_USER_UID, + { + "user": "some-user", + } + ), + ) + + @patch_env("grp.getgrnam", mock.MagicMock(side_effect=KeyError)) + @patch_env("pwd.getpwnam", mock.MagicMock()) + @patch_env("settings.pacemaker_gname", "some-group") + def test_raises_when_cannot_get_gid(self): + assert_raise_library_error( + lambda: env.set_keyfile_access("/booth"), + ( + severities.ERROR, + report_codes.UNABLE_TO_DETERMINE_GROUP_GID, + { + "group": "some-group", + } + ), + ) + + @patch_env("format_environment_error", mock.Mock(return_value="err")) + @patch_env("os.chown", mock.MagicMock(side_effect=EnvironmentError())) + @patch_env("grp.getgrnam", mock.MagicMock()) + @patch_env("pwd.getpwnam", mock.MagicMock()) + @patch_env("settings.pacemaker_gname", "some-group") + def test_raises_when_cannot_chown(self): + assert_raise_library_error( + lambda: env.set_keyfile_access("/booth"), + ( + severities.ERROR, + report_codes.FILE_IO_ERROR, + { + 'reason': 'err', + 'file_role': u'BOOTH_KEY', + 'file_path': '/booth', + 'operation': u'chown', + } + ), + ) + + @patch_env("format_environment_error", mock.Mock(return_value="err")) + @patch_env("os.chmod", mock.MagicMock(side_effect=EnvironmentError())) + @patch_env("os.chown", mock.MagicMock()) + @patch_env("grp.getgrnam", mock.MagicMock()) + @patch_env("pwd.getpwnam", mock.MagicMock()) + @patch_env("settings.pacemaker_gname", "some-group") + def test_raises_when_cannot_chmod(self): + assert_raise_library_error( + lambda: env.set_keyfile_access("/booth"), + ( + severities.ERROR, + report_codes.FILE_IO_ERROR, + { + 'reason': 'err', + 'file_role': u'BOOTH_KEY', + 'file_path': '/booth', + 'operation': u'chmod', + } + ), + ) diff --git a/pcs/lib/booth/test/test_resource.py b/pcs/lib/booth/test/test_resource.py new file mode 100644 index 0000000..440ddde --- /dev/null +++ b/pcs/lib/booth/test/test_resource.py @@ -0,0 +1,203 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +from lxml import etree + +import pcs.lib.booth.resource as booth_resource +from pcs.test.tools.pcs_mock import mock + + +def fixture_resources_with_booth(booth_config_file_path): + return etree.fromstring(''' + + + + + + + + '''.format(booth_config_file_path)) + +def fixture_booth_element(id, booth_config_file_path): + return etree.fromstring(''' + + + + + + '''.format(id, booth_config_file_path)) + +def fixture_ip_element(id, ip=""): + return etree.fromstring(''' + + + + + + '''.format(id, ip)) + +class CreateResourceIdTest(TestCase): + @mock.patch("pcs.lib.booth.resource.find_unique_id") + def test_return_new_uinq_id(self, mock_find_unique_id): + resources_section = etree.fromstring('''''') + mock_find_unique_id.side_effect = ( + lambda resources_section, id: "{0}-n".format(id) + ) + self.assertEqual( + "booth-some-name-ip-n", + booth_resource.create_resource_id( + resources_section, "some-name", "ip" + ) + ) + +class FindBoothResourceElementsTest(TestCase): + def test_returns_empty_list_when_no_matching_booth_element(self): + self.assertEqual([], booth_resource.find_for_config( + fixture_resources_with_booth("/ANOTHER/PATH/TO/CONF"), + "/PATH/TO/CONF" + )) + + + def test_returns_all_found_resource_elements(self): + resources = etree.fromstring('') + first = fixture_booth_element("first", "/PATH/TO/CONF") + second = fixture_booth_element("second", "/ANOTHER/PATH/TO/CONF") + third = fixture_booth_element("third", "/PATH/TO/CONF") + for element in [first, second,third]: + resources.append(element) + + self.assertEqual( + [first, third], + booth_resource.find_for_config( + resources, + "/PATH/TO/CONF" + ) + ) + +class RemoveFromClusterTest(TestCase): + def call(self, resources_section, remove_multiple=False): + mock_resource_remove = mock.Mock() + num_of_removed_booth_resources = booth_resource.get_remover( + mock_resource_remove + )( + resources_section, + "/PATH/TO/CONF", + remove_multiple, + ) + return ( + mock_resource_remove, + num_of_removed_booth_resources + ) + + def fixture_resources_including_two_booths(self): + resources_section = etree.fromstring('') + first = fixture_booth_element("first", "/PATH/TO/CONF") + second = fixture_booth_element("second", "/PATH/TO/CONF") + resources_section.append(first) + resources_section.append(second) + return resources_section + + def test_raises_when_booth_resource_not_found(self): + self.assertRaises( + booth_resource.BoothNotFoundInCib, + lambda: self.call(etree.fromstring('')), + ) + + def test_raises_when_more_booth_resources_found(self): + resources_section = self.fixture_resources_including_two_booths() + self.assertRaises( + booth_resource.BoothMultipleOccurenceFoundInCib, + lambda: self.call(resources_section), + ) + + def test_returns_number_of_removed_elements(self): + resources_section = self.fixture_resources_including_two_booths() + mock_resource_remove, num_of_removed_booth_resources = self.call( + resources_section, + remove_multiple=True + ) + self.assertEqual(num_of_removed_booth_resources, 2) + self.assertEqual( + mock_resource_remove.mock_calls, [ + mock.call('first'), + mock.call('second'), + ] + ) + + def test_remove_ip_when_is_only_booth_sibling_in_group(self): + resources_section = etree.fromstring(''' + + + + + + + + + + + ''') + + mock_resource_remove, _ = self.call( + resources_section, + remove_multiple=True + ) + self.assertEqual( + mock_resource_remove.mock_calls, [ + mock.call('ip'), + mock.call('booth'), + ] + ) + + +class FindBindedIpTest(TestCase): + def fixture_resource_section(self, ip_element_list): + resources_section = etree.fromstring('') + group = etree.SubElement(resources_section, "group") + group.append(fixture_booth_element("booth1", "/PATH/TO/CONF")) + for ip_element in ip_element_list: + group.append(ip_element) + return resources_section + + + def test_returns_None_when_no_ip(self): + self.assertEqual( + [], + booth_resource.find_bound_ip( + self.fixture_resource_section([]), + "/PATH/TO/CONF", + ) + ) + + def test_returns_ip_when_correctly_found(self): + self.assertEqual( + ["192.168.122.31"], + booth_resource.find_bound_ip( + self.fixture_resource_section([ + fixture_ip_element("ip1", "192.168.122.31"), + ]), + "/PATH/TO/CONF", + ) + ) + + def test_returns_None_when_more_ip(self): + self.assertEqual( + ["192.168.122.31", "192.168.122.32"], + booth_resource.find_bound_ip( + self.fixture_resource_section([ + fixture_ip_element("ip1", "192.168.122.31"), + fixture_ip_element("ip2", "192.168.122.32"), + ]), + "/PATH/TO/CONF", + ) + ) diff --git a/pcs/lib/booth/test/test_status.py b/pcs/lib/booth/test/test_status.py new file mode 100644 index 0000000..0ea837a --- /dev/null +++ b/pcs/lib/booth/test/test_status.py @@ -0,0 +1,137 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +try: + # python 2 + #pylint: disable=unused-import + from urlparse import parse_qs as url_decode +except ImportError: + # python 3 + from urllib.parse import parse_qs as url_decode + +from pcs.test.tools.pcs_mock import mock +from pcs.test.tools.assertions import assert_raise_library_error + +from pcs import settings +from pcs.common import report_codes +from pcs.lib.errors import ReportItemSeverity as Severities +from pcs.lib.external import CommandRunner +import pcs.lib.booth.status as lib + + +class GetDaemonStatusTest(TestCase): + def setUp(self): + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + + def test_no_name(self): + self.mock_run.run.return_value = ("output", 0) + self.assertEqual("output", lib.get_daemon_status(self.mock_run)) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "status"] + ) + + def test_with_name(self): + self.mock_run.run.return_value = ("output", 0) + self.assertEqual("output", lib.get_daemon_status(self.mock_run, "name")) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "status", "-c", "name"] + ) + + def test_daemon_not_running(self): + self.mock_run.run.return_value = ("", 7) + self.assertEqual("", lib.get_daemon_status(self.mock_run)) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "status"] + ) + + def test_failure(self): + self.mock_run.run.return_value = ("out", 1) + assert_raise_library_error( + lambda: lib.get_daemon_status(self.mock_run), + ( + Severities.ERROR, + report_codes.BOOTH_DAEMON_STATUS_ERROR, + {"reason": "out"} + ) + ) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "status"] + ) + + +class GetTicketsStatusTest(TestCase): + def setUp(self): + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + + def test_no_name(self): + self.mock_run.run.return_value = ("output", 0) + self.assertEqual("output", lib.get_tickets_status(self.mock_run)) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "list"] + ) + + def test_with_name(self): + self.mock_run.run.return_value = ("output", 0) + self.assertEqual( + "output", lib.get_tickets_status(self.mock_run, "name") + ) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "list", "-c", "name"] + ) + + def test_failure(self): + self.mock_run.run.return_value = ("out", 1) + assert_raise_library_error( + lambda: lib.get_tickets_status(self.mock_run), + ( + Severities.ERROR, + report_codes.BOOTH_TICKET_STATUS_ERROR, + { + "reason": "out" + } + ) + ) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "list"] + ) + + +class GetPeersStatusTest(TestCase): + def setUp(self): + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + + def test_no_name(self): + self.mock_run.run.return_value = ("output", 0) + self.assertEqual("output", lib.get_peers_status(self.mock_run)) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "peers"] + ) + + def test_with_name(self): + self.mock_run.run.return_value = ("output", 0) + self.assertEqual("output", lib.get_peers_status(self.mock_run, "name")) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "peers", "-c", "name"] + ) + + def test_failure(self): + self.mock_run.run.return_value = ("out", 1) + assert_raise_library_error( + lambda: lib.get_peers_status(self.mock_run), + ( + Severities.ERROR, + report_codes.BOOTH_PEERS_STATUS_ERROR, + { + "reason": "out" + } + ) + ) + self.mock_run.run.assert_called_once_with( + [settings.booth_binary, "peers"] + ) diff --git a/pcs/lib/booth/test/test_sync.py b/pcs/lib/booth/test/test_sync.py new file mode 100644 index 0000000..58500cc --- /dev/null +++ b/pcs/lib/booth/test/test_sync.py @@ -0,0 +1,1215 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +import json +import base64 +try: + # python 2 + from urlparse import parse_qs as url_decode +except ImportError: + # python 3 + from urllib.parse import parse_qs as url_decode + +from pcs.test.tools.pcs_mock import mock +from pcs.test.tools.assertions import ( + assert_report_item_list_equal, + assert_raise_library_error, +) +from pcs.test.tools.custom_mock import MockLibraryReportProcessor + +from pcs.common import report_codes +from pcs.lib.node import NodeAddresses, NodeAddressesList +from pcs.lib.errors import LibraryError, ReportItemSeverity as Severities +from pcs.lib.external import NodeCommunicator, NodeConnectionException +import pcs.lib.booth.sync as lib + + +def to_b64(string): + return base64.b64encode(string.encode("utf-8")).decode("utf-8") + + +class SetConfigOnNodeTest(TestCase): + def setUp(self): + self.mock_com = mock.MagicMock(spec_set=NodeCommunicator) + self.mock_rep = MockLibraryReportProcessor() + self.node = NodeAddresses("node") + + def test_with_authfile(self): + lib._set_config_on_node( + self.mock_com, + self.mock_rep, + self.node, + "cfg_name", + "cfg", + authfile="/abs/path/my-key.key", + authfile_data="test key".encode("utf-8") + ) + self.assertEqual(1, self.mock_com.call_node.call_count) + self.assertEqual(self.node, self.mock_com.call_node.call_args[0][0]) + self.assertEqual( + "remote/booth_set_config", self.mock_com.call_node.call_args[0][1] + ) + data = url_decode(self.mock_com.call_node.call_args[0][2]) + self.assertTrue("data_json" in data) + self.assertEqual( + { + "config": { + "name": "cfg_name.conf", + "data": "cfg" + }, + "authfile": { + "name": "my-key.key", + "data": to_b64("test key") + } + }, + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": self.node.label, + "name": "cfg_name", + "name_list": ["cfg_name"] + } + )] + ) + + def _assert(self): + self.assertEqual(1, self.mock_com.call_node.call_count) + self.assertEqual(self.node, self.mock_com.call_node.call_args[0][0]) + self.assertEqual( + "remote/booth_set_config", self.mock_com.call_node.call_args[0][1] + ) + data = url_decode(self.mock_com.call_node.call_args[0][2]) + self.assertTrue("data_json" in data) + self.assertEqual( + { + "config": { + "name": "cfg_name.conf", + "data": "cfg" + } + }, + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": self.node.label, + "name": "cfg_name", + "name_list": ["cfg_name"] + } + )] + ) + + def test_authfile_data_None(self): + lib._set_config_on_node( + self.mock_com, self.mock_rep, self.node, "cfg_name", "cfg", + authfile="key.key" + ) + self._assert() + + def test_authfile_only_data(self): + lib._set_config_on_node( + self.mock_com, self.mock_rep, self.node, "cfg_name", "cfg", + authfile_data="key".encode("utf-8") + ) + self._assert() + + def test_without_authfile(self): + lib._set_config_on_node( + self.mock_com, self.mock_rep, self.node, "cfg_name", "cfg" + ) + self._assert() + + +@mock.patch("pcs.lib.booth.sync.parallel_nodes_communication_helper") +class SyncConfigInCluster(TestCase): + def setUp(self): + self.mock_communicator = mock.MagicMock(spec_set=NodeCommunicator) + self.mock_reporter = MockLibraryReportProcessor() + self.node_list = NodeAddressesList( + [NodeAddresses("node" + str(i) for i in range(5))] + ) + + def test_without_authfile(self, mock_parallel): + lib.send_config_to_all_nodes( + self.mock_communicator, + self.mock_reporter, + self.node_list, + "cfg_name", + "config data" + ) + mock_parallel.assert_called_once_with( + lib._set_config_on_node, + [ + ( + [ + self.mock_communicator, + self.mock_reporter, + node, + "cfg_name", + "config data", + None, + None + ], + {} + ) + for node in self.node_list + ], + self.mock_reporter, + False + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [( + Severities.INFO, + report_codes.BOOTH_DISTRIBUTING_CONFIG, + {"name": "cfg_name"} + )] + ) + + def test_skip_offline(self, mock_parallel): + lib.send_config_to_all_nodes( + self.mock_communicator, + self.mock_reporter, + self.node_list, + "cfg_name", + "config data", + skip_offline=True + ) + mock_parallel.assert_called_once_with( + lib._set_config_on_node, + [ + ( + [ + self.mock_communicator, + self.mock_reporter, + node, + "cfg_name", + "config data", + None, + None + ], + {} + ) + for node in self.node_list + ], + self.mock_reporter, + True + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [( + Severities.INFO, + report_codes.BOOTH_DISTRIBUTING_CONFIG, + {"name": "cfg_name"} + )] + ) + + def test_with_authfile(self, mock_parallel): + lib.send_config_to_all_nodes( + self.mock_communicator, + self.mock_reporter, + self.node_list, + "cfg_name", + "config data", + authfile="/my/auth/file.key", + authfile_data="authfile data".encode("utf-8") + ) + mock_parallel.assert_called_once_with( + lib._set_config_on_node, + [ + ( + [ + self.mock_communicator, + self.mock_reporter, + node, + "cfg_name", + "config data", + "/my/auth/file.key", + "authfile data".encode("utf-8") + ], + {} + ) + for node in self.node_list + ], + self.mock_reporter, + False + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [( + Severities.INFO, + report_codes.BOOTH_DISTRIBUTING_CONFIG, + {"name": "cfg_name"} + )] + ) + + +@mock.patch("pcs.lib.booth.config_structure.get_authfile") +@mock.patch("pcs.lib.booth.config_parser.parse") +@mock.patch("pcs.lib.booth.config_files.read_configs") +@mock.patch("pcs.lib.booth.config_files.read_authfile") +class SendAllConfigToNodeTest(TestCase): + def setUp(self): + self.mock_communicator = mock.MagicMock(spec_set=NodeCommunicator) + self.mock_reporter = MockLibraryReportProcessor() + self.node = NodeAddresses("node") + + @staticmethod + def mock_parse_fn(config_content): + if config_content not in ["config1", "config2"]: + raise AssertionError( + "unexpected input {0}".format(config_content) + ) + return config_content + + @staticmethod + def mock_authfile_fn(parsed_config): + _data = { + "config1": "/path/to/file1.key", + "config2": "/path/to/file2.key" + } + if parsed_config not in _data: + raise AssertionError( + "unexpected input {0}".format(parsed_config) + ) + return _data[parsed_config] + + @staticmethod + def mock_read_authfile_fn(_, authfile_path): + _data = { + "/path/to/file1.key": "some key".encode("utf-8"), + "/path/to/file2.key": "another key".encode("utf-8"), + } + if authfile_path not in _data: + raise AssertionError( + "unexpected input {0}".format(authfile_path) + ) + return _data[authfile_path] + + def test_success( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing": [], + "failed": {}, + "saved": ["name1.conf", "file1.key", "name2.conf", "file2.key"] + } + """ + lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + {"node": self.node.label} + ), + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": self.node.label, + "name": "name1.conf, file1.key, name2.conf, file2.key", + "name_list": [ + "name1.conf", "file1.key", "name2.conf", "file2.key" + ] + } + ) + ] + ) + + def test_do_not_rewrite_existing( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing": ["name1.conf", "file1.key"], + "failed": {}, + "saved": ["name2.conf", "file2.key"] + } + """ + assert_raise_library_error( + lambda: lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ), + ( + Severities.ERROR, + report_codes.FILE_ALREADY_EXISTS, + { + "file_role": None, + "file_path": "name1.conf", + "node": self.node.label + }, + report_codes.FORCE_FILE_OVERWRITE + ), + ( + Severities.ERROR, + report_codes.FILE_ALREADY_EXISTS, + { + "file_role": None, + "file_path": "file1.key", + "node": self.node.label + }, + report_codes.FORCE_FILE_OVERWRITE + ) + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + {"node": self.node.label} + ), + ( + Severities.ERROR, + report_codes.FILE_ALREADY_EXISTS, + { + "file_role": None, + "file_path": "name1.conf", + "node": self.node.label + }, + report_codes.FORCE_FILE_OVERWRITE + ), + ( + Severities.ERROR, + report_codes.FILE_ALREADY_EXISTS, + { + "file_role": None, + "file_path": "file1.key", + "node": self.node.label + }, + report_codes.FORCE_FILE_OVERWRITE + ) + ] + ) + + def test_rewrite_existing( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing": ["name1.conf", "file1.key"], + "failed": {}, + "saved": ["name2.conf", "file2.key"] + } + """ + lib.send_all_config_to_node( + self.mock_communicator, + self.mock_reporter, + self.node, + rewrite_existing=True + ) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertTrue("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + {"node": self.node.label} + ), + ( + Severities.WARNING, + report_codes.FILE_ALREADY_EXISTS, + { + "file_role": None, + "file_path": "name1.conf", + "node": self.node.label + } + ), + ( + Severities.WARNING, + report_codes.FILE_ALREADY_EXISTS, + { + "file_role": None, + "file_path": "file1.key", + "node": self.node.label + } + ), + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": self.node.label, + "name": "name2.conf, file2.key", + "name_list": ["name2.conf", "file2.key"] + } + ) + ] + ) + + def test_write_failure( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing": [], + "failed": { + "name1.conf": "Error message", + "file1.key": "Another error message" + }, + "saved": ["name2.conf", "file2.key"] + } + """ + assert_raise_library_error( + lambda: lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_WRITE_ERROR, + { + "node": self.node.label, + "name": "name1.conf", + "reason": "Error message" + } + ), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_WRITE_ERROR, + { + "node": self.node.label, + "name": "file1.key", + "reason": "Another error message" + } + ) + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + {"node": self.node.label} + ), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_WRITE_ERROR, + { + "node": self.node.label, + "name": "name1.conf", + "reason": "Error message" + } + ), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_WRITE_ERROR, + { + "node": self.node.label, + "name": "file1.key", + "reason": "Another error message" + } + ) + ] + ) + + def test_communication_failure( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.side_effect = NodeConnectionException( + self.node.label, "command", "reason" + ) + assert_raise_library_error( + lambda: lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ), + ( + Severities.ERROR, + report_codes.NODE_COMMUNICATION_ERROR_UNABLE_TO_CONNECT, + { + "node": self.node.label, + "command": "command", + "reason": "reason" + } + ) + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + + def test_wrong_response_format( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing_files": [], + "failed": { + "name1.conf": "Error message", + "file1.key": "Another error message" + }, + "saved": ["name2.conf", "file2.key"] + } + """ + assert_raise_library_error( + lambda: lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ), + ( + Severities.ERROR, + report_codes.INVALID_RESPONSE_FORMAT, + {"node": self.node.label} + ) + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + + def test_response_not_json( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = self.mock_authfile_fn + mock_read_authfile.side_effect = self.mock_read_authfile_fn + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = "not json" + assert_raise_library_error( + lambda: lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ), + ( + Severities.ERROR, + report_codes.INVALID_RESPONSE_FORMAT, + {"node": self.node.label} + ) + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_read_authfile.call_count) + mock_read_authfile.assert_has_calls([ + mock.call(self.mock_reporter, "/path/to/file1.key"), + mock.call(self.mock_reporter, "/path/to/file2.key") + ]) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "file1.key", + "data": to_b64("some key"), + "is_authfile": True + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + + + def test_configs_without_authfiles( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + def mock_authfile_fn(parsed_config): + if parsed_config == "config1": + return None + elif parsed_config == "config2": + return "/path/to/file2.key" + else: + raise AssertionError( + "unexpected input: {0}".format(parsed_config) + ) + + mock_parse.side_effect = self.mock_parse_fn + mock_authfile.side_effect = mock_authfile_fn + mock_read_authfile.return_value = "another key".encode("utf-8") + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing": [], + "failed": {}, + "saved": ["name1.conf", "name2.conf", "file2.key"] + } + """ + lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + self.assertEqual(2, mock_authfile.call_count) + mock_authfile.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + mock_read_authfile.assert_called_once_with( + self.mock_reporter, "/path/to/file2.key" + ) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name1.conf", + "data": "config1", + "is_authfile": False + }, + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + {"node": self.node.label} + ), + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": self.node.label, + "name": "name1.conf, name2.conf, file2.key", + "name_list": ["name1.conf", "name2.conf", "file2.key"] + } + ) + ] + ) + + def test_unable_to_parse_config( + self, mock_read_authfile, mock_read_configs, mock_parse, mock_authfile + ): + def mock_parse_fn(config_data): + if config_data == "config1": + raise LibraryError() + elif config_data == "config2": + return "config2" + else: + raise AssertionError( + "unexpected input: {0}".format(config_data) + ) + + mock_parse.side_effect = mock_parse_fn + mock_authfile.return_value = "/path/to/file2.key" + mock_read_authfile.return_value = "another key".encode("utf-8") + mock_read_configs.return_value = { + "name1.conf": "config1", + "name2.conf": "config2" + } + self.mock_communicator.call_node.return_value = """ + { + "existing": [], + "failed": {}, + "saved": ["name2.conf", "file2.key"] + } + """ + lib.send_all_config_to_node( + self.mock_communicator, self.mock_reporter, self.node + ) + self.assertEqual(2, mock_parse.call_count) + mock_parse.assert_has_calls([ + mock.call("config1"), mock.call("config2") + ]) + mock_authfile.assert_called_once_with("config2") + mock_read_authfile.assert_called_once_with( + self.mock_reporter, "/path/to/file2.key" + ) + mock_read_configs.assert_called_once_with(self.mock_reporter, False) + self.assertEqual(1, self.mock_communicator.call_node.call_count) + self.assertEqual( + self.node, self.mock_communicator.call_node.call_args[0][0] + ) + self.assertEqual( + "remote/booth_save_files", + self.mock_communicator.call_node.call_args[0][1] + ) + data = url_decode(self.mock_communicator.call_node.call_args[0][2]) + self.assertFalse("rewrite_existing" in data) + self.assertTrue("data_json" in data) + self.assertEqual( + [ + { + "name": "name2.conf", + "data": "config2", + "is_authfile": False + }, + { + "name": "file2.key", + "data": to_b64("another key"), + "is_authfile": True + } + ], + json.loads(data["data_json"][0]) + ) + assert_report_item_list_equal( + self.mock_reporter.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVING_ON_NODE, + {"node": self.node.label} + ), + ( + Severities.WARNING, + report_codes.BOOTH_SKIPPING_CONFIG, + { + "config_file": "name1.conf" + } + ), + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": self.node.label, + "name": "name2.conf, file2.key", + "name_list": ["name2.conf", "file2.key"] + } + ) + ] + ) + + +class PullConfigFromNodeTest(TestCase): + def setUp(self): + self.mock_communicator = mock.MagicMock(spec_set=NodeCommunicator) + self.node = NodeAddresses("node") + + def test_success(self): + self.mock_communicator.call_node.return_value = "{}" + self.assertEqual( + {}, lib.pull_config_from_node( + self.mock_communicator, self.node, "booth" + ) + ) + self.mock_communicator.call_node.assert_called_once_with( + self.node, "remote/booth_get_config", "name=booth" + ) + + def test_not_json(self): + self.mock_communicator.call_node.return_value = "not json" + assert_raise_library_error( + lambda: lib.pull_config_from_node( + self.mock_communicator, self.node, "booth" + ), + ( + Severities.ERROR, + report_codes.INVALID_RESPONSE_FORMAT, + {"node": self.node.label} + ) + ) + + def test_communication_failure(self): + self.mock_communicator.call_node.side_effect = NodeConnectionException( + self.node.label, "command", "reason" + ) + assert_raise_library_error( + lambda: lib.pull_config_from_node( + self.mock_communicator, self.node, "booth" + ), + ( + Severities.ERROR, + report_codes.NODE_COMMUNICATION_ERROR_UNABLE_TO_CONNECT, + { + "node": self.node.label, + "command": "command", + "reason": "reason" + } + ) + ) diff --git a/pcs/lib/cib/tools.py b/pcs/lib/cib/tools.py index f86b63b..d8ce57a 100644 --- a/pcs/lib/cib/tools.py +++ b/pcs/lib/cib/tools.py @@ -100,6 +100,13 @@ def get_constraints(tree): """ return _get_mandatory_section(tree, "configuration/constraints") +def get_resources(tree): + """ + Return 'resources' element from tree + tree cib etree node + """ + return _get_mandatory_section(tree, "configuration/resources") + def find_parent(element, tag_names): candidate = element while True: diff --git a/pcs/lib/commands/booth.py b/pcs/lib/commands/booth.py new file mode 100644 index 0000000..43ea9dd --- /dev/null +++ b/pcs/lib/commands/booth.py @@ -0,0 +1,349 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import base64 +import os.path +from functools import partial + +from pcs import settings +from pcs.lib import external, reports +from pcs.lib.booth import ( + config_exchange, + config_files, + config_structure, + reports as booth_reports, + resource, + status, + sync, +) +from pcs.lib.booth.config_parser import parse, build +from pcs.lib.booth.env import get_config_file_name +from pcs.lib.cib.tools import get_resources +from pcs.lib.errors import LibraryError, ReportItemSeverity +from pcs.lib.node import NodeAddresses + + +def config_setup(env, booth_configuration, overwrite_existing=False): + """ + create boot configuration + list site_list contains site adresses of multisite + list arbitrator_list contains arbitrator adresses of multisite + """ + + config_structure.validate_peers( + booth_configuration.get("sites", []), + booth_configuration.get("arbitrators", []) + ) + config_content = config_exchange.from_exchange_format(booth_configuration) + + env.booth.create_key(config_files.generate_key(), overwrite_existing) + config_content = config_structure.set_authfile( + config_content, + env.booth.key_path + ) + env.booth.create_config(build(config_content), overwrite_existing) + +def config_destroy(env, ignore_config_load_problems=False): + env.booth.command_expect_live_env() + env.command_expect_live_corosync_env() + + name = env.booth.name + config_is_used = partial(booth_reports.booth_config_is_used, name) + + report_list = [] + + if(env.is_node_in_cluster() and resource.find_for_config( + get_resources(env.get_cib()), + get_config_file_name(name), + )): + report_list.append(config_is_used("in cluster resource")) + + #Only systemd is currently supported. Initd does not supports multiple + #instances (here specified by name) + if external.is_systemctl(): + if external.is_service_running(env.cmd_runner(), "booth", name): + report_list.append(config_is_used("(running in systemd)")) + + if external.is_service_enabled(env.cmd_runner(), "booth", name): + report_list.append(config_is_used("(enabled in systemd)")) + + if report_list: + raise LibraryError(*report_list) + + authfile_path = None + try: + authfile_path = config_structure.get_authfile( + parse(env.booth.get_config_content()) + ) + except LibraryError: + if not ignore_config_load_problems: + raise LibraryError(booth_reports.booth_cannot_identify_keyfile()) + + #if content not received, not valid,... still remove config needed + env.report_processor.process( + booth_reports.booth_cannot_identify_keyfile( + severity=ReportItemSeverity.WARNING + ) + ) + + if( + authfile_path + and + os.path.dirname(authfile_path) == settings.booth_config_dir + ): + env.booth.set_key_path(authfile_path) + env.booth.remove_key() + env.booth.remove_config() + +def config_show(env): + """ + return configuration as tuple of sites list and arbitrators list + """ + return config_exchange.to_exchange_format( + parse(env.booth.get_config_content()) + ) + +def config_ticket_add(env, ticket_name): + """ + add ticket to booth configuration + """ + booth_configuration = config_structure.add_ticket( + parse(env.booth.get_config_content()), + ticket_name + ) + env.booth.push_config(build(booth_configuration)) + +def config_ticket_remove(env, ticket_name): + """ + remove ticket from booth configuration + """ + booth_configuration = config_structure.remove_ticket( + parse(env.booth.get_config_content()), + ticket_name + ) + env.booth.push_config(build(booth_configuration)) + +def create_in_cluster(env, name, ip, resource_create): + #TODO resource_create is provisional hack until resources are not moved to + #lib + resources_section = get_resources(env.get_cib()) + + booth_config_file_path = get_config_file_name(name) + if resource.find_for_config(resources_section, booth_config_file_path): + raise LibraryError(booth_reports.booth_already_in_cib(name)) + + resource.get_creator(resource_create)( + ip, + booth_config_file_path, + create_id = partial( + resource.create_resource_id, + resources_section, + name + ) + ) + +def remove_from_cluster(env, name, resource_remove): + #TODO resource_remove is provisional hack until resources are not moved to + #lib + try: + num_of_removed_booth_resources = resource.get_remover(resource_remove)( + get_resources(env.get_cib()), + get_config_file_name(name), + ) + if num_of_removed_booth_resources > 1: + env.report_processor.process( + booth_reports.booth_multiple_times_in_cib( + name, + severity=ReportItemSeverity.WARNING, + ) + ) + except resource.BoothNotFoundInCib: + raise LibraryError(booth_reports.booth_not_exists_in_cib(name)) + except resource.BoothMultipleOccurenceFoundInCib: + raise LibraryError(booth_reports.booth_multiple_times_in_cib(name)) + +def ticket_operation(operation, env, name, ticket, site_ip): + if not site_ip: + site_ip_list = resource.find_bound_ip( + get_resources(env.get_cib()), + get_config_file_name(name) + ) + if len(site_ip_list) != 1: + raise LibraryError( + booth_reports.booth_cannot_determine_local_site_ip() + ) + site_ip = site_ip_list[0] + + command_output, return_code = env.cmd_runner().run([ + settings.booth_binary, operation, + "-s", site_ip, + ticket + ]) + + if return_code != 0: + raise LibraryError( + booth_reports.booth_ticket_operation_failed( + operation, + command_output, + site_ip, + ticket + ) + ) + +ticket_grant = partial(ticket_operation, "grant") +ticket_revoke = partial(ticket_operation, "revoke") + +def config_sync(env, name, skip_offline_nodes=False): + """ + Send specified local booth configuration to all nodes in cluster. + + env -- LibraryEnvironment + name -- booth instance name + skip_offline_nodes -- if True offline nodes will be skipped + """ + config = env.booth.get_config_content() + authfile_path = config_structure.get_authfile(parse(config)) + authfile_content = config_files.read_authfile( + env.report_processor, authfile_path + ) + + sync.send_config_to_all_nodes( + env.node_communicator(), + env.report_processor, + env.get_corosync_conf().get_nodes(), + name, + config, + authfile=authfile_path, + authfile_data=authfile_content, + skip_offline=skip_offline_nodes + ) + + +def enable_booth(env, name=None): + """ + Enable specified instance of booth service. Currently it is supported only + systemd systems. + + env -- LibraryEnvironment + name -- string, name of booth instance + """ + external.ensure_is_systemd() + try: + external.enable_service(env.cmd_runner(), "booth", name) + except external.EnableServiceError as e: + raise LibraryError(reports.service_enable_error( + "booth", e.message, instance=name + )) + env.report_processor.process(reports.service_enable_success( + "booth", instance=name + )) + + +def disable_booth(env, name=None): + """ + Disable specified instance of booth service. Currently it is supported only + systemd systems. + + env -- LibraryEnvironment + name -- string, name of booth instance + """ + external.ensure_is_systemd() + try: + external.disable_service(env.cmd_runner(), "booth", name) + except external.DisableServiceError as e: + raise LibraryError(reports.service_disable_error( + "booth", e.message, instance=name + )) + env.report_processor.process(reports.service_disable_success( + "booth", instance=name + )) + + +def start_booth(env, name=None): + """ + Start specified instance of booth service. Currently it is supported only + systemd systems. On non systems it can be run like this: + BOOTH_CONF_FILE= /etc/initd/booth-arbitrator + + env -- LibraryEnvironment + name -- string, name of booth instance + """ + external.ensure_is_systemd() + try: + external.start_service(env.cmd_runner(), "booth", name) + except external.StartServiceError as e: + raise LibraryError(reports.service_start_error( + "booth", e.message, instance=name + )) + env.report_processor.process(reports.service_start_success( + "booth", instance=name + )) + + +def stop_booth(env, name=None): + """ + Stop specified instance of booth service. Currently it is supported only + systemd systems. + + env -- LibraryEnvironment + name -- string, name of booth instance + """ + external.ensure_is_systemd() + try: + external.stop_service(env.cmd_runner(), "booth", name) + except external.StopServiceError as e: + raise LibraryError(reports.service_stop_error( + "booth", e.message, instance=name + )) + env.report_processor.process(reports.service_stop_success( + "booth", instance=name + )) + + +def pull_config(env, node_name, name): + """ + Get config from specified node and save it on local system. It will + rewrite existing files. + + env -- LibraryEnvironment + node_name -- string, name of node from which config should be fetched + name -- string, name of booth instance of which config should be fetched + """ + env.report_processor.process( + booth_reports.booth_fetching_config_from_node(node_name, name) + ) + output = sync.pull_config_from_node( + env.node_communicator(), NodeAddresses(node_name), name + ) + try: + env.booth.create_config(output["config"]["data"], True) + if ( + output["authfile"]["name"] is not None and + output["authfile"]["data"] + ): + env.booth.set_key_path(os.path.join( + settings.booth_config_dir, output["authfile"]["name"] + )) + env.booth.create_key( + base64.b64decode( + output["authfile"]["data"].encode("utf-8") + ), + True + ) + env.report_processor.process( + booth_reports.booth_config_saved(name_list=[name]) + ) + except KeyError: + raise LibraryError(reports.invalid_response_format(node_name)) + + +def get_status(env, name=None): + return { + "status": status.get_daemon_status(env.cmd_runner(), name), + "ticket": status.get_tickets_status(env.cmd_runner(), name), + "peers": status.get_peers_status(env.cmd_runner(), name), + } diff --git a/pcs/lib/commands/test/test_booth.py b/pcs/lib/commands/test/test_booth.py new file mode 100644 index 0000000..20bf06a --- /dev/null +++ b/pcs/lib/commands/test/test_booth.py @@ -0,0 +1,614 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import os +import base64 + +from unittest import TestCase + +from pcs.test.tools.pcs_mock import mock +from pcs.test.tools.custom_mock import MockLibraryReportProcessor +from pcs.test.tools.assertions import ( + assert_raise_library_error, + assert_report_item_list_equal, +) + +from pcs import settings +from pcs.common import report_codes +from pcs.lib.booth import resource as booth_resource +from pcs.lib.env import LibraryEnvironment +from pcs.lib.node import NodeAddresses +from pcs.lib.errors import LibraryError, ReportItemSeverity as Severities +from pcs.lib.commands import booth as commands +from pcs.lib.external import ( + NodeCommunicator, + CommandRunner, + EnableServiceError, + DisableServiceError, + StartServiceError, + StopServiceError +) + +def patch_commands(target, *args, **kwargs): + return mock.patch( + "pcs.lib.commands.booth.{0}".format(target), *args, **kwargs + ) + +@mock.patch("pcs.lib.booth.config_files.generate_key", return_value="key value") +@mock.patch("pcs.lib.commands.booth.build", return_value="config content") +@mock.patch("pcs.lib.booth.config_structure.validate_peers") +class ConfigSetupTest(TestCase): + def test_successfuly_build_and_write_to_std_path( + self, mock_validate_peers, mock_build, mock_generate_key + ): + env = mock.MagicMock() + commands.config_setup( + env, + booth_configuration={ + "sites": ["1.1.1.1"], + "arbitrators": ["2.2.2.2"], + }, + ) + env.booth.create_config.assert_called_once_with( + "config content", + False + ) + env.booth.create_key.assert_called_once_with( + "key value", + False + ) + mock_validate_peers.assert_called_once_with( + ["1.1.1.1"], ["2.2.2.2"] + ) + + def test_sanitize_peers_before_validation( + self, mock_validate_peers, mock_build, mock_generate_key + ): + commands.config_setup(env=mock.MagicMock(), booth_configuration={}) + mock_validate_peers.assert_called_once_with([], []) + + +class ConfigDestroyTest(TestCase): + @patch_commands("external.is_systemctl", mock.Mock(return_value=True)) + @patch_commands("external.is_service_enabled", mock.Mock(return_value=True)) + @patch_commands("external.is_service_running", mock.Mock(return_value=True)) + @patch_commands("resource.find_for_config", mock.Mock(return_value=[True])) + def test_raises_when_booth_config_in_use(self): + env = mock.MagicMock() + env.booth.name = "somename" + + assert_raise_library_error( + lambda: commands.config_destroy(env), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_IS_USED, + { + "name": "somename", + "detail": "in cluster resource", + } + ), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_IS_USED, + { + "name": "somename", + "detail": "(enabled in systemd)", + } + ), + ( + Severities.ERROR, + report_codes.BOOTH_CONFIG_IS_USED, + { + "name": "somename", + "detail": "(running in systemd)", + } + ) + ) + + @patch_commands("external.is_systemctl", mock.Mock(return_value=False)) + @patch_commands("resource.find_for_config", mock.Mock(return_value=[])) + @patch_commands("parse", mock.Mock(side_effect=LibraryError())) + def test_raises_when_cannot_get_content_of_config(self): + env = mock.MagicMock() + assert_raise_library_error( + lambda: commands.config_destroy(env), + ( + Severities.ERROR, + report_codes.BOOTH_CANNOT_IDENTIFY_KEYFILE, + {}, + report_codes.FORCE_BOOTH_DESTROY + ) + ) + + @patch_commands("external.is_systemctl", mock.Mock(return_value=False)) + @patch_commands("resource.find_for_config", mock.Mock(return_value=[])) + @patch_commands("parse", mock.Mock(side_effect=LibraryError())) + def test_remove_config_even_if_cannot_get_its_content_when_forced(self): + env = mock.MagicMock() + env.report_processor = MockLibraryReportProcessor() + commands.config_destroy(env, ignore_config_load_problems=True) + env.booth.remove_config.assert_called_once_with() + assert_report_item_list_equal(env.report_processor.report_item_list, [ + ( + Severities.WARNING, + report_codes.BOOTH_CANNOT_IDENTIFY_KEYFILE, + {} + ) + ]) + +@mock.patch("pcs.lib.commands.booth.config_structure.get_authfile") +@mock.patch("pcs.lib.commands.booth.parse") +@mock.patch("pcs.lib.booth.config_files.read_authfile") +@mock.patch("pcs.lib.booth.sync.send_config_to_all_nodes") +class ConfigSyncTest(TestCase): + def setUp(self): + self.mock_env = mock.MagicMock() + self.mock_rep = MockLibraryReportProcessor() + self.mock_env.report_processor = self.mock_rep + self.mock_com = mock.MagicMock(spec_set=NodeCommunicator) + self.mock_env.node_communicator.return_value = self.mock_com + self.node_list = ["node1", "node2", "node3"] + corosync_conf = mock.MagicMock() + corosync_conf.get_nodes.return_value = self.node_list + self.mock_env.get_corosync_conf.return_value = corosync_conf + self.mock_env.booth.get_config_content.return_value = "config" + + def test_skip_offline( + self, mock_sync, mock_read_key, mock_parse, mock_get_authfile + ): + mock_get_authfile.return_value = "/key/path.key" + mock_read_key.return_value = "key" + commands.config_sync(self.mock_env, "name", True) + self.mock_env.booth.get_config_content.assert_called_once_with() + mock_read_key.assert_called_once_with(self.mock_rep, "/key/path.key") + mock_parse.assert_called_once_with("config") + mock_sync.assert_called_once_with( + self.mock_com, + self.mock_rep, + self.node_list, + "name", + "config", + authfile="/key/path.key", + authfile_data="key", + skip_offline=True + ) + + def test_do_not_skip_offline( + self, mock_sync, mock_read_key, mock_parse, mock_get_authfile + ): + mock_get_authfile.return_value = "/key/path.key" + mock_read_key.return_value = "key" + commands.config_sync(self.mock_env, "name") + self.mock_env.booth.get_config_content.assert_called_once_with() + mock_read_key.assert_called_once_with(self.mock_rep, "/key/path.key") + mock_parse.assert_called_once_with("config") + mock_sync.assert_called_once_with( + self.mock_com, + self.mock_rep, + self.node_list, + "name", + "config", + authfile="/key/path.key", + authfile_data="key", + skip_offline=False + ) + + +@mock.patch("pcs.lib.commands.booth.external.ensure_is_systemd") +@mock.patch("pcs.lib.external.enable_service") +class EnableBoothTest(TestCase): + def setUp(self): + self.mock_env = mock.MagicMock(spec_set=LibraryEnvironment) + self.mock_rep = MockLibraryReportProcessor() + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + self.mock_env.cmd_runner.return_value = self.mock_run + self.mock_env.report_processor = self.mock_rep + + def test_success(self, mock_enable, mock_is_systemctl): + commands.enable_booth(self.mock_env, "name") + mock_enable.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.SERVICE_ENABLE_SUCCESS, + { + "service": "booth", + "node": None, + "instance": "name", + } + )] + ) + + def test_failed(self, mock_enable, mock_is_systemctl): + mock_enable.side_effect = EnableServiceError("booth", "msg", "name") + assert_raise_library_error( + lambda: commands.enable_booth(self.mock_env, "name"), + ( + Severities.ERROR, + report_codes.SERVICE_ENABLE_ERROR, + { + "service": "booth", + "reason": "msg", + "node": None, + "instance": "name", + } + ) + ) + mock_enable.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + + +@mock.patch("pcs.lib.commands.booth.external.ensure_is_systemd") +@mock.patch("pcs.lib.external.disable_service") +class DisableBoothTest(TestCase): + def setUp(self): + self.mock_env = mock.MagicMock(spec_set=LibraryEnvironment) + self.mock_rep = MockLibraryReportProcessor() + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + self.mock_env.cmd_runner.return_value = self.mock_run + self.mock_env.report_processor = self.mock_rep + + def test_success(self, mock_disable, mock_is_systemctl): + commands.disable_booth(self.mock_env, "name") + mock_disable.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.SERVICE_DISABLE_SUCCESS, + { + "service": "booth", + "node": None, + "instance": "name", + } + )] + ) + + def test_failed(self, mock_disable, mock_is_systemctl): + mock_disable.side_effect = DisableServiceError("booth", "msg", "name") + assert_raise_library_error( + lambda: commands.disable_booth(self.mock_env, "name"), + ( + Severities.ERROR, + report_codes.SERVICE_DISABLE_ERROR, + { + "service": "booth", + "reason": "msg", + "node": None, + "instance": "name", + } + ) + ) + mock_disable.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + + +@mock.patch("pcs.lib.commands.booth.external.ensure_is_systemd") +@mock.patch("pcs.lib.external.start_service") +class StartBoothTest(TestCase): + def setUp(self): + self.mock_env = mock.MagicMock(spec_set=LibraryEnvironment) + self.mock_rep = MockLibraryReportProcessor() + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + self.mock_env.cmd_runner.return_value = self.mock_run + self.mock_env.report_processor = self.mock_rep + + def test_success(self, mock_start, mock_is_systemctl): + commands.start_booth(self.mock_env, "name") + mock_start.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.SERVICE_START_SUCCESS, + { + "service": "booth", + "node": None, + "instance": "name", + } + )] + ) + + def test_failed(self, mock_start, mock_is_systemctl): + mock_start.side_effect = StartServiceError("booth", "msg", "name") + assert_raise_library_error( + lambda: commands.start_booth(self.mock_env, "name"), + ( + Severities.ERROR, + report_codes.SERVICE_START_ERROR, + { + "service": "booth", + "reason": "msg", + "node": None, + "instance": "name", + } + ) + ) + mock_start.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + + +@mock.patch("pcs.lib.commands.booth.external.ensure_is_systemd") +@mock.patch("pcs.lib.external.stop_service") +class StopBoothTest(TestCase): + def setUp(self): + self.mock_env = mock.MagicMock(spec_set=LibraryEnvironment) + self.mock_rep = MockLibraryReportProcessor() + self.mock_run = mock.MagicMock(spec_set=CommandRunner) + self.mock_env.cmd_runner.return_value = self.mock_run + self.mock_env.report_processor = self.mock_rep + + def test_success(self, mock_stop, mock_is_systemctl): + commands.stop_booth(self.mock_env, "name") + mock_stop.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.SERVICE_STOP_SUCCESS, + { + "service": "booth", + "node": None, + "instance": "name", + } + )] + ) + + def test_failed(self, mock_stop, mock_is_systemctl): + mock_stop.side_effect = StopServiceError("booth", "msg", "name") + assert_raise_library_error( + lambda: commands.stop_booth(self.mock_env, "name"), + ( + Severities.ERROR, + report_codes.SERVICE_STOP_ERROR, + { + "service": "booth", + "reason": "msg", + "node": None, + "instance": "name", + } + ) + ) + mock_stop.assert_called_once_with(self.mock_run, "booth", "name") + mock_is_systemctl.assert_called_once_with() + + +@mock.patch("pcs.lib.booth.sync.pull_config_from_node") +class PullConfigTest(TestCase): + def setUp(self): + self.mock_env = mock.MagicMock(spec_set=LibraryEnvironment) + self.mock_rep = MockLibraryReportProcessor() + self.mock_com = mock.MagicMock(spec_set=NodeCommunicator) + self.mock_env.node_communicator.return_value = self.mock_com + self.mock_env.report_processor = self.mock_rep + + def test_with_authfile(self, mock_pull): + mock_pull.return_value = { + "config": { + "name": "name.conf", + "data": "config" + }, + "authfile": { + "name": "name.key", + "data": base64.b64encode("key".encode("utf-8")).decode("utf-8") + } + } + commands.pull_config(self.mock_env, "node", "name") + mock_pull.assert_called_once_with( + self.mock_com, NodeAddresses("node"), "name" + ) + self.mock_env.booth.create_config.called_once_with("config", True) + self.mock_env.booth.set_key_path.called_once_with(os.path.join( + settings.booth_config_dir, "name.key" + )) + self.mock_env.booth.create_key.called_once_with( + "key".encode("utf-8"), True + ) + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_FETCHING_CONFIG_FROM_NODE, + { + "node": "node", + "config": "name" + } + ), + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": None, + "name": "name", + "name_list": ["name"] + } + ) + ] + ) + + def test_without_authfile(self, mock_pull): + mock_pull.return_value = { + "config": { + "name": "name.conf", + "data": "config" + }, + "authfile": { + "name": None, + "data": None + } + } + commands.pull_config(self.mock_env, "node", "name") + mock_pull.assert_called_once_with( + self.mock_com, NodeAddresses("node"), "name" + ) + self.mock_env.booth.create_config.called_once_with("config", True) + self.assertEqual(0, self.mock_env.booth.set_key_path.call_count) + self.assertEqual(0, self.mock_env.booth.create_key.call_count) + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [ + ( + Severities.INFO, + report_codes.BOOTH_FETCHING_CONFIG_FROM_NODE, + { + "node": "node", + "config": "name" + } + ), + ( + Severities.INFO, + report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, + { + "node": None, + "name": "name", + "name_list": ["name"] + } + ) + ] + ) + + def test_invalid_input(self, mock_pull): + mock_pull.return_value = {} + assert_raise_library_error( + lambda: commands.pull_config(self.mock_env, "node", "name"), + ( + Severities.ERROR, + report_codes.INVALID_RESPONSE_FORMAT, + {"node": "node"} + ) + ) + mock_pull.assert_called_once_with( + self.mock_com, NodeAddresses("node"), "name" + ) + self.assertEqual(0, self.mock_env.booth.create_config.call_count) + self.assertEqual(0, self.mock_env.booth.set_key_path.call_count) + self.assertEqual(0, self.mock_env.booth.create_key.call_count) + assert_report_item_list_equal( + self.mock_rep.report_item_list, + [( + Severities.INFO, + report_codes.BOOTH_FETCHING_CONFIG_FROM_NODE, + { + "node": "node", + "config": "name" + } + )] + ) + +class TicketOperationTest(TestCase): + @mock.patch("pcs.lib.booth.resource.find_bound_ip") + def test_raises_when_implicit_site_not_found_in_cib( + self, mock_find_bound_ip + ): + mock_find_bound_ip.return_value = [] + assert_raise_library_error( + lambda: commands.ticket_operation( + "grant", mock.Mock(), "booth", "ABC", site_ip=None + ), + ( + Severities.ERROR, + report_codes.BOOTH_CANNOT_DETERMINE_LOCAL_SITE_IP, + {} + ), + ) + + def test_raises_when_command_fail(self): + mock_run = mock.Mock(return_value=("some message", 1)) + mock_env = mock.MagicMock( + cmd_runner=mock.Mock(return_value=mock.MagicMock(run=mock_run)) + ) + assert_raise_library_error( + lambda: commands.ticket_operation( + "grant", mock_env, "booth", "ABC", site_ip="1.2.3.4" + ), + ( + Severities.ERROR, + report_codes.BOOTH_TICKET_OPERATION_FAILED, + { + "operation": "grant", + "reason": "some message", + "site_ip": "1.2.3.4", + "ticket_name": "ABC", + } + ), + ) + +class CreateInClusterTest(TestCase): + @patch_commands("get_resources", mock.MagicMock()) + def test_raises_when_is_created_already(self): + assert_raise_library_error( + lambda: commands.create_in_cluster( + mock.MagicMock(), "somename", ip="1.2.3.4", resource_create=None + ), + ( + Severities.ERROR, + report_codes.BOOTH_ALREADY_IN_CIB, + { + "name": "somename", + } + ), + ) + +class RemoveFromClusterTest(TestCase): + @patch_commands("resource.get_remover", mock.Mock(return_value = mock.Mock( + side_effect=booth_resource.BoothNotFoundInCib() + ))) + def test_raises_when_no_booth_resource_found(self): + assert_raise_library_error( + lambda: commands.remove_from_cluster( + mock.MagicMock(), "somename", resource_remove=None + ), + ( + Severities.ERROR, + report_codes.BOOTH_NOT_EXISTS_IN_CIB, + { + 'name': 'somename', + } + ), + ) + + @patch_commands("resource.get_remover", mock.Mock(return_value = mock.Mock( + side_effect=booth_resource.BoothMultipleOccurenceFoundInCib() + ))) + def test_raises_when_multiple_booth_resource_found(self): + assert_raise_library_error( + lambda: commands.remove_from_cluster( + mock.MagicMock(), "somename", resource_remove=None + ), + ( + Severities.ERROR, + report_codes.BOOTH_MULTIPLE_TIMES_IN_CIB, + { + 'name': 'somename', + }, + report_codes.FORCE_BOOTH_REMOVE_FROM_CIB, + ), + ) + + @patch_commands("resource.get_remover", mock.Mock(return_value = mock.Mock( + return_value=2 + ))) + def test_warn_when_multiple_booth_resources_removed(self): + report_processor=MockLibraryReportProcessor() + commands.remove_from_cluster( + mock.MagicMock(report_processor=report_processor), + "somename", + resource_remove=None + ) + assert_report_item_list_equal(report_processor.report_item_list, [( + Severities.WARNING, + report_codes.BOOTH_MULTIPLE_TIMES_IN_CIB, + { + 'name': 'somename', + }, + )]) diff --git a/pcs/lib/commands/test/test_ticket.py b/pcs/lib/commands/test/test_ticket.py index 751001b..d8b8a5f 100644 --- a/pcs/lib/commands/test/test_ticket.py +++ b/pcs/lib/commands/test/test_ticket.py @@ -5,27 +5,22 @@ from __future__ import ( unicode_literals, ) -import logging from unittest import TestCase from pcs.common import report_codes from pcs.lib.commands.constraint import ticket as ticket_command -from pcs.lib.env import LibraryEnvironment as Env from pcs.lib.errors import ReportItemSeverity as severities +from pcs.lib.test.misc import get_mocked_env from pcs.test.tools.assertions import ( assert_xml_equal, assert_raise_library_error ) -from pcs.test.tools.custom_mock import MockLibraryReportProcessor from pcs.test.tools.misc import get_test_resource as rc -from pcs.test.tools.pcs_mock import mock from pcs.test.tools.xml import get_xml_manipulation_creator_from_file class CreateTest(TestCase): def setUp(self): - self.mock_logger = mock.MagicMock(logging.Logger) - self.mock_reporter = MockLibraryReportProcessor() self.create_cib = get_xml_manipulation_creator_from_file( rc("cib-empty.xml") ) @@ -37,7 +32,7 @@ class CreateTest(TestCase): .append_to_first_tag_name('resources', resource_xml) ) - env = Env(self.mock_logger, self.mock_reporter, cib_data=str(cib)) + env = get_mocked_env(cib_data=str(cib)) ticket_command.create(env, "ticketA", "resourceA", { "loss-policy": "fence", "rsc-role": "master" @@ -59,11 +54,7 @@ class CreateTest(TestCase): ) def test_refuse_for_nonexisting_resource(self): - env = Env( - self.mock_logger, - self.mock_reporter, - cib_data=str(self.create_cib()) - ) + env = get_mocked_env(cib_data=str(self.create_cib())) assert_raise_library_error( lambda: ticket_command.create( env, "ticketA", "resourceA", "master", {"loss-policy": "fence"} diff --git a/pcs/lib/corosync/live.py b/pcs/lib/corosync/live.py index b49b9f6..1e68c31 100644 --- a/pcs/lib/corosync/live.py +++ b/pcs/lib/corosync/live.py @@ -22,6 +22,9 @@ def get_local_corosync_conf(): except IOError as e: raise LibraryError(reports.corosync_config_read_error(path, e.strerror)) +def exists_local_corosync_conf(): + return os.path.exists(settings.corosync_conf_file) + def set_remote_corosync_conf(node_communicator, node_addr, config_text): """ Send corosync.conf to a node diff --git a/pcs/lib/env.py b/pcs/lib/env.py index 24e4252..b139c58 100644 --- a/pcs/lib/env.py +++ b/pcs/lib/env.py @@ -5,20 +5,27 @@ from __future__ import ( unicode_literals, ) +import os.path + from lxml import etree +from pcs import settings from pcs.lib import reports +from pcs.lib.booth.env import BoothEnv +from pcs.lib.cib.tools import ensure_cib_version +from pcs.lib.corosync.config_facade import ConfigFacade as CorosyncConfigFacade +from pcs.lib.corosync.live import ( + exists_local_corosync_conf, + get_local_corosync_conf, + reload_config as reload_corosync_config, +) from pcs.lib.external import ( is_cman_cluster, is_service_running, CommandRunner, NodeCommunicator, ) -from pcs.lib.corosync.config_facade import ConfigFacade as CorosyncConfigFacade -from pcs.lib.corosync.live import ( - get_local_corosync_conf, - reload_config as reload_corosync_config, -) +from pcs.lib.errors import LibraryError from pcs.lib.nodes_task import ( distribute_corosync_conf, check_corosync_offline_on_nodes, @@ -29,7 +36,6 @@ from pcs.lib.pacemaker import ( get_cib_xml, replace_cib_configuration_xml, ) -from pcs.lib.cib.tools import ensure_cib_version class LibraryEnvironment(object): @@ -43,6 +49,7 @@ class LibraryEnvironment(object): user_groups=None, cib_data=None, corosync_conf_data=None, + booth=None, auth_tokens_getter=None, ): self._logger = logger @@ -51,6 +58,9 @@ class LibraryEnvironment(object): self._user_groups = [] if user_groups is None else user_groups self._cib_data = cib_data self._corosync_conf_data = corosync_conf_data + self._booth = ( + BoothEnv(report_processor, booth) if booth is not None else None + ) self._is_cman_cluster = None # TODO tokens probably should not be inserted from outside, but we're # postponing dealing with them, because it's not that easy to move @@ -169,6 +179,24 @@ class LibraryEnvironment(object): else: self._corosync_conf_data = corosync_conf_data + def is_node_in_cluster(self): + if self.is_cman_cluster: + #TODO --cluster_conf is not propagated here. So no live check not + #needed here. But this should not be permanently + return os.path.exists(settings.corosync_conf_file) + + if not self.is_corosync_conf_live: + raise AssertionError( + "Cannot check if node is in cluster with mocked corosync_conf." + ) + return exists_local_corosync_conf() + + def command_expect_live_corosync_env(self): + if not self.is_corosync_conf_live: + raise LibraryError(reports.live_environment_required([ + "--corosync_conf" + ])) + @property def is_corosync_conf_live(self): return self._corosync_conf_data is None @@ -195,3 +223,7 @@ class LibraryEnvironment(object): else: self._auth_tokens = {} return self._auth_tokens + + @property + def booth(self): + return self._booth diff --git a/pcs/lib/env_file.py b/pcs/lib/env_file.py new file mode 100644 index 0000000..e683a57 --- /dev/null +++ b/pcs/lib/env_file.py @@ -0,0 +1,122 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import os.path + +from pcs.common import report_codes +from pcs.common.tools import format_environment_error +from pcs.lib import reports +from pcs.lib.errors import ReportItemSeverity, LibraryError, LibraryEnvError + + +class GhostFile(object): + is_live = False + def __init__(self, file_role, content=None): + self.__file_role = file_role + self.__content = content + self.__no_existing_file_expected = False + self.__can_overwrite_existing_file = False + self.__is_binary = False + + def read(self): + if self.__content is None: + raise LibraryEnvError( + reports.file_does_not_exist(self.__file_role) + ) + + return self.__content + + def remove(self, silence_no_existence): + raise AssertionError("Remove GhostFile is not supported.") + + def write(self, content, file_operation=None, is_binary=False): + """ + callable file_operation is there only for RealFile compatible interface + it has no efect + """ + self.__is_binary = is_binary + self.__content = content + + def assert_no_conflict_with_existing( + self, report_processor, can_overwrite_existing=False + ): + self.__no_existing_file_expected = True + self.__can_overwrite_existing_file = can_overwrite_existing + + def export(self): + return { + "content": self.__content, + "no_existing_file_expected": self.__no_existing_file_expected, + "can_overwrite_existing_file": self.__can_overwrite_existing_file, + "is_binary": self.__is_binary, + } + + +class RealFile(object): + is_live = True + def __init__( + self, file_role, file_path, + overwrite_code=report_codes.FORCE_FILE_OVERWRITE + ): + self.__file_role = file_role + self.__file_path = file_path + self.__overwrite_code = overwrite_code + + def assert_no_conflict_with_existing( + self, report_processor, can_overwrite_existing=False + ): + if os.path.exists(self.__file_path): + report_processor.process(reports.file_already_exists( + self.__file_role, + self.__file_path, + ReportItemSeverity.WARNING if can_overwrite_existing + else ReportItemSeverity.ERROR, + forceable=None if can_overwrite_existing + else self.__overwrite_code, + )) + + def write(self, content, file_operation=None, is_binary=False): + """ + callable file_operation takes path and proces operation on it e.g. chmod + """ + mode = "wb" if is_binary else "w" + try: + with open(self.__file_path, mode) as config_file: + config_file.write(content) + if file_operation: + file_operation(self.__file_path) + except EnvironmentError as e: + raise self.__report_io_error(e, "write") + + def read(self): + try: + with open(self.__file_path, "r") as file: + return file.read() + except EnvironmentError as e: + raise self.__report_io_error(e, "read") + + def remove(self, silence_no_existence=False): + if os.path.exists(self.__file_path): + try: + os.remove(self.__file_path) + except EnvironmentError as e: + raise self.__report_io_error(e, "remove") + elif not silence_no_existence: + raise LibraryError(reports.file_io_error( + self.__file_role, + file_path=self.__file_path, + operation="remove", + reason="File does not exist" + )) + + def __report_io_error(self, e, operation): + return LibraryError(reports.file_io_error( + self.__file_role, + file_path=self.__file_path, + operation=operation, + reason=format_environment_error(e) + )) diff --git a/pcs/lib/errors.py b/pcs/lib/errors.py index 9cab5e9..0a8f4fa 100644 --- a/pcs/lib/errors.py +++ b/pcs/lib/errors.py @@ -8,6 +8,20 @@ from __future__ import ( class LibraryError(Exception): pass +class LibraryEnvError(LibraryError): + def __init__(self, *args, **kwargs): + super(LibraryEnvError, self).__init__(*args, **kwargs) + self.processed = [] + + def sign_processed(self, report): + self.processed.append(report) + + @property + def unprocessed(self): + return [report for report in self.args if report not in self.processed] + + + class ReportItemSeverity(object): ERROR = 'ERROR' WARNING = 'WARNING' diff --git a/pcs/lib/external.py b/pcs/lib/external.py index c773e5a..25e071f 100644 --- a/pcs/lib/external.py +++ b/pcs/lib/external.py @@ -59,9 +59,10 @@ from pcs import settings class ManageServiceError(Exception): #pylint: disable=super-init-not-called - def __init__(self, service, message=None): + def __init__(self, service, message=None, instance=None): self.service = service self.message = message + self.instance = instance class DisableServiceError(ManageServiceError): pass @@ -91,6 +92,22 @@ def is_dir_nonempty(path): return len(os.listdir(path)) > 0 +def _get_service_name(service, instance=None): + return "{0}{1}.service".format( + service, "" if instance is None else "@{0}".format(instance) + ) + +def ensure_is_systemd(): + """ + Ensure if current system is systemd system. Raises Library error if not. + """ + if not is_systemctl(): + raise LibraryError( + reports.unsupported_operation_on_non_systemd_systems() + ) + + + @simple_cache def is_systemctl(): """ @@ -108,74 +125,82 @@ def is_systemctl(): return False -def disable_service(runner, service): +def disable_service(runner, service, instance=None): """ Disable specified service in local system. Raise DisableServiceError or LibraryError on failure. runner -- CommandRunner service -- name of service + instance -- instance name, it ha no effect on not systemd systems. + If None no instance name will be used. """ if is_systemctl(): output, retval = runner.run([ - "systemctl", "disable", service + ".service" + "systemctl", "disable", _get_service_name(service, instance) ]) else: if not is_service_installed(runner, service): return output, retval = runner.run(["chkconfig", service, "off"]) if retval != 0: - raise DisableServiceError(service, output.rstrip()) + raise DisableServiceError(service, output.rstrip(), instance) -def enable_service(runner, service): +def enable_service(runner, service, instance=None): """ Enable specified service in local system. Raise EnableServiceError or LibraryError on failure. runner -- CommandRunner service -- name of service + instance -- instance name, it ha no effect on not systemd systems. + If None no instance name will be used. """ if is_systemctl(): output, retval = runner.run([ - "systemctl", "enable", service + ".service" + "systemctl", "enable", _get_service_name(service, instance) ]) else: output, retval = runner.run(["chkconfig", service, "on"]) if retval != 0: - raise EnableServiceError(service, output.rstrip()) + raise EnableServiceError(service, output.rstrip(), instance) -def start_service(runner, service): +def start_service(runner, service, instance=None): """ Start specified service in local system CommandRunner runner string service service name + string instance instance name, it ha no effect on not systemd systems. + If None no instance name will be used. """ if is_systemctl(): output, retval = runner.run([ - "systemctl", "start", "{0}.service".format(service) + "systemctl", "start", _get_service_name(service, instance) ]) else: output, retval = runner.run(["service", service, "start"]) if retval != 0: - raise StartServiceError(service, output.rstrip()) + raise StartServiceError(service, output.rstrip(), instance) -def stop_service(runner, service): +def stop_service(runner, service, instance=None): """ Stop specified service in local system CommandRunner runner string service service name + string instance instance name, it ha no effect on not systemd systems. + If None no instance name will be used. """ if is_systemctl(): output, retval = runner.run([ - "systemctl", "stop", "{0}.service".format(service) + "systemctl", "stop", _get_service_name(service, instance) ]) else: output, retval = runner.run(["service", service, "stop"]) if retval != 0: - raise StopServiceError(service, output.rstrip()) + raise StopServiceError(service, output.rstrip(), instance) def kill_services(runner, services): @@ -196,7 +221,7 @@ def kill_services(runner, services): raise KillServicesError(list(services), output.rstrip()) -def is_service_enabled(runner, service): +def is_service_enabled(runner, service, instance=None): """ Check if specified service is enabled in local system. @@ -205,7 +230,7 @@ def is_service_enabled(runner, service): """ if is_systemctl(): _, retval = runner.run( - ["systemctl", "is-enabled", service + ".service"] + ["systemctl", "is-enabled", _get_service_name(service, instance)] ) else: _, retval = runner.run(["chkconfig", service]) @@ -213,7 +238,7 @@ def is_service_enabled(runner, service): return retval == 0 -def is_service_running(runner, service): +def is_service_running(runner, service, instance=None): """ Check if specified service is currently running on local system. @@ -221,7 +246,11 @@ def is_service_running(runner, service): service -- name of service """ if is_systemctl(): - _, retval = runner.run(["systemctl", "is-active", service + ".service"]) + _, retval = runner.run([ + "systemctl", + "is-active", + _get_service_name(service, instance) + ]) else: _, retval = runner.run(["service", service, "status"]) @@ -314,6 +343,9 @@ class CommandRunner(object): self, args, ignore_stderr=False, stdin_string=None, env_extend=None, binary_output=False ): + #Reset environment variables by empty dict is desired here. We need to + #get rid of defaults - we do not know the context and environment of the + #library. So executable must be specified with full path. env_vars = dict(env_extend) if env_extend else dict() env_vars.update(self._env_vars) diff --git a/pcs/lib/reports.py b/pcs/lib/reports.py index fc2670b..eac95c7 100644 --- a/pcs/lib/reports.py +++ b/pcs/lib/reports.py @@ -1153,27 +1153,37 @@ def cman_broadcast_all_rings(): + "broadcast in only one ring" ) -def service_start_started(service): +def service_start_started(service, instance=None): """ system service is being started string service service name or description + string instance instance of service """ + if instance: + msg = "Starting {service}@{instance}..." + else: + msg = "Starting {service}..." return ReportItem.info( report_codes.SERVICE_START_STARTED, - "Starting {service}...", + msg, info={ "service": service, + "instance": instance, } ) -def service_start_error(service, reason, node=None): +def service_start_error(service, reason, node=None, instance=None): """ system service start failed string service service name or description string reason error message string node node on which service has been requested to start + string instance instance of service """ - msg = "Unable to start {service}: {reason}" + if instance: + msg = "Unable to start {service}@{instance}: {reason}" + else: + msg = "Unable to start {service}: {reason}" return ReportItem.error( report_codes.SERVICE_START_ERROR, msg if node is None else "{node}: " + msg, @@ -1181,33 +1191,43 @@ def service_start_error(service, reason, node=None): "service": service, "reason": reason, "node": node, + "instance": instance, } ) -def service_start_success(service, node=None): +def service_start_success(service, node=None, instance=None): """ system service was started successfully string service service name or description string node node on which service has been requested to start + string instance instance of service """ - msg = "{service} started" + if instance: + msg = "{service}@{instance} started" + else: + msg = "{service} started" return ReportItem.info( report_codes.SERVICE_START_SUCCESS, msg if node is None else "{node}: " + msg, info={ "service": service, "node": node, + "instance": instance, } ) -def service_start_skipped(service, reason, node=None): +def service_start_skipped(service, reason, node=None, instance=None): """ starting system service was skipped, no error occured string service service name or description string reason why the start has been skipped string node node on which service has been requested to start + string instance instance of service """ - msg = "not starting {service} - {reason}" + if instance: + msg = "not starting {service}@{instance} - {reason}" + else: + msg = "not starting {service} - {reason}" return ReportItem.info( report_codes.SERVICE_START_SKIPPED, msg if node is None else "{node}: " + msg, @@ -1215,30 +1235,41 @@ def service_start_skipped(service, reason, node=None): "service": service, "reason": reason, "node": node, + "instance": instance, } ) -def service_stop_started(service): +def service_stop_started(service, instance=None): """ system service is being stopped string service service name or description + string instance instance of service """ + if instance: + msg = "Stopping {service}@{instance}..." + else: + msg = "Stopping {service}..." return ReportItem.info( report_codes.SERVICE_STOP_STARTED, - "Stopping {service}...", + msg, info={ "service": service, + "instance": instance, } ) -def service_stop_error(service, reason, node=None): +def service_stop_error(service, reason, node=None, instance=None): """ system service stop failed string service service name or description string reason error message string node node on which service has been requested to stop + string instance instance of service """ - msg = "Unable to stop {service}: {reason}" + if instance: + msg = "Unable to stop {service}@{instance}: {reason}" + else: + msg = "Unable to stop {service}: {reason}" return ReportItem.error( report_codes.SERVICE_STOP_ERROR, msg if node is None else "{node}: " + msg, @@ -1246,22 +1277,28 @@ def service_stop_error(service, reason, node=None): "service": service, "reason": reason, "node": node, + "instance": instance, } ) -def service_stop_success(service, node=None): +def service_stop_success(service, node=None, instance=None): """ system service was stopped successfully string service service name or description string node node on which service has been requested to stop + string instance instance of service """ - msg = "{service} stopped" + if instance: + msg = "{service}@{instance} stopped" + else: + msg = "{service} stopped" return ReportItem.info( report_codes.SERVICE_STOP_SUCCESS, msg if node is None else "{node}: " + msg, info={ "service": service, "node": node, + "instance": instance, } ) @@ -1295,27 +1332,37 @@ def service_kill_success(services): } ) -def service_enable_started(service): +def service_enable_started(service, instance=None): """ system service is being enabled string service service name or description + string instance instance of service """ + if instance: + msg = "Enabling {service}@{instance}..." + else: + msg = "Enabling {service}..." return ReportItem.info( report_codes.SERVICE_ENABLE_STARTED, - "Enabling {service}...", + msg, info={ "service": service, + "instance": instance, } ) -def service_enable_error(service, reason, node=None): +def service_enable_error(service, reason, node=None, instance=None): """ system service enable failed string service service name or description string reason error message string node node on which service was enabled + string instance instance of service """ - msg = "Unable to enable {service}: {reason}" + if instance: + msg = "Unable to enable {service}@{instance}: {reason}" + else: + msg = "Unable to enable {service}: {reason}" return ReportItem.error( report_codes.SERVICE_ENABLE_ERROR, msg if node is None else "{node}: " + msg, @@ -1323,33 +1370,43 @@ def service_enable_error(service, reason, node=None): "service": service, "reason": reason, "node": node, + "instance": instance, } ) -def service_enable_success(service, node=None): +def service_enable_success(service, node=None, instance=None): """ system service was enabled successfully string service service name or description string node node on which service has been enabled + string instance instance of service """ - msg = "{service} enabled" + if instance: + msg = "{service}@{instance} enabled" + else: + msg = "{service} enabled" return ReportItem.info( report_codes.SERVICE_ENABLE_SUCCESS, msg if node is None else "{node}: " + msg, info={ "service": service, "node": node, + "instance": instance, } ) -def service_enable_skipped(service, reason, node=None): +def service_enable_skipped(service, reason, node=None, instance=None): """ enabling system service was skipped, no error occured string service service name or description string reason why the enabling has been skipped string node node on which service has been requested to enable + string instance instance of service """ - msg = "not enabling {service} - {reason}" + if instance: + msg = "not enabling {service}@{instance} - {reason}" + else: + msg = "not enabling {service} - {reason}" return ReportItem.info( report_codes.SERVICE_ENABLE_SKIPPED, msg if node is None else "{node}: " + msg, @@ -1357,30 +1414,41 @@ def service_enable_skipped(service, reason, node=None): "service": service, "reason": reason, "node": node, + "instance": instance } ) -def service_disable_started(service): +def service_disable_started(service, instance=None): """ system service is being disabled string service service name or description + string instance instance of service """ + if instance: + msg = "Disabling {service}@{instance}..." + else: + msg = "Disabling {service}..." return ReportItem.info( report_codes.SERVICE_DISABLE_STARTED, - "Disabling {service}...", + msg, info={ "service": service, + "instance": instance, } ) -def service_disable_error(service, reason, node=None): +def service_disable_error(service, reason, node=None, instance=None): """ system service disable failed string service service name or description string reason error message string node node on which service was disabled + string instance instance of service """ - msg = "Unable to disable {service}: {reason}" + if instance: + msg = "Unable to disable {service}@{instance}: {reason}" + else: + msg = "Unable to disable {service}: {reason}" return ReportItem.error( report_codes.SERVICE_DISABLE_ERROR, msg if node is None else "{node}: " + msg, @@ -1388,22 +1456,28 @@ def service_disable_error(service, reason, node=None): "service": service, "reason": reason, "node": node, + "instance": instance, } ) -def service_disable_success(service, node=None): +def service_disable_success(service, node=None, instance=None): """ system service was disabled successfully string service service name or description string node node on which service was disabled + string instance instance of service """ - msg = "{service} disabled" + if instance: + msg = "{service}@{instance} disabled" + else: + msg = "{service} disabled" return ReportItem.info( report_codes.SERVICE_DISABLE_SUCCESS, msg if node is None else "{node}: " + msg, info={ "service": service, "node": node, + "instance": instance, } ) @@ -1742,3 +1816,88 @@ def unable_to_upgrade_cib_to_required_version( "current_version": "{0}.{1}.{2}".format(*current_version) } ) + +def file_already_exists( + file_role, file_path, severity=ReportItemSeverity.ERROR, + forceable=None, node=None + ): + msg = "file {file_path} already exists" + if file_role: + msg = "{file_role} " + msg + if node: + msg = "{node}: " + msg + return ReportItem( + report_codes.FILE_ALREADY_EXISTS, + severity, + msg, + info={ + "file_role": file_role, + "file_path": file_path, + "node": node, + }, + forceable=forceable, + ) + +def file_does_not_exist(file_role, file_path=""): + return ReportItem.error( + report_codes.FILE_DOES_NOT_EXIST, + "{file_role} file {file_path} does not exist", + info={ + "file_role": file_role, + "file_path": file_path, + }, + ) + +def file_io_error( + file_role, file_path="", reason="", operation="work with", + severity=ReportItemSeverity.ERROR +): + if file_path: + msg = "unable to {operation} {file_role} '{file_path}': {reason}" + else: + msg = "unable to {operation} {file_role}: {reason}" + return ReportItem( + report_codes.FILE_IO_ERROR, + severity, + msg, + info={ + "file_role": file_role, + "file_path": file_path, + "reason": reason, + "operation": operation + }, + ) + +def unable_to_determine_user_uid(user): + return ReportItem.error( + report_codes.UNABLE_TO_DETERMINE_USER_UID, + "Unable to determine uid of user '{user}'", + info={ + "user": user + } + ) + +def unable_to_determine_group_gid(group): + return ReportItem.error( + report_codes.UNABLE_TO_DETERMINE_GROUP_GID, + "Unable to determine gid of group '{group}'", + info={ + "group": group + } + ) + +def unsupported_operation_on_non_systemd_systems(): + return ReportItem.error( + report_codes.UNSUPPORTED_OPERATION_ON_NON_SYSTEMD_SYSTEMS, + "unsupported operation on non systemd systems" + ) + +def live_environment_required(forbidden_options): + return ReportItem.error( + report_codes.LIVE_ENVIRONMENT_REQUIRED, + "This command does not support {options_string}", + info={ + "forbidden_options": forbidden_options, + "options_string": ", ".join(forbidden_options), + } + ) diff --git a/pcs/lib/test/misc.py b/pcs/lib/test/misc.py new file mode 100644 index 0000000..1b1670a --- /dev/null +++ b/pcs/lib/test/misc.py @@ -0,0 +1,20 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import logging + +from pcs.lib.env import LibraryEnvironment as Env +from pcs.test.tools.custom_mock import MockLibraryReportProcessor +from pcs.test.tools.pcs_mock import mock + + +def get_mocked_env(**kwargs): + return Env( + logger=mock.MagicMock(logging.Logger), + report_processor=MockLibraryReportProcessor(), + **kwargs + ) diff --git a/pcs/lib/test/test_env_file.py b/pcs/lib/test/test_env_file.py new file mode 100644 index 0000000..3e27af1 --- /dev/null +++ b/pcs/lib/test/test_env_file.py @@ -0,0 +1,187 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +from pcs.common import report_codes +from pcs.lib.env_file import RealFile, GhostFile +from pcs.lib.errors import ReportItemSeverity as severities +from pcs.test.tools.assertions import( + assert_raise_library_error, + assert_report_item_list_equal +) +from pcs.test.tools.custom_mock import MockLibraryReportProcessor +from pcs.test.tools.pcs_mock import mock + + +class GhostFileReadTest(TestCase): + def test_raises_when_trying_read_nonexistent_file(self): + assert_raise_library_error( + lambda: GhostFile("some role", content=None).read(), + ( + severities.ERROR, + report_codes.FILE_DOES_NOT_EXIST, + { + "file_role": "some role", + } + ), + ) + +@mock.patch("pcs.lib.env_file.os.path.exists", return_value=True) +class RealFileAssertNoConflictWithExistingTest(TestCase): + def check(self, report_processor, can_overwrite_existing=False): + real_file = RealFile("some role", "/etc/booth/some-name.conf") + real_file.assert_no_conflict_with_existing( + report_processor, + can_overwrite_existing + ) + + def test_success_when_config_not_exists(self, mock_exists): + mock_exists.return_value = False + report_processor=MockLibraryReportProcessor() + self.check(report_processor) + assert_report_item_list_equal(report_processor.report_item_list, []) + + def test_raises_when_config_exists_and_overwrite_not_allowed(self, mock_ex): + assert_raise_library_error( + lambda: self.check(MockLibraryReportProcessor()), + ( + severities.ERROR, + report_codes.FILE_ALREADY_EXISTS, + { + "file_path": "/etc/booth/some-name.conf" + }, + report_codes.FORCE_FILE_OVERWRITE, + ), + ) + + def test_warn_when_config_exists_and_overwrite_allowed(self, mock_exists): + report_processor=MockLibraryReportProcessor() + self.check(report_processor, can_overwrite_existing=True) + assert_report_item_list_equal(report_processor.report_item_list, [( + severities.WARNING, + report_codes.FILE_ALREADY_EXISTS, + { + "file_path": "/etc/booth/some-name.conf" + }, + )]) + +class RealFileWriteTest(TestCase): + def test_success_write_content_to_path(self): + mock_open = mock.mock_open() + mock_file_operation = mock.Mock() + with mock.patch("pcs.lib.env_file.open", mock_open, create=True): + RealFile("some role", "/etc/booth/some-name.conf").write( + "config content", + file_operation=mock_file_operation + ) + mock_open.assert_called_once_with("/etc/booth/some-name.conf", "w") + mock_open().write.assert_called_once_with("config content") + mock_file_operation.assert_called_once_with( + "/etc/booth/some-name.conf" + ) + + def test_success_binary(self): + mock_open = mock.mock_open() + mock_file_operation = mock.Mock() + with mock.patch("pcs.lib.env_file.open", mock_open, create=True): + RealFile("some role", "/etc/booth/some-name.conf").write( + "config content".encode("utf-8"), + file_operation=mock_file_operation, + is_binary=True + ) + mock_open.assert_called_once_with("/etc/booth/some-name.conf", "wb") + mock_open().write.assert_called_once_with( + "config content".encode("utf-8") + ) + mock_file_operation.assert_called_once_with( + "/etc/booth/some-name.conf" + ) + + def test_raises_when_could_not_write(self): + assert_raise_library_error( + lambda: + RealFile("some role", "/no/existing/file.path").write(["content"]), + ( + severities.ERROR, + report_codes.FILE_IO_ERROR, + { + "reason": + "No such file or directory: '/no/existing/file.path'" + , + } + ) + ) + +class RealFileReadTest(TestCase): + def test_success_read_content_from_file(self): + mock_open = mock.mock_open() + with mock.patch("pcs.lib.env_file.open", mock_open, create=True): + mock_open().read.return_value = "test booth\nconfig" + self.assertEqual( + "test booth\nconfig", + RealFile("some role", "/path/to.file").read() + ) + + def test_raises_when_could_not_read(self): + assert_raise_library_error( + lambda: RealFile("some role", "/no/existing/file.path").read(), + ( + severities.ERROR, + report_codes.FILE_IO_ERROR, + { + "reason": + "No such file or directory: '/no/existing/file.path'" + , + } + ) + ) + +class RealFileRemoveTest(TestCase): + @mock.patch("pcs.lib.env_file.os.remove") + @mock.patch("pcs.lib.env_file.os.path.exists", return_value=True) + def test_success_remove_file(self, _, mock_remove): + RealFile("some role", "/path/to.file").remove() + mock_remove.assert_called_once_with("/path/to.file") + + @mock.patch( + "pcs.lib.env_file.os.remove", + side_effect=EnvironmentError(1, "mock remove failed", "/path/to.file") + ) + @mock.patch("pcs.lib.env_file.os.path.exists", return_value=True) + def test_raise_library_error_when_remove_failed(self, _, dummy): + assert_raise_library_error( + lambda: RealFile("some role", "/path/to.file").remove(), + ( + severities.ERROR, + report_codes.FILE_IO_ERROR, + { + 'reason': "mock remove failed: '/path/to.file'", + 'file_role': 'some role', + 'file_path': '/path/to.file' + } + ) + ) + + @mock.patch("pcs.lib.env_file.os.path.exists", return_value=False) + def test_existence_is_required(self, _): + assert_raise_library_error( + lambda: RealFile("some role", "/path/to.file").remove(), + ( + severities.ERROR, + report_codes.FILE_IO_ERROR, + { + 'reason': "File does not exist", + 'file_role': 'some role', + 'file_path': '/path/to.file' + } + ) + ) + + @mock.patch("pcs.lib.env_file.os.path.exists", return_value=False) + def test_noexistent_can_be_silenced(self, _): + RealFile("some role", "/path/to.file").remove(silence_no_existence=True) diff --git a/pcs/lib/test/test_errors.py b/pcs/lib/test/test_errors.py new file mode 100644 index 0000000..2e99e19 --- /dev/null +++ b/pcs/lib/test/test_errors.py @@ -0,0 +1,20 @@ +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from unittest import TestCase + +from pcs.lib.errors import LibraryEnvError + + +class LibraryEnvErrorTest(TestCase): + def test_can_sign_solved_reports(self): + e = LibraryEnvError("first", "second", "third") + for report in e.args: + if report == "second": + e.sign_processed(report) + + self.assertEqual(["first", "third"], e.unprocessed) diff --git a/pcs/pcs.8 b/pcs/pcs.8 index 09c0235..52497a0 100644 --- a/pcs/pcs.8 +++ b/pcs/pcs.8 @@ -45,6 +45,9 @@ Manage quorum device provider on the local host. quorum Manage cluster quorum settings. .TP +booth +Manage booth (cluster ticket manager). +.TP status View cluster status. .TP @@ -573,6 +576,55 @@ Cancel waiting for all nodes when establishing quorum. Useful in situations whe .TP update [auto_tie_breaker=[0|1]] [last_man_standing=[0|1]] [last_man_standing_window=[