diff --git a/tests/conftest.py b/tests/conftest.py index d35ad6a..9fd5cfd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,14 @@ -import os - import pytest import vcr from click.testing import CliRunner from tim.opensearch import configure_opensearch_client +EXIT_CODES = { + "success": 0, + "error": 1, + "invalid_command": 2, +} my_vcr = vcr.VCR( cassette_library_dir="tests/fixtures/cassettes", filter_headers=["authorization"], @@ -13,23 +16,20 @@ @pytest.fixture(autouse=True) -def test_env(): - os.environ = { - "AWS_ACCESS_KEY_ID": "test", - "AWS_SECRET_ACCESS_KEY": "test", - "AWS_SESSION_TOKEN": "test", - "TIMDEX_OPENSEARCH_ENDPOINT": "localhost", - "SENTRY_DSN": None, - "WORKSPACE": "test", - } - yield - - -@pytest.fixture() +def _test_env(monkeypatch): + monkeypatch.setenv("SENTRY_DSN", "None") + monkeypatch.setenv("WORKSPACE", "test") + monkeypatch.setenv("AWS_ACCESS_KEY_ID", "test") + monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "test") + monkeypatch.setenv("AWS_SESSION_TOKEN", "test") + monkeypatch.setenv("TIMDEX_OPENSEARCH_ENDPOINT", "localhost") + + +@pytest.fixture def test_opensearch_client(): return configure_opensearch_client("localhost") -@pytest.fixture() +@pytest.fixture def runner(): return CliRunner() diff --git a/tests/test_cli.py b/tests/test_cli.py index 752475e..104c037 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,7 +4,7 @@ from tim.cli import main -from .conftest import my_vcr +from .conftest import EXIT_CODES, my_vcr def escape_ansi(line): @@ -19,7 +19,7 @@ def test_main_group_no_options_configures_correctly_and_invokes_result_callback( ): monkeypatch.delenv("TIMDEX_OPENSEARCH_ENDPOINT", raising=False) result = runner.invoke(main, ["ping"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Logger 'root' configured with level=INFO" in caplog.text assert "OpenSearch client configured for endpoint 'localhost'" in caplog.text assert "Total time to complete process" in caplog.text @@ -31,7 +31,7 @@ def test_main_group_all_options_configures_correctly_and_invokes_result_callback ): monkeypatch.delenv("TIMDEX_OPENSEARCH_ENDPOINT", raising=False) result = runner.invoke(main, ["--verbose", "--url", "localhost", "ping"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Logger 'root' configured with level=DEBUG" in caplog.text assert "OpenSearch client configured for endpoint 'localhost'" in caplog.text assert "Total time to complete process" in caplog.text @@ -42,7 +42,7 @@ def test_main_group_options_from_env_configures_correctly_and_invokes_result_cal caplog, runner ): result = runner.invoke(main, ["ping"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Logger 'root' configured with level=INFO" in caplog.text assert "OpenSearch client configured for endpoint 'localhost'" in caplog.text assert "Total time to complete process" in caplog.text @@ -51,27 +51,27 @@ def test_main_group_options_from_env_configures_correctly_and_invokes_result_cal @my_vcr.use_cassette("get_aliases.yaml") def test_aliases(runner): result = runner.invoke(main, ["aliases"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Alias: alias-with-multiple-indexes" in result.stdout @my_vcr.use_cassette("get_indexes.yaml") def test_indexes(runner): result = runner.invoke(main, ["indexes"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Name: index-with-multiple-aliases" in result.stdout @my_vcr.use_cassette("ping_localhost.yaml") def test_ping(runner): result = runner.invoke(main, ["ping"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Name: docker-cluster" in result.stdout def test_create_index_neither_name_nor_source_passed(runner): result = runner.invoke(main, ["create"]) - assert result.exit_code == 2 + assert result.exit_code == EXIT_CODES["invalid_command"] assert "Must provide either a name or source for the new index." in result.stdout @@ -80,7 +80,7 @@ def test_create_index_name_and_source_passed(runner): main, ["create", "--index", "aspace-2022-09-01t12-34-56", "--source", "aspace"], ) - assert result.exit_code == 2 + assert result.exit_code == EXIT_CODES["invalid_command"] assert ( "Only one of --index and --source options is allowed, not both." in escape_ansi(result.stdout) @@ -89,18 +89,18 @@ def test_create_index_name_and_source_passed(runner): def test_create_index_invalid_name_passed(runner): result = runner.invoke(main, ["create", "--index", "wrong"]) - assert result.exit_code == 2 + assert result.exit_code == EXIT_CODES["invalid_command"] def test_create_index_invalid_source_passed(runner): result = runner.invoke(main, ["create", "--source", "wrong"]) - assert result.exit_code == 2 + assert result.exit_code == EXIT_CODES["invalid_command"] @my_vcr.use_cassette("cli/create_index_exists.yaml") def test_create_index_exists(caplog, runner): result = runner.invoke(main, ["create", "--index", "aspace-2022-09-20t15-59-38"]) - assert result.exit_code == 1 + assert result.exit_code == EXIT_CODES["error"] assert ( "tim.cli", 40, @@ -113,14 +113,14 @@ def test_create_index_exists(caplog, runner): @my_vcr.use_cassette("cli/create_index_success.yaml") def test_create_index_success(caplog, runner): result = runner.invoke(main, ["create", "--source", "aspace"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Index 'aspace-2022-09-01t00-00-00' created." in caplog.text @my_vcr.use_cassette("delete_success.yaml") def test_delete_index_with_force(runner): result = runner.invoke(main, ["delete", "-i", "test-index", "-f"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Index 'test-index' deleted." in result.stdout @@ -128,7 +128,7 @@ def test_delete_index_with_force(runner): def test_delete_index_with_confirmation(monkeypatch, runner): monkeypatch.setattr("builtins.input", lambda _: "y") result = runner.invoke(main, ["delete", "-i", "test-index"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Index 'test-index' deleted." in result.stdout @@ -136,14 +136,14 @@ def test_delete_index_with_confirmation(monkeypatch, runner): def test_delete_index_without_confirmation(monkeypatch, runner): monkeypatch.setattr("builtins.input", lambda _: "n") result = runner.invoke(main, ["delete", "-i", "test-index"]) - assert result.exit_code == 1 + assert result.exit_code == EXIT_CODES["error"] assert "Ok, index will not be deleted." in result.stdout @my_vcr.use_cassette("demote_no_aliases_for_index.yaml") def test_demote_index_no_aliases_for_index(runner): result = runner.invoke(main, ["demote", "-i", "test-index"]) - assert result.exit_code == 1 + assert result.exit_code == EXIT_CODES["error"] assert ( "Index 'test-index' has no aliases, please check aliases and try again." in result.stdout @@ -154,7 +154,7 @@ def test_demote_index_no_aliases_for_index(runner): def test_demote_index_from_primary_alias_with_confirmation(monkeypatch, runner): monkeypatch.setattr("builtins.input", lambda _: "y") result = runner.invoke(main, ["demote", "-i", "test-index"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Index 'test-index' demoted from aliases: ['all-current']" in result.stdout @@ -162,21 +162,21 @@ def test_demote_index_from_primary_alias_with_confirmation(monkeypatch, runner): def test_demote_index_from_primary_alias_without_confirmation(monkeypatch, runner): monkeypatch.setattr("builtins.input", lambda _: "n") result = runner.invoke(main, ["demote", "-i", "test-index"]) - assert result.exit_code == 1 + assert result.exit_code == EXIT_CODES["error"] assert "Ok, index will not be demoted." in result.stdout @my_vcr.use_cassette("demote_no_primary_alias.yaml") def test_demote_index_no_primary_alias(runner): result = runner.invoke(main, ["demote", "-i", "test-index"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Index 'test-index' demoted from aliases: ['not-primary']" in result.stdout @my_vcr.use_cassette("promote_index.yaml") def test_promote_index(caplog, runner): result = runner.invoke(main, ["promote", "-i", "testsource-index"]) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert "Index promoted" in caplog.text @@ -195,7 +195,7 @@ def test_bulk_index_with_index_name_success(caplog, runner): "tests/fixtures/sample_records.json", ], ) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert ( "Bulk indexing records from file 'tests/fixtures/sample_records.json' into " "index 'dspace-2022-09-01t00-00-00'" in caplog.text @@ -210,7 +210,7 @@ def test_bulk_index_with_source_success(caplog, runner): main, ["bulk-index", "--source", "dspace", "tests/fixtures/sample_records.json"], ) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert ( "Bulk indexing records from file 'tests/fixtures/sample_records.json' into " "index 'dspace-2022-09-01t00-00-00'" in caplog.text @@ -230,7 +230,7 @@ def test_bulk_delete_with_index_name_success(caplog, runner): "tests/fixtures/sample_deleted_records.txt", ], ) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert ( "Bulk deleting records in file 'tests/fixtures/sample_deleted_records.txt' " "from index 'alma-2022-09-01t00-00-00'" in caplog.text @@ -250,7 +250,7 @@ def test_bulk_delete_with_source_success(caplog, runner): "tests/fixtures/sample_deleted_records.txt", ], ) - assert result.exit_code == 0 + assert result.exit_code == EXIT_CODES["success"] assert ( "Bulk deleting records in file 'tests/fixtures/sample_deleted_records.txt' " "from index 'alma-2022-09-01t00-00-00'" in caplog.text diff --git a/tests/test_config.py b/tests/test_config.py index efb7e68..b243b86 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -18,14 +18,15 @@ def test_configure_index_settings(): def test_configure_logger_not_verbose(): logger = logging.getLogger(__name__) result = configure_logger(logger, verbose=False) - assert logger.getEffectiveLevel() == 20 + + assert logger.getEffectiveLevel() == logging.INFO assert result == "Logger 'tests.test_config' configured with level=INFO" def test_configure_logger_verbose(): logger = logging.getLogger(__name__) result = configure_logger(logger, verbose=True) - assert logger.getEffectiveLevel() == 10 + assert logger.getEffectiveLevel() == logging.DEBUG assert result == "Logger 'tests.test_config' configured with level=DEBUG" diff --git a/tests/test_helpers.py b/tests/test_helpers.py index b8776ad..05fd685 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -9,18 +9,18 @@ def test_confirm_action_yes(monkeypatch): monkeypatch.setattr("builtins.input", lambda _: "Y") - assert helpers.confirm_action("test-index", "delete") is True + assert helpers.confirm_action("delete test-index") is True def test_confirm_action_no(monkeypatch): monkeypatch.setattr("builtins.input", lambda _: "n") - assert helpers.confirm_action("test-index", "delete") is False + assert helpers.confirm_action("delete test-index") is False def test_confirm_action_invalid(capsys, monkeypatch): inputs = iter(["wrong", "y"]) monkeypatch.setattr("builtins.input", lambda _: next(inputs)) - assert helpers.confirm_action("test-index", "delete") is True + assert helpers.confirm_action("delete test-index") is True out, _ = capsys.readouterr() assert out == "Invalid input: 'wrong', must be one of 'y' or 'n'.\n" @@ -54,9 +54,8 @@ def test_generate_bulk_actions_delete(): def test_generate_bulk_actions_invalid_action_raises_error(): records = [{"timdex_record_id": "12345", "other_fields": "some_data"}] actions = helpers.generate_bulk_actions("test-index", records, "wrong") - with pytest.raises(ValueError) as error: + with pytest.raises(ValueError, match="Invalid action parameter"): next(actions) - assert "Invalid action parameter" in str(error.value) def test_get_source_from_index(): @@ -69,7 +68,8 @@ def test_get_source_from_index_without_dash(): def test_parse_records(): records = list(helpers.parse_records("tests/fixtures/sample_records.json")) - assert len(records) == 6 + n_sample_records = 6 + assert len(records) == n_sample_records assert isinstance(records[0], dict) @@ -77,45 +77,47 @@ def test_parse_deleted_records(): records = list( helpers.parse_deleted_records("tests/fixtures/sample_deleted_records.txt") ) - assert len(records) == 3 + n_sample_deleted_records = 3 + assert len(records) == n_sample_deleted_records assert isinstance(records[0], dict) def test_validate_bulk_cli_options_neither_index_nor_source_passed( test_opensearch_client, ): - with pytest.raises(UsageError) as error: + with pytest.raises( + UsageError, match="Must provide either an existing index name or a valid source." + ): helpers.validate_bulk_cli_options(None, None, test_opensearch_client) - assert "Must provide either an existing index name or a valid source." == str( - error.value - ) def test_validate_bulk_cli_options_index_and_source_passed(test_opensearch_client): - with pytest.raises(UsageError) as error: + with pytest.raises( + UsageError, match="Only one of --index and --source options is allowed, not both." + ): helpers.validate_bulk_cli_options( "index-name", "source-name", test_opensearch_client ) - assert "Only one of --index and --source options is allowed, not both." == str( - error.value - ) @my_vcr.use_cassette("helpers/bulk_cli_nonexistent_index.yaml") def test_validate_bulk_cli_options_nonexistent_index_passed(test_opensearch_client): - with pytest.raises(BadParameter) as error: + with pytest.raises( + BadParameter, match="Index 'wrong' does not exist in the cluster." + ): helpers.validate_bulk_cli_options("wrong", None, test_opensearch_client) - assert "Index 'wrong' does not exist in the cluster." == str(error.value) @my_vcr.use_cassette("helpers/bulk_cli_no_primary_index_for_source.yaml") def test_validate_bulk_cli_options_no_primary_index_for_source(test_opensearch_client): - with pytest.raises(BadParameter) as error: + with pytest.raises( + BadParameter, + match=( + "No index name was passed and there is no " + "primary-aliased index for source 'dspace'." + ), + ): helpers.validate_bulk_cli_options(None, "dspace", test_opensearch_client) - assert ( - "No index name was passed and there is no primary-aliased index for source " - "'dspace'." == str(error.value) - ) def test_validate_index_name_no_value(): diff --git a/tests/test_opensearch.py b/tests/test_opensearch.py index cac8416..4b4503a 100644 --- a/tests/test_opensearch.py +++ b/tests/test_opensearch.py @@ -31,7 +31,7 @@ def test_configure_opensearch_client_for_local_opensearch_host(): @mock.patch("boto3.session.Session") -def test_configure_opensearch_client_for_aws(mocked_boto3_session): # noqa +def test_configure_opensearch_client_for_aws(mocked_boto3_session): result = tim_os.configure_opensearch_client("fake-dev.us-east-1.es.amazonaws.com") assert ( str(result) == " None: - """ - TIM provides commands for interacting with OpenSearch indexes. +def main(ctx: click.Context, url: str, *, verbose: bool) -> None: + """TIM provides commands for interacting with OpenSearch indexes. For more details on a specific command, run tim COMMAND -h. """ ctx.ensure_object(dict) ctx.obj["START_TIME"] = perf_counter() root_logger = logging.getLogger() - logger.info(configure_logger(root_logger, verbose)) + logger.info(configure_logger(root_logger, verbose=verbose)) logger.info(configure_sentry()) ctx.obj["CLIENT"] = tim_os.configure_opensearch_client(url) logger.info("OpenSearch client configured for endpoint '%s'", url) @@ -64,9 +63,7 @@ def main(ctx: click.Context, url: str, verbose: bool) -> None: @main.result_callback() @click.pass_context -def log_process_time( - ctx: click.Context, result: Optional[object], **kwargs: dict # noqa -) -> None: +def log_process_time(ctx: click.Context, _result: object, **_kwargs: dict) -> None: elapsed_time = perf_counter() - ctx.obj["START_TIME"] logger.info( "Total time to complete process: %s", str(timedelta(seconds=elapsed_time)) @@ -90,8 +87,7 @@ def aliases(ctx: click.Context) -> None: @main.command() @click.pass_context def indexes(ctx: click.Context) -> None: - """ - Display summary information about all indexes in the cluster. + """Display summary information about all indexes in the cluster. Prints all indexes in the cluster in alphabetical order by name. For each index, displays information including its status, health, number of documents, primary @@ -126,9 +122,8 @@ def ping(ctx: click.Context) -> None: "the configured sources list.", ) @click.pass_context -def create(ctx: click.Context, index: Optional[str], source: Optional[str]) -> None: - """ - Create a new index in the cluster. +def create(ctx: click.Context, index: str, source: str) -> None: + """Create a new index in the cluster. Must provide either the index name or source option. If source is provided, will create an index named according to our convention with the source and a generated @@ -149,8 +144,8 @@ def create(ctx: click.Context, index: Optional[str], source: Optional[str]) -> N try: new_index = tim_os.create_index(ctx.obj["CLIENT"], str(index)) except errors.IndexExistsError as error: - logger.error(error) - raise click.Abort() + logger.error(error) # noqa: TRY400 + raise click.Abort from error logger.info("Index '%s' created.", new_index) ctx.invoke(indexes) @@ -169,7 +164,7 @@ def create(ctx: click.Context, index: Optional[str], source: Optional[str]) -> N help="Pass to disable user confirmation prompt.", ) @click.pass_context -def delete(ctx: click.Context, index: str, force: bool) -> None: +def delete(ctx: click.Context, index: str, *, force: bool) -> None: """Delete an index. Will prompt for confirmation before index deletion unless the --force option is @@ -177,14 +172,14 @@ def delete(ctx: click.Context, index: str, force: bool) -> None: """ client = ctx.obj["CLIENT"] if force or helpers.confirm_action( - index, f"Are you sure you want to delete index '{index}'?" + f"Are you sure you want to delete index '{index}'?" ): tim_os.delete_index(client, index) click.echo(f"Index '{index}' deleted.") ctx.invoke(indexes) else: click.echo("Ok, index will not be deleted.") - raise click.Abort() + raise click.Abort @main.command() @@ -206,15 +201,13 @@ def demote(ctx: click.Context, index: str) -> None: index_aliases = tim_os.get_index_aliases(client, index) or [] if not index_aliases: click.echo(f"Index '{index}' has no aliases, please check aliases and try again.") - raise click.Abort() - if PRIMARY_ALIAS in index_aliases: - if not helpers.confirm_action( - index, - f"Are you sure you want to demote index '{index}' from the primary alias " - "without promoting another index for the source?", - ): - click.echo("Ok, index will not be demoted.") - raise click.Abort() + raise click.Abort + if PRIMARY_ALIAS in index_aliases and not helpers.confirm_action( + f"Are you sure you want to demote index '{index}' from the primary alias " + "without promoting another index for the source?", + ): + click.echo("Ok, index will not be demoted.") + raise click.Abort for alias in index_aliases: tim_os.remove_alias(client, index, alias) click.echo(f"Index '{index}' demoted from aliases: {index_aliases}") @@ -236,9 +229,8 @@ def demote(ctx: click.Context, index: str) -> None: "be repeated to promote the index to multiple aliases at once.", ) @click.pass_context -def promote(ctx: click.Context, index: str, alias: Optional[list[str]]) -> None: - """ - Promote an index to the primary alias and add it to any additional provided aliases. +def promote(ctx: click.Context, index: str, alias: list[str]) -> None: + """Promote index as the primary alias and add it to any additional provided aliases. This command promotes an index to the primary alias, any alias that already has an index for the same source, and any additional alias(es) passed to the command. If @@ -270,11 +262,8 @@ def promote(ctx: click.Context, index: str, alias: Optional[list[str]]) -> None: ) @click.argument("filepath", type=click.Path()) @click.pass_context -def bulk_index( - ctx: click.Context, index: Optional[str], source: Optional[str], filepath: str -) -> None: - """ - Bulk index records into an index. +def bulk_index(ctx: click.Context, index: str, source: str, filepath: str) -> None: + """Bulk index records into an index. Must provide either the name of an existing index in the cluster or a valid source. If source is provided, will index records into the primary-aliased index for the @@ -316,11 +305,8 @@ def bulk_index( ) @click.argument("filepath", type=click.Path()) @click.pass_context -def bulk_delete( - ctx: click.Context, index: Optional[str], source: Optional[str], filepath: str -) -> None: - """ - Bulk delete records from an index. +def bulk_delete(ctx: click.Context, index: str, source: str, filepath: str) -> None: + """Bulk delete records from an index. Must provide either the name of an existing index in the cluster or a valid source. If source is provided, will delete records from the primary-aliased index for the diff --git a/tim/config.py b/tim/config.py index fc24cb8..1131dc2 100644 --- a/tim/config.py +++ b/tim/config.py @@ -26,12 +26,12 @@ def configure_index_settings() -> tuple: - with open("config/opensearch_mappings.json", "r", encoding="utf-8") as file: + with open("config/opensearch_mappings.json", encoding="utf-8") as file: all_settings = json.load(file) return all_settings["mappings"], all_settings["settings"] -def configure_logger(logger: logging.Logger, verbose: bool) -> str: +def configure_logger(logger: logging.Logger, *, verbose: bool) -> str: if verbose: logging.basicConfig( format="%(asctime)s %(levelname)s %(name)s.%(funcName)s() line %(lineno)d: " diff --git a/tim/helpers.py b/tim/helpers.py index 098c772..2ded170 100644 --- a/tim/helpers.py +++ b/tim/helpers.py @@ -1,5 +1,5 @@ -from datetime import datetime -from typing import Generator, Iterator, Optional +from collections.abc import Generator, Iterator +from datetime import UTC, datetime import click import ijson @@ -9,15 +9,15 @@ from tim.config import VALID_BULK_OPERATIONS, VALID_SOURCES -def confirm_action(index: str, input_prompt: str) -> bool: +def confirm_action(input_prompt: str) -> bool: """Get user confirmation via the provided input prompt.""" check = input(f"{input_prompt} [y/n]: ") if check.lower() == "y": return True if check.lower() == "n": return False - print(f"Invalid input: '{check}', must be one of 'y' or 'n'.") - return confirm_action(index, input_prompt) + click.echo(f"Invalid input: '{check}', must be one of 'y' or 'n'.") + return confirm_action(input_prompt) def generate_index_name(source: str) -> str: @@ -27,7 +27,7 @@ def generate_index_name(source: str) -> str: 'source-YYYY-MM-DDthh-mm-ss' where the datetime is the datetime this operation is run. """ - return f"{source}-{datetime.now().strftime('%Y-%m-%dt%H-%M-%S')}" + return f"{source}-{datetime.now(tz=UTC).strftime('%Y-%m-%dt%H-%M-%S')}" def generate_bulk_actions( @@ -41,10 +41,11 @@ def generate_bulk_actions( Each record must contain the "timdex_record_id" field. """ if action not in VALID_BULK_OPERATIONS: - raise ValueError( + message = ( f"Invalid action parameter, must be one of {VALID_BULK_OPERATIONS}. Action " f"passed was '{action}'" ) + raise ValueError(message) for record in records: doc = { "_op_type": action, @@ -67,7 +68,7 @@ def parse_records(filepath: str) -> Generator[dict, None, None]: representing individual records in our standard TIMDEX record JSON format. """ with smart_open.open(filepath, "rb") as json_input: - for item in ijson.items(json_input, "item"): + for item in ijson.items(json_input, "item"): # noqa: UP028 yield item @@ -83,32 +84,32 @@ def parse_deleted_records(filepath: str) -> Generator[dict, None, None]: def validate_bulk_cli_options( - index: Optional[str], source: Optional[str], client: tim_os.OpenSearch + index: str | None, source: str, client: tim_os.OpenSearch ) -> str: options = [index, source] if all(options): - raise click.UsageError( - "Only one of --index and --source options is allowed, not both." - ) + message = "Only one of --index and --source options is allowed, not both." + raise click.UsageError(message) if not any(options): - raise click.UsageError( - "Must provide either an existing index name or a valid source." - ) + message = "Must provide either an existing index name or a valid source." + raise click.UsageError(message) if index and not client.indices.exists(index): - raise click.BadParameter(f"Index '{index}' does not exist in the cluster.") + message = f"Index '{index}' does not exist in the cluster." + raise click.BadParameter(message) if source: index = tim_os.get_primary_index_for_source(client, source) if not index: - raise click.BadParameter( + message = ( "No index name was passed and there is no primary-aliased index for " f"source '{source}'." ) + raise click.BadParameter(message) return index def validate_index_name( - ctx: click.Context, parameter_name: str, value: Optional[str] # noqa -) -> Optional[str]: + ctx: click.Context, parameter_name: str, value: str # noqa: ARG001 +) -> str: """Click callback to validate a provided index name against our business rules.""" if value is None: return value @@ -116,20 +117,23 @@ def validate_index_name( source_end = value.index("-") date_start = source_end + 1 except ValueError as error: - raise click.BadParameter( + message = ( "Index name must be in the format -, e.g. " "'aspace-2022-01-01t12:34:56'." - ) from error + ) + raise click.BadParameter(message) from error if value[:source_end] not in VALID_SOURCES: - raise click.BadParameter( + message = ( "Source in index name must be a valid configured source, one of: " f"{VALID_SOURCES}" ) + raise click.BadParameter(message) try: - datetime.strptime(value[date_start:], "%Y-%m-%dt%H-%M-%S") + datetime.strptime(value[date_start:], "%Y-%m-%dt%H-%M-%S").astimezone() except ValueError as error: - raise click.BadParameter( + message = ( "Date in index name must be in the format 'YYYY-MM-DDthh-mm-ss', e.g. " "'aspace_2022-01-01t12:34:56'." - ) from error + ) + raise click.BadParameter(message) from error return value diff --git a/tim/opensearch.py b/tim/opensearch.py index 525cdba..5b29643 100644 --- a/tim/opensearch.py +++ b/tim/opensearch.py @@ -1,7 +1,7 @@ import json import logging import os -from typing import Iterator, Optional +from collections.abc import Iterator import boto3 from opensearchpy import AWSV4SignerAuth, OpenSearch, RequestsHttpConnection @@ -70,7 +70,7 @@ def get_formatted_info(client: OpenSearch) -> str: ) -def get_aliases(client: OpenSearch) -> Optional[dict[str, list[str]]]: +def get_aliases(client: OpenSearch) -> dict[str, list[str]] | None: """Return all aliases with their associated indexes. Returns None if there are no aliases in the cluster. @@ -95,7 +95,7 @@ def get_formatted_aliases(client: OpenSearch) -> str: return output -def get_indexes(client: OpenSearch) -> Optional[dict[str, dict]]: +def get_indexes(client: OpenSearch) -> dict[str, dict] | None: """Return all indexes with their summary information.""" response = client.cat.indices(format="json") logger.debug(response) @@ -131,7 +131,7 @@ def get_formatted_indexes(client: OpenSearch) -> str: def get_all_aliased_indexes_for_source( client: OpenSearch, source: str -) -> Optional[dict[str, list[str]]]: +) -> dict[str, list[str]] | None: """Return all aliased indexes for the source, grouped by alias. Returns a dict of the aliases with a list of the source index(es) for each. There @@ -180,7 +180,7 @@ def create_index(client: OpenSearch, name: str) -> str: == "resource_already_exists_exception" ): raise IndexExistsError(name) from error - raise error + raise logger.debug(response) return response["index"] @@ -195,7 +195,7 @@ def delete_index(client: OpenSearch, index: str) -> None: def get_or_create_index_from_source( - client: OpenSearch, source: str, new: bool + client: OpenSearch, source: str, *, new: bool ) -> tuple[str, bool]: """Get the primary index for the provided source or create a new index. @@ -217,7 +217,7 @@ def get_or_create_index_from_source( return create_index(client, new_index_name), True -def get_index_aliases(client: OpenSearch, index: str) -> Optional[list[str]]: +def get_index_aliases(client: OpenSearch, index: str) -> list[str] | None: """Return a sorted list of aliases assigned to an index. Returns None if the index has no aliases. @@ -231,7 +231,7 @@ def get_index_aliases(client: OpenSearch, index: str) -> Optional[list[str]]: return sorted(aliases.keys()) or None -def get_primary_index_for_source(client: OpenSearch, source: str) -> Optional[str]: +def get_primary_index_for_source(client: OpenSearch, source: str) -> str | None: """Get the primary index for the provided source. Returns None if there is no primary index for the source. @@ -241,7 +241,7 @@ def get_primary_index_for_source(client: OpenSearch, source: str) -> Optional[st def promote_index( - client: OpenSearch, index: str, extra_aliases: Optional[list[str]] = None + client: OpenSearch, index: str, extra_aliases: list[str] | None = None ) -> None: """Promote an index to all relevant aliases. @@ -261,9 +261,7 @@ def promote_index( get_all_aliased_indexes_for_source(client, source) or {} ) new_aliases = list(extra_aliases) if extra_aliases else [] - all_aliases = set( - [PRIMARY_ALIAS] + list(current_aliased_source_indexes.keys()) + new_aliases - ) + all_aliases = {PRIMARY_ALIAS, *current_aliased_source_indexes.keys(), *new_aliases} request_body = { "actions": [{"add": {"index": index, "alias": alias}} for alias in all_aliases] @@ -289,7 +287,7 @@ def promote_index( and error.info.get("error", []).get("type") == "index_not_found_exception" ): raise IndexNotFoundError(index=index) from error - raise error + raise def remove_alias(client: OpenSearch, index: str, alias: str) -> None: @@ -308,7 +306,7 @@ def remove_alias(client: OpenSearch, index: str, alias: str) -> None: and error.info.get("error", []).get("type") == "aliases_not_found_exception" ): raise AliasNotFoundError(alias=alias, index=index) from error - raise error + raise # Record functions