From 6b0518b106f0804e2db000d5caf7dee36968c20b Mon Sep 17 00:00:00 2001 From: dblock Date: Thu, 26 Oct 2023 12:32:40 -0400 Subject: [PATCH 01/17] Merged types into .py code. Signed-off-by: dblock --- MANIFEST.in | 2 +- benchmarks/bench_async.py | 16 +- benchmarks/bench_info_sync.py | 10 +- benchmarks/bench_sync.py | 10 +- noxfile.py | 21 +- opensearchpy/__init__.py | 18 +- opensearchpy/__init__.pyi | 132 -- opensearchpy/_async/client/__init__.py | 348 ++++- opensearchpy/_async/client/__init__.pyi | 1139 ----------------- opensearchpy/_async/client/_patch.py | 23 +- opensearchpy/_async/client/_patch.pyi | 71 - opensearchpy/_async/client/cat.py | 466 ++++--- opensearchpy/_async/client/cat.pyi | 601 --------- opensearchpy/_async/client/client.py | 42 + opensearchpy/_async/client/cluster.py | 147 ++- opensearchpy/_async/client/cluster.pyi | 456 ------- .../_async/client/dangling_indices.py | 22 +- .../_async/client/dangling_indices.pyi | 99 -- opensearchpy/_async/client/features.py | 6 +- opensearchpy/_async/client/features.pyi | 66 - opensearchpy/_async/client/indices.py | 365 +++++- opensearchpy/_async/client/indices.pyi | 1097 ---------------- opensearchpy/_async/client/ingest.py | 38 +- opensearchpy/_async/client/ingest.pyi | 136 -- opensearchpy/_async/client/nodes.py | 42 +- opensearchpy/_async/client/nodes.pyi | 149 --- opensearchpy/_async/client/plugins.py | 9 +- opensearchpy/_async/client/plugins.pyi | 19 - opensearchpy/_async/client/remote.py | 4 +- opensearchpy/_async/client/remote.pyi | 46 - opensearchpy/_async/client/remote_store.py | 28 +- opensearchpy/_async/client/remote_store.pyi | 42 - opensearchpy/_async/client/security.py | 357 +++++- opensearchpy/_async/client/security.pyi | 821 ------------ opensearchpy/_async/client/snapshot.py | 90 +- opensearchpy/_async/client/snapshot.pyi | 272 ---- opensearchpy/_async/client/tasks.py | 21 +- opensearchpy/_async/client/tasks.pyi | 104 -- opensearchpy/_async/client/utils.py | 10 + opensearchpy/_async/client/utils.pyi | 41 - opensearchpy/_async/compat.py | 2 +- opensearchpy/_async/compat.pyi | 30 - opensearchpy/_async/helpers/actions.py | 163 ++- opensearchpy/_async/helpers/actions.pyi | 115 -- opensearchpy/_async/helpers/document.py | 130 +- opensearchpy/_async/helpers/document.pyi | 15 - opensearchpy/_async/helpers/faceted_search.py | 42 +- .../_async/helpers/faceted_search.pyi | 11 - opensearchpy/_async/helpers/index.py | 125 +- opensearchpy/_async/helpers/index.pyi | 12 - opensearchpy/_async/helpers/mapping.py | 50 +- opensearchpy/_async/helpers/mapping.pyi | 11 - opensearchpy/_async/helpers/search.py | 71 +- opensearchpy/_async/helpers/search.pyi | 14 - opensearchpy/_async/helpers/test.py | 8 +- opensearchpy/_async/helpers/test.pyi | 20 - .../_async/helpers/update_by_query.py | 24 +- .../_async/helpers/update_by_query.pyi | 13 - opensearchpy/_async/http_aiohttp.py | 98 +- opensearchpy/_async/http_aiohttp.pyi | 73 -- opensearchpy/_async/plugins/__init__.pyi | 9 - opensearchpy/_async/plugins/alerting.py | 86 +- opensearchpy/_async/plugins/alerting.pyi | 83 -- .../_async/plugins/index_management.py | 34 +- .../_async/plugins/index_management.pyi | 72 -- opensearchpy/_async/transport.py | 84 +- opensearchpy/_async/transport.pyi | 91 -- opensearchpy/_version.py | 2 +- opensearchpy/client/__init__.py | 347 ++++- opensearchpy/client/__init__.pyi | 1139 ----------------- opensearchpy/client/_patch.py | 21 +- opensearchpy/client/_patch.pyi | 71 - opensearchpy/client/cat.py | 466 ++++--- opensearchpy/client/cat.pyi | 601 --------- opensearchpy/client/client.py | 42 + opensearchpy/client/cluster.py | 147 ++- opensearchpy/client/cluster.pyi | 456 ------- opensearchpy/client/dangling_indices.py | 22 +- opensearchpy/client/dangling_indices.pyi | 99 -- opensearchpy/client/features.py | 6 +- opensearchpy/client/features.pyi | 66 - opensearchpy/client/indices.py | 365 +++++- opensearchpy/client/indices.pyi | 1097 ---------------- opensearchpy/client/ingest.py | 38 +- opensearchpy/client/ingest.pyi | 136 -- opensearchpy/client/nodes.py | 42 +- opensearchpy/client/nodes.pyi | 149 --- opensearchpy/client/plugins.py | 9 +- opensearchpy/client/plugins.pyi | 19 - opensearchpy/client/remote.py | 4 +- opensearchpy/client/remote.pyi | 46 - opensearchpy/client/remote_store.py | 28 +- opensearchpy/client/remote_store.pyi | 42 - opensearchpy/client/security.py | 357 +++++- opensearchpy/client/security.pyi | 821 ------------ opensearchpy/client/snapshot.py | 90 +- opensearchpy/client/snapshot.pyi | 272 ---- opensearchpy/client/tasks.py | 21 +- opensearchpy/client/tasks.pyi | 104 -- opensearchpy/client/utils.py | 29 +- opensearchpy/client/utils.pyi | 68 - opensearchpy/compat.py | 14 +- opensearchpy/compat.pyi | 54 - opensearchpy/connection/__init__.pyi | 32 - opensearchpy/connection/async_connections.py | 18 +- opensearchpy/connection/async_connections.pyi | 11 - opensearchpy/connection/base.py | 136 +- opensearchpy/connection/base.pyi | 119 -- opensearchpy/connection/connections.py | 18 +- opensearchpy/connection/connections.pyi | 30 - opensearchpy/connection/http_async.py | 66 +- opensearchpy/connection/http_async.pyi | 38 - opensearchpy/connection/http_requests.py | 65 +- opensearchpy/connection/http_requests.pyi | 51 - opensearchpy/connection/http_urllib3.py | 69 +- opensearchpy/connection/http_urllib3.pyi | 65 - opensearchpy/connection/pooling.py | 14 +- opensearchpy/connection/pooling.pyi | 34 - opensearchpy/connection_pool.py | 73 +- opensearchpy/connection_pool.pyi | 86 -- opensearchpy/exceptions.py | 27 +- opensearchpy/exceptions.pyi | 64 - opensearchpy/helpers/__init__.pyi | 46 - opensearchpy/helpers/actions.py | 172 +-- opensearchpy/helpers/actions.pyi | 137 -- opensearchpy/helpers/aggs.py | 65 +- opensearchpy/helpers/aggs.pyi | 105 -- opensearchpy/helpers/analysis.py | 75 +- opensearchpy/helpers/analysis.pyi | 50 - opensearchpy/helpers/asyncsigner.py | 16 +- opensearchpy/helpers/asyncsigner.pyi | 19 - opensearchpy/helpers/document.py | 128 +- opensearchpy/helpers/document.pyi | 37 - opensearchpy/helpers/errors.py | 12 +- opensearchpy/helpers/errors.pyi | 38 - opensearchpy/helpers/faceted_search.py | 99 +- opensearchpy/helpers/faceted_search.pyi | 37 - opensearchpy/helpers/field.py | 218 ++-- opensearchpy/helpers/field.pyi | 71 - opensearchpy/helpers/function.py | 18 +- opensearchpy/helpers/function.pyi | 41 - opensearchpy/helpers/index.py | 128 +- opensearchpy/helpers/index.pyi | 29 - opensearchpy/helpers/mapping.py | 65 +- opensearchpy/helpers/mapping.pyi | 31 - opensearchpy/helpers/query.py | 47 +- opensearchpy/helpers/query.pyi | 96 -- opensearchpy/helpers/response/__init__.py | 41 +- opensearchpy/helpers/response/__init__.pyi | 32 - opensearchpy/helpers/response/aggs.py | 21 +- opensearchpy/helpers/response/aggs.pyi | 35 - opensearchpy/helpers/response/hit.py | 15 +- opensearchpy/helpers/response/hit.pyi | 30 - opensearchpy/helpers/search.py | 153 ++- opensearchpy/helpers/search.pyi | 36 - opensearchpy/helpers/signer.py | 11 +- opensearchpy/helpers/test.py | 17 +- opensearchpy/helpers/test.pyi | 44 - opensearchpy/helpers/update_by_query.py | 24 +- opensearchpy/helpers/update_by_query.pyi | 30 - opensearchpy/helpers/utils.py | 164 +-- opensearchpy/helpers/utils.pyi | 33 - opensearchpy/helpers/wrappers.py | 16 +- opensearchpy/helpers/wrappers.pyi | 30 - opensearchpy/plugins/__init__.pyi | 9 - opensearchpy/plugins/alerting.py | 50 +- opensearchpy/plugins/alerting.pyi | 73 -- opensearchpy/plugins/index_management.py | 30 +- opensearchpy/plugins/index_management.pyi | 72 -- opensearchpy/serializer.py | 36 +- opensearchpy/serializer.pyi | 56 - opensearchpy/transport.py | 111 +- opensearchpy/transport.pyi | 95 -- .../advanced_index_actions_sample.py | 2 +- setup.py | 9 +- test_opensearchpy/TestHttpServer.py | 6 +- test_opensearchpy/run_tests.py | 4 +- .../test_async/test_connection.py | 77 +- .../test_async/test_helpers/conftest.py | 3 +- .../test_async/test_helpers/test_document.py | 69 +- .../test_helpers/test_faceted_search.py | 11 +- .../test_async/test_helpers/test_index.py | 23 +- .../test_async/test_helpers/test_mapping.py | 10 +- .../test_async/test_helpers/test_search.py | 45 +- .../test_helpers/test_update_by_query.py | 11 +- .../test_async/test_http_connection.py | 13 +- .../test_async/test_plugins_client.py | 2 +- .../test_async/test_server/__init__.py | 4 +- .../test_async/test_server/conftest.py | 3 +- .../test_async/test_server/test_clients.py | 11 +- .../test_server/test_helpers/conftest.py | 2 +- .../test_server/test_helpers/test_actions.py | 42 +- .../test_server/test_helpers/test_data.py | 6 +- .../test_server/test_helpers/test_document.py | 72 +- .../test_helpers/test_faceted_search.py | 21 +- .../test_server/test_helpers/test_index.py | 15 +- .../test_server/test_helpers/test_mapping.py | 9 +- .../test_server/test_helpers/test_search.py | 19 +- .../test_helpers/test_update_by_query.py | 9 +- .../test_server/test_plugins/test_alerting.py | 11 +- .../test_plugins/test_index_management.py | 11 +- .../test_server/test_rest_api_spec.py | 19 +- .../test_security_plugin.py | 35 +- test_opensearchpy/test_async/test_signer.py | 15 +- .../test_async/test_transport.py | 62 +- test_opensearchpy/test_cases.py | 14 +- test_opensearchpy/test_client/__init__.py | 39 +- test_opensearchpy/test_client/test_cluster.py | 6 +- test_opensearchpy/test_client/test_indices.py | 8 +- .../test_client/test_overrides.py | 30 +- .../test_client/test_plugins/test_alerting.py | 24 +- .../test_plugins/test_index_management.py | 18 +- .../test_plugins/test_plugins_client.py | 2 +- .../test_client/test_point_in_time.py | 16 +- .../test_client/test_remote_store.py | 2 +- .../test_client/test_requests.py | 4 +- test_opensearchpy/test_client/test_urllib3.py | 6 +- test_opensearchpy/test_client/test_utils.py | 32 +- .../test_connection/test_base_connection.py | 28 +- .../test_requests_http_connection.py | 82 +- .../test_urllib3_http_connection.py | 58 +- test_opensearchpy/test_connection_pool.py | 26 +- test_opensearchpy/test_exceptions.py | 4 +- .../test_helpers/test_actions.py | 18 +- test_opensearchpy/test_helpers/test_aggs.py | 36 +- .../test_helpers/test_analysis.py | 10 +- .../test_helpers/test_document.py | 66 +- .../test_helpers/test_faceted_search.py | 8 +- test_opensearchpy/test_helpers/test_field.py | 32 +- test_opensearchpy/test_helpers/test_index.py | 22 +- .../test_helpers/test_mapping.py | 10 +- test_opensearchpy/test_helpers/test_query.py | 106 +- test_opensearchpy/test_helpers/test_result.py | 34 +- test_opensearchpy/test_helpers/test_search.py | 50 +- .../test_helpers/test_update_by_query.py | 10 +- test_opensearchpy/test_helpers/test_utils.py | 20 +- .../test_helpers/test_validation.py | 22 +- .../test_helpers/test_wrappers.py | 8 +- test_opensearchpy/test_serializer.py | 50 +- test_opensearchpy/test_server/__init__.py | 2 +- test_opensearchpy/test_server/test_clients.py | 6 +- .../test_server/test_helpers/conftest.py | 5 +- .../test_server/test_helpers/test_actions.py | 43 +- .../test_server/test_helpers/test_analysis.py | 6 +- .../test_server/test_helpers/test_count.py | 6 +- .../test_server/test_helpers/test_data.py | 6 +- .../test_server/test_helpers/test_document.py | 68 +- .../test_helpers/test_faceted_search.py | 18 +- .../test_server/test_helpers/test_index.py | 12 +- .../test_server/test_helpers/test_mapping.py | 8 +- .../test_server/test_helpers/test_search.py | 18 +- .../test_helpers/test_update_by_query.py | 6 +- .../test_server/test_plugins/test_alerting.py | 8 +- .../test_plugins/test_index_management.py | 8 +- .../test_server/test_rest_api_spec.py | 46 +- .../test_server_secured/test_clients.py | 2 +- .../test_security_plugin.py | 32 +- test_opensearchpy/test_transport.py | 50 +- test_opensearchpy/utils.py | 24 +- utils/build-dists.py | 6 +- utils/generate-api.py | 58 +- utils/license-headers.py | 4 +- utils/templates/base | 4 +- utils/templates/base_pyi | 2 - utils/templates/func_params | 10 +- utils/templates/func_params_pyi | 26 - 266 files changed, 6102 insertions(+), 16156 deletions(-) delete mode 100644 opensearchpy/__init__.pyi delete mode 100644 opensearchpy/_async/client/__init__.pyi delete mode 100644 opensearchpy/_async/client/_patch.pyi delete mode 100644 opensearchpy/_async/client/cat.pyi create mode 100644 opensearchpy/_async/client/client.py delete mode 100644 opensearchpy/_async/client/cluster.pyi delete mode 100644 opensearchpy/_async/client/dangling_indices.pyi delete mode 100644 opensearchpy/_async/client/features.pyi delete mode 100644 opensearchpy/_async/client/indices.pyi delete mode 100644 opensearchpy/_async/client/ingest.pyi delete mode 100644 opensearchpy/_async/client/nodes.pyi delete mode 100644 opensearchpy/_async/client/plugins.pyi delete mode 100644 opensearchpy/_async/client/remote.pyi delete mode 100644 opensearchpy/_async/client/remote_store.pyi delete mode 100644 opensearchpy/_async/client/security.pyi delete mode 100644 opensearchpy/_async/client/snapshot.pyi delete mode 100644 opensearchpy/_async/client/tasks.pyi delete mode 100644 opensearchpy/_async/client/utils.pyi delete mode 100644 opensearchpy/_async/compat.pyi delete mode 100644 opensearchpy/_async/helpers/actions.pyi delete mode 100644 opensearchpy/_async/helpers/document.pyi delete mode 100644 opensearchpy/_async/helpers/faceted_search.pyi delete mode 100644 opensearchpy/_async/helpers/index.pyi delete mode 100644 opensearchpy/_async/helpers/mapping.pyi delete mode 100644 opensearchpy/_async/helpers/search.pyi delete mode 100644 opensearchpy/_async/helpers/test.pyi delete mode 100644 opensearchpy/_async/helpers/update_by_query.pyi delete mode 100644 opensearchpy/_async/http_aiohttp.pyi delete mode 100644 opensearchpy/_async/plugins/__init__.pyi delete mode 100644 opensearchpy/_async/plugins/alerting.pyi delete mode 100644 opensearchpy/_async/plugins/index_management.pyi delete mode 100644 opensearchpy/_async/transport.pyi delete mode 100644 opensearchpy/client/__init__.pyi delete mode 100644 opensearchpy/client/_patch.pyi delete mode 100644 opensearchpy/client/cat.pyi create mode 100644 opensearchpy/client/client.py delete mode 100644 opensearchpy/client/cluster.pyi delete mode 100644 opensearchpy/client/dangling_indices.pyi delete mode 100644 opensearchpy/client/features.pyi delete mode 100644 opensearchpy/client/indices.pyi delete mode 100644 opensearchpy/client/ingest.pyi delete mode 100644 opensearchpy/client/nodes.pyi delete mode 100644 opensearchpy/client/plugins.pyi delete mode 100644 opensearchpy/client/remote.pyi delete mode 100644 opensearchpy/client/remote_store.pyi delete mode 100644 opensearchpy/client/security.pyi delete mode 100644 opensearchpy/client/snapshot.pyi delete mode 100644 opensearchpy/client/tasks.pyi delete mode 100644 opensearchpy/client/utils.pyi delete mode 100644 opensearchpy/compat.pyi delete mode 100644 opensearchpy/connection/__init__.pyi delete mode 100644 opensearchpy/connection/async_connections.pyi delete mode 100644 opensearchpy/connection/base.pyi delete mode 100644 opensearchpy/connection/connections.pyi delete mode 100644 opensearchpy/connection/http_async.pyi delete mode 100644 opensearchpy/connection/http_requests.pyi delete mode 100644 opensearchpy/connection/http_urllib3.pyi delete mode 100644 opensearchpy/connection/pooling.pyi delete mode 100644 opensearchpy/connection_pool.pyi delete mode 100644 opensearchpy/exceptions.pyi delete mode 100644 opensearchpy/helpers/__init__.pyi delete mode 100644 opensearchpy/helpers/actions.pyi delete mode 100644 opensearchpy/helpers/aggs.pyi delete mode 100644 opensearchpy/helpers/analysis.pyi delete mode 100644 opensearchpy/helpers/asyncsigner.pyi delete mode 100644 opensearchpy/helpers/document.pyi delete mode 100644 opensearchpy/helpers/errors.pyi delete mode 100644 opensearchpy/helpers/faceted_search.pyi delete mode 100644 opensearchpy/helpers/field.pyi delete mode 100644 opensearchpy/helpers/function.pyi delete mode 100644 opensearchpy/helpers/index.pyi delete mode 100644 opensearchpy/helpers/mapping.pyi delete mode 100644 opensearchpy/helpers/query.pyi delete mode 100644 opensearchpy/helpers/response/__init__.pyi delete mode 100644 opensearchpy/helpers/response/aggs.pyi delete mode 100644 opensearchpy/helpers/response/hit.pyi delete mode 100644 opensearchpy/helpers/search.pyi delete mode 100644 opensearchpy/helpers/test.pyi delete mode 100644 opensearchpy/helpers/update_by_query.pyi delete mode 100644 opensearchpy/helpers/utils.pyi delete mode 100644 opensearchpy/helpers/wrappers.pyi delete mode 100644 opensearchpy/plugins/__init__.pyi delete mode 100644 opensearchpy/plugins/alerting.pyi delete mode 100644 opensearchpy/plugins/index_management.pyi delete mode 100644 opensearchpy/serializer.pyi delete mode 100644 opensearchpy/transport.pyi delete mode 100644 utils/templates/base_pyi delete mode 100644 utils/templates/func_params_pyi diff --git a/MANIFEST.in b/MANIFEST.in index 40d49135..9f446e08 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,7 +5,7 @@ include LICENSE include MANIFEST.in include README.md include setup.py -recursive-include opensearch* py.typed *.pyi +recursive-include opensearch* py.typed prune test_opensearch recursive-exclude * __pycache__ diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index c7eb5714..a27a126c 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -22,7 +22,7 @@ item_count = 100 -async def index_records(client, item_count): +async def index_records(client, item_count) -> None: await asyncio.gather( *[ client.index( @@ -71,34 +71,34 @@ async def test_async(client_count=1, item_count=1): await asyncio.gather(*[client.close() for client in clients]) -def test(item_count=1, client_count=1): +def test(item_count: int = 1, client_count: int = 1) -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(test_async(item_count, client_count)) loop.close() -def test_1(): +def test_1() -> None: test(1, 32 * item_count) -def test_2(): +def test_2() -> None: test(2, 16 * item_count) -def test_4(): +def test_4() -> None: test(4, 8 * item_count) -def test_8(): +def test_8() -> None: test(8, 4 * item_count) -def test_16(): +def test_16() -> None: test(16, 2 * item_count) -def test_32(): +def test_32() -> None: test(32, item_count) diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 229a2e4d..29b289cd 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -75,23 +75,23 @@ def test(thread_count=1, request_count=1, client_count=1): print(f"latency={latency}") -def test_1(): +def test_1() -> None: test(1, 32 * request_count, 1) -def test_2(): +def test_2() -> None: test(2, 16 * request_count, 2) -def test_4(): +def test_4() -> None: test(4, 8 * request_count, 3) -def test_8(): +def test_8() -> None: test(8, 4 * request_count, 8) -def test_32(): +def test_32() -> None: test(32, request_count, 32) diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index e201eaba..83225ef9 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -112,23 +112,23 @@ def test(thread_count=1, item_count=1, client_count=1): print(f"{count}, latency={latency}") -def test_1(): +def test_1() -> None: test(1, 32 * item_count, 1) -def test_2(): +def test_2() -> None: test(2, 16 * item_count, 2) -def test_4(): +def test_4() -> None: test(4, 8 * item_count, 3) -def test_8(): +def test_8() -> None: test(8, 4 * item_count, 8) -def test_32(): +def test_32() -> None: test(32, item_count, 32) diff --git a/noxfile.py b/noxfile.py index a9cd9068..d503b261 100644 --- a/noxfile.py +++ b/noxfile.py @@ -41,7 +41,7 @@ @nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) -def test(session): +def test(session) -> None: session.install(".") session.install("-r", "dev-requirements.txt") @@ -49,7 +49,7 @@ def test(session): @nox.session() -def format(session): +def format(session) -> None: session.install("black", "isort") session.run("isort", "--profile=black", *SOURCE_FILES) @@ -60,8 +60,17 @@ def format(session): @nox.session() -def lint(session): - session.install("flake8", "black", "mypy", "isort", "types-requests", "types-six") +def lint(session) -> None: + session.install( + "flake8", + "black", + "mypy", + "isort", + "types-requests", + "types-six", + "types-simplejson", + "types-python-dateutil", + ) session.run("isort", "--check", "--profile=black", *SOURCE_FILES) session.run("black", "--target-version=py33", "--check", *SOURCE_FILES) @@ -85,7 +94,7 @@ def lint(session): @nox.session() -def docs(session): +def docs(session) -> None: session.install(".") session.install( "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" @@ -94,7 +103,7 @@ def docs(session): @nox.session() -def generate(session): +def generate(session) -> None: session.install("-rdev-requirements.txt") session.run("python", "utils/generate-api.py") format(session) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 8116d60a..1a5fe5cd 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -31,21 +31,25 @@ import logging import re -import sys import warnings from ._version import __versionstr__ _major, _minor, _patch = [ - int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() + int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore ] + VERSION = __version__ = (_major, _minor, _patch) logger = logging.getLogger("opensearch") logger.addHandler(logging.NullHandler()) +from ._async.client import AsyncOpenSearch +from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection +from ._async.transport import AsyncTransport from .client import OpenSearch from .connection import ( + AsyncHttpConnection, Connection, RequestsHttpConnection, Urllib3HttpConnection, @@ -142,7 +146,7 @@ from .serializer import JSONSerializer from .transport import Transport -# Only raise one warning per deprecation message so as not +# Only raise one warning per deprecation message so # to spam up the user if the same action is done multiple times. warnings.simplefilter("default", category=OpenSearchDeprecationWarning, append=True) @@ -247,14 +251,6 @@ "normalizer", "token_filter", "tokenizer", -] - -from ._async.client import AsyncOpenSearch -from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection -from ._async.transport import AsyncTransport -from .connection import AsyncHttpConnection - -__all__ += [ "AIOHttpConnection", "AsyncConnection", "AsyncTransport", diff --git a/opensearchpy/__init__.pyi b/opensearchpy/__init__.pyi deleted file mode 100644 index 96c17075..00000000 --- a/opensearchpy/__init__.pyi +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys -from typing import Tuple - -from ._async.client import AsyncOpenSearch as AsyncOpenSearch -from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection -from ._async.http_aiohttp import AsyncConnection as AsyncConnection -from ._async.transport import AsyncTransport as AsyncTransport -from .client import OpenSearch as OpenSearch -from .connection import AsyncHttpConnection as AsyncHttpConnection -from .connection import Connection as Connection -from .connection import RequestsHttpConnection as RequestsHttpConnection -from .connection import Urllib3HttpConnection as Urllib3HttpConnection -from .connection import connections as connections -from .connection_pool import ConnectionPool as ConnectionPool -from .connection_pool import ConnectionSelector as ConnectionSelector -from .connection_pool import RoundRobinSelector as RoundRobinSelector -from .exceptions import AuthenticationException as AuthenticationException -from .exceptions import AuthorizationException as AuthorizationException -from .exceptions import ConflictError as ConflictError -from .exceptions import ConnectionError as ConnectionError -from .exceptions import ConnectionTimeout as ConnectionTimeout -from .exceptions import IllegalOperation as IllegalOperation -from .exceptions import ImproperlyConfigured as ImproperlyConfigured -from .exceptions import NotFoundError as NotFoundError -from .exceptions import OpenSearchDeprecationWarning as OpenSearchDeprecationWarning -from .exceptions import OpenSearchDslException as OpenSearchDslException -from .exceptions import OpenSearchException as OpenSearchException -from .exceptions import OpenSearchWarning as OpenSearchWarning -from .exceptions import RequestError as RequestError -from .exceptions import SerializationError as SerializationError -from .exceptions import SSLError as SSLError -from .exceptions import TransportError as TransportError -from .exceptions import UnknownDslObject as UnknownDslObject -from .exceptions import ValidationException as ValidationException -from .helpers import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth -from .helpers import AWSV4SignerAuth as AWSV4SignerAuth -from .helpers.aggs import A as A -from .helpers.analysis import Analyzer, CharFilter, Normalizer, TokenFilter, Tokenizer -from .helpers.document import Document as Document -from .helpers.document import InnerDoc as InnerDoc -from .helpers.document import MetaField as MetaField -from .helpers.faceted_search import DateHistogramFacet as DateHistogramFacet -from .helpers.faceted_search import Facet as Facet -from .helpers.faceted_search import FacetedResponse as FacetedResponse -from .helpers.faceted_search import FacetedSearch as FacetedSearch -from .helpers.faceted_search import HistogramFacet as HistogramFacet -from .helpers.faceted_search import NestedFacet as NestedFacet -from .helpers.faceted_search import RangeFacet as RangeFacet -from .helpers.faceted_search import TermsFacet as TermsFacet -from .helpers.field import Binary as Binary -from .helpers.field import Boolean as Boolean -from .helpers.field import Byte as Byte -from .helpers.field import Completion as Completion -from .helpers.field import CustomField as CustomField -from .helpers.field import Date as Date -from .helpers.field import DateRange as DateRange -from .helpers.field import DenseVector as DenseVector -from .helpers.field import Double as Double -from .helpers.field import DoubleRange as DoubleRange -from .helpers.field import Field as Field -from .helpers.field import Float as Float -from .helpers.field import FloatRange as FloatRange -from .helpers.field import GeoPoint as GeoPoint -from .helpers.field import GeoShape as GeoShape -from .helpers.field import HalfFloat as HalfFloat -from .helpers.field import Integer as Integer -from .helpers.field import IntegerRange as IntegerRange -from .helpers.field import Ip as Ip -from .helpers.field import IpRange as IpRange -from .helpers.field import Join as Join -from .helpers.field import Keyword as Keyword -from .helpers.field import Long as Long -from .helpers.field import LongRange as LongRange -from .helpers.field import Murmur3 as Murmur3 -from .helpers.field import Nested as Nested -from .helpers.field import Object as Object -from .helpers.field import Percolator as Percolator -from .helpers.field import RangeField as RangeField -from .helpers.field import RankFeature as RankFeature -from .helpers.field import RankFeatures as RankFeatures -from .helpers.field import ScaledFloat as ScaledFloat -from .helpers.field import SearchAsYouType as SearchAsYouType -from .helpers.field import Short as Short -from .helpers.field import SparseVector as SparseVector -from .helpers.field import Text as Text -from .helpers.field import TokenCount as TokenCount -from .helpers.field import construct_field as construct_field -from .helpers.function import SF as SF -from .helpers.index import Index as Index -from .helpers.index import IndexTemplate as IndexTemplate -from .helpers.mapping import Mapping as Mapping -from .helpers.query import Q as Q -from .helpers.search import MultiSearch as MultiSearch -from .helpers.search import Search as Search -from .helpers.update_by_query import UpdateByQuery as UpdateByQuery -from .helpers.utils import AttrDict as AttrDict -from .helpers.utils import AttrList as AttrList -from .helpers.utils import DslBase as DslBase -from .helpers.wrappers import Range as Range -from .serializer import JSONSerializer as JSONSerializer -from .transport import Transport as Transport - -VERSION: Tuple[int, int, int] -__version__: Tuple[int, int, int] -__versionstr__: str diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index a7587f82..279fda37 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -39,9 +39,11 @@ from __future__ import unicode_literals import logging +from typing import Any, Type from ..transport import AsyncTransport, TransportError from .cat import CatClient +from .client import Client from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient @@ -54,12 +56,12 @@ from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") -class AsyncOpenSearch(object): +class AsyncOpenSearch(Client): """ OpenSearch client. Provides a straightforward mapping from Python to OpenSearch REST endpoints. @@ -184,13 +186,19 @@ def default(self, obj): """ - from ._patch import ( + # include PIT functions inside _patch.py + from ._patch import ( # type: ignore create_point_in_time, delete_point_in_time, list_all_point_in_time, ) - def __init__(self, hosts=None, transport_class=AsyncTransport, **kwargs): + def __init__( + self, + hosts: Any = None, + transport_class: Type[AsyncTransport] = AsyncTransport, + **kwargs: Any + ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), @@ -205,7 +213,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be :class:`~opensearchpy.Transport` class and, subsequently, to the :class:`~opensearchpy.Connection` instances. """ - self.transport = transport_class(_normalize_hosts(hosts), **kwargs) + super().__init__(hosts, transport_class, **kwargs) # namespaced clients for compatibility with API names self.cat = CatClient(self) @@ -224,10 +232,10 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.plugins = PluginsClient(self) - def __repr__(self): + def __repr__(self) -> Any: try: # get a list of all connections - cons = self.transport.hosts + cons: Any = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] @@ -236,21 +244,25 @@ def __repr__(self): # probably operating on custom transport and connection_pool, ignore return super(AsyncOpenSearch, self).__repr__() - async def __aenter__(self): + async def __aenter__(self) -> Any: if hasattr(self.transport, "_async_call"): await self.transport._async_call() return self - async def __aexit__(self, *_): + async def __aexit__(self, *_: Any) -> None: await self.close() - async def close(self): + async def close(self) -> None: """Closes the Transport and all internal connections""" await self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params() - async def ping(self, params=None, headers=None): + async def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns whether the cluster is running. @@ -263,7 +275,11 @@ async def ping(self, params=None, headers=None): return False @query_params() - async def info(self, params=None, headers=None): + async def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the cluster. @@ -281,7 +297,14 @@ async def info(self, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def create(self, index, id, body, params=None, headers=None): + async def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. @@ -330,7 +353,14 @@ async def create(self, index, id, body, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def index(self, index, body, id=None, params=None, headers=None): + async def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a document in an index. @@ -387,7 +417,13 @@ async def index(self, index, body, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def bulk(self, body, index=None, params=None, headers=None): + async def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to perform multiple index/update/delete operations in a single request. @@ -431,7 +467,13 @@ async def bulk(self, body, index=None, params=None, headers=None): ) @query_params() - async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): + async def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Explicitly clears the search context for a scroll. @@ -467,7 +509,13 @@ async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=Non "routing", "terminate_after", ) - async def count(self, body=None, index=None, params=None, headers=None): + async def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns number of documents matching a query. @@ -523,7 +571,13 @@ async def count(self, body=None, index=None, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def delete(self, index, id, params=None, headers=None): + async def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes a document from the index. @@ -592,7 +646,13 @@ async def delete(self, index, id, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def delete_by_query(self, index, body, params=None, headers=None): + async def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes documents matching the provided query. @@ -685,7 +745,12 @@ async def delete_by_query(self, index, body, params=None, headers=None): ) @query_params("requests_per_second") - async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): + async def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Delete By Query operation. @@ -706,7 +771,12 @@ async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_script(self, id, params=None, headers=None): + async def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a script. @@ -738,7 +808,13 @@ async def delete_script(self, id, params=None, headers=None): "version", "version_type", ) - async def exists(self, index, id, params=None, headers=None): + async def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document exists in an index. @@ -783,7 +859,13 @@ async def exists(self, index, id, params=None, headers=None): "version", "version_type", ) - async def exists_source(self, index, id, params=None, headers=None): + async def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document source exists in an index. @@ -831,7 +913,14 @@ async def exists_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - async def explain(self, index, id, body=None, params=None, headers=None): + async def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about why a specific matches (or doesn't match) a query. @@ -878,7 +967,13 @@ async def explain(self, index, id, body=None, params=None, headers=None): "ignore_unavailable", "include_unmapped", ) - async def field_caps(self, body=None, index=None, params=None, headers=None): + async def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about the capabilities of fields among multiple indices. @@ -919,7 +1014,13 @@ async def field_caps(self, body=None, index=None, params=None, headers=None): "version", "version_type", ) - async def get(self, index, id, params=None, headers=None): + async def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a document. @@ -954,7 +1055,12 @@ async def get(self, index, id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout") - async def get_script(self, id, params=None, headers=None): + async def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a script. @@ -984,7 +1090,13 @@ async def get_script(self, id, params=None, headers=None): "version", "version_type", ) - async def get_source(self, index, id, params=None, headers=None): + async def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the source of a document. @@ -1028,7 +1140,13 @@ async def get_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - async def mget(self, body, index=None, params=None, headers=None): + async def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to get multiple documents in one request. @@ -1073,7 +1191,13 @@ async def mget(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def msearch(self, body, index=None, params=None, headers=None): + async def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search operations in one request. @@ -1125,7 +1249,13 @@ async def msearch(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def msearch_template(self, body, index=None, params=None, headers=None): + async def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search template operations in one request. @@ -1173,7 +1303,13 @@ async def msearch_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - async def mtermvectors(self, body=None, index=None, params=None, headers=None): + async def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns multiple termvectors in one request. @@ -1221,7 +1357,14 @@ async def mtermvectors(self, body=None, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_script(self, id, body, context=None, params=None, headers=None): + async def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a script. @@ -1251,7 +1394,13 @@ async def put_script(self, id, body, context=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" ) - async def rank_eval(self, body, index=None, params=None, headers=None): + async def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to evaluate the quality of ranked search results over a set of typical search queries. @@ -1293,7 +1442,12 @@ async def rank_eval(self, body, index=None, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def reindex(self, body, params=None, headers=None): + async def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or @@ -1330,7 +1484,12 @@ async def reindex(self, body, params=None, headers=None): ) @query_params("requests_per_second") - async def reindex_rethrottle(self, task_id, params=None, headers=None): + async def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Reindex operation. @@ -1351,8 +1510,12 @@ async def reindex_rethrottle(self, task_id, params=None, headers=None): @query_params() async def render_search_template( - self, body=None, id=None, params=None, headers=None - ): + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1369,7 +1532,12 @@ async def render_search_template( ) @query_params() - async def scripts_painless_execute(self, body=None, params=None, headers=None): + async def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows an arbitrary script to be executed and a result to be returned. @@ -1385,7 +1553,13 @@ async def scripts_painless_execute(self, body=None, params=None, headers=None): ) @query_params("rest_total_hits_as_int", "scroll") - async def scroll(self, body=None, scroll_id=None, params=None, headers=None): + async def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to retrieve a large numbers of results from a single search request. @@ -1454,7 +1628,13 @@ async def scroll(self, body=None, scroll_id=None, params=None, headers=None): "typed_keys", "version", ) - async def search(self, body=None, index=None, params=None, headers=None): + async def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns results matching a query. @@ -1574,7 +1754,12 @@ async def search(self, body=None, index=None, params=None, headers=None): "preference", "routing", ) - async def search_shards(self, index=None, params=None, headers=None): + async def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the indices and shards that a search request would be executed against. @@ -1615,7 +1800,13 @@ async def search_shards(self, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def search_template(self, body, index=None, params=None, headers=None): + async def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1677,7 +1868,14 @@ async def search_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - async def termvectors(self, index, body=None, id=None, params=None, headers=None): + async def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information and statistics about terms in the fields of a particular document. @@ -1732,7 +1930,14 @@ async def termvectors(self, index, body=None, id=None, params=None, headers=None "timeout", "wait_for_active_shards", ) - async def update(self, index, id, body, params=None, headers=None): + async def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a document with a script or partial document. @@ -1814,7 +2019,13 @@ async def update(self, index, id, body, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def update_by_query(self, index, body=None, params=None, headers=None): + async def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. @@ -1908,7 +2119,12 @@ async def update_by_query(self, index, body=None, params=None, headers=None): ) @query_params("requests_per_second") - async def update_by_query_rethrottle(self, task_id, params=None, headers=None): + async def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Update By Query operation. @@ -1929,7 +2145,11 @@ async def update_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - async def get_script_context(self, params=None, headers=None): + async def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all script contexts. @@ -1939,7 +2159,11 @@ async def get_script_context(self, params=None, headers=None): ) @query_params() - async def get_script_languages(self, params=None, headers=None): + async def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns available script types, languages and contexts. @@ -1955,7 +2179,12 @@ async def get_script_languages(self, params=None, headers=None): "preference", "routing", ) - async def create_pit(self, index, params=None, headers=None): + async def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates point in time context. @@ -1983,7 +2212,11 @@ async def create_pit(self, index, params=None, headers=None): ) @query_params() - async def delete_all_pits(self, params=None, headers=None): + async def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes all active point in time searches. @@ -1993,7 +2226,12 @@ async def delete_all_pits(self, params=None, headers=None): ) @query_params() - async def delete_pit(self, body=None, params=None, headers=None): + async def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes one or more point in time searches based on the IDs passed. @@ -2009,7 +2247,11 @@ async def delete_pit(self, body=None, params=None, headers=None): ) @query_params() - async def get_all_pits(self, params=None, headers=None): + async def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Lists all active point in time searches. diff --git a/opensearchpy/_async/client/__init__.pyi b/opensearchpy/_async/client/__init__.pyi deleted file mode 100644 index 32ea967e..00000000 --- a/opensearchpy/_async/client/__init__.pyi +++ /dev/null @@ -1,1139 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from __future__ import unicode_literals - -import logging -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -from ..transport import AsyncTransport -from .cat import CatClient -from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient -from .features import FeaturesClient -from .indices import IndicesClient -from .ingest import IngestClient -from .nodes import NodesClient -from .remote import RemoteClient -from .remote_store import RemoteStoreClient -from .security import SecurityClient -from .snapshot import SnapshotClient -from .tasks import TasksClient - -logger: logging.Logger - -class AsyncOpenSearch(object): - transport: AsyncTransport - - cat: CatClient - cluster: ClusterClient - features: FeaturesClient - indices: IndicesClient - ingest: IngestClient - nodes: NodesClient - remote: RemoteClient - security: SecurityClient - snapshot: SnapshotClient - tasks: TasksClient - remote_store: RemoteStoreClient - def __init__( - self, - hosts: Any = ..., - transport_class: Type[AsyncTransport] = ..., - **kwargs: Any, - ) -> None: ... - def __repr__(self) -> str: ... - async def __aenter__(self) -> "AsyncOpenSearch": ... - async def __aexit__(self, *_: Any) -> None: ... - async def close(self) -> None: ... - # AUTO-GENERATED-API-DEFINITIONS # - async def ping( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create( - self, - index: Any, - id: Any, - *, - body: Any, - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def index( - self, - index: Any, - *, - body: Any, - id: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - op_type: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def bulk( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clear_scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def count( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - min_score: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - index: Any, - id: Any, - *, - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_by_query( - self, - index: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def exists_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def explain( - self, - index: Any, - id: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - lenient: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def field_caps( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_unmapped: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def mget( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def msearch( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def msearch_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def mtermvectors( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - ids: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_script( - self, - id: Any, - *, - body: Any, - context: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def rank_eval( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - search_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reindex( - self, - *, - body: Any, - max_docs: Optional[Any] = ..., - refresh: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - scroll: Optional[Any] = ..., - slices: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reindex_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def render_search_template( - self, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def scripts_painless_execute( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - scroll: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - allow_partial_search_results: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - batched_reduce_size: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - docvalue_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - seq_no_primary_term: Optional[Any] = ..., - size: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - suggest_field: Optional[Any] = ..., - suggest_mode: Optional[Any] = ..., - suggest_size: Optional[Any] = ..., - suggest_text: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - track_scores: Optional[Any] = ..., - track_total_hits: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - version: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search_shards( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - preference: Optional[Any] = ..., - profile: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def termvectors( - self, - index: Any, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update( - self, - index: Any, - id: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - lang: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - retry_on_conflict: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_by_query( - self, - index: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script_context( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script_languages( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_pit( - self, - index: Any, - *, - allow_partial_pit_creation: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_pit( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py index f3a953c0..cbf24e0b 100644 --- a/opensearchpy/_async/client/_patch.py +++ b/opensearchpy/_async/client/_patch.py @@ -9,12 +9,15 @@ # GitHub history for details. import warnings +from typing import Any from .utils import SKIP_IN_PATH, query_params @query_params() -async def list_all_point_in_time(self, params=None, headers=None): +async def list_all_point_in_time( + self: Any, params: Any = None, headers: Any = None +) -> Any: """ Returns the list of active point in times searches @@ -35,7 +38,9 @@ async def list_all_point_in_time(self, params=None, headers=None): @query_params( "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" ) -async def create_point_in_time(self, index, params=None, headers=None): +async def create_point_in_time( + self: Any, index: Any, params: Any = None, headers: Any = None +) -> Any: """ Create a point in time that can be used in subsequent searches @@ -68,7 +73,13 @@ async def create_point_in_time(self, index, params=None, headers=None): @query_params() -async def delete_point_in_time(self, body=None, all=False, params=None, headers=None): +async def delete_point_in_time( + self: Any, + body: Any = None, + all: bool = False, + params: Any = None, + headers: Any = None, +) -> Any: """ Delete a point in time @@ -94,7 +105,7 @@ async def delete_point_in_time(self, body=None, all=False, params=None, headers= @query_params() -async def health_check(self, params=None, headers=None): +async def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: """ Checks to see if the Security plugin is up and running. @@ -113,7 +124,9 @@ async def health_check(self, params=None, headers=None): @query_params() -async def update_audit_config(self, body, params=None, headers=None): +async def update_audit_config( + self: Any, body: Any, params: Any = None, headers: Any = None +) -> Any: """ A PUT call updates the audit configuration. diff --git a/opensearchpy/_async/client/_patch.pyi b/opensearchpy/_async/client/_patch.pyi deleted file mode 100644 index d49a7fec..00000000 --- a/opensearchpy/_async/client/_patch.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -async def list_all_point_in_time( - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -async def create_point_in_time( - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -async def delete_point_in_time( - *, - body: Optional[Any] = ..., - all: Optional[bool] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -async def health_check( - params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... -async def update_audit_config( - body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index 2c2b01c0..bd2c8b6a 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -26,7 +26,6 @@ # under the License. -# ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": @@ -36,12 +35,19 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") - async def aliases(self, name=None, params=None, headers=None): + async def aliases( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Shows information about currently configured aliases to indices including filter and routing infos. @@ -65,6 +71,20 @@ async def aliases(self, name=None, params=None, headers=None): "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) + @query_params() + async def all_pit_segments( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + Lists all active point-in-time segments. + + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + @query_params( "bytes", "cluster_manager_timeout", @@ -76,7 +96,12 @@ async def aliases(self, name=None, params=None, headers=None): "s", "v", ) - async def allocation(self, node_id=None, params=None, headers=None): + async def allocation( + self, + node_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. @@ -108,8 +133,51 @@ async def allocation(self, node_id=None, params=None, headers=None): headers=headers, ) + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + async def cluster_manager( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + Returns information about the cluster-manager node. + + + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", "/_cat/cluster_manager", params=params, headers=headers + ) + @query_params("format", "h", "help", "s", "v") - async def count(self, index=None, params=None, headers=None): + async def count( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides quick access to the document count of the entire cluster, or individual indices. @@ -129,8 +197,43 @@ async def count(self, index=None, params=None, headers=None): "GET", _make_path("_cat", "count", index), params=params, headers=headers ) + @query_params("bytes", "format", "h", "help", "s", "v") + async def fielddata( + self, + fields: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + Shows how much heap memory is currently being used by fielddata on every data + node in the cluster. + + + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", + _make_path("_cat", "fielddata", fields), + params=params, + headers=headers, + ) + @query_params("format", "h", "help", "s", "time", "ts", "v") - async def health(self, params=None, headers=None): + async def health( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a concise representation of the cluster health. @@ -151,7 +254,11 @@ async def health(self, params=None, headers=None): ) @query_params("help", "s") - async def help(self, params=None, headers=None): + async def help( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns help for the Cat APIs. @@ -180,7 +287,12 @@ async def help(self, params=None, headers=None): "time", "v", ) - async def indices(self, index=None, params=None, headers=None): + async def indices( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... @@ -232,7 +344,11 @@ async def indices(self, index=None, params=None, headers=None): "s", "v", ) - async def master(self, params=None, headers=None): + async def master( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about the cluster-manager node. @@ -271,9 +387,13 @@ async def master(self, params=None, headers=None): "s", "v", ) - async def cluster_manager(self, params=None, headers=None): + async def nodeattrs( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about the cluster-manager node. + Returns information about custom node attributes. :arg cluster_manager_timeout: Operation timeout for connection @@ -292,7 +412,7 @@ async def cluster_manager(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/cluster_manager", params=params, headers=headers + "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( @@ -308,7 +428,11 @@ async def cluster_manager(self, params=None, headers=None): "time", "v", ) - async def nodes(self, params=None, headers=None): + async def nodes( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns basic statistics about performance of cluster nodes. @@ -340,37 +464,6 @@ async def nodes(self, params=None, headers=None): ) @query_params( - "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" - ) - async def recovery(self, index=None, params=None, headers=None): - """ - Returns information about index shard recoveries, both on-going completed. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - shard recoveries. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg detailed: If `true`, the response includes detailed - information about shard recoveries. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return await self.transport.perform_request( - "GET", _make_path("_cat", "recovery", index), params=params, headers=headers - ) - - @query_params( - "bytes", "cluster_manager_timeout", "format", "h", @@ -381,15 +474,15 @@ async def recovery(self, index=None, params=None, headers=None): "time", "v", ) - async def shards(self, index=None, params=None, headers=None): + async def pending_tasks( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Provides a detailed view of shard allocation on nodes. + Returns a concise representation of the cluster pending tasks. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -408,34 +501,52 @@ async def shards(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", _make_path("_cat", "shards", index), params=params, headers=headers + "GET", "/_cat/pending_tasks", params=params, headers=headers + ) + + @query_params() + async def pit_segments( + self, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + List segments for one or several PITs. + + + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body ) @query_params( - "bytes", "cluster_manager_timeout", "format", "h", "help", + "local", "master_timeout", "s", "v", ) - async def segments(self, index=None, params=None, headers=None): + async def plugins( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Provides low-level information about the segments in the shards of an index. + Returns information about installed plugins across nodes node. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -444,36 +555,34 @@ async def segments(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", _make_path("_cat", "segments", index), params=params, headers=headers + "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( - "cluster_manager_timeout", - "format", - "h", - "help", - "local", - "master_timeout", - "s", - "time", - "v", + "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" ) - async def pending_tasks(self, params=None, headers=None): + async def recovery( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns a concise representation of the cluster pending tasks. + Returns information about index shard recoveries, both on-going completed. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg detailed: If `true`, the response includes detailed + information about shard recoveries. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. :arg time: The unit in which to display time values. Valid @@ -481,7 +590,7 @@ async def pending_tasks(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/pending_tasks", params=params, headers=headers + "GET", _make_path("_cat", "recovery", index), params=params, headers=headers ) @query_params( @@ -492,17 +601,17 @@ async def pending_tasks(self, params=None, headers=None): "local", "master_timeout", "s", - "size", "v", ) - async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): + async def repositories( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns cluster-wide thread pool statistics per node. By default the active, - queue and rejected statistics are returned for all thread pools. + Returns information about snapshot repositories registered in the cluster. - :arg thread_pool_patterns: Comma-separated list of regular- - expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -516,65 +625,94 @@ async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", - _make_path("_cat", "thread_pool", thread_pool_patterns), - params=params, - headers=headers, + "GET", "/_cat/repositories", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - async def fielddata(self, fields=None, params=None, headers=None): + @query_params( + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", + ) + async def segment_replication( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Shows how much heap memory is currently being used by fielddata on every data - node in the cluster. + Returns information about both on-going and latest completed Segment + Replication events. - :arg fields: Comma-separated list of fields to return in the - output. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. Default is false. :arg bytes: The unit in which to display byte values. Valid choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. Default is false. + :arg detailed: If `true`, the response includes detailed + information about segment replications. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", - _make_path("_cat", "fielddata", fields), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", "help", - "local", "master_timeout", "s", "v", ) - async def plugins(self, params=None, headers=None): + async def segments( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about installed plugins across nodes node. + Provides low-level information about the segments in the shards of an index. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -583,10 +721,11 @@ async def plugins(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/plugins", params=params, headers=headers + "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", @@ -594,13 +733,23 @@ async def plugins(self, params=None, headers=None): "local", "master_timeout", "s", + "time", "v", ) - async def nodeattrs(self, params=None, headers=None): + async def shards( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about custom node attributes. + Provides a detailed view of shard allocation on nodes. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -614,10 +763,12 @@ async def nodeattrs(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/nodeattrs", params=params, headers=headers + "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( @@ -628,13 +779,22 @@ async def nodeattrs(self, params=None, headers=None): "local", "master_timeout", "s", + "size", "v", ) - async def repositories(self, params=None, headers=None): + async def thread_pool( + self, + thread_pool_patterns: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about snapshot repositories registered in the cluster. + Returns cluster-wide thread pool statistics per node. By default the active, + queue and rejected statistics are returned for all thread pools. + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -648,10 +808,14 @@ async def repositories(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/repositories", params=params, headers=headers + "GET", + _make_path("_cat", "thread_pool", thread_pool_patterns), + params=params, + headers=headers, ) @query_params( @@ -665,7 +829,12 @@ async def repositories(self, params=None, headers=None): "time", "v", ) - async def snapshots(self, repository=None, params=None, headers=None): + async def snapshots( + self, + repository: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns all snapshots in a specific repository. @@ -708,7 +877,11 @@ async def snapshots(self, repository=None, params=None, headers=None): "time", "v", ) - async def tasks(self, params=None, headers=None): + async def tasks( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in the cluster. @@ -748,7 +921,12 @@ async def tasks(self, params=None, headers=None): "s", "v", ) - async def templates(self, name=None, params=None, headers=None): + async def templates( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about existing templates. @@ -772,71 +950,3 @@ async def templates(self, name=None, params=None, headers=None): return await self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) - - @query_params() - async def all_pit_segments(self, params=None, headers=None): - """ - Lists all active point-in-time segments. - - """ - return await self.transport.perform_request( - "GET", "/_cat/pit_segments/_all", params=params, headers=headers - ) - - @query_params() - async def pit_segments(self, body=None, params=None, headers=None): - """ - List segments for one or several PITs. - - - """ - return await self.transport.perform_request( - "GET", "/_cat/pit_segments", params=params, headers=headers, body=body - ) - - @query_params( - "active_only", - "bytes", - "completed_only", - "detailed", - "format", - "h", - "help", - "s", - "shards", - "time", - "v", - ) - async def segment_replication(self, index=None, params=None, headers=None): - """ - Returns information about both on-going and latest completed Segment - Replication events. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - segment replication events. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg completed_only: If `true`, the response only includes - latest completed segment replication events. Default is false. - :arg detailed: If `true`, the response includes detailed - information about segment replications. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg shards: Comma-separated list of shards to display. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return await self.transport.perform_request( - "GET", - _make_path("_cat", "segment_replication", index), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/cat.pyi b/opensearchpy/_async/client/cat.pyi deleted file mode 100644 index 404400cd..00000000 --- a/opensearchpy/_async/client/cat.pyi +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class CatClient(NamespacedClient): - async def aliases( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def allocation( - self, - *, - node_id: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def count( - self, - *, - index: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def health( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - ts: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def help( - self, - *, - help: Optional[Any] = ..., - s: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def indices( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - health: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pri: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def master( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cluster_manager( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def nodes( - self, - *, - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - full_id: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shards( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segments( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def thread_pool( - self, - *, - thread_pool_patterns: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def fielddata( - self, - *, - fields: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def plugins( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def nodeattrs( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def repositories( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def snapshots( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def tasks( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def templates( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def all_pit_segments( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pit_segments( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segment_replication( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - completed_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - shards: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/client.py b/opensearchpy/_async/client/client.py new file mode 100644 index 00000000..7f0b67c6 --- /dev/null +++ b/opensearchpy/_async/client/client.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Optional, Type + +from opensearchpy.client.utils import _normalize_hosts +from opensearchpy.transport import Transport + + +class Client(object): + """ + A generic async OpenSearch client. + """ + + def __init__( + self, + hosts: Optional[str] = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: + """ + :arg hosts: list of nodes, or a single node, we should connect to. + Node should be a dictionary ({"host": "localhost", "port": 9200}), + the entire dictionary will be passed to the :class:`~opensearchpy.Connection` + class as kwargs, or a string in the format of ``host[:port]`` which will be + translated to a dictionary automatically. If no value is given the + :class:`~opensearchpy.Connection` class defaults will be used. + + :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. + + :arg kwargs: any additional arguments will be passed on to the + :class:`~opensearchpy.Transport` class and, subsequently, to the + :class:`~opensearchpy.Connection` instances. + """ + self.transport = transport_class(_normalize_hosts(hosts), **kwargs) diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 8bd55390..a76a05fb 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -55,7 +57,12 @@ class ClusterClient(NamespacedClient): "wait_for_nodes", "wait_for_status", ) - async def health(self, index=None, params=None, headers=None): + async def health( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns basic information about the health of the cluster. @@ -99,7 +106,11 @@ async def health(self, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def pending_tasks(self, params=None, headers=None): + async def pending_tasks( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. @@ -128,7 +139,13 @@ async def pending_tasks(self, params=None, headers=None): "wait_for_metadata_version", "wait_for_timeout", ) - async def state(self, metric=None, index=None, params=None, headers=None): + async def state( + self, + metric: Any = None, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a comprehensive information about the state of the cluster. @@ -171,7 +188,12 @@ async def state(self, metric=None, index=None, params=None, headers=None): ) @query_params("flat_settings", "timeout") - async def stats(self, node_id=None, params=None, headers=None): + async def stats( + self, + node_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns high-level overview of cluster statistics. @@ -202,7 +224,12 @@ async def stats(self, node_id=None, params=None, headers=None): "retry_failed", "timeout", ) - async def reroute(self, body=None, params=None, headers=None): + async def reroute( + self, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. @@ -235,7 +262,11 @@ async def reroute(self, body=None, params=None, headers=None): "master_timeout", "timeout", ) - async def get_settings(self, params=None, headers=None): + async def get_settings( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns cluster settings. @@ -258,7 +289,12 @@ async def get_settings(self, params=None, headers=None): @query_params( "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) - async def put_settings(self, body, params=None, headers=None): + async def put_settings( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the cluster settings. @@ -282,7 +318,11 @@ async def put_settings(self, body, params=None, headers=None): ) @query_params() - async def remote_info(self, params=None, headers=None): + async def remote_info( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns the information about configured remote clusters. @@ -292,7 +332,12 @@ async def remote_info(self, params=None, headers=None): ) @query_params("include_disk_info", "include_yes_decisions") - async def allocation_explain(self, body=None, params=None, headers=None): + async def allocation_explain( + self, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -313,7 +358,12 @@ async def allocation_explain(self, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_component_template(self, name, params=None, headers=None): + async def delete_component_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a component template. @@ -337,7 +387,12 @@ async def delete_component_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def get_component_template(self, name=None, params=None, headers=None): + async def get_component_template( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns one or more component templates. @@ -359,7 +414,13 @@ async def get_component_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") - async def put_component_template(self, name, body, params=None, headers=None): + async def put_component_template( + self, + name: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates a component template. @@ -388,7 +449,12 @@ async def put_component_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def exists_component_template(self, name, params=None, headers=None): + async def exists_component_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular component template exist. @@ -413,7 +479,11 @@ async def exists_component_template(self, name, params=None, headers=None): ) @query_params("wait_for_removal") - async def delete_voting_config_exclusions(self, params=None, headers=None): + async def delete_voting_config_exclusions( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clears cluster voting config exclusions. @@ -430,7 +500,11 @@ async def delete_voting_config_exclusions(self, params=None, headers=None): ) @query_params("node_ids", "node_names", "timeout") - async def post_voting_config_exclusions(self, params=None, headers=None): + async def post_voting_config_exclusions( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. @@ -448,7 +522,11 @@ async def post_voting_config_exclusions(self, params=None, headers=None): ) @query_params() - async def delete_decommission_awareness(self, params=None, headers=None): + async def delete_decommission_awareness( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete any existing decommission. @@ -461,7 +539,11 @@ async def delete_decommission_awareness(self, params=None, headers=None): ) @query_params() - async def delete_weighted_routing(self, params=None, headers=None): + async def delete_weighted_routing( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete weighted shard routing weights. @@ -475,8 +557,11 @@ async def delete_weighted_routing(self, params=None, headers=None): @query_params() async def get_decommission_awareness( - self, awareness_attribute_name, params=None, headers=None - ): + self, + awareness_attribute_name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Get details and status of decommissioned attribute. @@ -502,7 +587,12 @@ async def get_decommission_awareness( ) @query_params() - async def get_weighted_routing(self, attribute, params=None, headers=None): + async def get_weighted_routing( + self, + attribute: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Fetches weighted shard routing weights. @@ -522,11 +612,11 @@ async def get_weighted_routing(self, attribute, params=None, headers=None): @query_params() async def put_decommission_awareness( self, - awareness_attribute_name, - awareness_attribute_value, - params=None, - headers=None, - ): + awareness_attribute_name: Any, + awareness_attribute_value: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Decommissions an awareness attribute. @@ -552,7 +642,12 @@ async def put_decommission_awareness( ) @query_params() - async def put_weighted_routing(self, attribute, params=None, headers=None): + async def put_weighted_routing( + self, + attribute: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates weighted shard routing weights. diff --git a/opensearchpy/_async/client/cluster.pyi b/opensearchpy/_async/client/cluster.pyi deleted file mode 100644 index 74f88694..00000000 --- a/opensearchpy/_async/client/cluster.pyi +++ /dev/null @@ -1,456 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class ClusterClient(NamespacedClient): - async def health( - self, - *, - index: Optional[Any] = ..., - awareness_attribute: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - level: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_events: Optional[Any] = ..., - wait_for_no_initializing_shards: Optional[Any] = ..., - wait_for_no_relocating_shards: Optional[Any] = ..., - wait_for_nodes: Optional[Any] = ..., - wait_for_status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def state( - self, - *, - metric: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_metadata_version: Optional[Any] = ..., - wait_for_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - node_id: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reroute( - self, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - explain: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - metric: Optional[Any] = ..., - retry_failed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_settings( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_settings( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def remote_info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def allocation_explain( - self, - *, - body: Optional[Any] = ..., - include_disk_info: Optional[Any] = ..., - include_yes_decisions: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_component_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_component_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def delete_voting_config_exclusions( - self, - *, - wait_for_removal: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def post_voting_config_exclusions( - self, - *, - node_ids: Optional[Any] = ..., - node_names: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_decommission_awareness( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_weighted_routing( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_decommission_awareness( - self, - awareness_attribute_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_decommission_awareness( - self, - awareness_attribute_name: Any, - awareness_attribute_value: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index bc886d65..19b0e5c4 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -43,7 +45,12 @@ class DanglingIndicesClient(NamespacedClient): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - async def delete_dangling_index(self, index_uuid, params=None, headers=None): + async def delete_dangling_index( + self, + index_uuid: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes the specified dangling index. @@ -71,7 +78,12 @@ async def delete_dangling_index(self, index_uuid, params=None, headers=None): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - async def import_dangling_index(self, index_uuid, params=None, headers=None): + async def import_dangling_index( + self, + index_uuid: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Imports the specified dangling index. @@ -94,7 +106,11 @@ async def import_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params() - async def list_dangling_indices(self, params=None, headers=None): + async def list_dangling_indices( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/_async/client/dangling_indices.pyi b/opensearchpy/_async/client/dangling_indices.pyi deleted file mode 100644 index d9dea8a1..00000000 --- a/opensearchpy/_async/client/dangling_indices.pyi +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class DanglingIndicesClient(NamespacedClient): - async def delete_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def import_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def list_dangling_indices( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/features.py b/opensearchpy/_async/client/features.py index e2c1bb7d..1b69aa04 100644 --- a/opensearchpy/_async/client/features.py +++ b/opensearchpy/_async/client/features.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params("master_timeout", "cluster_manager_timeout") - async def get_features(self, params=None, headers=None): + async def get_features(self, params: Any = None, headers: Any = None) -> Any: """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot @@ -47,7 +49,7 @@ async def get_features(self, params=None, headers=None): ) @query_params() - async def reset_features(self, params=None, headers=None): + async def reset_features(self, params: Any = None, headers: Any = None) -> Any: """ Resets the internal state of features, usually by deleting system indices diff --git a/opensearchpy/_async/client/features.pyi b/opensearchpy/_async/client/features.pyi deleted file mode 100644 index 38fb992e..00000000 --- a/opensearchpy/_async/client/features.pyi +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class FeaturesClient(NamespacedClient): - async def get_features( - self, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reset_features( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index b83cb73c..09d3b802 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -36,12 +36,20 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params() - async def analyze(self, body=None, index=None, params=None, headers=None): + async def analyze( + self, + body: Any = None, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the text. @@ -60,7 +68,12 @@ async def analyze(self, body=None, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def refresh(self, index=None, params=None, headers=None): + async def refresh( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the refresh operation in one or more indices. @@ -87,7 +100,12 @@ async def refresh(self, index=None, params=None, headers=None): "ignore_unavailable", "wait_if_ongoing", ) - async def flush(self, index=None, params=None, headers=None): + async def flush( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the flush operation on one or more indices. @@ -119,7 +137,13 @@ async def flush(self, index=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - async def create(self, index, body=None, params=None, headers=None): + async def create( + self, + index: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates an index with optional settings and mappings. @@ -146,7 +170,14 @@ async def create(self, index, body=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - async def clone(self, index, target, body=None, params=None, headers=None): + async def clone( + self, + index: Any, + target: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clones an index. @@ -186,7 +217,12 @@ async def clone(self, index, target, body=None, params=None, headers=None): "local", "master_timeout", ) - async def get(self, index, params=None, headers=None): + async def get( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about one or more indices. @@ -229,7 +265,12 @@ async def get(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def open(self, index, params=None, headers=None): + async def open( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Opens an index. @@ -268,7 +309,12 @@ async def open(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def close(self, index, params=None, headers=None): + async def close( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Closes an index. @@ -306,7 +352,12 @@ async def close(self, index, params=None, headers=None): "master_timeout", "timeout", ) - async def delete(self, index, params=None, headers=None): + async def delete( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an index. @@ -344,7 +395,12 @@ async def delete(self, index, params=None, headers=None): "include_defaults", "local", ) - async def exists(self, index, params=None, headers=None): + async def exists( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular index exists. @@ -382,7 +438,13 @@ async def exists(self, index, params=None, headers=None): "timeout", "write_index_only", ) - async def put_mapping(self, body, index=None, params=None, headers=None): + async def put_mapping( + self, + body: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the index mappings. @@ -429,7 +491,12 @@ async def put_mapping(self, body, index=None, params=None, headers=None): "local", "master_timeout", ) - async def get_mapping(self, index=None, params=None, headers=None): + async def get_mapping( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns mappings for one or more indices. @@ -463,7 +530,13 @@ async def get_mapping(self, index=None, params=None, headers=None): "include_defaults", "local", ) - async def get_field_mapping(self, fields, index=None, params=None, headers=None): + async def get_field_mapping( + self, + fields: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns mapping for one or more fields. @@ -494,7 +567,14 @@ async def get_field_mapping(self, fields, index=None, params=None, headers=None) ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_alias(self, index, name, body=None, params=None, headers=None): + async def put_alias( + self, + index: Any, + name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates an alias. @@ -524,7 +604,13 @@ async def put_alias(self, index, name, body=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - async def exists_alias(self, name, index=None, params=None, headers=None): + async def exists_alias( + self, + name: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular alias exists. @@ -550,7 +636,13 @@ async def exists_alias(self, name, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - async def get_alias(self, index=None, name=None, params=None, headers=None): + async def get_alias( + self, + index: Any = None, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns an alias. @@ -573,7 +665,12 @@ async def get_alias(self, index=None, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def update_aliases(self, body, params=None, headers=None): + async def update_aliases( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates index aliases. @@ -594,7 +691,13 @@ async def update_aliases(self, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_alias(self, index, name, params=None, headers=None): + async def delete_alias( + self, + index: Any, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an alias. @@ -619,7 +722,13 @@ async def delete_alias(self, index, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "order") - async def put_template(self, name, body, params=None, headers=None): + async def put_template( + self, + name: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates an index template. @@ -650,7 +759,12 @@ async def put_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def exists_template(self, name, params=None, headers=None): + async def exists_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -674,7 +788,12 @@ async def exists_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def get_template(self, name=None, params=None, headers=None): + async def get_template( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns an index template. @@ -695,7 +814,12 @@ async def get_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_template(self, name, params=None, headers=None): + async def delete_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an index template. @@ -725,7 +849,13 @@ async def delete_template(self, name, params=None, headers=None): "local", "master_timeout", ) - async def get_settings(self, index=None, name=None, params=None, headers=None): + async def get_settings( + self, + index: Any = None, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns settings for one or more indices. @@ -767,7 +897,13 @@ async def get_settings(self, index=None, name=None, params=None, headers=None): "preserve_existing", "timeout", ) - async def put_settings(self, body, index=None, params=None, headers=None): + async def put_settings( + self, + body: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the index settings. @@ -817,7 +953,13 @@ async def put_settings(self, body, index=None, params=None, headers=None): "include_unloaded_segments", "level", ) - async def stats(self, index=None, metric=None, params=None, headers=None): + async def stats( + self, + index: Any = None, + metric: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides statistics on operations happening in an index. @@ -858,7 +1000,12 @@ async def stats(self, index=None, metric=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose" ) - async def segments(self, index=None, params=None, headers=None): + async def segments( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides low-level information about segments in a Lucene index. @@ -894,7 +1041,13 @@ async def segments(self, index=None, params=None, headers=None): "q", "rewrite", ) - async def validate_query(self, body=None, index=None, params=None, headers=None): + async def validate_query( + self, + body: Any = None, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. @@ -943,7 +1096,12 @@ async def validate_query(self, body=None, index=None, params=None, headers=None) "query", "request", ) - async def clear_cache(self, index=None, params=None, headers=None): + async def clear_cache( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clears all or specific caches for one or more indices. @@ -969,7 +1127,12 @@ async def clear_cache(self, index=None, params=None, headers=None): ) @query_params("active_only", "detailed") - async def recovery(self, index=None, params=None, headers=None): + async def recovery( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about ongoing index shard recoveries. @@ -992,7 +1155,12 @@ async def recovery(self, index=None, params=None, headers=None): "only_ancient_segments", "wait_for_completion", ) - async def upgrade(self, index=None, params=None, headers=None): + async def upgrade( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1017,7 +1185,12 @@ async def upgrade(self, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def get_upgrade(self, index=None, params=None, headers=None): + async def get_upgrade( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1040,7 +1213,12 @@ async def get_upgrade(self, index=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) - async def shard_stores(self, index=None, params=None, headers=None): + async def shard_stores( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides store information for shard copies of indices. @@ -1070,7 +1248,12 @@ async def shard_stores(self, index=None, params=None, headers=None): "max_num_segments", "only_expunge_deletes", ) - async def forcemerge(self, index=None, params=None, headers=None): + async def forcemerge( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the force merge operation on one or more indices. @@ -1103,7 +1286,14 @@ async def forcemerge(self, index=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def shrink(self, index, target, body=None, params=None, headers=None): + async def shrink( + self, + index: Any, + target: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. @@ -1142,7 +1332,14 @@ async def shrink(self, index, target, body=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def split(self, index, target, body=None, params=None, headers=None): + async def split( + self, + index: Any, + target: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows you to split an existing index into a new index with more primary shards. @@ -1183,8 +1380,13 @@ async def split(self, index, target, body=None, params=None, headers=None): "wait_for_active_shards", ) async def rollover( - self, alias, body=None, new_index=None, params=None, headers=None - ): + self, + alias: Any, + body: Any = None, + new_index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. @@ -1219,7 +1421,13 @@ async def rollover( ) @query_params() - async def create_data_stream(self, name, body=None, params=None, headers=None): + async def create_data_stream( + self, + name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates a data stream. @@ -1239,7 +1447,12 @@ async def create_data_stream(self, name, body=None, params=None, headers=None): ) @query_params() - async def delete_data_stream(self, name, params=None, headers=None): + async def delete_data_stream( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a data stream. @@ -1255,7 +1468,12 @@ async def delete_data_stream(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_index_template(self, name, params=None, headers=None): + async def delete_index_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an index template. @@ -1279,7 +1497,12 @@ async def delete_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def exists_index_template(self, name, params=None, headers=None): + async def exists_index_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -1303,7 +1526,12 @@ async def exists_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def get_index_template(self, name=None, params=None, headers=None): + async def get_index_template( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns an index template. @@ -1324,7 +1552,13 @@ async def get_index_template(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - async def put_index_template(self, name, body, params=None, headers=None): + async def put_index_template( + self, + name: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates an index template. @@ -1354,7 +1588,13 @@ async def put_index_template(self, name, body, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - async def simulate_index_template(self, name, body=None, params=None, headers=None): + async def simulate_index_template( + self, + name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Simulate matching the given index name against the index templates in the system. @@ -1387,7 +1627,12 @@ async def simulate_index_template(self, name, body=None, params=None, headers=No ) @query_params() - async def get_data_stream(self, name=None, params=None, headers=None): + async def get_data_stream( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns data streams. @@ -1400,7 +1645,13 @@ async def get_data_stream(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - async def simulate_template(self, body=None, name=None, params=None, headers=None): + async def simulate_template( + self, + body: Any = None, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Simulate resolving the given template name or body. @@ -1428,7 +1679,12 @@ async def simulate_template(self, body=None, name=None, params=None, headers=Non ) @query_params("expand_wildcards") - async def resolve_index(self, name, params=None, headers=None): + async def resolve_index( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about any matching indices, aliases, and data streams. @@ -1454,7 +1710,13 @@ async def resolve_index(self, name, params=None, headers=None): "master_timeout", "timeout", ) - async def add_block(self, index, block, params=None, headers=None): + async def add_block( + self, + index: Any, + block: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Adds a block to an index. @@ -1486,7 +1748,12 @@ async def add_block(self, index, block, params=None, headers=None): ) @query_params() - async def data_streams_stats(self, name=None, params=None, headers=None): + async def data_streams_stats( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides statistics on operations happening in a data stream. diff --git a/opensearchpy/_async/client/indices.pyi b/opensearchpy/_async/client/indices.pyi deleted file mode 100644 index 1a5c0912..00000000 --- a/opensearchpy/_async/client/indices.pyi +++ /dev/null @@ -1,1097 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IndicesClient(NamespacedClient): - async def analyze( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def refresh( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def flush( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - force: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - wait_if_ongoing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create( - self, - index: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clone( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def open( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def close( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def put_mapping( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - write_index_only: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_mapping( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_field_mapping( - self, - fields: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_alias( - self, - index: Any, - name: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_alias( - self, - name: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_alias( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_aliases( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_alias( - self, - index: Any, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - order: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_settings( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_settings( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - preserve_existing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - index: Optional[Any] = ..., - metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - forbid_closed_indices: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segments( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def validate_query( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - all_shards: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - q: Optional[Any] = ..., - rewrite: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clear_cache( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - query: Optional[Any] = ..., - request: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - only_ancient_segments: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shard_stores( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def forcemerge( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - max_num_segments: Optional[Any] = ..., - only_expunge_deletes: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shrink( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def split( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def rollover( - self, - alias: Any, - *, - body: Optional[Any] = ..., - new_index: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_data_stream( - self, - name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_index_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_index_template( - self, - name: Any, - *, - body: Any, - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate_index_template( - self, - name: Any, - *, - body: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_data_stream( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate_template( - self, - *, - body: Optional[Any] = ..., - name: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def resolve_index( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def add_block( - self, - index: Any, - block: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def data_streams_stats( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 0d56f7e1..64cbdbb6 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -36,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") - async def get_pipeline(self, id=None, params=None, headers=None): + async def get_pipeline( + self, + id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a pipeline. @@ -59,7 +66,13 @@ async def get_pipeline(self, id=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_pipeline(self, id, body, params=None, headers=None): + async def put_pipeline( + self, + id: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates a pipeline. @@ -86,7 +99,12 @@ async def put_pipeline(self, id, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_pipeline(self, id, params=None, headers=None): + async def delete_pipeline( + self, + id: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a pipeline. @@ -110,7 +128,13 @@ async def delete_pipeline(self, id, params=None, headers=None): ) @query_params("verbose") - async def simulate(self, body, id=None, params=None, headers=None): + async def simulate( + self, + body: Any, + id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows to simulate a pipeline with example documents. @@ -132,7 +156,11 @@ async def simulate(self, body, id=None, params=None, headers=None): ) @query_params() - async def processor_grok(self, params=None, headers=None): + async def processor_grok( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a list of the built-in patterns. diff --git a/opensearchpy/_async/client/ingest.pyi b/opensearchpy/_async/client/ingest.pyi deleted file mode 100644 index 9dd4fc2b..00000000 --- a/opensearchpy/_async/client/ingest.pyi +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IngestClient(NamespacedClient): - async def get_pipeline( - self, - *, - id: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_pipeline( - self, - id: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_pipeline( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate( - self, - *, - body: Any, - id: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def processor_grok( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index a89fee94..36146fad 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -36,14 +36,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params("timeout") async def reload_secure_settings( - self, body=None, node_id=None, params=None, headers=None - ): + self, + body: Any = None, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reloads secure settings. @@ -64,7 +70,13 @@ async def reload_secure_settings( ) @query_params("flat_settings", "timeout") - async def info(self, node_id=None, metric=None, params=None, headers=None): + async def info( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about nodes in the cluster. @@ -95,8 +107,13 @@ async def info(self, node_id=None, metric=None, params=None, headers=None): "types", ) async def stats( - self, node_id=None, metric=None, index_metric=None, params=None, headers=None - ): + self, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns statistical information about nodes in the cluster. @@ -140,7 +157,12 @@ async def stats( @query_params( "doc_type", "ignore_idle_threads", "interval", "snapshots", "threads", "timeout" ) - async def hot_threads(self, node_id=None, params=None, headers=None): + async def hot_threads( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about hot threads on each node in the cluster. @@ -173,7 +195,13 @@ async def hot_threads(self, node_id=None, params=None, headers=None): ) @query_params("timeout") - async def usage(self, node_id=None, metric=None, params=None, headers=None): + async def usage( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns low-level information about REST actions usage on nodes. diff --git a/opensearchpy/_async/client/nodes.pyi b/opensearchpy/_async/client/nodes.pyi deleted file mode 100644 index c18afb83..00000000 --- a/opensearchpy/_async/client/nodes.pyi +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class NodesClient(NamespacedClient): - async def reload_secure_settings( - self, - *, - body: Optional[Any] = ..., - node_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def info( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - index_metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - level: Optional[Any] = ..., - timeout: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def hot_threads( - self, - *, - node_id: Optional[Any] = ..., - doc_type: Optional[Any] = ..., - ignore_idle_threads: Optional[Any] = ..., - interval: Optional[Any] = ..., - snapshots: Optional[Any] = ..., - threads: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def usage( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index 19570be4..b12214d7 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -9,14 +9,19 @@ # GitHub history for details. import warnings +from typing import Any from ..plugins.alerting import AlertingClient from ..plugins.index_management import IndexManagementClient +from .client import Client from .utils import NamespacedClient class PluginsClient(NamespacedClient): - def __init__(self, client): + alerting: Any + index_management: Any + + def __init__(self, client: Client) -> None: super(PluginsClient, self).__init__(client) # self.query_workbench = QueryWorkbenchClient(client) # self.reporting = ReportingClient(client) @@ -28,7 +33,7 @@ def __init__(self, client): self._dynamic_lookup(client) - def _dynamic_lookup(self, client): + def _dynamic_lookup(self, client: Any) -> None: # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 plugins = [ diff --git a/opensearchpy/_async/client/plugins.pyi b/opensearchpy/_async/client/plugins.pyi deleted file mode 100644 index 44576c74..00000000 --- a/opensearchpy/_async/client/plugins.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any - -from ..client import AsyncOpenSearch -from ..plugins.alerting import AlertingClient as AlertingClient -from .utils import NamespacedClient as NamespacedClient - -class PluginsClient(NamespacedClient): - alerting: Any - index_management: Any - def __init__(self, client: AsyncOpenSearch) -> None: ... diff --git a/opensearchpy/_async/client/remote.py b/opensearchpy/_async/client/remote.py index eee7319d..433c9fa5 100644 --- a/opensearchpy/_async/client/remote.py +++ b/opensearchpy/_async/client/remote.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() - async def info(self, params=None, headers=None): + async def info(self, params: Any = None, headers: Any = None) -> Any: return await self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) diff --git a/opensearchpy/_async/client/remote.pyi b/opensearchpy/_async/client/remote.pyi deleted file mode 100644 index a2d7dc51..00000000 --- a/opensearchpy/_async/client/remote.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteClient(NamespacedClient): - async def info( - self, - *, - timeout: Optional[Any] = None, - pretty: Optional[bool] = None, - human: Optional[bool] = None, - error_trace: Optional[bool] = None, - format: Optional[str] = None, - filter_path: Optional[Union[str, Collection[str]]] = None, - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, - ) -> Any: ... diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index e59d1870..ed06763b 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -7,6 +7,25 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # @@ -17,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, query_params class RemoteStoreClient(NamespacedClient): @query_params("cluster_manager_timeout", "wait_for_completion") - async def restore(self, body, params=None, headers=None): + async def restore( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Restores from remote store. diff --git a/opensearchpy/_async/client/remote_store.pyi b/opensearchpy/_async/client/remote_store.pyi deleted file mode 100644 index b14866ef..00000000 --- a/opensearchpy/_async/client/remote_store.pyi +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteStoreClient(NamespacedClient): - async def restore( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 43265506..1dce2dfa 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -7,6 +7,23 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. # ---------------------------------------------------- @@ -19,14 +36,29 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + class SecurityClient(NamespacedClient): - from ._patch import health_check, update_audit_config + from ._patch import health_check, update_audit_config # type: ignore @query_params() - async def get_account_details(self, params=None, headers=None): + async def get_account_details( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns account details for the current user. @@ -36,7 +68,12 @@ async def get_account_details(self, params=None, headers=None): ) @query_params() - async def change_password(self, body, params=None, headers=None): + async def change_password( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Changes the password for the current user. @@ -54,7 +91,12 @@ async def change_password(self, body, params=None, headers=None): ) @query_params() - async def get_action_group(self, action_group, params=None, headers=None): + async def get_action_group( + self, + action_group: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one action group. @@ -74,7 +116,11 @@ async def get_action_group(self, action_group, params=None, headers=None): ) @query_params() - async def get_action_groups(self, params=None, headers=None): + async def get_action_groups( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all action groups. @@ -87,7 +133,12 @@ async def get_action_groups(self, params=None, headers=None): ) @query_params() - async def delete_action_group(self, action_group, params=None, headers=None): + async def delete_action_group( + self, + action_group: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete a specified action group. @@ -107,7 +158,13 @@ async def delete_action_group(self, action_group, params=None, headers=None): ) @query_params() - async def create_action_group(self, action_group, body, params=None, headers=None): + async def create_action_group( + self, + action_group: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified action group. @@ -128,7 +185,13 @@ async def create_action_group(self, action_group, body, params=None, headers=Non ) @query_params() - async def patch_action_group(self, action_group, body, params=None, headers=None): + async def patch_action_group( + self, + action_group: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of an action group. @@ -147,7 +210,12 @@ async def patch_action_group(self, action_group, body, params=None, headers=None ) @query_params() - async def patch_action_groups(self, body, params=None, headers=None): + async def patch_action_groups( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. @@ -165,7 +233,12 @@ async def patch_action_groups(self, body, params=None, headers=None): ) @query_params() - async def get_user(self, username, params=None, headers=None): + async def get_user( + self, + username: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieve one internal user. @@ -182,7 +255,11 @@ async def get_user(self, username, params=None, headers=None): ) @query_params() - async def get_users(self, params=None, headers=None): + async def get_users( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieve all internal users. @@ -195,7 +272,12 @@ async def get_users(self, params=None, headers=None): ) @query_params() - async def delete_user(self, username, params=None, headers=None): + async def delete_user( + self, + username: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete the specified user. @@ -212,7 +294,13 @@ async def delete_user(self, username, params=None, headers=None): ) @query_params() - async def create_user(self, username, body, params=None, headers=None): + async def create_user( + self, + username: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified user. @@ -231,7 +319,13 @@ async def create_user(self, username, body, params=None, headers=None): ) @query_params() - async def patch_user(self, username, body, params=None, headers=None): + async def patch_user( + self, + username: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of an internal user. @@ -250,7 +344,12 @@ async def patch_user(self, username, body, params=None, headers=None): ) @query_params() - async def patch_users(self, body, params=None, headers=None): + async def patch_users( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. @@ -268,7 +367,12 @@ async def patch_users(self, body, params=None, headers=None): ) @query_params() - async def get_role(self, role, params=None, headers=None): + async def get_role( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one role. @@ -285,7 +389,11 @@ async def get_role(self, role, params=None, headers=None): ) @query_params() - async def get_roles(self, params=None, headers=None): + async def get_roles( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all roles. @@ -295,7 +403,12 @@ async def get_roles(self, params=None, headers=None): ) @query_params() - async def delete_role(self, role, params=None, headers=None): + async def delete_role( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete the specified role. @@ -312,7 +425,13 @@ async def delete_role(self, role, params=None, headers=None): ) @query_params() - async def create_role(self, role, body, params=None, headers=None): + async def create_role( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified role. @@ -331,7 +450,13 @@ async def create_role(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role(self, role, body, params=None, headers=None): + async def patch_role( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of a role. @@ -350,7 +475,12 @@ async def patch_role(self, role, body, params=None, headers=None): ) @query_params() - async def patch_roles(self, body, params=None, headers=None): + async def patch_roles( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. @@ -368,7 +498,12 @@ async def patch_roles(self, body, params=None, headers=None): ) @query_params() - async def get_role_mapping(self, role, params=None, headers=None): + async def get_role_mapping( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one role mapping. @@ -385,7 +520,11 @@ async def get_role_mapping(self, role, params=None, headers=None): ) @query_params() - async def get_role_mappings(self, params=None, headers=None): + async def get_role_mappings( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all role mappings. @@ -398,7 +537,12 @@ async def get_role_mappings(self, params=None, headers=None): ) @query_params() - async def delete_role_mapping(self, role, params=None, headers=None): + async def delete_role_mapping( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes the specified role mapping. @@ -415,7 +559,13 @@ async def delete_role_mapping(self, role, params=None, headers=None): ) @query_params() - async def create_role_mapping(self, role, body, params=None, headers=None): + async def create_role_mapping( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified role mapping. @@ -434,7 +584,13 @@ async def create_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role_mapping(self, role, body, params=None, headers=None): + async def patch_role_mapping( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of a role mapping. @@ -453,7 +609,12 @@ async def patch_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role_mappings(self, body, params=None, headers=None): + async def patch_role_mappings( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates multiple role mappings in a single call. @@ -471,7 +632,12 @@ async def patch_role_mappings(self, body, params=None, headers=None): ) @query_params() - async def get_tenant(self, tenant, params=None, headers=None): + async def get_tenant( + self, + tenant: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one tenant. @@ -488,7 +654,11 @@ async def get_tenant(self, tenant, params=None, headers=None): ) @query_params() - async def get_tenants(self, params=None, headers=None): + async def get_tenants( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all tenants. @@ -498,7 +668,12 @@ async def get_tenants(self, params=None, headers=None): ) @query_params() - async def delete_tenant(self, tenant, params=None, headers=None): + async def delete_tenant( + self, + tenant: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete the specified tenant. @@ -515,7 +690,13 @@ async def delete_tenant(self, tenant, params=None, headers=None): ) @query_params() - async def create_tenant(self, tenant, body, params=None, headers=None): + async def create_tenant( + self, + tenant: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified tenant. @@ -534,7 +715,13 @@ async def create_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - async def patch_tenant(self, tenant, body, params=None, headers=None): + async def patch_tenant( + self, + tenant: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Add, delete, or modify a single tenant. @@ -553,7 +740,12 @@ async def patch_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - async def patch_tenants(self, body, params=None, headers=None): + async def patch_tenants( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Add, delete, or modify multiple tenants in a single call. @@ -571,7 +763,11 @@ async def patch_tenants(self, body, params=None, headers=None): ) @query_params() - async def get_configuration(self, params=None, headers=None): + async def get_configuration( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns the current Security plugin configuration in JSON format. @@ -584,7 +780,12 @@ async def get_configuration(self, params=None, headers=None): ) @query_params() - async def update_configuration(self, body, params=None, headers=None): + async def update_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Adds or updates the existing configuration using the REST API. @@ -602,7 +803,12 @@ async def update_configuration(self, body, params=None, headers=None): ) @query_params() - async def patch_configuration(self, body, params=None, headers=None): + async def patch_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ A PATCH call is used to update the existing configuration using the REST API. @@ -621,8 +827,11 @@ async def patch_configuration(self, body, params=None, headers=None): @query_params() async def get_distinguished_names( - self, cluster_name=None, params=None, headers=None - ): + self, + cluster_name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all distinguished names in the allow list. @@ -637,8 +846,12 @@ async def get_distinguished_names( @query_params() async def update_distinguished_names( - self, cluster_name, body=None, params=None, headers=None - ): + self, + cluster_name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Adds or updates the specified distinguished names in the cluster’s or node’s allow list. @@ -659,7 +872,12 @@ async def update_distinguished_names( ) @query_params() - async def delete_distinguished_names(self, cluster_name, params=None, headers=None): + async def delete_distinguished_names( + self, + cluster_name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes all distinguished names in the specified cluster’s or node’s allow list. @@ -679,7 +897,11 @@ async def delete_distinguished_names(self, cluster_name, params=None, headers=No ) @query_params() - async def get_certificates(self, params=None, headers=None): + async def get_certificates( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves the cluster’s security certificates. @@ -689,7 +911,11 @@ async def get_certificates(self, params=None, headers=None): ) @query_params() - async def reload_transport_certificates(self, params=None, headers=None): + async def reload_transport_certificates( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Reload transport layer communication certificates. @@ -702,7 +928,11 @@ async def reload_transport_certificates(self, params=None, headers=None): ) @query_params() - async def reload_http_certificates(self, params=None, headers=None): + async def reload_http_certificates( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Reload HTTP layer communication certificates. @@ -715,7 +945,11 @@ async def reload_http_certificates(self, params=None, headers=None): ) @query_params() - async def flush_cache(self, params=None, headers=None): + async def flush_cache( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. @@ -725,7 +959,11 @@ async def flush_cache(self, params=None, headers=None): ) @query_params() - async def health(self, params=None, headers=None): + async def health( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Checks to see if the Security plugin is up and running. @@ -735,7 +973,11 @@ async def health(self, params=None, headers=None): ) @query_params() - async def get_audit_configuration(self, params=None, headers=None): + async def get_audit_configuration( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves the audit configuration. @@ -745,7 +987,12 @@ async def get_audit_configuration(self, params=None, headers=None): ) @query_params() - async def update_audit_configuration(self, body, params=None, headers=None): + async def update_audit_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the audit configuration. @@ -763,7 +1010,12 @@ async def update_audit_configuration(self, body, params=None, headers=None): ) @query_params() - async def patch_audit_configuration(self, body, params=None, headers=None): + async def patch_audit_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. @@ -781,7 +1033,12 @@ async def patch_audit_configuration(self, body, params=None, headers=None): ) @query_params() - async def patch_distinguished_names(self, body, params=None, headers=None): + async def patch_distinguished_names( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Bulk update of distinguished names. diff --git a/opensearchpy/_async/client/security.pyi b/opensearchpy/_async/client/security.pyi deleted file mode 100644 index b3010b3b..00000000 --- a/opensearchpy/_async/client/security.pyi +++ /dev/null @@ -1,821 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SecurityClient(NamespacedClient): - async def get_account_details( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def change_password( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_action_groups( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_action_groups( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_users( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_users( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_roles( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_roles( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_role_mappings( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_role_mappings( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_tenants( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_tenants( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_distinguished_names( - self, - *, - cluster_name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_distinguished_names( - self, - cluster_name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_distinguished_names( - self, - cluster_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reload_transport_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reload_http_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def flush_cache( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def health( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_audit_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_distinguished_names( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index f9960b64..212b778d 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -36,12 +36,21 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - async def create(self, repository, snapshot, body=None, params=None, headers=None): + async def create( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates a snapshot in a repository. @@ -70,7 +79,13 @@ async def create(self, repository, snapshot, body=None, params=None, headers=Non ) @query_params("cluster_manager_timeout", "master_timeout") - async def delete(self, repository, snapshot, params=None, headers=None): + async def delete( + self, + repository: Any, + snapshot: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a snapshot. @@ -97,7 +112,13 @@ async def delete(self, repository, snapshot, params=None, headers=None): @query_params( "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) - async def get(self, repository, snapshot, params=None, headers=None): + async def get( + self, + repository: Any, + snapshot: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about a snapshot. @@ -127,7 +148,12 @@ async def get(self, repository, snapshot, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_repository(self, repository, params=None, headers=None): + async def delete_repository( + self, + repository: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a repository. @@ -152,7 +178,12 @@ async def delete_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def get_repository(self, repository=None, params=None, headers=None): + async def get_repository( + self, + repository: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about a repository. @@ -171,7 +202,13 @@ async def get_repository(self, repository=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") - async def create_repository(self, repository, body, params=None, headers=None): + async def create_repository( + self, + repository: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates a repository. @@ -199,7 +236,14 @@ async def create_repository(self, repository, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - async def restore(self, repository, snapshot, body=None, params=None, headers=None): + async def restore( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Restores a snapshot. @@ -228,7 +272,13 @@ async def restore(self, repository, snapshot, body=None, params=None, headers=No ) @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") - async def status(self, repository=None, snapshot=None, params=None, headers=None): + async def status( + self, + repository: Any = None, + snapshot: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about the status of a snapshot. @@ -252,7 +302,12 @@ async def status(self, repository=None, snapshot=None, params=None, headers=None ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def verify_repository(self, repository, params=None, headers=None): + async def verify_repository( + self, + repository: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Verifies a repository. @@ -276,7 +331,12 @@ async def verify_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def cleanup_repository(self, repository, params=None, headers=None): + async def cleanup_repository( + self, + repository: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Removes stale data from repository. @@ -301,8 +361,14 @@ async def cleanup_repository(self, repository, params=None, headers=None): @query_params("cluster_manager_timeout", "master_timeout") async def clone( - self, repository, snapshot, target_snapshot, body, params=None, headers=None - ): + self, + repository: Any, + snapshot: Any, + target_snapshot: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. diff --git a/opensearchpy/_async/client/snapshot.pyi b/opensearchpy/_async/client/snapshot.pyi deleted file mode 100644 index b219a323..00000000 --- a/opensearchpy/_async/client/snapshot.pyi +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SnapshotClient(NamespacedClient): - async def create( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_repository( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_repository( - self, - repository: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - verify: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def restore( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def status( - self, - *, - repository: Optional[Any] = ..., - snapshot: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def verify_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cleanup_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clone( - self, - repository: Any, - snapshot: Any, - target_snapshot: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 7efce482..f6c8701f 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -37,6 +37,7 @@ import warnings +from typing import Any, MutableMapping, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -51,7 +52,11 @@ class TasksClient(NamespacedClient): "timeout", "wait_for_completion", ) - async def list(self, params=None, headers=None): + async def list( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a list of tasks. @@ -77,7 +82,12 @@ async def list(self, params=None, headers=None): ) @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") - async def cancel(self, task_id=None, params=None, headers=None): + async def cancel( + self, + task_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Cancels a task, if it can be cancelled through an API. @@ -103,7 +113,12 @@ async def cancel(self, task_id=None, params=None, headers=None): ) @query_params("timeout", "wait_for_completion") - async def get(self, task_id=None, params=None, headers=None): + async def get( + self, + task_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about a task. diff --git a/opensearchpy/_async/client/tasks.pyi b/opensearchpy/_async/client/tasks.pyi deleted file mode 100644 index f3cf05d0..00000000 --- a/opensearchpy/_async/client/tasks.pyi +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class TasksClient(NamespacedClient): - async def list( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - group_by: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cancel( - self, - *, - task_id: Optional[Any] = ..., - actions: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - *, - task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/utils.py b/opensearchpy/_async/client/utils.py index 59bedb8e..45ad552b 100644 --- a/opensearchpy/_async/client/utils.py +++ b/opensearchpy/_async/client/utils.py @@ -35,3 +35,13 @@ _normalize_hosts, query_params, ) + +__all__ = [ + "SKIP_IN_PATH", + "NamespacedClient", + "_make_path", + "query_params", + "_bulk_body", + "_escape", + "_normalize_hosts", +] diff --git a/opensearchpy/_async/client/utils.pyi b/opensearchpy/_async/client/utils.pyi deleted file mode 100644 index e175d5e2..00000000 --- a/opensearchpy/_async/client/utils.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ...client.utils import SKIP_IN_PATH as SKIP_IN_PATH -from ...client.utils import _bulk_body as _bulk_body -from ...client.utils import _escape as _escape -from ...client.utils import _make_path as _make_path # noqa -from ...client.utils import _normalize_hosts as _normalize_hosts -from ...client.utils import query_params as query_params -from ..client import AsyncOpenSearch -from ..transport import AsyncTransport - -class NamespacedClient: - client: AsyncOpenSearch - def __init__(self, client: AsyncOpenSearch) -> None: ... - @property - def transport(self) -> AsyncTransport: ... diff --git a/opensearchpy/_async/compat.py b/opensearchpy/_async/compat.py index 66c2eca8..2ba1b980 100644 --- a/opensearchpy/_async/compat.py +++ b/opensearchpy/_async/compat.py @@ -39,7 +39,7 @@ from asyncio import get_running_loop except ImportError: - def get_running_loop(): + def get_running_loop() -> asyncio.AbstractEventLoop: loop = asyncio.get_event_loop() if not loop.is_running(): raise RuntimeError("no running event loop") diff --git a/opensearchpy/_async/compat.pyi b/opensearchpy/_async/compat.pyi deleted file mode 100644 index 290396de..00000000 --- a/opensearchpy/_async/compat.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import asyncio - -def get_running_loop() -> asyncio.AbstractEventLoop: ... diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 1f49220f..94eade34 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -32,6 +32,18 @@ import asyncio import logging +from typing import ( + Any, + AsyncGenerator, + AsyncIterable, + Collection, + Iterable, + List, + Optional, + Tuple, + TypeVar, + Union, +) from ...compat import map from ...exceptions import TransportError @@ -43,10 +55,16 @@ ) from ...helpers.errors import ScanError -logger = logging.getLogger("opensearchpy.helpers") +# from opensearchpy._async.client import Any +# from opensearchpy.serializer import Serializer + + +logger: logging.Logger = logging.getLogger("opensearchpy.helpers") -async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): +async def _chunk_actions( + actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Any +) -> AsyncGenerator[Any, None]: """ Split actions into chunks by number or size, serialize them into strings in the process. @@ -64,15 +82,15 @@ async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): async def _process_bulk_chunk( - client, - bulk_actions, - bulk_data, - raise_on_exception=True, - raise_on_error=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + bulk_actions: Any, + bulk_data: Any, + raise_on_exception: bool = True, + raise_on_error: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> AsyncGenerator[Tuple[bool, Any], None]: """ Send a bulk request to opensearch and process the output. """ @@ -101,21 +119,26 @@ async def _process_bulk_chunk( yield item -def aiter(x): +T = TypeVar("T") + + +def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> Any: """Turns an async iterable or iterable into an async iterator""" if hasattr(x, "__anext__"): return x elif hasattr(x, "__aiter__"): return x.__aiter__() - async def f(): + async def f() -> Any: for item in x: yield item return f().__aiter__() -async def azip(*iterables): +async def azip( + *iterables: Union[Iterable[T], AsyncIterable[T]] +) -> AsyncGenerator[Tuple[T, ...], None]: """Zips async iterables and iterables into an async iterator with the same behavior as zip() """ @@ -128,21 +151,21 @@ async def azip(*iterables): async def async_streaming_bulk( - client, - actions, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - raise_on_error=True, - expand_action_callback=expand_action, - raise_on_exception=True, - max_retries=0, - initial_backoff=2, - max_backoff=600, - yield_ok=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + raise_on_error: bool = True, + expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + max_retries: int = 0, + initial_backoff: Union[float, int] = 2, + max_backoff: Union[float, int] = 600, + yield_ok: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> AsyncGenerator[Tuple[bool, Any], None]: """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -156,7 +179,7 @@ async def async_streaming_bulk( every subsequent rejection for the same chunk, for double the time every time up to ``max_backoff`` seconds. - :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use + :arg client: instance of :class:`~opensearchpy.Any` to use :arg actions: iterable or async iterable containing the actions to be executed :arg chunk_size: number of docs in one chunk sent to client (default: 500) :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB) @@ -177,7 +200,7 @@ async def async_streaming_bulk( :arg ignore_status: list of HTTP status code that you want to ignore """ - async def map_actions(): + async def map_actions() -> Any: async for item in aiter(actions): yield expand_action_callback(item) @@ -185,7 +208,8 @@ async def map_actions(): map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): - to_retry, to_retry_data = [], [] + to_retry: Any = [] + to_retry_data: Any = [] if attempt: await asyncio.sleep( min(max_backoff, initial_backoff * 2 ** (attempt - 1)) @@ -237,10 +261,15 @@ async def map_actions(): async def async_bulk( - client, actions, stats_only=False, ignore_status=(), *args, **kwargs -): + client: Any, + actions: Union[Iterable[Any], AsyncIterable[Any]], + stats_only: bool = False, + ignore_status: Optional[Union[int, Collection[int]]] = (), + *args: Any, + **kwargs: Any +) -> Tuple[int, Union[int, List[Any]]]: """ - Helper for the :meth:`~opensearchpy.AsyncOpenSearch.bulk` api that provides + Helper for the :meth:`~opensearchpy.Any.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and sends them to opensearch in chunks. It returns a tuple with summary information - number of successfully executed actions and either list of @@ -256,7 +285,7 @@ async def async_bulk( just return the errors and not store them in memory. - :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use + :arg client: instance of :class:`~opensearchpy.Any` to use :arg actions: iterator containing the actions :arg stats_only: if `True` only report number of successful/failed operations instead of just number of successful and a list of error responses @@ -275,7 +304,7 @@ async def async_bulk( # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True async for ok, item in async_streaming_bulk( - client, actions, ignore_status=ignore_status, *args, **kwargs + client, actions, ignore_status=ignore_status, *args, **kwargs # type: ignore ): # go through request-response pairs and detect failures if not ok: @@ -289,20 +318,20 @@ async def async_bulk( async def async_scan( - client, - query=None, - scroll="5m", - raise_on_error=True, - preserve_order=False, - size=1000, - request_timeout=None, - clear_scroll=True, - scroll_kwargs=None, - **kwargs -): + client: Any, + query: Any = None, + scroll: str = "5m", + raise_on_error: bool = True, + preserve_order: bool = False, + size: int = 1000, + request_timeout: Any = None, + clear_scroll: bool = True, + scroll_kwargs: Any = None, + **kwargs: Any +) -> AsyncGenerator[dict[str, Any], None]: """ Simple abstraction on top of the - :meth:`~opensearchpy.AsyncOpenSearch.scroll` api - a simple iterator that + :meth:`~opensearchpy.Any.scroll` api - a simple iterator that yields all hits as returned by underlining scroll requests. By default scan does not return results in any pre-determined order. To @@ -311,8 +340,8 @@ async def async_scan( may be an expensive operation and will negate the performance benefits of using ``scan``. - :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use - :arg query: body for the :meth:`~opensearchpy.AsyncOpenSearch.search` api + :arg client: instance of :class:`~opensearchpy.Any` to use + :arg query: body for the :meth:`~opensearchpy.Any.search` api :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg raise_on_error: raises an exception (``ScanError``) if an error is @@ -327,10 +356,10 @@ async def async_scan( scroll API at the end of the method on completion or error, defaults to true. :arg scroll_kwargs: additional kwargs to be passed to - :meth:`~opensearchpy.AsyncOpenSearch.scroll` + :meth:`~opensearchpy.Any.scroll` Any additional keyword arguments will be passed to the initial - :meth:`~opensearchpy.AsyncOpenSearch.search` call:: + :meth:`~opensearchpy.Any.search` call:: async_scan(client, query={"query": {"match": {"title": "python"}}}, @@ -409,22 +438,22 @@ async def async_scan( async def async_reindex( - client, - source_index, - target_index, - query=None, - target_client=None, - chunk_size=500, - scroll="5m", - scan_kwargs={}, - bulk_kwargs={}, -): + client: Any, + source_index: Union[str, Collection[str]], + target_index: str, + query: Any = None, + target_client: Any = None, + chunk_size: int = 500, + scroll: str = "5m", + scan_kwargs: Any = {}, + bulk_kwargs: Any = {}, +) -> Tuple[int, Union[int, List[Any]]]: """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. If you don't specify the query you will reindex all the documents. - Since ``2.3`` a :meth:`~opensearchpy.AsyncOpenSearch.reindex` api is + Since ``2.3`` a :meth:`~opensearchpy.Any.reindex` api is available as part of opensearch itself. It is recommended to use the api instead of this helper wherever possible. The helper is here mostly for backwards compatibility and for situations where more flexibility is @@ -434,11 +463,11 @@ async def async_reindex( This helper doesn't transfer mappings, just the data. - :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use (for + :arg client: instance of :class:`~opensearchpy.Any` to use (for read if `target_client` is specified as well) :arg source_index: index (or list of indices) to read documents from :arg target_index: name of the index in the target cluster to populate - :arg query: body for the :meth:`~opensearchpy.AsyncOpenSearch.search` api + :arg query: body for the :meth:`~opensearchpy.Any.search` api :arg target_client: optional, is specified will be used for writing (thus enabling reindex between clusters) :arg chunk_size: number of docs in one chunk sent to client (default: 500) @@ -454,7 +483,7 @@ async def async_reindex( client, query=query, index=source_index, scroll=scroll, **scan_kwargs ) - async def _change_doc_index(hits, index): + async def _change_doc_index(hits: Any, index: Any) -> Any: async for h in hits: h["_index"] = index if "fields" in h: diff --git a/opensearchpy/_async/helpers/actions.pyi b/opensearchpy/_async/helpers/actions.pyi deleted file mode 100644 index 20cc0661..00000000 --- a/opensearchpy/_async/helpers/actions.pyi +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import ( - Any, - AsyncGenerator, - AsyncIterable, - Callable, - Collection, - Dict, - Iterable, - List, - Mapping, - Optional, - Tuple, - TypeVar, - Union, -) - -from ...serializer import Serializer -from ..client import AsyncOpenSearch - -logger: logging.Logger - -T = TypeVar("T") - -def _chunk_actions( - actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer -) -> AsyncGenerator[Any, None]: ... -def _process_bulk_chunk( - client: AsyncOpenSearch, - bulk_actions: Any, - bulk_data: Any, - raise_on_exception: bool = ..., - raise_on_error: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> AsyncGenerator[Tuple[bool, Any], None]: ... -def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ... -def azip( - *iterables: Union[Iterable[T], AsyncIterable[T]] -) -> AsyncGenerator[Tuple[T, ...], None]: ... -def async_streaming_bulk( - client: AsyncOpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - chunk_size: int = ..., - max_chunk_bytes: int = ..., - raise_on_error: bool = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - raise_on_exception: bool = ..., - max_retries: int = ..., - initial_backoff: Union[float, int] = ..., - max_backoff: Union[float, int] = ..., - yield_ok: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> AsyncGenerator[Tuple[bool, Any], None]: ... -async def async_bulk( - client: AsyncOpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - stats_only: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, Union[int, List[Any]]]: ... -def async_scan( - client: AsyncOpenSearch, - query: Optional[Any] = ..., - scroll: str = ..., - raise_on_error: bool = ..., - preserve_order: bool = ..., - size: int = ..., - request_timeout: Optional[Union[float, int]] = ..., - clear_scroll: bool = ..., - scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any -) -> AsyncGenerator[dict[str, Any], None]: ... -async def async_reindex( - client: AsyncOpenSearch, - source_index: Union[str, Collection[str]], - target_index: str, - query: Any = ..., - target_client: Optional[AsyncOpenSearch] = ..., - chunk_size: int = ..., - scroll: str = ..., - scan_kwargs: Optional[Mapping[str, Any]] = ..., - bulk_kwargs: Optional[Mapping[str, Any]] = ..., -) -> Tuple[int, Union[int, List[Any]]]: ... diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index e71bef46..18612187 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -8,15 +8,15 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from __future__ import annotations +import collections.abc as collections_abc from fnmatch import fnmatch +from typing import Any, Optional, Sequence, Tuple, Type from six import add_metaclass +from opensearchpy._async.client import AsyncOpenSearch from opensearchpy._async.helpers.index import AsyncIndex from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy.connection.async_connections import get_connection @@ -35,7 +35,12 @@ class AsyncIndexMeta(DocumentMeta): # class, only user defined subclasses should have an _index attr _document_initialized = False - def __new__(cls, name, bases, attrs): + def __new__( + cls, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: new_cls = super(AsyncIndexMeta, cls).__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop("Index", None) @@ -46,7 +51,7 @@ def __new__(cls, name, bases, attrs): return new_cls @classmethod - def construct_index(cls, opts, bases): + def construct_index(cls, opts: Any, bases: Any) -> Any: if opts is None: for b in bases: if hasattr(b, "_index"): @@ -72,25 +77,27 @@ class AsyncDocument(ObjectBase): """ @classmethod - def _matches(cls, hit): + def _matches(cls: Any, hit: Any) -> bool: if cls._index._name is None: return True return fnmatch(hit.get("_index", ""), cls._index._name) @classmethod - def _get_using(cls, using=None): + def _get_using(cls: Any, using: Any = None) -> Any: return using or cls._index._using @classmethod - async def _get_connection(cls, using=None): + async def _get_connection(cls, using: Optional[AsyncOpenSearch] = None) -> Any: return await get_connection(cls._get_using(using)) @classmethod - def _default_index(cls, index=None): + def _default_index(cls: Any, index: Any = None) -> Any: return index or cls._index._name @classmethod - async def init(cls, index=None, using=None): + async def init( + cls: Any, index: Optional[str] = None, using: Optional[AsyncOpenSearch] = None + ) -> None: """ Create the index and populate the mappings in opensearch. """ @@ -99,7 +106,9 @@ async def init(cls, index=None, using=None): i = i.clone(name=index) await i.save(using=using) - def _get_index(self, index=None, required=True): + def _get_index( + self, index: Optional[str] = None, required: Optional[bool] = True + ) -> Any: if index is None: index = getattr(self.meta, "index", None) if index is None: @@ -110,7 +119,7 @@ def _get_index(self, index=None, required=True): raise ValidationException("You cannot write to a wildcard index.") return index - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( @@ -121,7 +130,9 @@ def __repr__(self): ) @classmethod - def search(cls, using=None, index=None): + def search( + cls, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None + ) -> AsyncSearch: """ Create an :class:`~opensearchpy.AsyncSearch` instance that will search over this ``Document``. @@ -131,7 +142,13 @@ def search(cls, using=None, index=None): ) @classmethod - async def get(cls, id, using=None, index=None, **kwargs): + async def get( # type: ignore + cls, + id: str, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any + ) -> Any: """ Retrieve a single document from opensearch using its ``id``. @@ -150,7 +167,13 @@ async def get(cls, id, using=None, index=None, **kwargs): return cls.from_opensearch(doc) @classmethod - async def exists(cls, id, using=None, index=None, **kwargs): + async def exists( + cls, + id: str, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any + ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -167,13 +190,19 @@ async def exists(cls, id, using=None, index=None, **kwargs): @classmethod async def mget( - cls, docs, using=None, index=None, raise_on_error=True, missing="none", **kwargs - ): - r""" - Retrieve multiple document by their ``id``\s. Returns a list of instances + cls, + docs: Sequence[str], + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + raise_on_error: Optional[bool] = True, + missing: Optional[str] = "none", + **kwargs: Any + ) -> Any: + """ + Retrieve multiple document by their ``id``'s. Returns a list of instances in the same order as requested. - :arg docs: list of ``id``\s of the documents to be retrieved or a list + :arg docs: list of ``id``'s of the documents to be retrieved or a list of document specifications as per https://opensearch.org/docs/latest/opensearch/rest-api/document-apis/multi-get/ :arg index: opensearch index to use, if the ``Document`` is @@ -197,7 +226,9 @@ async def mget( } results = await opensearch.mget(body, index=cls._default_index(index), **kwargs) - objs, error_docs, missing_docs = [], [], [] + objs: Any = [] + error_docs: Any = [] + missing_docs: Any = [] for doc in results["docs"]: if doc.get("found"): if error_docs or missing_docs: @@ -230,7 +261,12 @@ async def mget( raise NotFoundError(404, message, {"docs": missing_docs}) return objs - async def delete(self, using=None, index=None, **kwargs): + async def delete( + self, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any + ) -> Any: """ Delete the instance in opensearch. @@ -253,7 +289,9 @@ async def delete(self, using=None, index=None, **kwargs): doc_meta.update(kwargs) await opensearch.delete(index=self._get_index(index), **doc_meta) - def to_dict(self, include_meta=False, skip_empty=True): + def to_dict( # type: ignore + self, include_meta: Optional[bool] = False, skip_empty: Optional[bool] = True + ) -> Any: """ Serialize the instance into a dictionary so that it can be saved in opensearch. @@ -264,7 +302,7 @@ def to_dict(self, include_meta=False, skip_empty=True): ``[]``, ``{}``) to be left on the document. Those values will be stripped out otherwise as they make no difference in opensearch. """ - d = super(AsyncDocument, self).to_dict(skip_empty=skip_empty) + d = super(AsyncDocument, self).to_dict(skip_empty) if not include_meta: return d @@ -280,19 +318,19 @@ def to_dict(self, include_meta=False, skip_empty=True): async def update( self, - using=None, - index=None, - detect_noop=True, - doc_as_upsert=False, - refresh=False, - retry_on_conflict=None, - script=None, - script_id=None, - scripted_upsert=False, - upsert=None, - return_doc_meta=False, - **fields - ): + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + detect_noop: Optional[bool] = True, + doc_as_upsert: Optional[bool] = False, + refresh: Optional[bool] = False, + retry_on_conflict: Optional[bool] = None, + script: Any = None, + script_id: Optional[str] = None, + scripted_upsert: Optional[bool] = False, + upsert: Optional[bool] = None, + return_doc_meta: Optional[bool] = False, + **fields: Any + ) -> Any: """ Partial update of the document, specify fields you wish to update and both the instance and the document in opensearch will be updated:: @@ -321,7 +359,7 @@ async def update( :return operation result noop/updated """ - body = { + body: Any = { "doc_as_upsert": doc_as_upsert, "detect_noop": detect_noop, } @@ -385,13 +423,13 @@ async def update( async def save( self, - using=None, - index=None, - validate=True, - skip_empty=True, - return_doc_meta=False, - **kwargs - ): + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + validate: Optional[bool] = True, + skip_empty: Optional[bool] = True, + return_doc_meta: Optional[bool] = False, + **kwargs: Any + ) -> Any: """ Save the document into opensearch. If the document doesn't exist it is created, it is overwritten otherwise. Returns ``True`` if this diff --git a/opensearchpy/_async/helpers/document.pyi b/opensearchpy/_async/helpers/document.pyi deleted file mode 100644 index f39d5471..00000000 --- a/opensearchpy/_async/helpers/document.pyi +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.document import DocumentMeta -from opensearchpy.helpers.utils import ObjectBase - -class AsyncIndexMeta(DocumentMeta): ... -class AsyncDocument(ObjectBase): ... diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index 86f22e00..1eb5a677 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -9,6 +9,8 @@ # GitHub history for details. +from typing import Any + from six import iteritems, itervalues from opensearchpy._async.helpers.search import AsyncSearch @@ -58,38 +60,38 @@ def search(self): """ - index = None - doc_types = None - fields = None - facets = {} - using = "default" + index: Any = None + doc_types: Any = None + fields: Any = None + facets: Any = {} + using: str = "default" - def __init__(self, query=None, filters={}, sort=()): + def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None: """ :arg query: the text to search for :arg filters: facet values to filter :arg sort: sort information to be passed to :class:`~opensearchpy.AsyncSearch` """ self._query = query - self._filters = {} + self._filters: Any = {} self._sort = sort - self.filter_values = {} + self.filter_values: Any = {} for name, value in iteritems(filters): self.add_filter(name, value) self._s = self.build_search() - async def count(self): + async def count(self) -> Any: return await self._s.count() - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: self._s = self._s[k] return self - def __iter__(self): + def __iter__(self) -> Any: return iter(self._s) - def add_filter(self, name, filter_values): + def add_filter(self, name: Any, filter_values: Any) -> None: """ Add a filter for a facet. """ @@ -111,7 +113,7 @@ def add_filter(self, name, filter_values): self._filters[name] = f - def search(self): + def search(self) -> Any: """ Returns the base Search object to which the facets are added. @@ -121,7 +123,7 @@ def search(self): s = AsyncSearch(doc_type=self.doc_types, index=self.index, using=self.using) return s.response_class(FacetedResponse) - def query(self, search, query): + def query(self, search: Any, query: Any) -> Any: """ Add query part to ``search``. @@ -134,7 +136,7 @@ def query(self, search, query): return search.query("multi_match", query=query) return search - def aggregate(self, search): + def aggregate(self, search: Any) -> Any: """ Add aggregations representing the facets selected, including potential filters. @@ -150,7 +152,7 @@ def aggregate(self, search): f, agg ) - def filter(self, search): + def filter(self, search: Any) -> Any: """ Add a ``post_filter`` to the search request narrowing the results based on the facet filters. @@ -163,7 +165,7 @@ def filter(self, search): post_filter &= f return search.post_filter(post_filter) - def highlight(self, search): + def highlight(self, search: Any) -> Any: """ Add highlighting for all the fields """ @@ -171,7 +173,7 @@ def highlight(self, search): *(f if "^" not in f else f.split("^", 1)[0] for f in self.fields) ) - def sort(self, search): + def sort(self, search: Any) -> Any: """ Add sorting information to the request. """ @@ -179,7 +181,7 @@ def sort(self, search): search = search.sort(*self._sort) return search - def build_search(self): + def build_search(self) -> Any: """ Construct the ``AsyncSearch`` object. """ @@ -192,7 +194,7 @@ def build_search(self): self.aggregate(s) return s - async def execute(self): + async def execute(self) -> Any: """ Execute the search and return the response. """ diff --git a/opensearchpy/_async/helpers/faceted_search.pyi b/opensearchpy/_async/helpers/faceted_search.pyi deleted file mode 100644 index 0e79f1f6..00000000 --- a/opensearchpy/_async/helpers/faceted_search.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncFacetedSearch(object): ... diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index 51082dc6..ea06f316 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from opensearchpy._async.helpers.mapping import AsyncMapping from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy._async.helpers.update_by_query import AsyncUpdateByQuery @@ -18,7 +20,14 @@ class AsyncIndexTemplate(object): - def __init__(self, name, template, index=None, order=None, **kwargs): + def __init__( + self, + name: Any, + template: Any, + index: Any = None, + order: Any = None, + **kwargs: Any + ) -> None: if index is None: self._index = AsyncIndex(template, **kwargs) else: @@ -32,17 +41,17 @@ def __init__(self, name, template, index=None, order=None, **kwargs): self._template_name = name self.order = order - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._index, attr_name) - def to_dict(self): + def to_dict(self) -> Any: d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d - async def save(self, using=None): + async def save(self, using: Any = None) -> Any: opensearch = await get_connection(using or self._index._using) return await opensearch.indices.put_template( name=self._template_name, body=self.to_dict() @@ -50,25 +59,27 @@ async def save(self, using=None): class AsyncIndex(object): - def __init__(self, name, using="default"): + def __init__(self, name: Any, using: str = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` """ self._name = name - self._doc_types = [] + self._doc_types: Any = [] self._using = using - self._settings = {} - self._aliases = {} - self._analysis = {} - self._mapping = None + self._settings: Any = {} + self._aliases: Any = {} + self._analysis: Any = {} + self._mapping: Any = None - def get_or_create_mapping(self): + def get_or_create_mapping(self) -> Any: if self._mapping is None: self._mapping = AsyncMapping() return self._mapping - def as_template(self, template_name, pattern=None, order=None): + def as_template( + self, template_name: Any, pattern: Any = None, order: Any = None + ) -> Any: # TODO: should we allow pattern to be a top-level arg? # or maybe have an IndexPattern that allows for it and have # AsyncDocument._index be that? @@ -76,7 +87,7 @@ def as_template(self, template_name, pattern=None, order=None): template_name, pattern or self._name, index=self, order=order ) - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: @@ -85,7 +96,7 @@ def resolve_nested(self, field_path): return self._mapping.resolve_nested(field_path) return (), None - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: @@ -94,12 +105,12 @@ def resolve_field(self, field_path): return self._mapping.resolve_field(field_path) return None - async def load_mappings(self, using=None): + async def load_mappings(self, using: Any = None) -> None: await self.get_or_create_mapping().update_from_opensearch( self._name, using=using or self._using ) - def clone(self, name=None, using=None): + def clone(self, name: Any = None, using: Any = None) -> Any: """ Create a copy of the instance with another name or connection alias. Useful for creating multiple indices with shared configuration:: @@ -123,14 +134,14 @@ def clone(self, name=None, using=None): i._mapping = self._mapping._clone() return i - async def _get_connection(self, using=None): + async def _get_connection(self, using: Any = None) -> Any: if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return await get_connection(using or self._using) connection = property(_get_connection) - def mapping(self, mapping): + def mapping(self, mapping: Any) -> None: """ Associate a mapping (an instance of :class:`~opensearchpy.AsyncMapping`) with this index. @@ -139,7 +150,7 @@ def mapping(self, mapping): """ self.get_or_create_mapping().update(mapping) - def document(self, document): + def document(self, document: Any) -> Any: """ Associate a :class:`~opensearchpy.AsyncDocument` subclass with an index. This means that, when this index is created, it will contain the @@ -170,7 +181,7 @@ class Post(AsyncDocument): return document - def settings(self, **kwargs): + def settings(self, **kwargs: Any) -> "AsyncIndex": """ Add settings to the index:: @@ -183,7 +194,7 @@ def settings(self, **kwargs): self._settings.update(kwargs) return self - def aliases(self, **kwargs): + def aliases(self, **kwargs: Any) -> "AsyncIndex": """ Add aliases to the index definition:: @@ -193,7 +204,7 @@ def aliases(self, **kwargs): self._aliases.update(kwargs) return self - def analyzer(self, *args, **kwargs): + def analyzer(self, *args: Any, **kwargs: Any) -> Any: """ Explicitly add an analyzer to an index. Note that all custom analyzers defined in mappings will also be created. This is useful for search analyzers. @@ -220,14 +231,14 @@ def analyzer(self, *args, **kwargs): # merge the definition merge(self._analysis, d, True) - def to_dict(self): + def to_dict(self) -> Any: out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases - mappings = self._mapping.to_dict() if self._mapping else {} - analysis = self._mapping._collect_analysis() if self._mapping else {} + mappings: Any = self._mapping.to_dict() if self._mapping else {} + analysis: Any = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) @@ -239,7 +250,7 @@ def to_dict(self): out.setdefault("settings", {})["analysis"] = analysis return out - def search(self, using=None): + def search(self, using: Any = None) -> Any: """ Return a :class:`~opensearchpy.AsyncSearch` object searching over the index (or all the indices belonging to this template) and its @@ -249,7 +260,7 @@ def search(self, using=None): using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using=None): + def updateByQuery(self, using: Any = None) -> Any: """ Return a :class:`~opensearchpy.AsyncUpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match @@ -263,7 +274,7 @@ def updateByQuery(self, using=None): index=self._name, ) - async def create(self, using=None, **kwargs): + async def create(self, using: Any = None, **kwargs: Any) -> Any: """ Creates the index in opensearch. @@ -274,13 +285,13 @@ async def create(self, using=None, **kwargs): index=self._name, body=self.to_dict(), **kwargs ) - async def is_closed(self, using=None): + async def is_closed(self, using: Any = None) -> Any: state = await (await self._get_connection(using)).cluster.state( index=self._name, metric="metadata" ) return state["metadata"]["indices"][self._name]["state"] == "close" - async def save(self, using=None): + async def save(self, using: Any = None) -> Any: """ Sync the index definition with opensearch, creating the index if it doesn't exist and updating its settings and mappings if it does. @@ -334,7 +345,7 @@ async def save(self, using=None): if mappings: await self.put_mapping(using=using, body=mappings) - async def analyze(self, using=None, **kwargs): + async def analyze(self, using: Any = None, **kwargs: Any) -> Any: """ Perform the analysis process on a text and return the tokens breakdown of the text. @@ -346,7 +357,7 @@ async def analyze(self, using=None, **kwargs): index=self._name, **kwargs ) - async def refresh(self, using=None, **kwargs): + async def refresh(self, using: Any = None, **kwargs: Any) -> Any: """ Performs a refresh operation on the index. @@ -357,7 +368,7 @@ async def refresh(self, using=None, **kwargs): index=self._name, **kwargs ) - async def flush(self, using=None, **kwargs): + async def flush(self, using: Any = None, **kwargs: Any) -> Any: """ Performs a flush operation on the index. @@ -368,7 +379,7 @@ async def flush(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get(self, using=None, **kwargs): + async def get(self, using: Any = None, **kwargs: Any) -> Any: """ The get index API allows to retrieve information about the index. @@ -379,7 +390,7 @@ async def get(self, using=None, **kwargs): index=self._name, **kwargs ) - async def open(self, using=None, **kwargs): + async def open(self, using: Any = None, **kwargs: Any) -> Any: """ Opens the index in opensearch. @@ -390,7 +401,7 @@ async def open(self, using=None, **kwargs): index=self._name, **kwargs ) - async def close(self, using=None, **kwargs): + async def close(self, using: Any = None, **kwargs: Any) -> Any: """ Closes the index in opensearch. @@ -401,7 +412,7 @@ async def close(self, using=None, **kwargs): index=self._name, **kwargs ) - async def delete(self, using=None, **kwargs): + async def delete(self, using: Any = None, **kwargs: Any) -> Any: """ Deletes the index in opensearch. @@ -412,7 +423,7 @@ async def delete(self, using=None, **kwargs): index=self._name, **kwargs ) - async def exists(self, using=None, **kwargs): + async def exists(self, using: Any = None, **kwargs: Any) -> Any: """ Returns ``True`` if the index already exists in opensearch. @@ -423,7 +434,7 @@ async def exists(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_mapping(self, using=None, **kwargs): + async def put_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Register specific mapping definition for a specific type. @@ -434,7 +445,7 @@ async def put_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_mapping(self, using=None, **kwargs): + async def get_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve specific mapping definition for a specific type. @@ -445,7 +456,7 @@ async def get_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_field_mapping(self, using=None, **kwargs): + async def get_field_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve mapping definition of a specific field. @@ -456,7 +467,7 @@ async def get_field_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_alias(self, using=None, **kwargs): + async def put_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Create an alias for the index. @@ -467,7 +478,7 @@ async def put_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def exists_alias(self, using=None, **kwargs): + async def exists_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Return a boolean indicating whether given alias exists for this index. @@ -478,7 +489,7 @@ async def exists_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_alias(self, using=None, **kwargs): + async def get_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve a specified alias. @@ -489,7 +500,7 @@ async def get_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def delete_alias(self, using=None, **kwargs): + async def delete_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Delete specific alias. @@ -500,7 +511,7 @@ async def delete_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_settings(self, using=None, **kwargs): + async def get_settings(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve settings for the index. @@ -511,7 +522,7 @@ async def get_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_settings(self, using=None, **kwargs): + async def put_settings(self, using: Any = None, **kwargs: Any) -> Any: """ Change specific index level settings in real time. @@ -522,7 +533,7 @@ async def put_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - async def stats(self, using=None, **kwargs): + async def stats(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve statistics on different operations happening on the index. @@ -533,7 +544,7 @@ async def stats(self, using=None, **kwargs): index=self._name, **kwargs ) - async def segments(self, using=None, **kwargs): + async def segments(self, using: Any = None, **kwargs: Any) -> Any: """ Provide low level segments information that a Lucene index (shard level) is built with. @@ -545,7 +556,7 @@ async def segments(self, using=None, **kwargs): index=self._name, **kwargs ) - async def validate_query(self, using=None, **kwargs): + async def validate_query(self, using: Any = None, **kwargs: Any) -> Any: """ Validate a potentially expensive query without executing it. @@ -556,7 +567,7 @@ async def validate_query(self, using=None, **kwargs): index=self._name, **kwargs ) - async def clear_cache(self, using=None, **kwargs): + async def clear_cache(self, using: Any = None, **kwargs: Any) -> Any: """ Clear all caches or specific cached associated with the index. @@ -567,7 +578,7 @@ async def clear_cache(self, using=None, **kwargs): index=self._name, **kwargs ) - async def recovery(self, using=None, **kwargs): + async def recovery(self, using: Any = None, **kwargs: Any) -> Any: """ The indices recovery API provides insight into on-going shard recoveries for the index. @@ -579,7 +590,7 @@ async def recovery(self, using=None, **kwargs): index=self._name, **kwargs ) - async def upgrade(self, using=None, **kwargs): + async def upgrade(self, using: Any = None, **kwargs: Any) -> Any: """ Upgrade the index to the latest format. @@ -590,7 +601,7 @@ async def upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_upgrade(self, using=None, **kwargs): + async def get_upgrade(self, using: Any = None, **kwargs: Any) -> Any: """ Monitor how much of the index is upgraded. @@ -601,7 +612,7 @@ async def get_upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - async def shard_stores(self, using=None, **kwargs): + async def shard_stores(self, using: Any = None, **kwargs: Any) -> Any: """ Provides store information for shard copies of the index. Store information reports on which nodes shard copies exist, the shard copy @@ -615,7 +626,7 @@ async def shard_stores(self, using=None, **kwargs): index=self._name, **kwargs ) - async def forcemerge(self, using=None, **kwargs): + async def forcemerge(self, using: Any = None, **kwargs: Any) -> Any: """ The force merge API allows to force merging of the index through an API. The merge relates to the number of segments a Lucene index holds @@ -633,7 +644,7 @@ async def forcemerge(self, using=None, **kwargs): index=self._name, **kwargs ) - async def shrink(self, using=None, **kwargs): + async def shrink(self, using: Any = None, **kwargs: Any) -> Any: """ The shrink index API allows you to shrink an existing index into a new index with fewer primary shards. The number of primary shards in the diff --git a/opensearchpy/_async/helpers/index.pyi b/opensearchpy/_async/helpers/index.pyi deleted file mode 100644 index 6a89f0d1..00000000 --- a/opensearchpy/_async/helpers/index.pyi +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncIndexTemplate(object): ... -class AsyncIndex(object): ... diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index 967c74c8..c89bf70a 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -8,12 +8,11 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from __future__ import annotations +import collections.abc as collections_abc from itertools import chain +from typing import Any, Optional from six import iteritems @@ -23,25 +22,28 @@ class AsyncMapping(object): - def __init__(self): + _meta: Any + properties: Properties + + def __init__(self) -> None: self.properties = Properties() self._meta = {} - def __repr__(self): + def __repr__(self) -> str: return "Mapping()" - def _clone(self): + def _clone(self) -> Any: m = AsyncMapping() m.properties._params = self.properties._params.copy() return m @classmethod - async def from_opensearch(cls, index, using="default"): + async def from_opensearch(cls, index: Any, using: str = "default") -> Any: m = cls() await m.update_from_opensearch(index, using) return m - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: str) -> Any: field = self nested = [] parts = field_path.split(".") @@ -54,18 +56,18 @@ def resolve_nested(self, field_path): nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Optional[AsyncMapping]: field = self for step in field_path.split("."): try: field = field[step] except KeyError: - return + return None return field - def _collect_analysis(self): - analysis = {} - fields = [] + def _collect_analysis(self) -> Any: + analysis: Any = {} + fields: Any = [] if "_all" in self._meta: fields.append(Text(**self._meta["_all"])) @@ -91,20 +93,20 @@ def _collect_analysis(self): return analysis - async def save(self, index, using="default"): + async def save(self, index: Any, using: str = "default") -> Any: from opensearchpy._async.helpers.index import AsyncIndex index = AsyncIndex(index, using=using) index.mapping(self) return await index.save() - async def update_from_opensearch(self, index, using="default"): + async def update_from_opensearch(self, index: Any, using: str = "default") -> None: opensearch = await get_connection(using) raw = await opensearch.indices.get_mapping(index=index) _, raw = raw.popitem() self._update_from_dict(raw["mappings"]) - def _update_from_dict(self, raw): + def _update_from_dict(self, raw: Any) -> None: for name, definition in iteritems(raw.get("properties", {})): self.field(name, definition) @@ -116,7 +118,7 @@ def _update_from_dict(self, raw): else: self.meta(name, value) - def update(self, mapping, update_only=False): + def update(self, mapping: Any, update_only: bool = False) -> None: for name in mapping: if update_only and name in self: # nested and inner objects, merge recursively @@ -133,20 +135,20 @@ def update(self, mapping, update_only=False): else: self._meta.update(mapping._meta) - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self.properties.properties - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties.properties[name] - def __iter__(self): + def __iter__(self) -> Any: return iter(self.properties.properties) - def field(self, *args, **kwargs): + def field(self, *args: Any, **kwargs: Any) -> "AsyncMapping": self.properties.field(*args, **kwargs) return self - def meta(self, name, params=None, **kwargs): + def meta(self, name: Any, params: Any = None, **kwargs: Any) -> "AsyncMapping": if not name.startswith("_") and name not in META_FIELDS: name = "_" + name @@ -156,7 +158,7 @@ def meta(self, name, params=None, **kwargs): self._meta[name] = kwargs if params is None else params return self - def to_dict(self): + def to_dict(self) -> Any: meta = self._meta # hard coded serialization of analyzers in _all diff --git a/opensearchpy/_async/helpers/mapping.pyi b/opensearchpy/_async/helpers/mapping.pyi deleted file mode 100644 index 91b8d64b..00000000 --- a/opensearchpy/_async/helpers/mapping.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncMapping(object): ... diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index 73c52971..d844ba29 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -9,6 +9,7 @@ # GitHub history for details. import copy +from typing import Any, Sequence from six import iteritems, string_types @@ -26,7 +27,7 @@ class AsyncSearch(Request): query = ProxyDescriptor("query") post_filter = ProxyDescriptor("post_filter") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Search request to opensearch. @@ -40,24 +41,24 @@ def __init__(self, **kwargs): super(AsyncSearch, self).__init__(**kwargs) self.aggs = AggsProxy(self) - self._sort = [] - self._source = None - self._highlight = {} - self._highlight_opts = {} - self._suggest = {} - self._script_fields = {} - self._response_class = Response + self._sort: Sequence[Any] = [] + self._source: Any = None + self._highlight: Any = {} + self._highlight_opts: Any = {} + self._suggest: Any = {} + self._script_fields: Any = {} + self._response_class: Any = Response self._query_proxy = QueryProxy(self, "query") self._post_filter_proxy = QueryProxy(self, "post_filter") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) - def __getitem__(self, n): + def __getitem__(self, n: Any) -> Any: """ Support slicing the `AsyncSearch` instance for pagination. @@ -92,7 +93,7 @@ def __getitem__(self, n): return s @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `AsyncSearch` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -113,7 +114,7 @@ def from_dict(cls, d): s.update_from_dict(d) return s - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -136,7 +137,7 @@ def _clone(self): s.aggs._params = {"aggs": self.aggs._params["aggs"].copy()} return s - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -144,7 +145,7 @@ def response_class(self, cls): s._response_class = cls return s - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "AsyncSearch": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -179,7 +180,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script_fields(self, **kwargs): + def script_fields(self, **kwargs: Any) -> Any: """ Define script fields to be calculated on hits. @@ -205,7 +206,7 @@ def script_fields(self, **kwargs): s._script_fields.update(kwargs) return s - def source(self, fields=None, **kwargs): + def source(self, fields: Any = None, **kwargs: Any) -> Any: """ Selectively control how the _source field is returned. @@ -250,7 +251,7 @@ def source(self, fields=None, **kwargs): return s - def sort(self, *keys): + def sort(self, *keys: Any) -> Any: """ Add sorting information to the search request. If called without arguments it will remove all sort requirements. Otherwise it will @@ -283,7 +284,7 @@ def sort(self, *keys): s._sort.append(k) return s - def highlight_options(self, **kwargs): + def highlight_options(self, **kwargs: Any) -> Any: """ Update the global highlighting options used for this request. For example:: @@ -295,7 +296,7 @@ def highlight_options(self, **kwargs): s._highlight_opts.update(kwargs) return s - def highlight(self, *fields, **kwargs): + def highlight(self, *fields: Any, **kwargs: Any) -> Any: """ Request highlighting of some fields. All keyword arguments passed in will be used as parameters for all the fields in the ``fields`` parameter. Example:: @@ -335,7 +336,7 @@ def highlight(self, *fields, **kwargs): s._highlight[f] = kwargs return s - def suggest(self, name, text, **kwargs): + def suggest(self, name: str, text: str, **kwargs: Any) -> Any: """ Add a suggestions request to the search. @@ -352,7 +353,7 @@ def suggest(self, name, text, **kwargs): s._suggest[name].update(kwargs) return s - def to_dict(self, count=False, **kwargs): + def to_dict(self, count: bool = False, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request's body. @@ -396,7 +397,7 @@ def to_dict(self, count=False, **kwargs): d.update(recursive_to_dict(kwargs)) return d - async def count(self): + async def count(self) -> Any: """ Return the number of hits matching the query and filters. Note that only the actual number is returned. @@ -412,7 +413,7 @@ async def count(self): "count" ] - async def execute(self, ignore_cache=False): + async def execute(self, ignore_cache: bool = False) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. @@ -431,7 +432,7 @@ async def execute(self, ignore_cache=False): ) return self._response - async def scan(self): + async def scan(self) -> Any: """ Turn the search into a scan search and return a generator that will iterate over all the documents matching the query. @@ -449,7 +450,7 @@ async def scan(self): ): yield self._get_result(hit) - async def delete(self): + async def delete(self) -> Any: """ delete() executes the query by delegating to delete_by_query() """ @@ -469,22 +470,22 @@ class AsyncMultiSearch(Request): request. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(AsyncMultiSearch, self).__init__(**kwargs) - self._searches = [] + self._searches: Any = [] - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return self._searches[key] - def __iter__(self): + def __iter__(self) -> Any: return iter(self._searches) - def _clone(self): + def _clone(self) -> Any: ms = super(AsyncMultiSearch, self)._clone() ms._searches = self._searches[:] return ms - def add(self, search): + def add(self, search: Any) -> Any: """ Adds a new :class:`~opensearchpy.AsyncSearch` object to the request:: @@ -496,7 +497,7 @@ def add(self, search): ms._searches.append(search) return ms - def to_dict(self): + def to_dict(self) -> Any: out = [] for s in self._searches: meta = {} @@ -509,7 +510,9 @@ def to_dict(self): return out - async def execute(self, ignore_cache=False, raise_on_error=True): + async def execute( + self, ignore_cache: bool = False, raise_on_error: bool = True + ) -> Any: """ Execute the multi search request and return a list of search results. """ diff --git a/opensearchpy/_async/helpers/search.pyi b/opensearchpy/_async/helpers/search.pyi deleted file mode 100644 index 3413c889..00000000 --- a/opensearchpy/_async/helpers/search.pyi +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.search import Request - -class AsyncSearch(Request): ... -class AsyncMultiSearch(Request): ... diff --git a/opensearchpy/_async/helpers/test.py b/opensearchpy/_async/helpers/test.py index 895ae991..c0a261c8 100644 --- a/opensearchpy/_async/helpers/test.py +++ b/opensearchpy/_async/helpers/test.py @@ -10,18 +10,22 @@ import os import time +from typing import Any from unittest import SkipTest +from _typeshed import Incomplete + from opensearchpy import AsyncOpenSearch from opensearchpy.exceptions import ConnectionError +OPENSEARCH_URL: Incomplete if "OPENSEARCH_URL" in os.environ: OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] else: OPENSEARCH_URL = "https://admin:admin@localhost:9200" -async def get_test_client(nowait=False, **kwargs): +async def get_test_client(nowait: bool = False, **kwargs: Any) -> Any: # construct kwargs from the environment kw = {"timeout": 30} @@ -32,7 +36,7 @@ async def get_test_client(nowait=False, **kwargs): kw["connection_class"] = getattr(async_connection, "AIOHttpConnection") kw.update(kwargs) - client = AsyncOpenSearch(OPENSEARCH_URL, **kw) + client = AsyncOpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(1 if nowait else 100): diff --git a/opensearchpy/_async/helpers/test.pyi b/opensearchpy/_async/helpers/test.pyi deleted file mode 100644 index 497d8caf..00000000 --- a/opensearchpy/_async/helpers/test.pyi +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any - -from _typeshed import Incomplete - -from opensearchpy import AsyncOpenSearch as AsyncOpenSearch -from opensearchpy.exceptions import ConnectionError as ConnectionError - -OPENSEARCH_URL: Incomplete - -async def get_test_client(nowait: bool = ..., **kwargs: Any) -> Any: ... diff --git a/opensearchpy/_async/helpers/update_by_query.py b/opensearchpy/_async/helpers/update_by_query.py index fc9eef54..aeb8e3d2 100644 --- a/opensearchpy/_async/helpers/update_by_query.py +++ b/opensearchpy/_async/helpers/update_by_query.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from opensearchpy.connection.async_connections import get_connection from opensearchpy.helpers.query import Bool, Q from opensearchpy.helpers.response import UpdateByQueryResponse @@ -18,7 +20,7 @@ class AsyncUpdateByQuery(Request): query = ProxyDescriptor("query") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Update by query request to opensearch. @@ -32,17 +34,17 @@ def __init__(self, **kwargs): """ super(AsyncUpdateByQuery, self).__init__(**kwargs) self._response_class = UpdateByQueryResponse - self._script = {} + self._script: Any = {} self._query_proxy = QueryProxy(self, "query") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `AsyncUpdateByQuery` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -63,7 +65,7 @@ def from_dict(cls, d): u.update_from_dict(d) return u - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -76,7 +78,7 @@ def _clone(self): ubq.query._proxied = self.query._proxied return ubq - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -84,7 +86,7 @@ def response_class(self, cls): ubq._response_class = cls return ubq - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "AsyncUpdateByQuery": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -97,7 +99,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script(self, **kwargs): + def script(self, **kwargs: Any) -> Any: """ Define update action to take: @@ -118,7 +120,7 @@ def script(self, **kwargs): ubq._script.update(kwargs) return ubq - def to_dict(self, **kwargs): + def to_dict(self, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request'ubq body. @@ -136,7 +138,7 @@ def to_dict(self, **kwargs): d.update(recursive_to_dict(kwargs)) return d - async def execute(self): + async def execute(self) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. diff --git a/opensearchpy/_async/helpers/update_by_query.pyi b/opensearchpy/_async/helpers/update_by_query.pyi deleted file mode 100644 index 57d692c6..00000000 --- a/opensearchpy/_async/helpers/update_by_query.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.search import Request - -class AsyncUpdateByQuery(Request): ... diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index cab7782e..9d12066d 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -31,7 +31,12 @@ import ssl import warnings -import urllib3 # type: ignore +# sentinel value for `verify_certs`. +# This is used to detect if a user is passing in a value +# for SSL kwargs if also using an SSLContext. +from typing import Any, Collection, Mapping, Optional, Union + +import urllib3 from ..compat import reraise_exceptions, urlencode from ..connection.base import Connection @@ -41,12 +46,9 @@ ImproperlyConfigured, SSLError, ) -from ._extra_imports import aiohttp, aiohttp_exceptions, yarl +from ._extra_imports import aiohttp, aiohttp_exceptions, yarl # type: ignore from .compat import get_running_loop -# sentinel value for `verify_certs`. -# This is used to detect if a user is passing in a value -# for SSL kwargs if also using an SSLContext. VERIFY_CERTS_DEFAULT = object() SSL_SHOW_WARN_DEFAULT = object() @@ -56,45 +58,48 @@ class AsyncConnection(Connection): async def perform_request( self, - method, - url, - params=None, - body=None, - timeout=None, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: raise NotImplementedError() - async def close(self): + async def close(self) -> None: raise NotImplementedError() class AIOHttpConnection(AsyncConnection): + session: Optional[aiohttp.ClientSession] + ssl_assert_fingerprint: Optional[str] + def __init__( self, - host="localhost", - port=None, - url_prefix="", - timeout=10, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - loop=None, - trust_env=False, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + url_prefix: str = "", + timeout: int = 10, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_fingerprint: Any = None, + maxsize: Optional[int] = 10, + headers: Any = None, + ssl_context: Any = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + loop: Any = None, + trust_env: Optional[bool] = False, + **kwargs: Any + ) -> None: """ Default connection class for ``AsyncOpenSearch`` using the `aiohttp` library and the http protocol. @@ -224,8 +229,15 @@ def __init__( self._trust_env = trust_env async def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: if self.session is None: await self._create_aiohttp_session() assert self.session is not None @@ -346,14 +358,14 @@ async def perform_request( return response.status, response.headers, raw_data - async def close(self): + async def close(self) -> Any: """ Explicitly closes connection """ if self.session: await self.session.close() - async def _create_aiohttp_session(self): + async def _create_aiohttp_session(self) -> Any: """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop @@ -374,9 +386,9 @@ async def _create_aiohttp_session(self): ) -class OpenSearchClientResponse(aiohttp.ClientResponse): - async def text(self, encoding=None, errors="strict"): +class OpenSearchClientResponse(aiohttp.ClientResponse): # type: ignore + async def text(self, encoding: Any = None, errors: str = "strict") -> Any: if self._body is None: await self.read() - return self._body.decode("utf-8", "surrogatepass") + return self._body.decode("utf-8", "surrogatepass") # type: ignore diff --git a/opensearchpy/_async/http_aiohttp.pyi b/opensearchpy/_async/http_aiohttp.pyi deleted file mode 100644 index d641a5eb..00000000 --- a/opensearchpy/_async/http_aiohttp.pyi +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from asyncio import AbstractEventLoop -from typing import Any, Collection, Mapping, Optional, Tuple, Union - -from ..connection import Connection -from ._extra_imports import aiohttp # type: ignore - -class AsyncConnection(Connection): - async def perform_request( # type: ignore - self, - method: str, - url: str, - params: Optional[Mapping[str, Any]] = ..., - body: Optional[bytes] = ..., - timeout: Optional[Union[int, float]] = ..., - ignore: Collection[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ) -> Tuple[int, Mapping[str, str], str]: ... - async def close(self) -> None: ... - -class AIOHttpConnection(AsyncConnection): - session: Optional[aiohttp.ClientSession] - ssl_assert_fingerprint: Optional[str] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - url_prefix: str = ..., - timeout: int = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: int = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - loop: Optional[AbstractEventLoop] = ..., - trust_env: bool = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/_async/plugins/__init__.pyi b/opensearchpy/_async/plugins/__init__.pyi deleted file mode 100644 index 22c54ac8..00000000 --- a/opensearchpy/_async/plugins/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. diff --git a/opensearchpy/_async/plugins/alerting.py b/opensearchpy/_async/plugins/alerting.py index be79ed02..f1cf3ac9 100644 --- a/opensearchpy/_async/plugins/alerting.py +++ b/opensearchpy/_async/plugins/alerting.py @@ -8,12 +8,19 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any, Union + from ..client.utils import NamespacedClient, _make_path, query_params class AlertingClient(NamespacedClient): @query_params() - async def search_monitor(self, body, params=None, headers=None): + async def search_monitor( + self, + body: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the search result for a monitor. @@ -28,7 +35,12 @@ async def search_monitor(self, body, params=None, headers=None): ) @query_params() - async def get_monitor(self, monitor_id, params=None, headers=None): + async def get_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the details of a specific monitor. @@ -42,7 +54,12 @@ async def get_monitor(self, monitor_id, params=None, headers=None): ) @query_params("dryrun") - async def run_monitor(self, monitor_id, params=None, headers=None): + async def run_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Runs/Executes a specific monitor. @@ -57,7 +74,12 @@ async def run_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - async def create_monitor(self, body=None, params=None, headers=None): + async def create_monitor( + self, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Creates a monitor with inputs, triggers, and actions. @@ -72,7 +94,13 @@ async def create_monitor(self, body=None, params=None, headers=None): ) @query_params() - async def update_monitor(self, monitor_id, body=None, params=None, headers=None): + async def update_monitor( + self, + monitor_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Updates a monitor's inputs, triggers, and actions. @@ -88,7 +116,12 @@ async def update_monitor(self, monitor_id, body=None, params=None, headers=None) ) @query_params() - async def delete_monitor(self, monitor_id, params=None, headers=None): + async def delete_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Deletes a specific monitor. @@ -102,7 +135,12 @@ async def delete_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - async def get_destination(self, destination_id=None, params=None, headers=None): + async def get_destination( + self, + destination_id: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the details of a specific destination. @@ -118,7 +156,12 @@ async def get_destination(self, destination_id=None, params=None, headers=None): ) @query_params() - async def create_destination(self, body=None, params=None, headers=None): + async def create_destination( + self, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Creates a destination for slack, mail, or custom-webhook. @@ -134,8 +177,12 @@ async def create_destination(self, body=None, params=None, headers=None): @query_params() async def update_destination( - self, destination_id, body=None, params=None, headers=None - ): + self, + destination_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Updates a destination's inputs, triggers, and actions. @@ -151,7 +198,12 @@ async def update_destination( ) @query_params() - async def delete_destination(self, destination_id, params=None, headers=None): + async def delete_destination( + self, + destination_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Deletes a specific destination. @@ -165,7 +217,9 @@ async def delete_destination(self, destination_id, params=None, headers=None): ) @query_params() - async def get_alerts(self, params=None, headers=None): + async def get_alerts( + self, params: Union[Any, None] = None, headers: Union[Any, None] = None + ) -> Union[bool, Any]: """ Returns all alerts. @@ -178,7 +232,13 @@ async def get_alerts(self, params=None, headers=None): ) @query_params() - async def acknowledge_alert(self, monitor_id, body=None, params=None, headers=None): + async def acknowledge_alert( + self, + monitor_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Acknowledges an alert. diff --git a/opensearchpy/_async/plugins/alerting.pyi b/opensearchpy/_async/plugins/alerting.pyi deleted file mode 100644 index 7629df93..00000000 --- a/opensearchpy/_async/plugins/alerting.pyi +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient - -class AlertingClient(NamespacedClient): - def search_monitor( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def run_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_monitor( - self, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_monitor( - self, - monitor_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_destination( - self, - destination_id: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_destination( - self, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_destination( - self, - destination_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_destination( - self, - destination_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_alerts( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def acknowledge_alert( - self, - monitor_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/plugins/index_management.py b/opensearchpy/_async/plugins/index_management.py index ea654bc2..bbca4e2f 100644 --- a/opensearchpy/_async/plugins/index_management.py +++ b/opensearchpy/_async/plugins/index_management.py @@ -9,12 +9,16 @@ # GitHub history for details. +from typing import Any + from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndexManagementClient(NamespacedClient): @query_params() - async def put_policy(self, policy, body=None, params=None, headers=None): + async def put_policy( + self, policy: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates, or updates, a policy. @@ -32,7 +36,9 @@ async def put_policy(self, policy, body=None, params=None, headers=None): ) @query_params() - async def add_policy(self, index, body=None, params=None, headers=None): + async def add_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Adds a policy to an index. This operation does not change the policy if the index already has one. @@ -50,7 +56,9 @@ async def add_policy(self, index, body=None, params=None, headers=None): ) @query_params() - async def get_policy(self, policy, params=None, headers=None): + async def get_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Gets the policy by `policy_id`. @@ -67,7 +75,9 @@ async def get_policy(self, policy, params=None, headers=None): ) @query_params() - async def remove_policy_from_index(self, index, params=None, headers=None): + async def remove_policy_from_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Removes any ISM policy from the index. @@ -84,7 +94,9 @@ async def remove_policy_from_index(self, index, params=None, headers=None): ) @query_params() - async def change_policy(self, index, body=None, params=None, headers=None): + async def change_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates the managed index policy to a new policy (or to a new version of the policy). @@ -102,7 +114,9 @@ async def change_policy(self, index, body=None, params=None, headers=None): ) @query_params() - async def retry(self, index, body=None, params=None, headers=None): + async def retry( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Retries the failed action for an index. @@ -120,7 +134,9 @@ async def retry(self, index, body=None, params=None, headers=None): ) @query_params("show_policy") - async def explain_index(self, index, params=None, headers=None): + async def explain_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Gets the current state of the index. @@ -137,7 +153,9 @@ async def explain_index(self, index, params=None, headers=None): ) @query_params() - async def delete_policy(self, policy, params=None, headers=None): + async def delete_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes the policy by `policy_id`. diff --git a/opensearchpy/_async/plugins/index_management.pyi b/opensearchpy/_async/plugins/index_management.pyi deleted file mode 100644 index 98d50097..00000000 --- a/opensearchpy/_async/plugins/index_management.pyi +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class IndexManagementClient(NamespacedClient): - async def put_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def add_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def get_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def remove_policy_from_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def change_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def retry( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def explain_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def delete_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index bf1b77d6..417c6805 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -30,6 +30,10 @@ import logging import sys from itertools import chain +from typing import Any, Collection, Mapping, Optional, Tuple, Type, Union + +from opensearchpy.connection.base import Connection +from opensearchpy.serializer import Serializer from ..connection_pool import ConnectionPool from ..exceptions import ( @@ -56,25 +60,27 @@ class AsyncTransport(Transport): DEFAULT_CONNECTION_CLASS = AIOHttpConnection + sniffing_task: Any = None + def __init__( self, - hosts, - connection_class=None, - connection_pool_class=ConnectionPool, - host_info_callback=get_host_info, - sniff_on_start=False, - sniffer_timeout=None, - sniff_timeout=0.1, - sniff_on_connection_fail=False, - serializer=JSONSerializer(), - serializers=None, - default_mimetype="application/json", - max_retries=3, - retry_on_status=(502, 503, 504), - retry_on_timeout=False, - send_get_body_as="GET", - **kwargs - ): + hosts: Any, + connection_class: Any = None, + connection_pool_class: Type[ConnectionPool] = ConnectionPool, + host_info_callback: Any = get_host_info, + sniff_on_start: bool = False, + sniffer_timeout: Any = None, + sniff_timeout: float = 0.1, + sniff_on_connection_fail: bool = False, + serializer: Serializer = JSONSerializer(), + serializers: Any = None, + default_mimetype: str = "application/json", + max_retries: int = 3, + retry_on_status: Tuple[int, ...] = (502, 503, 504), + retry_on_timeout: bool = False, + send_get_body_as: str = "GET", + **kwargs: Any + ) -> None: """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance @@ -113,9 +119,9 @@ def __init__( options provided as part of the hosts parameter. """ self.sniffing_task = None - self.loop = None + self.loop: Any = None self._async_init_called = False - self._sniff_on_start_event = None # type: asyncio.Event + self._sniff_on_start_event: Optional[asyncio.Event] = None super(AsyncTransport, self).__init__( hosts=[], @@ -142,7 +148,7 @@ def __init__( self.hosts = hosts self.sniff_on_start = sniff_on_start - async def _async_init(self): + async def _async_init(self) -> None: """This is our stand-in for an async constructor. Everything that was deferred within __init__() should be done here now. @@ -171,7 +177,7 @@ async def _async_init(self): # Since this is the first one we wait for it to complete # in case there's an error it'll get raised here. - await self.sniffing_task + await self.sniffing_task # type: ignore # If the task gets cancelled here it likely means the # transport got closed. @@ -184,7 +190,7 @@ async def _async_init(self): finally: self._sniff_on_start_event.set() - async def _async_call(self): + async def _async_call(self) -> None: """This method is called within any async method of AsyncTransport where the transport is not closing. This will check to see if we should call our _async_init() or create a new sniffing task @@ -205,7 +211,7 @@ async def _async_call(self): if self.loop.time() >= self.last_sniff + self.sniffer_timeout: self.create_sniff_task() - async def _get_node_info(self, conn, initial): + async def _get_node_info(self, conn: Any, initial: Any) -> Any: try: # use small timeout for the sniffing request, should be a fast api call _, headers, node_info = await conn.perform_request( @@ -218,7 +224,7 @@ async def _get_node_info(self, conn, initial): pass return None - async def _get_sniff_data(self, initial=False): + async def _get_sniff_data(self, initial: Any = False) -> Any: previous_sniff = self.last_sniff # reset last_sniff timestamp @@ -227,7 +233,7 @@ async def _get_sniff_data(self, initial=False): # use small timeout for the sniffing request, should be a fast api call timeout = self.sniff_timeout if not initial else None - def _sniff_request(conn): + def _sniff_request(conn: Any) -> Any: return self.loop.create_task( conn.perform_request("GET", "/_nodes/_all/http", timeout=timeout) ) @@ -243,7 +249,7 @@ def _sniff_request(conn): continue tasks.append(_sniff_request(conn)) - done = () + done: Any = () try: while tasks: # The 'loop' keyword is deprecated in 3.8+ so don't @@ -283,7 +289,7 @@ def _sniff_request(conn): for task in chain(done, tasks): task.cancel() - async def sniff_hosts(self, initial=False): + async def sniff_hosts(self, initial: bool = False) -> Any: """Either spawns a sniffing_task which does regular sniffing over time or does a single sniffing session and awaits the results. """ @@ -294,7 +300,7 @@ async def sniff_hosts(self, initial=False): return node_info = await self._get_sniff_data(initial) - hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) + hosts: Any = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes, maybe using an incompatible # transport_schema or host_info_callback blocked all - raise error. @@ -311,7 +317,7 @@ async def sniff_hosts(self, initial=False): if c not in self.connection_pool.connections: await c.close() - def create_sniff_task(self, initial=False): + def create_sniff_task(self, initial: bool = False) -> None: """ Initiate a sniffing task. Make sure we only have one sniff request running at any given time. If a finished sniffing request is around, @@ -327,7 +333,7 @@ def create_sniff_task(self, initial=False): if self.sniffing_task is None: self.sniffing_task = self.loop.create_task(self.sniff_hosts(initial)) - def mark_dead(self, connection): + def mark_dead(self, connection: Connection) -> None: """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. @@ -338,10 +344,19 @@ def mark_dead(self, connection): if self.sniff_on_connection_fail: self.create_sniff_task() - def get_connection(self): + def get_connection(self) -> Any: return self.connection_pool.get_connection() - async def perform_request(self, method, url, headers=None, params=None, body=None): + async def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to its perform_request method and @@ -425,7 +440,7 @@ async def perform_request(self, method, url, headers=None, params=None, body=Non ) return data - async def close(self): + async def close(self) -> None: # type: ignore """ Explicitly closes connections """ @@ -439,3 +454,6 @@ async def close(self): for connection in self.connection_pool.connections: await connection.close() + + +__all__ = ["TransportError"] diff --git a/opensearchpy/_async/transport.pyi b/opensearchpy/_async/transport.pyi deleted file mode 100644 index 5d66514d..00000000 --- a/opensearchpy/_async/transport.pyi +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union - -from ..connection import Connection -from ..connection_pool import ConnectionPool -from ..serializer import Deserializer, Serializer - -class AsyncTransport(object): - DEFAULT_CONNECTION_CLASS: Type[Connection] - connection_pool: ConnectionPool - deserializer: Deserializer - - max_retries: int - retry_on_timeout: bool - retry_on_status: Collection[int] - send_get_body_as: str - serializer: Serializer - connection_pool_class: Type[ConnectionPool] - connection_class: Type[Connection] - kwargs: Any - hosts: Optional[List[Dict[str, Any]]] - seed_connections: List[Connection] - sniffer_timeout: Optional[float] - sniff_on_start: bool - sniff_on_connection_fail: bool - last_sniff: float - sniff_timeout: Optional[float] - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Dict[str, Any] - ] - def __init__( - self, - hosts: Any, - connection_class: Optional[Type[Any]] = ..., - connection_pool_class: Type[ConnectionPool] = ..., - host_info_callback: Callable[ - [Dict[str, Any], Dict[str, Any]], Optional[Dict[str, Any]] - ] = ..., - sniff_on_start: bool = ..., - sniffer_timeout: Optional[float] = ..., - sniff_timeout: float = ..., - sniff_on_connection_fail: bool = ..., - serializer: Serializer = ..., - serializers: Optional[Mapping[str, Serializer]] = ..., - default_mimetype: str = ..., - max_retries: int = ..., - retry_on_status: Collection[int] = ..., - retry_on_timeout: bool = ..., - send_get_body_as: str = ..., - **kwargs: Any - ) -> None: ... - def add_connection(self, host: Any) -> None: ... - def set_connections(self, hosts: Collection[Any]) -> None: ... - def get_connection(self) -> Connection: ... - def sniff_hosts(self, initial: bool = ...) -> None: ... - def mark_dead(self, connection: Connection) -> None: ... - async def perform_request( - self, - method: str, - url: str, - headers: Optional[Mapping[str, str]] = ..., - params: Optional[Mapping[str, Any]] = ..., - body: Optional[Any] = ..., - ) -> Union[bool, Any]: ... - async def close(self) -> None: ... diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 2410b9f5..13c8d5c9 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -25,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__ = "2.3.2" +__versionstr__: str = "2.3.2" diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 6a8dffb6..142f3af2 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -39,9 +39,11 @@ from __future__ import unicode_literals import logging +from typing import Any, Type from ..transport import Transport, TransportError from .cat import CatClient +from .client import Client from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient @@ -54,12 +56,12 @@ from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") -class OpenSearch(object): +class OpenSearch(Client): """ OpenSearch client. Provides a straightforward mapping from Python to OpenSearch REST endpoints. @@ -184,13 +186,18 @@ def default(self, obj): """ - from ._patch import ( + from ._patch import ( # type: ignore create_point_in_time, delete_point_in_time, list_all_point_in_time, ) - def __init__(self, hosts=None, transport_class=Transport, **kwargs): + def __init__( + self, + hosts: Any = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), @@ -205,7 +212,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be :class:`~opensearchpy.Transport` class and, subsequently, to the :class:`~opensearchpy.Connection` instances. """ - self.transport = transport_class(_normalize_hosts(hosts), **kwargs) + super().__init__(hosts, transport_class, **kwargs) # namespaced clients for compatibility with API names self.cat = CatClient(self) @@ -224,10 +231,10 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.plugins = PluginsClient(self) - def __repr__(self): + def __repr__(self) -> Any: try: # get a list of all connections - cons = self.transport.hosts + cons: Any = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] @@ -236,21 +243,25 @@ def __repr__(self): # probably operating on custom transport and connection_pool, ignore return super(OpenSearch, self).__repr__() - def __enter__(self): + def __enter__(self) -> "OpenSearch": if hasattr(self.transport, "_async_call"): self.transport._async_call() return self - def __exit__(self, *_): + def __exit__(self, *_: Any) -> None: self.close() - def close(self): + def close(self) -> None: """Closes the Transport and all internal connections""" self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params() - def ping(self, params=None, headers=None): + def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns whether the cluster is running. @@ -263,7 +274,11 @@ def ping(self, params=None, headers=None): return False @query_params() - def info(self, params=None, headers=None): + def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the cluster. @@ -281,7 +296,14 @@ def info(self, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def create(self, index, id, body, params=None, headers=None): + def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. @@ -330,7 +352,14 @@ def create(self, index, id, body, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def index(self, index, body, id=None, params=None, headers=None): + def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a document in an index. @@ -387,7 +416,13 @@ def index(self, index, body, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def bulk(self, body, index=None, params=None, headers=None): + def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to perform multiple index/update/delete operations in a single request. @@ -431,7 +466,13 @@ def bulk(self, body, index=None, params=None, headers=None): ) @query_params() - def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): + def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Explicitly clears the search context for a scroll. @@ -467,7 +508,13 @@ def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): "routing", "terminate_after", ) - def count(self, body=None, index=None, params=None, headers=None): + def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns number of documents matching a query. @@ -523,7 +570,13 @@ def count(self, body=None, index=None, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def delete(self, index, id, params=None, headers=None): + def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes a document from the index. @@ -592,7 +645,13 @@ def delete(self, index, id, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def delete_by_query(self, index, body, params=None, headers=None): + def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes documents matching the provided query. @@ -685,7 +744,12 @@ def delete_by_query(self, index, body, params=None, headers=None): ) @query_params("requests_per_second") - def delete_by_query_rethrottle(self, task_id, params=None, headers=None): + def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Delete By Query operation. @@ -706,7 +770,12 @@ def delete_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_script(self, id, params=None, headers=None): + def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a script. @@ -738,7 +807,13 @@ def delete_script(self, id, params=None, headers=None): "version", "version_type", ) - def exists(self, index, id, params=None, headers=None): + def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document exists in an index. @@ -783,7 +858,13 @@ def exists(self, index, id, params=None, headers=None): "version", "version_type", ) - def exists_source(self, index, id, params=None, headers=None): + def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document source exists in an index. @@ -831,7 +912,14 @@ def exists_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - def explain(self, index, id, body=None, params=None, headers=None): + def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about why a specific matches (or doesn't match) a query. @@ -878,7 +966,13 @@ def explain(self, index, id, body=None, params=None, headers=None): "ignore_unavailable", "include_unmapped", ) - def field_caps(self, body=None, index=None, params=None, headers=None): + def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about the capabilities of fields among multiple indices. @@ -919,7 +1013,13 @@ def field_caps(self, body=None, index=None, params=None, headers=None): "version", "version_type", ) - def get(self, index, id, params=None, headers=None): + def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a document. @@ -954,7 +1054,12 @@ def get(self, index, id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout") - def get_script(self, id, params=None, headers=None): + def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a script. @@ -984,7 +1089,13 @@ def get_script(self, id, params=None, headers=None): "version", "version_type", ) - def get_source(self, index, id, params=None, headers=None): + def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the source of a document. @@ -1028,7 +1139,13 @@ def get_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - def mget(self, body, index=None, params=None, headers=None): + def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to get multiple documents in one request. @@ -1073,7 +1190,13 @@ def mget(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def msearch(self, body, index=None, params=None, headers=None): + def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search operations in one request. @@ -1125,7 +1248,13 @@ def msearch(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def msearch_template(self, body, index=None, params=None, headers=None): + def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search template operations in one request. @@ -1173,7 +1302,13 @@ def msearch_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - def mtermvectors(self, body=None, index=None, params=None, headers=None): + def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns multiple termvectors in one request. @@ -1221,7 +1356,14 @@ def mtermvectors(self, body=None, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def put_script(self, id, body, context=None, params=None, headers=None): + def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a script. @@ -1251,7 +1393,13 @@ def put_script(self, id, body, context=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" ) - def rank_eval(self, body, index=None, params=None, headers=None): + def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to evaluate the quality of ranked search results over a set of typical search queries. @@ -1293,7 +1441,12 @@ def rank_eval(self, body, index=None, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def reindex(self, body, params=None, headers=None): + def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or @@ -1330,7 +1483,12 @@ def reindex(self, body, params=None, headers=None): ) @query_params("requests_per_second") - def reindex_rethrottle(self, task_id, params=None, headers=None): + def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Reindex operation. @@ -1350,7 +1508,13 @@ def reindex_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - def render_search_template(self, body=None, id=None, params=None, headers=None): + def render_search_template( + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1367,7 +1531,12 @@ def render_search_template(self, body=None, id=None, params=None, headers=None): ) @query_params() - def scripts_painless_execute(self, body=None, params=None, headers=None): + def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows an arbitrary script to be executed and a result to be returned. @@ -1383,7 +1552,13 @@ def scripts_painless_execute(self, body=None, params=None, headers=None): ) @query_params("rest_total_hits_as_int", "scroll") - def scroll(self, body=None, scroll_id=None, params=None, headers=None): + def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to retrieve a large numbers of results from a single search request. @@ -1452,7 +1627,13 @@ def scroll(self, body=None, scroll_id=None, params=None, headers=None): "typed_keys", "version", ) - def search(self, body=None, index=None, params=None, headers=None): + def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns results matching a query. @@ -1572,7 +1753,12 @@ def search(self, body=None, index=None, params=None, headers=None): "preference", "routing", ) - def search_shards(self, index=None, params=None, headers=None): + def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the indices and shards that a search request would be executed against. @@ -1613,7 +1799,13 @@ def search_shards(self, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def search_template(self, body, index=None, params=None, headers=None): + def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1675,7 +1867,14 @@ def search_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - def termvectors(self, index, body=None, id=None, params=None, headers=None): + def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information and statistics about terms in the fields of a particular document. @@ -1730,7 +1929,14 @@ def termvectors(self, index, body=None, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def update(self, index, id, body, params=None, headers=None): + def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a document with a script or partial document. @@ -1812,7 +2018,13 @@ def update(self, index, id, body, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def update_by_query(self, index, body=None, params=None, headers=None): + def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. @@ -1906,7 +2118,12 @@ def update_by_query(self, index, body=None, params=None, headers=None): ) @query_params("requests_per_second") - def update_by_query_rethrottle(self, task_id, params=None, headers=None): + def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Update By Query operation. @@ -1927,7 +2144,11 @@ def update_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - def get_script_context(self, params=None, headers=None): + def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all script contexts. @@ -1937,7 +2158,11 @@ def get_script_context(self, params=None, headers=None): ) @query_params() - def get_script_languages(self, params=None, headers=None): + def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns available script types, languages and contexts. @@ -1953,7 +2178,12 @@ def get_script_languages(self, params=None, headers=None): "preference", "routing", ) - def create_pit(self, index, params=None, headers=None): + def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates point in time context. @@ -1981,7 +2211,11 @@ def create_pit(self, index, params=None, headers=None): ) @query_params() - def delete_all_pits(self, params=None, headers=None): + def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes all active point in time searches. @@ -1991,7 +2225,12 @@ def delete_all_pits(self, params=None, headers=None): ) @query_params() - def delete_pit(self, body=None, params=None, headers=None): + def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes one or more point in time searches based on the IDs passed. @@ -2007,7 +2246,11 @@ def delete_pit(self, body=None, params=None, headers=None): ) @query_params() - def get_all_pits(self, params=None, headers=None): + def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Lists all active point in time searches. diff --git a/opensearchpy/client/__init__.pyi b/opensearchpy/client/__init__.pyi deleted file mode 100644 index 9ad72a83..00000000 --- a/opensearchpy/client/__init__.pyi +++ /dev/null @@ -1,1139 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from __future__ import unicode_literals - -import logging -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -from ..transport import Transport -from .cat import CatClient -from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient -from .features import FeaturesClient -from .indices import IndicesClient -from .ingest import IngestClient -from .nodes import NodesClient -from .remote import RemoteClient -from .remote_store import RemoteStoreClient -from .security import SecurityClient -from .snapshot import SnapshotClient -from .tasks import TasksClient - -logger: logging.Logger - -class OpenSearch(object): - transport: Transport - - cat: CatClient - cluster: ClusterClient - features: FeaturesClient - indices: IndicesClient - ingest: IngestClient - nodes: NodesClient - remote: RemoteClient - security: SecurityClient - snapshot: SnapshotClient - tasks: TasksClient - remote_store: RemoteStoreClient - def __init__( - self, - hosts: Any = ..., - transport_class: Type[Transport] = ..., - **kwargs: Any, - ) -> None: ... - def __repr__(self) -> str: ... - def __enter__(self) -> "OpenSearch": ... - def __exit__(self, *_: Any) -> None: ... - def close(self) -> None: ... - # AUTO-GENERATED-API-DEFINITIONS # - def ping( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create( - self, - index: Any, - id: Any, - *, - body: Any, - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def index( - self, - index: Any, - *, - body: Any, - id: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - op_type: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def bulk( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clear_scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def count( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - min_score: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - index: Any, - id: Any, - *, - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_by_query( - self, - index: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def exists_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def explain( - self, - index: Any, - id: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - lenient: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def field_caps( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_unmapped: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def mget( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def msearch( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def msearch_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def mtermvectors( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - ids: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_script( - self, - id: Any, - *, - body: Any, - context: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def rank_eval( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - search_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reindex( - self, - *, - body: Any, - max_docs: Optional[Any] = ..., - refresh: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - scroll: Optional[Any] = ..., - slices: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reindex_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def render_search_template( - self, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def scripts_painless_execute( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - scroll: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - allow_partial_search_results: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - batched_reduce_size: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - docvalue_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - seq_no_primary_term: Optional[Any] = ..., - size: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - suggest_field: Optional[Any] = ..., - suggest_mode: Optional[Any] = ..., - suggest_size: Optional[Any] = ..., - suggest_text: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - track_scores: Optional[Any] = ..., - track_total_hits: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - version: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search_shards( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - preference: Optional[Any] = ..., - profile: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def termvectors( - self, - index: Any, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update( - self, - index: Any, - id: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - lang: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - retry_on_conflict: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_by_query( - self, - index: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script_context( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script_languages( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_pit( - self, - index: Any, - *, - allow_partial_pit_creation: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_pit( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py index bbb69d52..3f156906 100644 --- a/opensearchpy/client/_patch.py +++ b/opensearchpy/client/_patch.py @@ -9,12 +9,13 @@ # GitHub history for details. import warnings +from typing import Any from .utils import SKIP_IN_PATH, query_params @query_params() -def list_all_point_in_time(self, params=None, headers=None): +def list_all_point_in_time(self: Any, params: Any = None, headers: Any = None) -> Any: """ Returns the list of active point in times searches @@ -35,7 +36,9 @@ def list_all_point_in_time(self, params=None, headers=None): @query_params( "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" ) -def create_point_in_time(self, index, params=None, headers=None): +def create_point_in_time( + self: Any, index: Any, params: Any = None, headers: Any = None +) -> Any: """ Create a point in time that can be used in subsequent searches @@ -68,7 +71,13 @@ def create_point_in_time(self, index, params=None, headers=None): @query_params() -def delete_point_in_time(self, body=None, all=False, params=None, headers=None): +def delete_point_in_time( + self: Any, + body: Any = None, + all: bool = False, + params: Any = None, + headers: Any = None, +) -> Any: """ Delete a point in time @@ -94,7 +103,7 @@ def delete_point_in_time(self, body=None, all=False, params=None, headers=None): @query_params() -def health_check(self, params=None, headers=None): +def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: """ Checks to see if the Security plugin is up and running. @@ -113,7 +122,9 @@ def health_check(self, params=None, headers=None): @query_params() -def update_audit_config(self, body, params=None, headers=None): +def update_audit_config( + self: Any, body: Any, params: Any = None, headers: Any = None +) -> Any: """ A PUT call updates the audit configuration. diff --git a/opensearchpy/client/_patch.pyi b/opensearchpy/client/_patch.pyi deleted file mode 100644 index b1819682..00000000 --- a/opensearchpy/client/_patch.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -def list_all_point_in_time( - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -def create_point_in_time( - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -def delete_point_in_time( - *, - body: Optional[Any] = ..., - all: Optional[bool] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -def health_check( - params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... -def update_audit_config( - body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 49d797ca..aac5d8a6 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -26,7 +26,6 @@ # under the License. -# ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": @@ -36,12 +35,19 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") - def aliases(self, name=None, params=None, headers=None): + def aliases( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Shows information about currently configured aliases to indices including filter and routing infos. @@ -65,6 +71,20 @@ def aliases(self, name=None, params=None, headers=None): "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) + @query_params() + def all_pit_segments( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + Lists all active point-in-time segments. + + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + @query_params( "bytes", "cluster_manager_timeout", @@ -76,7 +96,12 @@ def aliases(self, name=None, params=None, headers=None): "s", "v", ) - def allocation(self, node_id=None, params=None, headers=None): + def allocation( + self, + node_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. @@ -108,8 +133,51 @@ def allocation(self, node_id=None, params=None, headers=None): headers=headers, ) + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + def cluster_manager( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + Returns information about the cluster-manager node. + + + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", "/_cat/cluster_manager", params=params, headers=headers + ) + @query_params("format", "h", "help", "s", "v") - def count(self, index=None, params=None, headers=None): + def count( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides quick access to the document count of the entire cluster, or individual indices. @@ -129,8 +197,43 @@ def count(self, index=None, params=None, headers=None): "GET", _make_path("_cat", "count", index), params=params, headers=headers ) + @query_params("bytes", "format", "h", "help", "s", "v") + def fielddata( + self, + fields: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + Shows how much heap memory is currently being used by fielddata on every data + node in the cluster. + + + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", + _make_path("_cat", "fielddata", fields), + params=params, + headers=headers, + ) + @query_params("format", "h", "help", "s", "time", "ts", "v") - def health(self, params=None, headers=None): + def health( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a concise representation of the cluster health. @@ -151,7 +254,11 @@ def health(self, params=None, headers=None): ) @query_params("help", "s") - def help(self, params=None, headers=None): + def help( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns help for the Cat APIs. @@ -180,7 +287,12 @@ def help(self, params=None, headers=None): "time", "v", ) - def indices(self, index=None, params=None, headers=None): + def indices( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... @@ -232,7 +344,11 @@ def indices(self, index=None, params=None, headers=None): "s", "v", ) - def master(self, params=None, headers=None): + def master( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about the cluster-manager node. @@ -271,9 +387,13 @@ def master(self, params=None, headers=None): "s", "v", ) - def cluster_manager(self, params=None, headers=None): + def nodeattrs( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about the cluster-manager node. + Returns information about custom node attributes. :arg cluster_manager_timeout: Operation timeout for connection @@ -292,7 +412,7 @@ def cluster_manager(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/cluster_manager", params=params, headers=headers + "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( @@ -308,7 +428,11 @@ def cluster_manager(self, params=None, headers=None): "time", "v", ) - def nodes(self, params=None, headers=None): + def nodes( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns basic statistics about performance of cluster nodes. @@ -340,37 +464,6 @@ def nodes(self, params=None, headers=None): ) @query_params( - "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" - ) - def recovery(self, index=None, params=None, headers=None): - """ - Returns information about index shard recoveries, both on-going completed. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - shard recoveries. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg detailed: If `true`, the response includes detailed - information about shard recoveries. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return self.transport.perform_request( - "GET", _make_path("_cat", "recovery", index), params=params, headers=headers - ) - - @query_params( - "bytes", "cluster_manager_timeout", "format", "h", @@ -381,15 +474,15 @@ def recovery(self, index=None, params=None, headers=None): "time", "v", ) - def shards(self, index=None, params=None, headers=None): + def pending_tasks( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Provides a detailed view of shard allocation on nodes. + Returns a concise representation of the cluster pending tasks. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -408,34 +501,52 @@ def shards(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", _make_path("_cat", "shards", index), params=params, headers=headers + "GET", "/_cat/pending_tasks", params=params, headers=headers + ) + + @query_params() + def pit_segments( + self, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: + """ + List segments for one or several PITs. + + + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body ) @query_params( - "bytes", "cluster_manager_timeout", "format", "h", "help", + "local", "master_timeout", "s", "v", ) - def segments(self, index=None, params=None, headers=None): + def plugins( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Provides low-level information about the segments in the shards of an index. + Returns information about installed plugins across nodes node. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -444,36 +555,34 @@ def segments(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", _make_path("_cat", "segments", index), params=params, headers=headers + "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( - "cluster_manager_timeout", - "format", - "h", - "help", - "local", - "master_timeout", - "s", - "time", - "v", + "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" ) - def pending_tasks(self, params=None, headers=None): + def recovery( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns a concise representation of the cluster pending tasks. + Returns information about index shard recoveries, both on-going completed. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg detailed: If `true`, the response includes detailed + information about shard recoveries. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. :arg time: The unit in which to display time values. Valid @@ -481,7 +590,7 @@ def pending_tasks(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/pending_tasks", params=params, headers=headers + "GET", _make_path("_cat", "recovery", index), params=params, headers=headers ) @query_params( @@ -492,17 +601,17 @@ def pending_tasks(self, params=None, headers=None): "local", "master_timeout", "s", - "size", "v", ) - def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): + def repositories( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns cluster-wide thread pool statistics per node. By default the active, - queue and rejected statistics are returned for all thread pools. + Returns information about snapshot repositories registered in the cluster. - :arg thread_pool_patterns: Comma-separated list of regular- - expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -516,65 +625,94 @@ def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", - _make_path("_cat", "thread_pool", thread_pool_patterns), - params=params, - headers=headers, + "GET", "/_cat/repositories", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - def fielddata(self, fields=None, params=None, headers=None): + @query_params( + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", + ) + def segment_replication( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Shows how much heap memory is currently being used by fielddata on every data - node in the cluster. + Returns information about both on-going and latest completed Segment + Replication events. - :arg fields: Comma-separated list of fields to return in the - output. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. Default is false. :arg bytes: The unit in which to display byte values. Valid choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. Default is false. + :arg detailed: If `true`, the response includes detailed + information about segment replications. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", - _make_path("_cat", "fielddata", fields), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", "help", - "local", "master_timeout", "s", "v", ) - def plugins(self, params=None, headers=None): + def segments( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about installed plugins across nodes node. + Provides low-level information about the segments in the shards of an index. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -583,10 +721,11 @@ def plugins(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/plugins", params=params, headers=headers + "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", @@ -594,13 +733,23 @@ def plugins(self, params=None, headers=None): "local", "master_timeout", "s", + "time", "v", ) - def nodeattrs(self, params=None, headers=None): + def shards( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about custom node attributes. + Provides a detailed view of shard allocation on nodes. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -614,10 +763,12 @@ def nodeattrs(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/nodeattrs", params=params, headers=headers + "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( @@ -628,13 +779,22 @@ def nodeattrs(self, params=None, headers=None): "local", "master_timeout", "s", + "size", "v", ) - def repositories(self, params=None, headers=None): + def thread_pool( + self, + thread_pool_patterns: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ - Returns information about snapshot repositories registered in the cluster. + Returns cluster-wide thread pool statistics per node. By default the active, + queue and rejected statistics are returned for all thread pools. + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -648,10 +808,14 @@ def repositories(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/repositories", params=params, headers=headers + "GET", + _make_path("_cat", "thread_pool", thread_pool_patterns), + params=params, + headers=headers, ) @query_params( @@ -665,7 +829,12 @@ def repositories(self, params=None, headers=None): "time", "v", ) - def snapshots(self, repository=None, params=None, headers=None): + def snapshots( + self, + repository: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns all snapshots in a specific repository. @@ -708,7 +877,11 @@ def snapshots(self, repository=None, params=None, headers=None): "time", "v", ) - def tasks(self, params=None, headers=None): + def tasks( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in the cluster. @@ -748,7 +921,12 @@ def tasks(self, params=None, headers=None): "s", "v", ) - def templates(self, name=None, params=None, headers=None): + def templates( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about existing templates. @@ -772,71 +950,3 @@ def templates(self, name=None, params=None, headers=None): return self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) - - @query_params() - def all_pit_segments(self, params=None, headers=None): - """ - Lists all active point-in-time segments. - - """ - return self.transport.perform_request( - "GET", "/_cat/pit_segments/_all", params=params, headers=headers - ) - - @query_params() - def pit_segments(self, body=None, params=None, headers=None): - """ - List segments for one or several PITs. - - - """ - return self.transport.perform_request( - "GET", "/_cat/pit_segments", params=params, headers=headers, body=body - ) - - @query_params( - "active_only", - "bytes", - "completed_only", - "detailed", - "format", - "h", - "help", - "s", - "shards", - "time", - "v", - ) - def segment_replication(self, index=None, params=None, headers=None): - """ - Returns information about both on-going and latest completed Segment - Replication events. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - segment replication events. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg completed_only: If `true`, the response only includes - latest completed segment replication events. Default is false. - :arg detailed: If `true`, the response includes detailed - information about segment replications. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg shards: Comma-separated list of shards to display. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return self.transport.perform_request( - "GET", - _make_path("_cat", "segment_replication", index), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/cat.pyi b/opensearchpy/client/cat.pyi deleted file mode 100644 index 0d690dda..00000000 --- a/opensearchpy/client/cat.pyi +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class CatClient(NamespacedClient): - def aliases( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def allocation( - self, - *, - node_id: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def count( - self, - *, - index: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def health( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - ts: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def help( - self, - *, - help: Optional[Any] = ..., - s: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def indices( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - health: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pri: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def master( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cluster_manager( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def nodes( - self, - *, - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - full_id: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shards( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segments( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def thread_pool( - self, - *, - thread_pool_patterns: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def fielddata( - self, - *, - fields: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def plugins( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def nodeattrs( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def repositories( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def snapshots( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def tasks( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def templates( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def all_pit_segments( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pit_segments( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segment_replication( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - completed_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - shards: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/client.py b/opensearchpy/client/client.py new file mode 100644 index 00000000..7f0b67c6 --- /dev/null +++ b/opensearchpy/client/client.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Optional, Type + +from opensearchpy.client.utils import _normalize_hosts +from opensearchpy.transport import Transport + + +class Client(object): + """ + A generic async OpenSearch client. + """ + + def __init__( + self, + hosts: Optional[str] = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: + """ + :arg hosts: list of nodes, or a single node, we should connect to. + Node should be a dictionary ({"host": "localhost", "port": 9200}), + the entire dictionary will be passed to the :class:`~opensearchpy.Connection` + class as kwargs, or a string in the format of ``host[:port]`` which will be + translated to a dictionary automatically. If no value is given the + :class:`~opensearchpy.Connection` class defaults will be used. + + :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. + + :arg kwargs: any additional arguments will be passed on to the + :class:`~opensearchpy.Transport` class and, subsequently, to the + :class:`~opensearchpy.Connection` instances. + """ + self.transport = transport_class(_normalize_hosts(hosts), **kwargs) diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index 248c7ce3..0354e42c 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -55,7 +57,12 @@ class ClusterClient(NamespacedClient): "wait_for_nodes", "wait_for_status", ) - def health(self, index=None, params=None, headers=None): + def health( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns basic information about the health of the cluster. @@ -99,7 +106,11 @@ def health(self, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def pending_tasks(self, params=None, headers=None): + def pending_tasks( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. @@ -128,7 +139,13 @@ def pending_tasks(self, params=None, headers=None): "wait_for_metadata_version", "wait_for_timeout", ) - def state(self, metric=None, index=None, params=None, headers=None): + def state( + self, + metric: Any = None, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a comprehensive information about the state of the cluster. @@ -171,7 +188,12 @@ def state(self, metric=None, index=None, params=None, headers=None): ) @query_params("flat_settings", "timeout") - def stats(self, node_id=None, params=None, headers=None): + def stats( + self, + node_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns high-level overview of cluster statistics. @@ -202,7 +224,12 @@ def stats(self, node_id=None, params=None, headers=None): "retry_failed", "timeout", ) - def reroute(self, body=None, params=None, headers=None): + def reroute( + self, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. @@ -235,7 +262,11 @@ def reroute(self, body=None, params=None, headers=None): "master_timeout", "timeout", ) - def get_settings(self, params=None, headers=None): + def get_settings( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns cluster settings. @@ -258,7 +289,12 @@ def get_settings(self, params=None, headers=None): @query_params( "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) - def put_settings(self, body, params=None, headers=None): + def put_settings( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the cluster settings. @@ -282,7 +318,11 @@ def put_settings(self, body, params=None, headers=None): ) @query_params() - def remote_info(self, params=None, headers=None): + def remote_info( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns the information about configured remote clusters. @@ -292,7 +332,12 @@ def remote_info(self, params=None, headers=None): ) @query_params("include_disk_info", "include_yes_decisions") - def allocation_explain(self, body=None, params=None, headers=None): + def allocation_explain( + self, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -313,7 +358,12 @@ def allocation_explain(self, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_component_template(self, name, params=None, headers=None): + def delete_component_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a component template. @@ -337,7 +387,12 @@ def delete_component_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def get_component_template(self, name=None, params=None, headers=None): + def get_component_template( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns one or more component templates. @@ -359,7 +414,13 @@ def get_component_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") - def put_component_template(self, name, body, params=None, headers=None): + def put_component_template( + self, + name: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates a component template. @@ -388,7 +449,12 @@ def put_component_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def exists_component_template(self, name, params=None, headers=None): + def exists_component_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular component template exist. @@ -413,7 +479,11 @@ def exists_component_template(self, name, params=None, headers=None): ) @query_params("wait_for_removal") - def delete_voting_config_exclusions(self, params=None, headers=None): + def delete_voting_config_exclusions( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clears cluster voting config exclusions. @@ -430,7 +500,11 @@ def delete_voting_config_exclusions(self, params=None, headers=None): ) @query_params("node_ids", "node_names", "timeout") - def post_voting_config_exclusions(self, params=None, headers=None): + def post_voting_config_exclusions( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. @@ -448,7 +522,11 @@ def post_voting_config_exclusions(self, params=None, headers=None): ) @query_params() - def delete_decommission_awareness(self, params=None, headers=None): + def delete_decommission_awareness( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete any existing decommission. @@ -461,7 +539,11 @@ def delete_decommission_awareness(self, params=None, headers=None): ) @query_params() - def delete_weighted_routing(self, params=None, headers=None): + def delete_weighted_routing( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete weighted shard routing weights. @@ -475,8 +557,11 @@ def delete_weighted_routing(self, params=None, headers=None): @query_params() def get_decommission_awareness( - self, awareness_attribute_name, params=None, headers=None - ): + self, + awareness_attribute_name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Get details and status of decommissioned attribute. @@ -502,7 +587,12 @@ def get_decommission_awareness( ) @query_params() - def get_weighted_routing(self, attribute, params=None, headers=None): + def get_weighted_routing( + self, + attribute: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Fetches weighted shard routing weights. @@ -522,11 +612,11 @@ def get_weighted_routing(self, attribute, params=None, headers=None): @query_params() def put_decommission_awareness( self, - awareness_attribute_name, - awareness_attribute_value, - params=None, - headers=None, - ): + awareness_attribute_name: Any, + awareness_attribute_value: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Decommissions an awareness attribute. @@ -552,7 +642,12 @@ def put_decommission_awareness( ) @query_params() - def put_weighted_routing(self, attribute, params=None, headers=None): + def put_weighted_routing( + self, + attribute: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates weighted shard routing weights. diff --git a/opensearchpy/client/cluster.pyi b/opensearchpy/client/cluster.pyi deleted file mode 100644 index ad2d3fac..00000000 --- a/opensearchpy/client/cluster.pyi +++ /dev/null @@ -1,456 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class ClusterClient(NamespacedClient): - def health( - self, - *, - index: Optional[Any] = ..., - awareness_attribute: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - level: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_events: Optional[Any] = ..., - wait_for_no_initializing_shards: Optional[Any] = ..., - wait_for_no_relocating_shards: Optional[Any] = ..., - wait_for_nodes: Optional[Any] = ..., - wait_for_status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def state( - self, - *, - metric: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_metadata_version: Optional[Any] = ..., - wait_for_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - node_id: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reroute( - self, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - explain: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - metric: Optional[Any] = ..., - retry_failed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_settings( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_settings( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def remote_info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def allocation_explain( - self, - *, - body: Optional[Any] = ..., - include_disk_info: Optional[Any] = ..., - include_yes_decisions: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_component_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_component_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def delete_voting_config_exclusions( - self, - *, - wait_for_removal: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def post_voting_config_exclusions( - self, - *, - node_ids: Optional[Any] = ..., - node_names: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_decommission_awareness( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_weighted_routing( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_decommission_awareness( - self, - awareness_attribute_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_decommission_awareness( - self, - awareness_attribute_name: Any, - awareness_attribute_value: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index adc4aea3..8734a3ac 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -43,7 +45,12 @@ class DanglingIndicesClient(NamespacedClient): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - def delete_dangling_index(self, index_uuid, params=None, headers=None): + def delete_dangling_index( + self, + index_uuid: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes the specified dangling index. @@ -71,7 +78,12 @@ def delete_dangling_index(self, index_uuid, params=None, headers=None): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - def import_dangling_index(self, index_uuid, params=None, headers=None): + def import_dangling_index( + self, + index_uuid: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Imports the specified dangling index. @@ -94,7 +106,11 @@ def import_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params() - def list_dangling_indices(self, params=None, headers=None): + def list_dangling_indices( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/client/dangling_indices.pyi b/opensearchpy/client/dangling_indices.pyi deleted file mode 100644 index b48ba830..00000000 --- a/opensearchpy/client/dangling_indices.pyi +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class DanglingIndicesClient(NamespacedClient): - def delete_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def import_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def list_dangling_indices( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/features.py b/opensearchpy/client/features.py index b96ea308..c6520fa1 100644 --- a/opensearchpy/client/features.py +++ b/opensearchpy/client/features.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params("master_timeout", "cluster_manager_timeout") - def get_features(self, params=None, headers=None): + def get_features(self, params: Any = None, headers: Any = None) -> Any: """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot @@ -47,7 +49,7 @@ def get_features(self, params=None, headers=None): ) @query_params() - def reset_features(self, params=None, headers=None): + def reset_features(self, params: Any = None, headers: Any = None) -> Any: """ Resets the internal state of features, usually by deleting system indices diff --git a/opensearchpy/client/features.pyi b/opensearchpy/client/features.pyi deleted file mode 100644 index 6abcd79e..00000000 --- a/opensearchpy/client/features.pyi +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class FeaturesClient(NamespacedClient): - def get_features( - self, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reset_features( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 3f8df6c6..b826e99f 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -36,12 +36,20 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params() - def analyze(self, body=None, index=None, params=None, headers=None): + def analyze( + self, + body: Any = None, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the text. @@ -60,7 +68,12 @@ def analyze(self, body=None, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def refresh(self, index=None, params=None, headers=None): + def refresh( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the refresh operation in one or more indices. @@ -87,7 +100,12 @@ def refresh(self, index=None, params=None, headers=None): "ignore_unavailable", "wait_if_ongoing", ) - def flush(self, index=None, params=None, headers=None): + def flush( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the flush operation on one or more indices. @@ -119,7 +137,13 @@ def flush(self, index=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - def create(self, index, body=None, params=None, headers=None): + def create( + self, + index: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates an index with optional settings and mappings. @@ -146,7 +170,14 @@ def create(self, index, body=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - def clone(self, index, target, body=None, params=None, headers=None): + def clone( + self, + index: Any, + target: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clones an index. @@ -186,7 +217,12 @@ def clone(self, index, target, body=None, params=None, headers=None): "local", "master_timeout", ) - def get(self, index, params=None, headers=None): + def get( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about one or more indices. @@ -229,7 +265,12 @@ def get(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def open(self, index, params=None, headers=None): + def open( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Opens an index. @@ -268,7 +309,12 @@ def open(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def close(self, index, params=None, headers=None): + def close( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Closes an index. @@ -306,7 +352,12 @@ def close(self, index, params=None, headers=None): "master_timeout", "timeout", ) - def delete(self, index, params=None, headers=None): + def delete( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an index. @@ -344,7 +395,12 @@ def delete(self, index, params=None, headers=None): "include_defaults", "local", ) - def exists(self, index, params=None, headers=None): + def exists( + self, + index: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular index exists. @@ -382,7 +438,13 @@ def exists(self, index, params=None, headers=None): "timeout", "write_index_only", ) - def put_mapping(self, body, index=None, params=None, headers=None): + def put_mapping( + self, + body: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the index mappings. @@ -429,7 +491,12 @@ def put_mapping(self, body, index=None, params=None, headers=None): "local", "master_timeout", ) - def get_mapping(self, index=None, params=None, headers=None): + def get_mapping( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns mappings for one or more indices. @@ -463,7 +530,13 @@ def get_mapping(self, index=None, params=None, headers=None): "include_defaults", "local", ) - def get_field_mapping(self, fields, index=None, params=None, headers=None): + def get_field_mapping( + self, + fields: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns mapping for one or more fields. @@ -494,7 +567,14 @@ def get_field_mapping(self, fields, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def put_alias(self, index, name, body=None, params=None, headers=None): + def put_alias( + self, + index: Any, + name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates an alias. @@ -524,7 +604,13 @@ def put_alias(self, index, name, body=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - def exists_alias(self, name, index=None, params=None, headers=None): + def exists_alias( + self, + name: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular alias exists. @@ -550,7 +636,13 @@ def exists_alias(self, name, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - def get_alias(self, index=None, name=None, params=None, headers=None): + def get_alias( + self, + index: Any = None, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns an alias. @@ -573,7 +665,12 @@ def get_alias(self, index=None, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def update_aliases(self, body, params=None, headers=None): + def update_aliases( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates index aliases. @@ -594,7 +691,13 @@ def update_aliases(self, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_alias(self, index, name, params=None, headers=None): + def delete_alias( + self, + index: Any, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an alias. @@ -619,7 +722,13 @@ def delete_alias(self, index, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "order") - def put_template(self, name, body, params=None, headers=None): + def put_template( + self, + name: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates an index template. @@ -650,7 +759,12 @@ def put_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def exists_template(self, name, params=None, headers=None): + def exists_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -674,7 +788,12 @@ def exists_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def get_template(self, name=None, params=None, headers=None): + def get_template( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns an index template. @@ -695,7 +814,12 @@ def get_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_template(self, name, params=None, headers=None): + def delete_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an index template. @@ -725,7 +849,13 @@ def delete_template(self, name, params=None, headers=None): "local", "master_timeout", ) - def get_settings(self, index=None, name=None, params=None, headers=None): + def get_settings( + self, + index: Any = None, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns settings for one or more indices. @@ -767,7 +897,13 @@ def get_settings(self, index=None, name=None, params=None, headers=None): "preserve_existing", "timeout", ) - def put_settings(self, body, index=None, params=None, headers=None): + def put_settings( + self, + body: Any, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the index settings. @@ -817,7 +953,13 @@ def put_settings(self, body, index=None, params=None, headers=None): "include_unloaded_segments", "level", ) - def stats(self, index=None, metric=None, params=None, headers=None): + def stats( + self, + index: Any = None, + metric: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides statistics on operations happening in an index. @@ -858,7 +1000,12 @@ def stats(self, index=None, metric=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose" ) - def segments(self, index=None, params=None, headers=None): + def segments( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides low-level information about segments in a Lucene index. @@ -894,7 +1041,13 @@ def segments(self, index=None, params=None, headers=None): "q", "rewrite", ) - def validate_query(self, body=None, index=None, params=None, headers=None): + def validate_query( + self, + body: Any = None, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. @@ -943,7 +1096,12 @@ def validate_query(self, body=None, index=None, params=None, headers=None): "query", "request", ) - def clear_cache(self, index=None, params=None, headers=None): + def clear_cache( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clears all or specific caches for one or more indices. @@ -969,7 +1127,12 @@ def clear_cache(self, index=None, params=None, headers=None): ) @query_params("active_only", "detailed") - def recovery(self, index=None, params=None, headers=None): + def recovery( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about ongoing index shard recoveries. @@ -992,7 +1155,12 @@ def recovery(self, index=None, params=None, headers=None): "only_ancient_segments", "wait_for_completion", ) - def upgrade(self, index=None, params=None, headers=None): + def upgrade( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1017,7 +1185,12 @@ def upgrade(self, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def get_upgrade(self, index=None, params=None, headers=None): + def get_upgrade( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1040,7 +1213,12 @@ def get_upgrade(self, index=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) - def shard_stores(self, index=None, params=None, headers=None): + def shard_stores( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides store information for shard copies of indices. @@ -1070,7 +1248,12 @@ def shard_stores(self, index=None, params=None, headers=None): "max_num_segments", "only_expunge_deletes", ) - def forcemerge(self, index=None, params=None, headers=None): + def forcemerge( + self, + index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Performs the force merge operation on one or more indices. @@ -1103,7 +1286,14 @@ def forcemerge(self, index=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def shrink(self, index, target, body=None, params=None, headers=None): + def shrink( + self, + index: Any, + target: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. @@ -1142,7 +1332,14 @@ def shrink(self, index, target, body=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def split(self, index, target, body=None, params=None, headers=None): + def split( + self, + index: Any, + target: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows you to split an existing index into a new index with more primary shards. @@ -1182,7 +1379,14 @@ def split(self, index, target, body=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def rollover(self, alias, body=None, new_index=None, params=None, headers=None): + def rollover( + self, + alias: Any, + body: Any = None, + new_index: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. @@ -1217,7 +1421,13 @@ def rollover(self, alias, body=None, new_index=None, params=None, headers=None): ) @query_params() - def create_data_stream(self, name, body=None, params=None, headers=None): + def create_data_stream( + self, + name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates a data stream. @@ -1237,7 +1447,12 @@ def create_data_stream(self, name, body=None, params=None, headers=None): ) @query_params() - def delete_data_stream(self, name, params=None, headers=None): + def delete_data_stream( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a data stream. @@ -1253,7 +1468,12 @@ def delete_data_stream(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_index_template(self, name, params=None, headers=None): + def delete_index_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes an index template. @@ -1277,7 +1497,12 @@ def delete_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def exists_index_template(self, name, params=None, headers=None): + def exists_index_template( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -1301,7 +1526,12 @@ def exists_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def get_index_template(self, name=None, params=None, headers=None): + def get_index_template( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns an index template. @@ -1322,7 +1552,13 @@ def get_index_template(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - def put_index_template(self, name, body, params=None, headers=None): + def put_index_template( + self, + name: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates an index template. @@ -1352,7 +1588,13 @@ def put_index_template(self, name, body, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - def simulate_index_template(self, name, body=None, params=None, headers=None): + def simulate_index_template( + self, + name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Simulate matching the given index name against the index templates in the system. @@ -1385,7 +1627,12 @@ def simulate_index_template(self, name, body=None, params=None, headers=None): ) @query_params() - def get_data_stream(self, name=None, params=None, headers=None): + def get_data_stream( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns data streams. @@ -1398,7 +1645,13 @@ def get_data_stream(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - def simulate_template(self, body=None, name=None, params=None, headers=None): + def simulate_template( + self, + body: Any = None, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Simulate resolving the given template name or body. @@ -1426,7 +1679,12 @@ def simulate_template(self, body=None, name=None, params=None, headers=None): ) @query_params("expand_wildcards") - def resolve_index(self, name, params=None, headers=None): + def resolve_index( + self, + name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about any matching indices, aliases, and data streams. @@ -1452,7 +1710,13 @@ def resolve_index(self, name, params=None, headers=None): "master_timeout", "timeout", ) - def add_block(self, index, block, params=None, headers=None): + def add_block( + self, + index: Any, + block: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Adds a block to an index. @@ -1484,7 +1748,12 @@ def add_block(self, index, block, params=None, headers=None): ) @query_params() - def data_streams_stats(self, name=None, params=None, headers=None): + def data_streams_stats( + self, + name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Provides statistics on operations happening in a data stream. diff --git a/opensearchpy/client/indices.pyi b/opensearchpy/client/indices.pyi deleted file mode 100644 index 87048693..00000000 --- a/opensearchpy/client/indices.pyi +++ /dev/null @@ -1,1097 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IndicesClient(NamespacedClient): - def analyze( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def refresh( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def flush( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - force: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - wait_if_ongoing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create( - self, - index: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clone( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def open( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def close( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def put_mapping( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - write_index_only: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_mapping( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_field_mapping( - self, - fields: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_alias( - self, - index: Any, - name: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_alias( - self, - name: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_alias( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_aliases( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_alias( - self, - index: Any, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - order: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_settings( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_settings( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - preserve_existing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - index: Optional[Any] = ..., - metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - forbid_closed_indices: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segments( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def validate_query( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - all_shards: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - q: Optional[Any] = ..., - rewrite: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clear_cache( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - query: Optional[Any] = ..., - request: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - only_ancient_segments: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shard_stores( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def forcemerge( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - max_num_segments: Optional[Any] = ..., - only_expunge_deletes: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shrink( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def split( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def rollover( - self, - alias: Any, - *, - body: Optional[Any] = ..., - new_index: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_data_stream( - self, - name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_index_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_index_template( - self, - name: Any, - *, - body: Any, - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate_index_template( - self, - name: Any, - *, - body: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_data_stream( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate_template( - self, - *, - body: Optional[Any] = ..., - name: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def resolve_index( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def add_block( - self, - index: Any, - block: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def data_streams_stats( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index 6282c7b8..a2d0fa0d 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -36,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") - def get_pipeline(self, id=None, params=None, headers=None): + def get_pipeline( + self, + id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a pipeline. @@ -59,7 +66,13 @@ def get_pipeline(self, id=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def put_pipeline(self, id, body, params=None, headers=None): + def put_pipeline( + self, + id: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates a pipeline. @@ -86,7 +99,12 @@ def put_pipeline(self, id, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_pipeline(self, id, params=None, headers=None): + def delete_pipeline( + self, + id: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a pipeline. @@ -110,7 +128,13 @@ def delete_pipeline(self, id, params=None, headers=None): ) @query_params("verbose") - def simulate(self, body, id=None, params=None, headers=None): + def simulate( + self, + body: Any, + id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Allows to simulate a pipeline with example documents. @@ -132,7 +156,11 @@ def simulate(self, body, id=None, params=None, headers=None): ) @query_params() - def processor_grok(self, params=None, headers=None): + def processor_grok( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a list of the built-in patterns. diff --git a/opensearchpy/client/ingest.pyi b/opensearchpy/client/ingest.pyi deleted file mode 100644 index c7531f0e..00000000 --- a/opensearchpy/client/ingest.pyi +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IngestClient(NamespacedClient): - def get_pipeline( - self, - *, - id: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_pipeline( - self, - id: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_pipeline( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate( - self, - *, - body: Any, - id: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def processor_grok( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 28ea1357..6a7b5db1 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -36,14 +36,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params("timeout") def reload_secure_settings( - self, body=None, node_id=None, params=None, headers=None - ): + self, + body: Any = None, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reloads secure settings. @@ -64,7 +70,13 @@ def reload_secure_settings( ) @query_params("flat_settings", "timeout") - def info(self, node_id=None, metric=None, params=None, headers=None): + def info( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about nodes in the cluster. @@ -95,8 +107,13 @@ def info(self, node_id=None, metric=None, params=None, headers=None): "types", ) def stats( - self, node_id=None, metric=None, index_metric=None, params=None, headers=None - ): + self, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns statistical information about nodes in the cluster. @@ -140,7 +157,12 @@ def stats( @query_params( "doc_type", "ignore_idle_threads", "interval", "snapshots", "threads", "timeout" ) - def hot_threads(self, node_id=None, params=None, headers=None): + def hot_threads( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about hot threads on each node in the cluster. @@ -173,7 +195,13 @@ def hot_threads(self, node_id=None, params=None, headers=None): ) @query_params("timeout") - def usage(self, node_id=None, metric=None, params=None, headers=None): + def usage( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns low-level information about REST actions usage on nodes. diff --git a/opensearchpy/client/nodes.pyi b/opensearchpy/client/nodes.pyi deleted file mode 100644 index 78465481..00000000 --- a/opensearchpy/client/nodes.pyi +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class NodesClient(NamespacedClient): - def reload_secure_settings( - self, - *, - body: Optional[Any] = ..., - node_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def info( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - index_metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - level: Optional[Any] = ..., - timeout: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def hot_threads( - self, - *, - node_id: Optional[Any] = ..., - doc_type: Optional[Any] = ..., - ignore_idle_threads: Optional[Any] = ..., - interval: Optional[Any] = ..., - snapshots: Optional[Any] = ..., - threads: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def usage( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index 19570be4..b12214d7 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -9,14 +9,19 @@ # GitHub history for details. import warnings +from typing import Any from ..plugins.alerting import AlertingClient from ..plugins.index_management import IndexManagementClient +from .client import Client from .utils import NamespacedClient class PluginsClient(NamespacedClient): - def __init__(self, client): + alerting: Any + index_management: Any + + def __init__(self, client: Client) -> None: super(PluginsClient, self).__init__(client) # self.query_workbench = QueryWorkbenchClient(client) # self.reporting = ReportingClient(client) @@ -28,7 +33,7 @@ def __init__(self, client): self._dynamic_lookup(client) - def _dynamic_lookup(self, client): + def _dynamic_lookup(self, client: Any) -> None: # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 plugins = [ diff --git a/opensearchpy/client/plugins.pyi b/opensearchpy/client/plugins.pyi deleted file mode 100644 index da9a7488..00000000 --- a/opensearchpy/client/plugins.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any - -from ..client import OpenSearch -from ..plugins.alerting import AlertingClient as AlertingClient -from .utils import NamespacedClient as NamespacedClient - -class PluginsClient(NamespacedClient): - alerting: Any - index_management: Any - def __init__(self, client: OpenSearch) -> None: ... diff --git a/opensearchpy/client/remote.py b/opensearchpy/client/remote.py index eba66927..5c1c0f0c 100644 --- a/opensearchpy/client/remote.py +++ b/opensearchpy/client/remote.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() - def info(self, params=None, headers=None): + def info(self, params: Any = None, headers: Any = None) -> Any: return self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) diff --git a/opensearchpy/client/remote.pyi b/opensearchpy/client/remote.pyi deleted file mode 100644 index 93e8c067..00000000 --- a/opensearchpy/client/remote.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteClient(NamespacedClient): - def info( - self, - *, - timeout: Optional[Any] = None, - pretty: Optional[bool] = None, - human: Optional[bool] = None, - error_trace: Optional[bool] = None, - format: Optional[str] = None, - filter_path: Optional[Union[str, Collection[str]]] = None, - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, - ) -> Any: ... diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index 8f4313b7..fc45c1e8 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -7,6 +7,25 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # @@ -17,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, query_params class RemoteStoreClient(NamespacedClient): @query_params("cluster_manager_timeout", "wait_for_completion") - def restore(self, body, params=None, headers=None): + def restore( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Restores from remote store. diff --git a/opensearchpy/client/remote_store.pyi b/opensearchpy/client/remote_store.pyi deleted file mode 100644 index 50358e63..00000000 --- a/opensearchpy/client/remote_store.pyi +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteStoreClient(NamespacedClient): - def restore( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 14bc0229..f074574e 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -7,6 +7,23 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. # ---------------------------------------------------- @@ -19,14 +36,29 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + class SecurityClient(NamespacedClient): - from ._patch import health_check, update_audit_config + from ._patch import health_check, update_audit_config # type: ignore @query_params() - def get_account_details(self, params=None, headers=None): + def get_account_details( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns account details for the current user. @@ -36,7 +68,12 @@ def get_account_details(self, params=None, headers=None): ) @query_params() - def change_password(self, body, params=None, headers=None): + def change_password( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Changes the password for the current user. @@ -54,7 +91,12 @@ def change_password(self, body, params=None, headers=None): ) @query_params() - def get_action_group(self, action_group, params=None, headers=None): + def get_action_group( + self, + action_group: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one action group. @@ -74,7 +116,11 @@ def get_action_group(self, action_group, params=None, headers=None): ) @query_params() - def get_action_groups(self, params=None, headers=None): + def get_action_groups( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all action groups. @@ -87,7 +133,12 @@ def get_action_groups(self, params=None, headers=None): ) @query_params() - def delete_action_group(self, action_group, params=None, headers=None): + def delete_action_group( + self, + action_group: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete a specified action group. @@ -107,7 +158,13 @@ def delete_action_group(self, action_group, params=None, headers=None): ) @query_params() - def create_action_group(self, action_group, body, params=None, headers=None): + def create_action_group( + self, + action_group: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified action group. @@ -128,7 +185,13 @@ def create_action_group(self, action_group, body, params=None, headers=None): ) @query_params() - def patch_action_group(self, action_group, body, params=None, headers=None): + def patch_action_group( + self, + action_group: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of an action group. @@ -147,7 +210,12 @@ def patch_action_group(self, action_group, body, params=None, headers=None): ) @query_params() - def patch_action_groups(self, body, params=None, headers=None): + def patch_action_groups( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. @@ -165,7 +233,12 @@ def patch_action_groups(self, body, params=None, headers=None): ) @query_params() - def get_user(self, username, params=None, headers=None): + def get_user( + self, + username: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieve one internal user. @@ -182,7 +255,11 @@ def get_user(self, username, params=None, headers=None): ) @query_params() - def get_users(self, params=None, headers=None): + def get_users( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieve all internal users. @@ -195,7 +272,12 @@ def get_users(self, params=None, headers=None): ) @query_params() - def delete_user(self, username, params=None, headers=None): + def delete_user( + self, + username: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete the specified user. @@ -212,7 +294,13 @@ def delete_user(self, username, params=None, headers=None): ) @query_params() - def create_user(self, username, body, params=None, headers=None): + def create_user( + self, + username: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified user. @@ -231,7 +319,13 @@ def create_user(self, username, body, params=None, headers=None): ) @query_params() - def patch_user(self, username, body, params=None, headers=None): + def patch_user( + self, + username: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of an internal user. @@ -250,7 +344,12 @@ def patch_user(self, username, body, params=None, headers=None): ) @query_params() - def patch_users(self, body, params=None, headers=None): + def patch_users( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. @@ -268,7 +367,12 @@ def patch_users(self, body, params=None, headers=None): ) @query_params() - def get_role(self, role, params=None, headers=None): + def get_role( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one role. @@ -285,7 +389,11 @@ def get_role(self, role, params=None, headers=None): ) @query_params() - def get_roles(self, params=None, headers=None): + def get_roles( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all roles. @@ -295,7 +403,12 @@ def get_roles(self, params=None, headers=None): ) @query_params() - def delete_role(self, role, params=None, headers=None): + def delete_role( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete the specified role. @@ -312,7 +425,13 @@ def delete_role(self, role, params=None, headers=None): ) @query_params() - def create_role(self, role, body, params=None, headers=None): + def create_role( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified role. @@ -331,7 +450,13 @@ def create_role(self, role, body, params=None, headers=None): ) @query_params() - def patch_role(self, role, body, params=None, headers=None): + def patch_role( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of a role. @@ -350,7 +475,12 @@ def patch_role(self, role, body, params=None, headers=None): ) @query_params() - def patch_roles(self, body, params=None, headers=None): + def patch_roles( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. @@ -368,7 +498,12 @@ def patch_roles(self, body, params=None, headers=None): ) @query_params() - def get_role_mapping(self, role, params=None, headers=None): + def get_role_mapping( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one role mapping. @@ -385,7 +520,11 @@ def get_role_mapping(self, role, params=None, headers=None): ) @query_params() - def get_role_mappings(self, params=None, headers=None): + def get_role_mappings( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all role mappings. @@ -398,7 +537,12 @@ def get_role_mappings(self, params=None, headers=None): ) @query_params() - def delete_role_mapping(self, role, params=None, headers=None): + def delete_role_mapping( + self, + role: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes the specified role mapping. @@ -415,7 +559,13 @@ def delete_role_mapping(self, role, params=None, headers=None): ) @query_params() - def create_role_mapping(self, role, body, params=None, headers=None): + def create_role_mapping( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified role mapping. @@ -434,7 +584,13 @@ def create_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - def patch_role_mapping(self, role, body, params=None, headers=None): + def patch_role_mapping( + self, + role: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates individual attributes of a role mapping. @@ -453,7 +609,12 @@ def patch_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - def patch_role_mappings(self, body, params=None, headers=None): + def patch_role_mappings( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or updates multiple role mappings in a single call. @@ -471,7 +632,12 @@ def patch_role_mappings(self, body, params=None, headers=None): ) @query_params() - def get_tenant(self, tenant, params=None, headers=None): + def get_tenant( + self, + tenant: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves one tenant. @@ -488,7 +654,11 @@ def get_tenant(self, tenant, params=None, headers=None): ) @query_params() - def get_tenants(self, params=None, headers=None): + def get_tenants( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all tenants. @@ -498,7 +668,12 @@ def get_tenants(self, params=None, headers=None): ) @query_params() - def delete_tenant(self, tenant, params=None, headers=None): + def delete_tenant( + self, + tenant: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Delete the specified tenant. @@ -515,7 +690,13 @@ def delete_tenant(self, tenant, params=None, headers=None): ) @query_params() - def create_tenant(self, tenant, body, params=None, headers=None): + def create_tenant( + self, + tenant: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates or replaces the specified tenant. @@ -534,7 +715,13 @@ def create_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - def patch_tenant(self, tenant, body, params=None, headers=None): + def patch_tenant( + self, + tenant: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Add, delete, or modify a single tenant. @@ -553,7 +740,12 @@ def patch_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - def patch_tenants(self, body, params=None, headers=None): + def patch_tenants( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Add, delete, or modify multiple tenants in a single call. @@ -571,7 +763,11 @@ def patch_tenants(self, body, params=None, headers=None): ) @query_params() - def get_configuration(self, params=None, headers=None): + def get_configuration( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns the current Security plugin configuration in JSON format. @@ -584,7 +780,12 @@ def get_configuration(self, params=None, headers=None): ) @query_params() - def update_configuration(self, body, params=None, headers=None): + def update_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Adds or updates the existing configuration using the REST API. @@ -602,7 +803,12 @@ def update_configuration(self, body, params=None, headers=None): ) @query_params() - def patch_configuration(self, body, params=None, headers=None): + def patch_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ A PATCH call is used to update the existing configuration using the REST API. @@ -620,7 +826,12 @@ def patch_configuration(self, body, params=None, headers=None): ) @query_params() - def get_distinguished_names(self, cluster_name=None, params=None, headers=None): + def get_distinguished_names( + self, + cluster_name: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves all distinguished names in the allow list. @@ -635,8 +846,12 @@ def get_distinguished_names(self, cluster_name=None, params=None, headers=None): @query_params() def update_distinguished_names( - self, cluster_name, body=None, params=None, headers=None - ): + self, + cluster_name: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Adds or updates the specified distinguished names in the cluster’s or node’s allow list. @@ -657,7 +872,12 @@ def update_distinguished_names( ) @query_params() - def delete_distinguished_names(self, cluster_name, params=None, headers=None): + def delete_distinguished_names( + self, + cluster_name: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes all distinguished names in the specified cluster’s or node’s allow list. @@ -677,7 +897,11 @@ def delete_distinguished_names(self, cluster_name, params=None, headers=None): ) @query_params() - def get_certificates(self, params=None, headers=None): + def get_certificates( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves the cluster’s security certificates. @@ -687,7 +911,11 @@ def get_certificates(self, params=None, headers=None): ) @query_params() - def reload_transport_certificates(self, params=None, headers=None): + def reload_transport_certificates( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Reload transport layer communication certificates. @@ -700,7 +928,11 @@ def reload_transport_certificates(self, params=None, headers=None): ) @query_params() - def reload_http_certificates(self, params=None, headers=None): + def reload_http_certificates( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Reload HTTP layer communication certificates. @@ -713,7 +945,11 @@ def reload_http_certificates(self, params=None, headers=None): ) @query_params() - def flush_cache(self, params=None, headers=None): + def flush_cache( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. @@ -723,7 +959,11 @@ def flush_cache(self, params=None, headers=None): ) @query_params() - def health(self, params=None, headers=None): + def health( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Checks to see if the Security plugin is up and running. @@ -733,7 +973,11 @@ def health(self, params=None, headers=None): ) @query_params() - def get_audit_configuration(self, params=None, headers=None): + def get_audit_configuration( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Retrieves the audit configuration. @@ -743,7 +987,12 @@ def get_audit_configuration(self, params=None, headers=None): ) @query_params() - def update_audit_configuration(self, body, params=None, headers=None): + def update_audit_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Updates the audit configuration. @@ -761,7 +1010,12 @@ def update_audit_configuration(self, body, params=None, headers=None): ) @query_params() - def patch_audit_configuration(self, body, params=None, headers=None): + def patch_audit_configuration( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. @@ -779,7 +1033,12 @@ def patch_audit_configuration(self, body, params=None, headers=None): ) @query_params() - def patch_distinguished_names(self, body, params=None, headers=None): + def patch_distinguished_names( + self, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Bulk update of distinguished names. diff --git a/opensearchpy/client/security.pyi b/opensearchpy/client/security.pyi deleted file mode 100644 index 99e009d9..00000000 --- a/opensearchpy/client/security.pyi +++ /dev/null @@ -1,821 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SecurityClient(NamespacedClient): - def get_account_details( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def change_password( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_action_groups( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_action_groups( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_users( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_users( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_roles( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_roles( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_role_mappings( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_role_mappings( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_tenants( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_tenants( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_distinguished_names( - self, - *, - cluster_name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_distinguished_names( - self, - cluster_name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_distinguished_names( - self, - cluster_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reload_transport_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reload_http_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def flush_cache( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def health( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_audit_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_distinguished_names( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index 313f7dd3..a931a231 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -36,12 +36,21 @@ # ----------------------------------------------------- +from typing import Any, MutableMapping, Optional + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - def create(self, repository, snapshot, body=None, params=None, headers=None): + def create( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates a snapshot in a repository. @@ -70,7 +79,13 @@ def create(self, repository, snapshot, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout") - def delete(self, repository, snapshot, params=None, headers=None): + def delete( + self, + repository: Any, + snapshot: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a snapshot. @@ -97,7 +112,13 @@ def delete(self, repository, snapshot, params=None, headers=None): @query_params( "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) - def get(self, repository, snapshot, params=None, headers=None): + def get( + self, + repository: Any, + snapshot: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about a snapshot. @@ -127,7 +148,12 @@ def get(self, repository, snapshot, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_repository(self, repository, params=None, headers=None): + def delete_repository( + self, + repository: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Deletes a repository. @@ -152,7 +178,12 @@ def delete_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def get_repository(self, repository=None, params=None, headers=None): + def get_repository( + self, + repository: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about a repository. @@ -171,7 +202,13 @@ def get_repository(self, repository=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") - def create_repository(self, repository, body, params=None, headers=None): + def create_repository( + self, + repository: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Creates a repository. @@ -199,7 +236,14 @@ def create_repository(self, repository, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - def restore(self, repository, snapshot, body=None, params=None, headers=None): + def restore( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Restores a snapshot. @@ -228,7 +272,13 @@ def restore(self, repository, snapshot, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") - def status(self, repository=None, snapshot=None, params=None, headers=None): + def status( + self, + repository: Any = None, + snapshot: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about the status of a snapshot. @@ -252,7 +302,12 @@ def status(self, repository=None, snapshot=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def verify_repository(self, repository, params=None, headers=None): + def verify_repository( + self, + repository: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Verifies a repository. @@ -276,7 +331,12 @@ def verify_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def cleanup_repository(self, repository, params=None, headers=None): + def cleanup_repository( + self, + repository: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Removes stale data from repository. @@ -301,8 +361,14 @@ def cleanup_repository(self, repository, params=None, headers=None): @query_params("cluster_manager_timeout", "master_timeout") def clone( - self, repository, snapshot, target_snapshot, body, params=None, headers=None - ): + self, + repository: Any, + snapshot: Any, + target_snapshot: Any, + body: Any, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. diff --git a/opensearchpy/client/snapshot.pyi b/opensearchpy/client/snapshot.pyi deleted file mode 100644 index fd239fad..00000000 --- a/opensearchpy/client/snapshot.pyi +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SnapshotClient(NamespacedClient): - def create( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_repository( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_repository( - self, - repository: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - verify: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def restore( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def status( - self, - *, - repository: Optional[Any] = ..., - snapshot: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def verify_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cleanup_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clone( - self, - repository: Any, - snapshot: Any, - target_snapshot: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 90c4e731..908a790e 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -37,6 +37,7 @@ import warnings +from typing import Any, MutableMapping, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -51,7 +52,11 @@ class TasksClient(NamespacedClient): "timeout", "wait_for_completion", ) - def list(self, params=None, headers=None): + def list( + self, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns a list of tasks. @@ -77,7 +82,12 @@ def list(self, params=None, headers=None): ) @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") - def cancel(self, task_id=None, params=None, headers=None): + def cancel( + self, + task_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Cancels a task, if it can be cancelled through an API. @@ -103,7 +113,12 @@ def cancel(self, task_id=None, params=None, headers=None): ) @query_params("timeout", "wait_for_completion") - def get(self, task_id=None, params=None, headers=None): + def get( + self, + task_id: Any = None, + params: Optional[MutableMapping[str, Any]] = None, + headers: Optional[MutableMapping[str, str]] = None, + ) -> Any: """ Returns information about a task. diff --git a/opensearchpy/client/tasks.pyi b/opensearchpy/client/tasks.pyi deleted file mode 100644 index 0aeed153..00000000 --- a/opensearchpy/client/tasks.pyi +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class TasksClient(NamespacedClient): - def list( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - group_by: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cancel( - self, - *, - task_id: Optional[Any] = ..., - actions: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - *, - task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index 7b7366de..8e2c3510 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -32,14 +32,17 @@ import weakref from datetime import date, datetime from functools import wraps +from typing import Any, Callable + +from opensearchpy.serializer import Serializer from ..compat import quote, string_types, to_bytes, to_str, unquote, urlparse # parts of URL to be omitted -SKIP_IN_PATH = (None, "", b"", [], ()) +SKIP_IN_PATH: Any = (None, "", b"", [], ()) -def _normalize_hosts(hosts): +def _normalize_hosts(hosts: Any) -> Any: """ Helper function to transform hosts argument to :class:`~opensearchpy.OpenSearch` to a list of dicts. @@ -57,7 +60,7 @@ def _normalize_hosts(hosts): for host in hosts: if isinstance(host, string_types): if "://" not in host: - host = "//%s" % host + host = "//%r" % host parsed_url = urlparse(host) h = {"host": parsed_url.hostname} @@ -84,7 +87,7 @@ def _normalize_hosts(hosts): return out -def _escape(value): +def _escape(value: Any) -> Any: """ Escape a single value of a URL string or a query parameter. If it is a list or tuple, turn it into a comma-separated string first. @@ -114,7 +117,7 @@ def _escape(value): return str(value) -def _make_path(*parts): +def _make_path(*parts: Any) -> str: """ Create a URL string from parts, omit all `None` values and empty strings. Convert lists and tuples to comma separated values. @@ -132,15 +135,15 @@ def _make_path(*parts): GLOBAL_PARAMS = ("pretty", "human", "error_trace", "format", "filter_path") -def query_params(*opensearch_query_params): +def query_params(*opensearch_query_params: Any) -> Callable: # type: ignore """ Decorator that pops all accepted parameters from method's kwargs and puts them in the params argument. """ - def _wrapper(func): + def _wrapper(func: Any) -> Any: @wraps(func) - def _wrapped(*args, **kwargs): + def _wrapped(*args: Any, **kwargs: Any) -> Any: params = (kwargs.pop("params", None) or {}).copy() headers = { k.lower(): v @@ -182,7 +185,7 @@ def _wrapped(*args, **kwargs): return _wrapper -def _bulk_body(serializer, body): +def _bulk_body(serializer: Serializer, body: str) -> str: # if not passed in a string, serialize items and join by newline if not isinstance(body, string_types): body = "\n".join(map(serializer.dumps, body)) @@ -197,7 +200,7 @@ def _bulk_body(serializer, body): return body -def _base64_auth_header(auth_value): +def _base64_auth_header(auth_value: Any) -> str: """Takes either a 2-tuple or a base64-encoded string and returns a base64-encoded string to be used as an HTTP authorization header. @@ -208,17 +211,17 @@ def _base64_auth_header(auth_value): class NamespacedClient(object): - def __init__(self, client): + def __init__(self, client: Any) -> None: self.client = client @property - def transport(self): + def transport(self) -> Any: return self.client.transport class AddonClient(NamespacedClient): @classmethod - def infect_client(cls, client): + def infect_client(cls: Any, client: NamespacedClient) -> NamespacedClient: addon = cls(weakref.proxy(client)) setattr(client, cls.namespace, addon) return client diff --git a/opensearchpy/client/utils.pyi b/opensearchpy/client/utils.pyi deleted file mode 100644 index 2aa263fc..00000000 --- a/opensearchpy/client/utils.pyi +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import unicode_literals - -from typing import ( - Any, - Callable, - Collection, - Dict, - List, - Optional, - Tuple, - TypeVar, - Union, -) - -from ..client import OpenSearch -from ..serializer import Serializer -from ..transport import Transport - -T = TypeVar("T") -SKIP_IN_PATH: Collection[Any] - -def _normalize_hosts( - hosts: Optional[Union[str, Collection[Union[str, Dict[str, Any]]]]] -) -> List[Dict[str, Any]]: ... -def _escape(value: Any) -> str: ... -def _make_path(*parts: Any) -> str: ... - -GLOBAL_PARAMS: Tuple[str, ...] - -def query_params( - *es_query_params: str, -) -> Callable[[Callable[..., T]], Callable[..., T]]: ... -def _bulk_body( - serializer: Serializer, body: Union[str, bytes, Collection[Any]] -) -> str: ... - -class NamespacedClient: - client: OpenSearch - def __init__(self, client: OpenSearch) -> None: ... - @property - def transport(self) -> Transport: ... diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index 57a88a74..cb8bc7d7 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -26,33 +26,29 @@ # under the License. +from collections.abc import Mapping from queue import Queue +from typing import Tuple, Type, Union from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse string_types = str, bytes map = map -def to_str(x, encoding="ascii"): +def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: if not isinstance(x, str): return x.decode(encoding) return x -def to_bytes(x, encoding="ascii"): +def to_bytes(x: Union[str, bytes], encoding: str = "ascii") -> bytes: if not isinstance(x, bytes): return x.encode(encoding) return x try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - - -try: - reraise_exceptions = (RecursionError,) + reraise_exceptions: Tuple[Type[BaseException], ...] = (RecursionError,) except NameError: reraise_exceptions = () diff --git a/opensearchpy/compat.pyi b/opensearchpy/compat.pyi deleted file mode 100644 index 2606c723..00000000 --- a/opensearchpy/compat.pyi +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys -from typing import Callable, Tuple, Type, Union - -string_types: Tuple[type, ...] - -to_str: Callable[[Union[str, bytes]], str] -to_bytes: Callable[[Union[str, bytes]], bytes] -reraise_exceptions: Tuple[Type[Exception], ...] - -if sys.version_info[0] == 2: - from itertools import imap as map - from urllib import quote as quote - from urllib import quote_plus as quote_plus - from urllib import unquote as unquote - from urllib import urlencode as urlencode - - from Queue import Queue as Queue - from urlparse import urlparse as urlparse -else: - from urllib.parse import quote as quote - from urllib.parse import quote_plus as quote_plus - from urllib.parse import unquote as unquote - from urllib.parse import urlencode as urlencode - from urllib.parse import urlparse as urlparse - - map = map - from queue import Queue as Queue diff --git a/opensearchpy/connection/__init__.pyi b/opensearchpy/connection/__init__.pyi deleted file mode 100644 index f3f31016..00000000 --- a/opensearchpy/connection/__init__.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .base import Connection as Connection -from .http_async import AsyncHttpConnection as AsyncHttpConnection -from .http_requests import RequestsHttpConnection as RequestsHttpConnection -from .http_urllib3 import Urllib3HttpConnection as Urllib3HttpConnection -from .http_urllib3 import create_ssl_context as create_ssl_context diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index 87dd22d7..87467ae0 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from six import string_types import opensearchpy @@ -21,11 +23,11 @@ class AsyncConnections(object): singleton in this module. """ - def __init__(self): - self._kwargs = {} - self._conns = {} + def __init__(self) -> None: + self._kwargs: Any = {} + self._conns: Any = {} - async def configure(self, **kwargs): + async def configure(self, **kwargs: Any) -> None: """ Configure multiple connections at once, useful for passing in config dictionaries obtained from other sources, like Django's settings or a @@ -48,13 +50,13 @@ async def configure(self, **kwargs): del self._conns[k] self._kwargs = kwargs - async def add_connection(self, alias, conn): + async def add_connection(self, alias: str, conn: Any) -> None: """ Add a connection object, it will be passed through as-is. """ self._conns[alias] = conn - async def remove_connection(self, alias): + async def remove_connection(self, alias: str) -> None: """ Remove connection from the registry. Raises ``KeyError`` if connection wasn't found. @@ -69,7 +71,7 @@ async def remove_connection(self, alias): if errors == 2: raise KeyError("There is no connection with alias %r." % alias) - async def create_connection(self, alias="default", **kwargs): + async def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ Construct an instance of ``opensearchpy.AsyncOpenSearch`` and register it under given alias. @@ -78,7 +80,7 @@ async def create_connection(self, alias="default", **kwargs): conn = self._conns[alias] = opensearchpy.AsyncOpenSearch(**kwargs) return conn - async def get_connection(self, alias="default"): + async def get_connection(self, alias: str = "default") -> Any: """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we diff --git a/opensearchpy/connection/async_connections.pyi b/opensearchpy/connection/async_connections.pyi deleted file mode 100644 index eb310cdf..00000000 --- a/opensearchpy/connection/async_connections.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncConnections: ... diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index ee8d934f..59418bfa 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -25,7 +25,6 @@ # specific language governing permissions and limitations # under the License. - import gzip import io import logging @@ -33,13 +32,14 @@ import re import warnings from platform import python_version +from typing import Any, Collection, Dict, Mapping, Optional, Union try: import simplejson as json except ImportError: - import json + import json # type: ignore -from .. import __versionstr__ +from .._version import __versionstr__ from ..exceptions import HTTP_EXCEPTIONS, OpenSearchWarning, TransportError logger = logging.getLogger("opensearch") @@ -74,16 +74,16 @@ class Connection(object): def __init__( self, - host="localhost", - port=None, - use_ssl=False, - url_prefix="", - timeout=10, - headers=None, - http_compress=None, - opaque_id=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + use_ssl: bool = False, + url_prefix: str = "", + timeout: int = 10, + headers: Optional[Dict[str, str]] = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + **kwargs: Any + ) -> None: if port is None: port = 9200 @@ -130,24 +130,24 @@ def __init__( self.url_prefix = url_prefix self.timeout = timeout - def __repr__(self): + def __repr__(self) -> str: return "<%s: %s>" % (self.__class__.__name__, self.host) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if not isinstance(other, Connection): raise TypeError("Unsupported equality check for %s and %s" % (self, other)) return self.__hash__() == other.__hash__() - def __hash__(self): + def __hash__(self) -> int: return id(self) - def _gzip_compress(self, body): + def _gzip_compress(self, body: Any) -> bytes: buf = io.BytesIO() with gzip.GzipFile(fileobj=buf, mode="wb") as f: f.write(body) return buf.getvalue() - def _raise_warnings(self, warning_headers): + def _raise_warnings(self, warning_headers: Any) -> None: """If 'headers' contains a 'Warning' header raise the warnings to be seen by the user. Takes an iterable of string values from any number of 'Warning' headers. @@ -174,7 +174,7 @@ def _raise_warnings(self, warning_headers): for message in warning_messages: warnings.warn(message, category=OpenSearchWarning) - def _pretty_json(self, data): + def _pretty_json(self, data: Union[str, bytes]) -> str: # pretty JSON in tracer curl logs try: return json.dumps( @@ -182,9 +182,17 @@ def _pretty_json(self, data): ).replace("'", r"\u0027") except (ValueError, TypeError): # non-json data or a bulk request - return data + return data # type: ignore - def _log_trace(self, method, path, body, status_code, response, duration): + def _log_trace( + self, + method: str, + path: str, + body: Optional[Union[str, bytes]], + status_code: Optional[int], + response: Optional[str], + duration: Optional[float], + ) -> None: if not tracer.isEnabledFor(logging.INFO) or not tracer.handlers: return @@ -210,29 +218,33 @@ def _log_trace(self, method, path, body, status_code, response, duration): def perform_request( self, - method, - url, - params=None, - body=None, - timeout=None, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: raise NotImplementedError() def log_request_success( - self, method, full_url, path, body, status_code, response, duration - ): + self, + method: str, + full_url: str, + path: str, + body: Any, + status_code: int, + response: str, + duration: float, + ) -> None: """Log a successful API call.""" # TODO: optionally pass in params instead of full_url and do urlencode only when needed # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth - if body: - try: - body = body.decode("utf-8", "ignore") - except AttributeError: - pass + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") logger.info( "%s %s [status:%s request:%.3fs]", method, full_url, status_code, duration @@ -244,15 +256,15 @@ def log_request_success( def log_request_fail( self, - method, - full_url, - path, - body, - duration, - status_code=None, - response=None, - exception=None, - ): + method: str, + full_url: str, + path: str, + body: Any, + duration: float, + status_code: Optional[int] = None, + response: Optional[str] = None, + exception: Optional[Exception] = None, + ) -> None: """Log an unsuccessful API call.""" # do not log 404s on HEAD requests if method == "HEAD" and status_code == 404: @@ -268,11 +280,8 @@ def log_request_fail( # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth - if body: - try: - body = body.decode("utf-8", "ignore") - except AttributeError: - pass + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") logger.debug("> %s", body) @@ -281,7 +290,12 @@ def log_request_fail( if response is not None: logger.debug("< %s", response) - def _raise_error(self, status_code, raw_data, content_type=None): + def _raise_error( + self, + status_code: int, + raw_data: Union[str, bytes], + content_type: Optional[str] = None, + ) -> None: """Locate appropriate exception and raise it.""" error_message = raw_data additional_info = None @@ -303,11 +317,11 @@ def _raise_error(self, status_code, raw_data, content_type=None): status_code, error_message, additional_info ) - def _get_default_user_agent(self): + def _get_default_user_agent(self) -> str: return "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()) @staticmethod - def default_ca_certs(): + def default_ca_certs() -> Union[str, None]: """ Get the default CA certificate bundle, preferring those configured in the standard OpenSSL environment variables before those provided by @@ -315,12 +329,12 @@ def default_ca_certs(): """ ca_certs = os.environ.get("SSL_CERT_FILE") or os.environ.get("SSL_CERT_DIR") - if ca_certs: - return ca_certs + if not ca_certs: + try: + import certifi - try: - import certifi - except ImportError: - pass - else: - return certifi.where() + ca_certs = certifi.where() + except ImportError: + pass + + return ca_certs diff --git a/opensearchpy/connection/base.pyi b/opensearchpy/connection/base.pyi deleted file mode 100644 index 333f4a70..00000000 --- a/opensearchpy/connection/base.pyi +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import ( - Any, - AnyStr, - Collection, - Dict, - List, - Mapping, - NoReturn, - Optional, - Sequence, - Tuple, - Union, -) - -logger: logging.Logger -tracer: logging.Logger - -class Connection(object): - headers: Dict[str, str] - use_ssl: bool - http_compress: bool - scheme: str - hostname: str - port: Optional[int] - host: str - url_prefix: str - timeout: Optional[Union[float, int]] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - use_ssl: bool = ..., - url_prefix: str = ..., - timeout: Optional[Union[float, int]] = ..., - headers: Optional[Mapping[str, str]] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... - def __repr__(self) -> str: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def _gzip_compress(self, body: bytes) -> bytes: ... - def _raise_warnings(self, warning_headers: Sequence[str]) -> None: ... - def _pretty_json(self, data: Any) -> str: ... - def _log_trace( - self, - method: Any, - path: Any, - body: Any, - status_code: Any, - response: Any, - duration: Any, - ) -> None: ... - def perform_request( - self, - method: str, - url: str, - params: Optional[Mapping[str, Any]] = ..., - body: Optional[bytes] = ..., - timeout: Optional[Union[int, float]] = ..., - ignore: Collection[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ) -> Tuple[int, Mapping[str, str], str]: ... - def log_request_success( - self, - method: str, - full_url: str, - path: str, - body: Optional[bytes], - status_code: int, - response: str, - duration: float, - ) -> None: ... - def log_request_fail( - self, - method: str, - full_url: str, - path: str, - body: Optional[bytes], - duration: float, - status_code: Optional[int] = ..., - response: Optional[str] = ..., - exception: Optional[Exception] = ..., - ) -> None: ... - def _raise_error( - self, status_code: int, raw_data: str, content_type: Optional[str] - ) -> NoReturn: ... - def _get_default_user_agent(self) -> str: ... - @staticmethod - def default_ca_certs() -> Optional[str]: ... diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 4401ade0..5b1e9a9c 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -25,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from six import string_types import opensearchpy @@ -37,11 +39,11 @@ class Connections(object): singleton in this module. """ - def __init__(self): - self._kwargs = {} - self._conns = {} + def __init__(self) -> None: + self._kwargs: Any = {} + self._conns: Any = {} - def configure(self, **kwargs): + def configure(self, **kwargs: Any) -> None: """ Configure multiple connections at once, useful for passing in config dictionaries obtained from other sources, like Django's settings or a @@ -64,13 +66,13 @@ def configure(self, **kwargs): del self._conns[k] self._kwargs = kwargs - def add_connection(self, alias, conn): + def add_connection(self, alias: str, conn: Any) -> None: """ Add a connection object, it will be passed through as-is. """ self._conns[alias] = conn - def remove_connection(self, alias): + def remove_connection(self, alias: str) -> None: """ Remove connection from the registry. Raises ``KeyError`` if connection wasn't found. @@ -85,7 +87,7 @@ def remove_connection(self, alias): if errors == 2: raise KeyError("There is no connection with alias %r." % alias) - def create_connection(self, alias="default", **kwargs): + def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ Construct an instance of ``opensearchpy.OpenSearch`` and register it under given alias. @@ -94,7 +96,7 @@ def create_connection(self, alias="default", **kwargs): conn = self._conns[alias] = opensearchpy.OpenSearch(**kwargs) return conn - def get_connection(self, alias="default"): + def get_connection(self, alias: str = "default") -> Any: """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we diff --git a/opensearchpy/connection/connections.pyi b/opensearchpy/connection/connections.pyi deleted file mode 100644 index d763f57c..00000000 --- a/opensearchpy/connection/connections.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. - -class Connections(object): ... diff --git a/opensearchpy/connection/http_async.py b/opensearchpy/connection/http_async.py index b7288005..d6ee57ee 100644 --- a/opensearchpy/connection/http_async.py +++ b/opensearchpy/connection/http_async.py @@ -14,8 +14,9 @@ import os import ssl import warnings +from typing import Any, Collection, Mapping, Optional, Union -from .._async._extra_imports import aiohttp, aiohttp_exceptions +from .._async._extra_imports import aiohttp, aiohttp_exceptions # type: ignore from .._async.compat import get_running_loop from .._async.http_aiohttp import AIOHttpConnection from ..compat import reraise_exceptions, string_types, urlencode @@ -31,27 +32,29 @@ class AsyncHttpConnection(AIOHttpConnection): + session: Optional[aiohttp.ClientSession] + def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - loop=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_fingerprint: Any = None, + maxsize: Optional[int] = 10, + headers: Optional[Mapping[str, str]] = None, + ssl_context: Any = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + loop: Any = None, + **kwargs: Any + ) -> None: self.headers = {} super().__init__( @@ -68,7 +71,7 @@ def __init__( if isinstance(http_auth, (tuple, list)): http_auth = aiohttp.BasicAuth(login=http_auth[0], password=http_auth[1]) elif isinstance(http_auth, string_types): - login, password = http_auth.split(":", 1) + login, password = http_auth.split(":", 1) # type: ignore http_auth = aiohttp.BasicAuth(login=login, password=password) # if providing an SSL context, raise error if any other SSL related flag is used @@ -146,8 +149,15 @@ def __init__( self._ssl_context = ssl_context async def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: if self.session is None: await self._create_aiohttp_session() assert self.session is not None @@ -262,14 +272,14 @@ async def perform_request( return response.status, response.headers, raw_data - async def close(self): + async def close(self) -> Any: """ Explicitly closes connection """ if self.session: await self.session.close() - async def _create_aiohttp_session(self): + async def _create_aiohttp_session(self) -> Any: """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop @@ -289,9 +299,9 @@ async def _create_aiohttp_session(self): ) -class OpenSearchClientResponse(aiohttp.ClientResponse): - async def text(self, encoding=None, errors="strict"): +class OpenSearchClientResponse(aiohttp.ClientResponse): # type: ignore + async def text(self, encoding: Any = None, errors: str = "strict") -> Any: if self._body is None: await self.read() - return self._body.decode("utf-8", "surrogatepass") + return self._body.decode("utf-8", "surrogatepass") # type: ignore diff --git a/opensearchpy/connection/http_async.pyi b/opensearchpy/connection/http_async.pyi deleted file mode 100644 index 9fcfb246..00000000 --- a/opensearchpy/connection/http_async.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Mapping, Optional - -from .._async._extra_imports import aiohttp # type: ignore -from .._async.http_aiohttp import AIOHttpConnection - -class AsyncHttpConnection(AIOHttpConnection): - session: Optional[aiohttp.ClientSession] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: Optional[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - loop: Optional[Any] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/http_requests.py b/opensearchpy/connection/http_requests.py index f9e9b1a1..a966631d 100644 --- a/opensearchpy/connection/http_requests.py +++ b/opensearchpy/connection/http_requests.py @@ -28,6 +28,7 @@ import time import warnings +from typing import Any, Collection, Mapping, Optional, Union try: import requests @@ -73,21 +74,21 @@ class RequestsHttpConnection(Connection): def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=True, - ssl_show_warn=True, - ca_certs=None, - client_cert=None, - client_key=None, - headers=None, - http_compress=None, - opaque_id=None, - pool_maxsize=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: bool = True, + ssl_show_warn: bool = True, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + headers: Any = None, + http_compress: Any = None, + opaque_id: Any = None, + pool_maxsize: Any = None, + **kwargs: Any + ) -> None: if not REQUESTS_AVAILABLE: raise ImproperlyConfigured( "Please install requests to use RequestsHttpConnection." @@ -116,13 +117,13 @@ def __init__( if not self.http_compress: # Need to set this to 'None' otherwise Requests adds its own. - self.session.headers["accept-encoding"] = None + self.session.headers["accept-encoding"] = None # type: ignore if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = tuple(http_auth) elif isinstance(http_auth, string_types): - http_auth = tuple(http_auth.split(":", 1)) + http_auth = tuple(http_auth.split(":", 1)) # type: ignore self.session.auth = http_auth self.base_url = "%s%s" % ( @@ -147,7 +148,7 @@ def __init__( self.session.verify = ca_certs if not ssl_show_warn: - requests.packages.urllib3.disable_warnings() + requests.packages.urllib3.disable_warnings() # type: ignore if self.use_ssl and not verify_certs and ssl_show_warn: warnings.warn( @@ -155,17 +156,17 @@ def __init__( % self.host ) - def perform_request( + def perform_request( # type: ignore self, - method, - url, - params=None, - body=None, - timeout=None, - allow_redirects=True, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + allow_redirects: Optional[bool] = True, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: url = self.base_url + url headers = headers or {} if params: @@ -174,7 +175,7 @@ def perform_request( orig_body = body if self.http_compress and body: body = self._gzip_compress(body) - headers["content-encoding"] = "gzip" + headers["content-encoding"] = "gzip" # type: ignore start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) @@ -182,7 +183,7 @@ def perform_request( settings = self.session.merge_environment_settings( prepared_request.url, {}, None, None, None ) - send_kwargs = { + send_kwargs: Any = { "timeout": timeout or self.timeout, "allow_redirects": allow_redirects, } @@ -247,10 +248,10 @@ def perform_request( return response.status_code, response.headers, raw_data @property - def headers(self): + def headers(self) -> Any: # type: ignore return self.session.headers - def close(self): + def close(self) -> None: """ Explicitly closes connections """ diff --git a/opensearchpy/connection/http_requests.pyi b/opensearchpy/connection/http_requests.pyi deleted file mode 100644 index 61b6d496..00000000 --- a/opensearchpy/connection/http_requests.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Mapping, Optional - -import requests - -from .base import Connection - -class RequestsHttpConnection(Connection): - session: requests.Session - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - headers: Optional[Mapping[str, str]] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index bde689ae..2a5ccd3b 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -28,12 +28,12 @@ import ssl import time import warnings -from typing import Callable +from typing import Any, Callable, Collection, Mapping, Optional, Union -import urllib3 # type: ignore +import urllib3 from urllib3.exceptions import ReadTimeoutError -from urllib3.exceptions import SSLError as UrllibSSLError # type: ignore -from urllib3.util.retry import Retry # type: ignore +from urllib3.exceptions import SSLError as UrllibSSLError +from urllib3.util.retry import Retry from ..compat import reraise_exceptions, urlencode from ..exceptions import ( @@ -51,7 +51,7 @@ SSL_SHOW_WARN_DEFAULT = object() -def create_ssl_context(**kwargs): +def create_ssl_context(**kwargs: Any) -> Any: """ A helper function around creating an SSL context @@ -99,25 +99,25 @@ class Urllib3HttpConnection(Connection): def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_hostname=None, - ssl_assert_fingerprint=None, - pool_maxsize=None, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_hostname: Any = None, + ssl_assert_fingerprint: Any = None, + pool_maxsize: Any = None, + headers: Any = None, + ssl_context: Any = None, + http_compress: Any = None, + opaque_id: Any = None, + **kwargs: Any + ) -> None: # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) @@ -133,7 +133,7 @@ def __init__( self.http_auth = http_auth if self.http_auth is not None: - if isinstance(self.http_auth, Callable): + if isinstance(self.http_auth, Callable): # type: ignore pass elif isinstance(self.http_auth, (tuple, list)): self.headers.update( @@ -142,7 +142,7 @@ def __init__( else: self.headers.update(urllib3.make_headers(basic_auth=http_auth)) - pool_class = urllib3.HTTPConnectionPool + pool_class: Any = urllib3.HTTPConnectionPool kw = {} # if providing an SSL context, raise error if any other SSL related flag is used @@ -220,8 +220,15 @@ def __init__( ) def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: url = self.url_prefix + url if params: url = "%s?%s" % (url, urlencode(params)) @@ -251,7 +258,7 @@ def perform_request( request_headers["content-encoding"] = "gzip" if self.http_auth is not None: - if isinstance(self.http_auth, Callable): + if isinstance(self.http_auth, Callable): # type: ignore request_headers.update(self.http_auth(method, full_url, body)) response = self.pool.urlopen( @@ -292,10 +299,10 @@ def perform_request( return response.status, response.headers, raw_data - def get_response_headers(self, response): + def get_response_headers(self, response: Any) -> Any: return {header.lower(): value for header, value in response.headers.items()} - def close(self): + def close(self) -> None: """ Explicitly closes connection """ diff --git a/opensearchpy/connection/http_urllib3.pyi b/opensearchpy/connection/http_urllib3.pyi deleted file mode 100644 index 7fe27617..00000000 --- a/opensearchpy/connection/http_urllib3.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import ssl -from typing import Any, Mapping, Optional, Union - -import urllib3 - -from .base import Connection - -def create_ssl_context( - cafile: Any = ..., - capath: Any = ..., - cadata: Any = ..., -) -> ssl.SSLContext: ... - -class Urllib3HttpConnection(Connection): - pool: urllib3.HTTPConnectionPool - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - url_prefix: str = ..., - timeout: Optional[Union[float, int]] = ..., - http_auth: Any = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_hostname: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: int = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/pooling.py b/opensearchpy/connection/pooling.py index 48503a58..87bd8c72 100644 --- a/opensearchpy/connection/pooling.py +++ b/opensearchpy/connection/pooling.py @@ -26,6 +26,8 @@ # under the License. +from typing import Any + from .base import Connection try: @@ -35,6 +37,8 @@ class PoolingConnection(Connection): + _free_connections: queue.Queue[Connection] + """ Base connection class for connections that use libraries without thread safety and no capacity for connection pooling. To use this just implement a @@ -42,23 +46,23 @@ class PoolingConnection(Connection): it. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self._free_connections = queue.Queue() super(PoolingConnection, self).__init__(*args, **kwargs) - def _make_connection(self): + def _make_connection(self) -> Connection: raise NotImplementedError - def _get_connection(self): + def _get_connection(self) -> Connection: try: return self._free_connections.get_nowait() except queue.Empty: return self._make_connection() - def _release_connection(self, con): + def _release_connection(self, con: Connection) -> None: self._free_connections.put(con) - def close(self): + def close(self) -> None: """ Explicitly close connection """ diff --git a/opensearchpy/connection/pooling.pyi b/opensearchpy/connection/pooling.pyi deleted file mode 100644 index 53e38f40..00000000 --- a/opensearchpy/connection/pooling.pyi +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .base import Connection - -class PoolingConnection(Connection): - def _make_connection(self) -> Connection: ... - def _get_connection(self) -> Connection: ... - def _release_connection(self, con: Connection) -> None: ... - def close(self) -> None: ... diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index 7ff15512..defef6f5 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -30,15 +30,13 @@ import random import threading import time +from queue import Empty, PriorityQueue +from typing import Any, Dict, Optional, Sequence, Tuple, Type -try: - from Queue import Empty, PriorityQueue -except ImportError: - from queue import PriorityQueue, Empty - +from .connection import Connection from .exceptions import ImproperlyConfigured -logger = logging.getLogger("opensearch") +logger: logging.Logger = logging.getLogger("opensearch") class ConnectionSelector(object): @@ -60,13 +58,13 @@ class ConnectionSelector(object): connections where there would be none in its zones. """ - def __init__(self, opts): + def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: """ :arg opts: dictionary of connection instances and their options """ self.connection_opts = opts - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> None: """ Select a connection from the given list. @@ -80,7 +78,7 @@ class RandomSelector(ConnectionSelector): Select a connection at random """ - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> Any: return random.choice(connections) @@ -89,11 +87,11 @@ class RoundRobinSelector(ConnectionSelector): Selector using round-robin. """ - def __init__(self, opts): + def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: super(RoundRobinSelector, self).__init__(opts) self.data = threading.local() - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> Any: self.data.rr = getattr(self.data, "rr", -1) + 1 self.data.rr %= len(connections) return connections[self.data.rr] @@ -122,15 +120,24 @@ class ConnectionPool(object): succeeds will be marked as live (its fail count will be deleted). """ + connections_opts: Sequence[Tuple[Connection, Any]] + connections: Any + orig_connections: Tuple[Connection, ...] + dead: Any + dead_count: Dict[Connection, int] + dead_timeout: float + timeout_cutoff: int + selector: Any + def __init__( self, - connections, - dead_timeout=60, - timeout_cutoff=5, - selector_class=RoundRobinSelector, - randomize_hosts=True, - **kwargs - ): + connections: Any, + dead_timeout: float = 60, + timeout_cutoff: int = 5, + selector_class: Type[ConnectionSelector] = RoundRobinSelector, + randomize_hosts: bool = True, + **kwargs: Any + ) -> None: """ :arg connections: list of tuples containing the :class:`~opensearchpy.Connection` instance and its options @@ -164,9 +171,9 @@ def __init__( self.dead_timeout = dead_timeout self.timeout_cutoff = timeout_cutoff - self.selector = selector_class(dict(connections)) + self.selector = selector_class(dict(connections)) # type: ignore - def mark_dead(self, connection, now=None): + def mark_dead(self, connection: Connection, now: Optional[float] = None) -> None: """ Mark the connection as dead (failed). Remove it from the live pool and put it on a timeout. @@ -196,7 +203,7 @@ def mark_dead(self, connection, now=None): timeout, ) - def mark_live(self, connection): + def mark_live(self, connection: Connection) -> None: """ Mark connection as healthy after a resurrection. Resets the fail counter for the connection. @@ -209,7 +216,7 @@ def mark_live(self, connection): # race condition, safe to ignore pass - def resurrect(self, force=False): + def resurrect(self, force: bool = False) -> Any: """ Attempt to resurrect a connection from the dead pool. It will try to locate one (not all) eligible (its timeout is over) connection to @@ -251,7 +258,7 @@ def resurrect(self, force=False): logger.info("Resurrecting connection %r (force=%s).", connection, force) return connection - def get_connection(self): + def get_connection(self) -> Any: """ Return a connection from the pool using the `ConnectionSelector` instance. @@ -276,38 +283,38 @@ def get_connection(self): # only one connection, no need for a selector return connections[0] - def close(self): + def close(self) -> Any: """ Explicitly closes connections """ for conn in self.connections: conn.close() - def __repr__(self): + def __repr__(self) -> str: return "<%s: %r>" % (type(self).__name__, self.connections) class DummyConnectionPool(ConnectionPool): - def __init__(self, connections, **kwargs): + def __init__(self, connections: Any, **kwargs: Any) -> None: if len(connections) != 1: raise ImproperlyConfigured( "DummyConnectionPool needs exactly one " "connection defined." ) # we need connection opts for sniffing logic self.connection_opts = connections - self.connection = connections[0][0] + self.connection: Any = connections[0][0] self.connections = (self.connection,) - def get_connection(self): + def get_connection(self) -> Any: return self.connection - def close(self): + def close(self) -> None: """ Explicitly closes connections """ self.connection.close() - def _noop(self, *args, **kwargs): + def _noop(self, *args: Any, **kwargs: Any) -> Any: pass mark_dead = mark_live = resurrect = _noop @@ -316,14 +323,14 @@ def _noop(self, *args, **kwargs): class EmptyConnectionPool(ConnectionPool): """A connection pool that is empty. Errors out if used.""" - def __init__(self, *_, **__): + def __init__(self, *_: Any, **__: Any) -> None: self.connections = [] self.connection_opts = [] - def get_connection(self): + def get_connection(self) -> Connection: raise ImproperlyConfigured("No connections were configured") - def _noop(self, *args, **kwargs): + def _noop(self, *args: Any, **kwargs: Any) -> Any: pass close = mark_dead = mark_live = resurrect = _noop diff --git a/opensearchpy/connection_pool.pyi b/opensearchpy/connection_pool.pyi deleted file mode 100644 index e219591c..00000000 --- a/opensearchpy/connection_pool.pyi +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union - -from .connection import Connection - -try: - from Queue import PriorityQueue -except ImportError: - from queue import PriorityQueue - -logger: logging.Logger - -class ConnectionSelector(object): - connection_opts: Sequence[Tuple[Connection, Any]] - def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: ... - def select(self, connections: Sequence[Connection]) -> Connection: ... - -class RandomSelector(ConnectionSelector): ... -class RoundRobinSelector(ConnectionSelector): ... - -class ConnectionPool(object): - connections_opts: Sequence[Tuple[Connection, Any]] - connections: Sequence[Connection] - orig_connections: Tuple[Connection, ...] - dead: PriorityQueue - dead_count: Dict[Connection, int] - dead_timeout: float - timeout_cutoff: int - selector: ConnectionSelector - def __init__( - self, - connections: Sequence[Tuple[Connection, Any]], - dead_timeout: float = ..., - timeout_cutoff: int = ..., - selector_class: Type[ConnectionSelector] = ..., - randomize_hosts: bool = ..., - **kwargs: Any - ) -> None: ... - def mark_dead(self, connection: Connection, now: Optional[float] = ...) -> None: ... - def mark_live(self, connection: Connection) -> None: ... - def resurrect(self, force: bool = ...) -> Optional[Connection]: ... - def get_connection(self) -> Connection: ... - def close(self) -> None: ... - def __repr__(self) -> str: ... - -class DummyConnectionPool(ConnectionPool): - def __init__( - self, connections: Sequence[Tuple[Connection, Any]], **kwargs: Any - ) -> None: ... - def get_connection(self) -> Connection: ... - def close(self) -> None: ... - def _noop(self, *args: Any, **kwargs: Any) -> Any: ... - mark_dead = mark_live = resurrect = _noop - -class EmptyConnectionPool(ConnectionPool): - def __init__(self, *_: Any, **__: Any) -> None: ... - def get_connection(self) -> Connection: ... - def _noop(self, *args: Any, **kwargs: Any) -> Any: ... - close = mark_dead = mark_live = resurrect = _noop diff --git a/opensearchpy/exceptions.py b/opensearchpy/exceptions.py index f2f994ca..58d29bdf 100644 --- a/opensearchpy/exceptions.py +++ b/opensearchpy/exceptions.py @@ -26,6 +26,8 @@ # under the License. +from typing import Any, Dict, Type, Union + __all__ = [ "ImproperlyConfigured", "OpenSearchException", @@ -76,32 +78,33 @@ class TransportError(OpenSearchException): """ @property - def status_code(self): + def status_code(self) -> Union[str, int]: """ The HTTP status code of the response that precipitated the error or ``'N/A'`` if not applicable. """ - return self.args[0] + return self.args[0] # type: ignore @property - def error(self): + def error(self) -> str: """A string error message.""" - return self.args[1] + return self.args[1] # type: ignore @property - def info(self): + def info(self) -> Union[Dict[str, Any], Exception, Any]: """ Dict of returned error info from OpenSearch, where available, underlying exception when not. """ return self.args[2] - def __str__(self): + def __str__(self) -> str: cause = "" try: - if self.info and "error" in self.info: - if isinstance(self.info["error"], dict): - root_cause = self.info["error"]["root_cause"][0] + if self.info and isinstance(self.info, dict) and "error" in self.info: + error = self.info["error"] + if isinstance(error, dict): + root_cause = error["root_cause"][0] cause = ", ".join( filter( None, @@ -128,7 +131,7 @@ class ConnectionError(TransportError): implementation is available as ``.info``. """ - def __str__(self): + def __str__(self) -> str: return "ConnectionError(%s) caused by: %s(%s)" % ( self.error, self.info.__class__.__name__, @@ -143,7 +146,7 @@ class SSLError(ConnectionError): class ConnectionTimeout(ConnectionError): """A network timeout. Doesn't cause a node retry by default.""" - def __str__(self): + def __str__(self) -> str: return "ConnectionTimeout caused by - %s(%s)" % ( self.info.__class__.__name__, self.info, @@ -199,7 +202,7 @@ class OpenSearchWarning(Warning): # more generic mappings from status_code to python exceptions -HTTP_EXCEPTIONS = { +HTTP_EXCEPTIONS: Dict[int, Type[OpenSearchException]] = { 400: RequestError, 401: AuthenticationException, 403: AuthorizationException, diff --git a/opensearchpy/exceptions.pyi b/opensearchpy/exceptions.pyi deleted file mode 100644 index 0ecacc6f..00000000 --- a/opensearchpy/exceptions.pyi +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Dict, Union - -class ImproperlyConfigured(Exception): ... -class OpenSearchException(Exception): ... -class SerializationError(OpenSearchException): ... - -class TransportError(OpenSearchException): - @property - def status_code(self) -> Union[str, int]: ... - @property - def error(self) -> str: ... - @property - def info(self) -> Union[Dict[str, Any], Exception, Any]: ... - def __str__(self) -> str: ... - -class ConnectionError(TransportError): - def __str__(self) -> str: ... - -class SSLError(ConnectionError): ... - -class ConnectionTimeout(ConnectionError): - def __str__(self) -> str: ... - -class NotFoundError(TransportError): ... -class ConflictError(TransportError): ... -class RequestError(TransportError): ... -class AuthenticationException(TransportError): ... -class AuthorizationException(TransportError): ... -class OpenSearchDslException(Exception): ... -class UnknownDslObject(OpenSearchDslException): ... -class ValidationException(ValueError, OpenSearchDslException): ... -class IllegalOperation(OpenSearchDslException): ... -class OpenSearchWarning(Warning): ... - -OpenSearchDeprecationWarning = OpenSearchWarning - -HTTP_EXCEPTIONS: Dict[int, OpenSearchException] diff --git a/opensearchpy/helpers/__init__.pyi b/opensearchpy/helpers/__init__.pyi deleted file mode 100644 index 24c0d13d..00000000 --- a/opensearchpy/helpers/__init__.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys - -from .._async.helpers.actions import async_bulk as async_bulk -from .._async.helpers.actions import async_reindex as async_reindex -from .._async.helpers.actions import async_scan as async_scan -from .._async.helpers.actions import async_streaming_bulk as async_streaming_bulk -from .actions import _chunk_actions as _chunk_actions -from .actions import _process_bulk_chunk as _process_bulk_chunk -from .actions import bulk as bulk -from .actions import expand_action as expand_action -from .actions import parallel_bulk as parallel_bulk -from .actions import reindex as reindex -from .actions import scan as scan -from .actions import streaming_bulk as streaming_bulk -from .asyncsigner import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth -from .errors import BulkIndexError as BulkIndexError -from .errors import ScanError as ScanError -from .signer import AWSV4SignerAuth as AWSV4SignerAuth -from .signer import RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index 587444a3..a02e13a5 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -29,6 +29,7 @@ import logging import time from operator import methodcaller +from typing import Any, Optional from ..compat import Mapping, Queue, map, string_types from ..exceptions import TransportError @@ -37,7 +38,7 @@ logger = logging.getLogger("opensearchpy.helpers") -def expand_action(data): +def expand_action(data: Any) -> Any: """ From one document or action definition passed in by the user extract the action/data lines needed for opensearch's @@ -50,7 +51,7 @@ def expand_action(data): # make sure we don't alter the action data = data.copy() op_type = data.pop("_op_type", "index") - action = {op_type: {}} + action: Any = {op_type: {}} # If '_source' is a dict use it for source # otherwise if op_type == 'update' then @@ -105,17 +106,17 @@ def expand_action(data): class _ActionChunker: - def __init__(self, chunk_size, max_chunk_bytes, serializer): + def __init__(self, chunk_size: int, max_chunk_bytes: int, serializer: Any) -> None: self.chunk_size = chunk_size self.max_chunk_bytes = max_chunk_bytes self.serializer = serializer self.size = 0 self.action_count = 0 - self.bulk_actions = [] - self.bulk_data = [] + self.bulk_actions: Any = [] + self.bulk_data: Any = [] - def feed(self, action, data): + def feed(self, action: Any, data: Any) -> Any: ret = None raw_data, raw_action = data, action action = self.serializer.dumps(action) @@ -146,7 +147,7 @@ def feed(self, action, data): self.action_count += 1 return ret - def flush(self): + def flush(self) -> Any: ret = None if self.bulk_actions: ret = (self.bulk_data, self.bulk_actions) @@ -154,7 +155,9 @@ def flush(self): return ret -def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): +def _chunk_actions( + actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Any +) -> Any: """ Split actions into chunks by number or size, serialize them into strings in the process. @@ -171,7 +174,9 @@ def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): yield ret -def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=True): +def _process_bulk_chunk_success( + resp: Any, bulk_data: Any, ignore_status: Any = (), raise_on_error: bool = True +) -> Any: # if raise on error is set, we need to collect errors per chunk before raising them errors = [] @@ -198,8 +203,12 @@ def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=T def _process_bulk_chunk_error( - error, bulk_data, ignore_status, raise_on_exception=True, raise_on_error=True -): + error: Any, + bulk_data: Any, + ignore_status: Any = (), + raise_on_exception: bool = True, + raise_on_error: bool = True, +) -> Any: # default behavior - just propagate exception if raise_on_exception and error.status_code not in ignore_status: raise error @@ -228,15 +237,15 @@ def _process_bulk_chunk_error( def _process_bulk_chunk( - client, - bulk_actions, - bulk_data, - raise_on_exception=True, - raise_on_error=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + bulk_actions: Any, + bulk_data: Any, + raise_on_exception: bool = True, + raise_on_error: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Send a bulk request to opensearch and process the output. """ @@ -266,21 +275,21 @@ def _process_bulk_chunk( def streaming_bulk( - client, - actions, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - raise_on_error=True, - expand_action_callback=expand_action, - raise_on_exception=True, - max_retries=0, - initial_backoff=2, - max_backoff=600, - yield_ok=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + raise_on_error: bool = True, + expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + max_retries: int = 0, + initial_backoff: int = 2, + max_backoff: int = 600, + yield_ok: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -320,7 +329,8 @@ def streaming_bulk( actions, chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): - to_retry, to_retry_data = [], [] + to_retry: Any = [] + to_retry_data: Any = [] if attempt: time.sleep(min(max_backoff, initial_backoff * 2 ** (attempt - 1))) @@ -369,7 +379,14 @@ def streaming_bulk( bulk_actions, bulk_data = to_retry, to_retry_data -def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): +def bulk( + client: Any, + actions: Any, + stats_only: bool = False, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Helper for the :meth:`~opensearchpy.OpenSearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and @@ -405,9 +422,7 @@ def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - for ok, item in streaming_bulk( - client, actions, ignore_status=ignore_status, *args, **kwargs - ): + for ok, item in streaming_bulk(client, actions, ignore_status, *args, **kwargs): # go through request-response pairs and detect failures if not ok: if not stats_only: @@ -420,17 +435,17 @@ def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): def parallel_bulk( - client, - actions, - thread_count=4, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - queue_size=4, - expand_action_callback=expand_action, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + thread_count: int = 4, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + queue_size: int = 4, + expand_action_callback: Any = expand_action, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Parallel version of the bulk helper run in multiple threads at once. @@ -457,11 +472,11 @@ def parallel_bulk( actions = map(expand_action_callback, actions) class BlockingPool(ThreadPool): - def _setup_queues(self): + def _setup_queues(self) -> None: super(BlockingPool, self)._setup_queues() # type: ignore # The queue must be at least the size of the number of threads to # prevent hanging when inserting sentinel values during teardown. - self._inqueue = Queue(max(queue_size, thread_count)) + self._inqueue: Any = Queue(max(queue_size, thread_count)) self._quick_put = self._inqueue.put pool = BlockingPool(thread_count) @@ -470,12 +485,7 @@ def _setup_queues(self): for result in pool.imap( lambda bulk_chunk: list( _process_bulk_chunk( - client, - bulk_chunk[1], - bulk_chunk[0], - ignore_status=ignore_status, - *args, - **kwargs + client, bulk_chunk[1], bulk_chunk[0], ignore_status, *args, **kwargs ) ), _chunk_actions( @@ -491,17 +501,17 @@ def _setup_queues(self): def scan( - client, - query=None, - scroll="5m", - raise_on_error=True, - preserve_order=False, - size=1000, - request_timeout=None, - clear_scroll=True, - scroll_kwargs=None, - **kwargs -): + client: Any, + query: Any = None, + scroll: str = "5m", + raise_on_error: bool = True, + preserve_order: bool = False, + size: int = 1000, + request_timeout: Optional[int] = None, + clear_scroll: bool = True, + scroll_kwargs: Any = None, + **kwargs: Any +) -> Any: """ Simple abstraction on top of the :meth:`~opensearchpy.OpenSearch.scroll` api - a simple iterator that @@ -609,16 +619,16 @@ def scan( def reindex( - client, - source_index, - target_index, - query=None, - target_client=None, - chunk_size=500, - scroll="5m", - scan_kwargs={}, - bulk_kwargs={}, -): + client: Any, + source_index: Any, + target_index: Any, + query: Any = None, + target_client: Any = None, + chunk_size: int = 500, + scroll: str = "5m", + scan_kwargs: Any = {}, + bulk_kwargs: Any = {}, +) -> Any: """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. @@ -652,7 +662,7 @@ def reindex( target_client = client if target_client is None else target_client docs = scan(client, query=query, index=source_index, scroll=scroll, **scan_kwargs) - def _change_doc_index(hits, index): + def _change_doc_index(hits: Any, index: Any) -> Any: for h in hits: h["_index"] = index if "fields" in h: diff --git a/opensearchpy/helpers/actions.pyi b/opensearchpy/helpers/actions.pyi deleted file mode 100644 index e1ee4254..00000000 --- a/opensearchpy/helpers/actions.pyi +++ /dev/null @@ -1,137 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -import sys -from typing import ( - Any, - AsyncIterable, - Callable, - Collection, - Dict, - Generator, - Iterable, - List, - Mapping, - Optional, - Tuple, - Union, - overload, -) - -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - -from ..client import OpenSearch -from ..serializer import Serializer - -logger: logging.Logger - -def expand_action(data: Any) -> Tuple[Dict[str, Any], Optional[Any]]: ... -def _chunk_actions( - actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer -) -> Generator[Any, None, None]: ... -def _process_bulk_chunk( - client: OpenSearch, - bulk_actions: Any, - bulk_data: Any, - raise_on_exception: bool = ..., - raise_on_error: bool = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -def streaming_bulk( - client: OpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - chunk_size: int = ..., - max_chunk_bytes: int = ..., - raise_on_error: bool = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - raise_on_exception: bool = ..., - max_retries: int = ..., - initial_backoff: Union[float, int] = ..., - max_backoff: Union[float, int] = ..., - yield_ok: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -@overload -def bulk( - client: OpenSearch, - actions: Iterable[Any], - stats_only: Literal[True] = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, int]: ... -@overload -def bulk( - client: OpenSearch, - actions: Iterable[Any], - stats_only: Literal[False], - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, List[Any]]: ... -def parallel_bulk( - client: OpenSearch, - actions: Iterable[Any], - thread_count: int = ..., - chunk_size: int = ..., - max_chunk_bytes: int = ..., - queue_size: int = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -def scan( - client: OpenSearch, - query: Optional[Any] = ..., - scroll: str = ..., - raise_on_error: bool = ..., - preserve_order: bool = ..., - size: int = ..., - request_timeout: Optional[Union[float, int]] = ..., - clear_scroll: bool = ..., - scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any -) -> Generator[Any, None, None]: ... -def reindex( - client: OpenSearch, - source_index: Union[str, Collection[str]], - target_index: str, - query: Any = ..., - target_client: Optional[OpenSearch] = ..., - chunk_size: int = ..., - scroll: str = ..., - scan_kwargs: Optional[Mapping[str, Any]] = ..., - bulk_kwargs: Optional[Mapping[str, Any]] = ..., -) -> Tuple[int, Union[int, List[Any]]]: ... diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index db7d2c28..59795614 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -25,16 +25,15 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc + +import collections.abc as collections_abc +from typing import Any, Optional from .response.aggs import AggResponse, BucketData, FieldBucketData, TopHitsData from .utils import DslBase -def A(name_or_agg, filter=None, **params): +def A(name_or_agg: Any, filter: Any = None, **params: Any) -> Any: if filter is not None: if name_or_agg != "filter": raise ValueError( @@ -48,7 +47,7 @@ def A(name_or_agg, filter=None, **params): if params: raise ValueError("A() cannot accept parameters when passing in a dict.") # copy to avoid modifying in-place - agg = name_or_agg.copy() + agg = name_or_agg.copy() # type: ignore # pop out nested aggs aggs = agg.pop("aggs", None) # pop out meta data @@ -81,20 +80,20 @@ def A(name_or_agg, filter=None, **params): class Agg(DslBase): - _type_name = "agg" + _type_name: str = "agg" _type_shortcut = staticmethod(A) - name = None + name: Optional[str] = None - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return False - def to_dict(self): + def to_dict(self) -> Any: d = super(Agg, self).to_dict() if "meta" in d[self.name]: d["meta"] = d[self.name].pop("meta") return d - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return AggResponse(self, search, data) @@ -103,10 +102,10 @@ class AggBase(object): "aggs": {"type": "agg", "hash": True}, } - def __contains__(self, key): + def __contains__(self: Any, key: Any) -> bool: return key in self._params.get("aggs", {}) - def __getitem__(self, agg_name): + def __getitem__(self: Any, agg_name: Any) -> Any: agg = self._params.setdefault("aggs", {})[agg_name] # propagate KeyError # make sure we're not mutating a shared state - whenever accessing a @@ -118,13 +117,15 @@ def __getitem__(self, agg_name): return agg - def __setitem__(self, agg_name, agg): + def __setitem__(self: Any, agg_name: str, agg: Any) -> None: self.aggs[agg_name] = A(agg) - def __iter__(self): + def __iter__(self: Any) -> Any: return iter(self.aggs) - def _agg(self, bucket, name, agg_type, *args, **params): + def _agg( + self: Any, bucket: Any, name: Any, agg_type: Any, *args: Any, **params: Any + ) -> Any: agg = self[name] = A(agg_type, *args, **params) # For chaining - when creating new buckets return them... @@ -134,26 +135,26 @@ def _agg(self, bucket, name, agg_type, *args, **params): else: return self._base - def metric(self, name, agg_type, *args, **params): + def metric(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(False, name, agg_type, *args, **params) - def bucket(self, name, agg_type, *args, **params): + def bucket(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(True, name, agg_type, *args, **params) - def pipeline(self, name, agg_type, *args, **params): + def pipeline(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(False, name, agg_type, *args, **params) - def result(self, search, data): + def result(self: Any, search: Any, data: Any) -> Any: return BucketData(self, search, data) class Bucket(AggBase, Agg): - def __init__(self, **params): + def __init__(self, **params: Any) -> None: super(Bucket, self).__init__(**params) # remember self for chaining self._base = self - def to_dict(self): + def to_dict(self) -> Any: d = super(AggBase, self).to_dict() if "aggs" in d[self.name]: d["aggs"] = d[self.name].pop("aggs") @@ -161,18 +162,18 @@ def to_dict(self): class Filter(Bucket): - name = "filter" + name: Optional[str] = "filter" _param_defs = { "filter": {"type": "query"}, "aggs": {"type": "agg", "hash": True}, } - def __init__(self, filter=None, **params): + def __init__(self, filter: Any = None, **params: Any) -> None: if filter is not None: params["filter"] = filter super(Filter, self).__init__(**params) - def to_dict(self): + def to_dict(self) -> Any: d = super(Filter, self).to_dict() d[self.name].update(d[self.name].pop("filter", {})) return d @@ -184,7 +185,7 @@ class Pipeline(Agg): # bucket aggregations class Filters(Bucket): - name = "filters" + name: str = "filters" _param_defs = { "filters": {"type": "query", "hash": True}, "aggs": {"type": "agg", "hash": True}, @@ -202,7 +203,7 @@ class Parent(Bucket): class DateHistogram(Bucket): name = "date_histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -237,7 +238,7 @@ class Global(Bucket): class Histogram(Bucket): name = "histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -260,7 +261,7 @@ class Range(Bucket): class RareTerms(Bucket): name = "rare_terms" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -279,7 +280,7 @@ class SignificantText(Bucket): class Terms(Bucket): name = "terms" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -302,7 +303,7 @@ class Composite(Bucket): class VariableWidthHistogram(Bucket): name = "variable_width_histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -310,7 +311,7 @@ def result(self, search, data): class TopHits(Agg): name = "top_hits" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return TopHitsData(self, search, data) diff --git a/opensearchpy/helpers/aggs.pyi b/opensearchpy/helpers/aggs.pyi deleted file mode 100644 index 08b74a3a..00000000 --- a/opensearchpy/helpers/aggs.pyi +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from typing import Any - -from _typeshed import Incomplete - -from ..response.aggs import AggResponse as AggResponse -from ..response.aggs import BucketData as BucketData -from ..response.aggs import FieldBucketData as FieldBucketData -from ..response.aggs import TopHitsData as TopHitsData -from .utils import DslBase -from .utils import DslBase as DslBase - -def A(name_or_agg: Any, filter: Incomplete | None = ..., **params: Any) -> Any: ... - -class Agg(DslBase): ... -class AggBase(object): ... -class Bucket(AggBase, Agg): ... -class Filter(Bucket): ... -class Pipeline(Agg): ... -class Filters(Bucket): ... -class Children(Bucket): ... -class Parent(Bucket): ... -class DateHistogram(Bucket): ... -class AutoDateHistogram(DateHistogram): ... -class DateRange(Bucket): ... -class GeoDistance(Bucket): ... -class GeohashGrid(Bucket): ... -class GeotileGrid(Bucket): ... -class GeoCentroid(Bucket): ... -class Global(Bucket): ... -class Histogram(Bucket): ... -class IPRange(Bucket): ... -class Missing(Bucket): ... -class Nested(Bucket): ... -class Range(Bucket): ... -class RareTerms(Bucket): ... -class ReverseNested(Bucket): ... -class SignificantTerms(Bucket): ... -class SignificantText(Bucket): ... -class Terms(Bucket): ... -class Sampler(Bucket): ... -class DiversifiedSampler(Bucket): ... -class Composite(Bucket): ... -class VariableWidthHistogram(Bucket): ... -class TopHits(Agg): ... -class Avg(Agg): ... -class WeightedAvg(Agg): ... -class Cardinality(Agg): ... -class ExtendedStats(Agg): ... -class Boxplot(Agg): ... -class GeoBounds(Agg): ... -class Max(Agg): ... -class MedianAbsoluteDeviation(Agg): ... -class Min(Agg): ... -class Percentiles(Agg): ... -class PercentileRanks(Agg): ... -class ScriptedMetric(Agg): ... -class Stats(Agg): ... -class Sum(Agg): ... -class TTest(Agg): ... -class ValueCount(Agg): ... -class AvgBucket(Pipeline): ... -class BucketScript(Pipeline): ... -class BucketSelector(Pipeline): ... -class CumulativeSum(Pipeline): ... -class CumulativeCardinality(Pipeline): ... -class Derivative(Pipeline): ... -class ExtendedStatsBucket(Pipeline): ... -class Inference(Pipeline): ... -class MaxBucket(Pipeline): ... -class MinBucket(Pipeline): ... -class MovingFn(Pipeline): ... -class MovingAvg(Pipeline): ... -class MovingPercentiles(Pipeline): ... -class Normalize(Pipeline): ... -class PercentilesBucket(Pipeline): ... -class SerialDiff(Pipeline): ... -class StatsBucket(Pipeline): ... -class SumBucket(Pipeline): ... -class BucketSort(Pipeline): ... diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index 4e2646d7..c228acd1 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -25,17 +25,20 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + import six from opensearchpy.connection.connections import get_connection -from opensearchpy.helpers.utils import AttrDict, DslBase, merge -__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"] +from .utils import AttrDict, DslBase, merge class AnalysisBase(object): @classmethod - def _type_shortcut(cls, name_or_instance, type=None, **kwargs): + def _type_shortcut( + cls: Any, name_or_instance: Any, type: Any = None, **kwargs: Any + ) -> Any: if isinstance(name_or_instance, cls): if type or kwargs: raise ValueError("%s() cannot accept parameters." % cls.__name__) @@ -50,29 +53,31 @@ def _type_shortcut(cls, name_or_instance, type=None, **kwargs): class CustomAnalysis(object): - name = "custom" + name: Optional[str] = "custom" - def __init__(self, filter_name, builtin_type="custom", **kwargs): + def __init__( + self, filter_name: str, builtin_type: str = "custom", **kwargs: Any + ) -> None: self._builtin_type = builtin_type self._name = filter_name super(CustomAnalysis, self).__init__(**kwargs) - def to_dict(self): + def to_dict(self) -> Any: # only name to present in lists return self._name - def get_definition(self): - d = super(CustomAnalysis, self).to_dict() + def get_definition(self) -> Any: + d = super(CustomAnalysis, self).to_dict() # type: ignore d = d.pop(self.name) d["type"] = self._builtin_type return d class CustomAnalysisDefinition(CustomAnalysis): - def get_analysis_definition(self): + def get_analysis_definition(self: Any) -> Any: out = {self._type_name: {self._name: self.get_definition()}} - t = getattr(self, "tokenizer", None) + t: Any = getattr(self, "tokenizer", None) if "tokenizer" in self._param_defs and hasattr(t, "get_definition"): out["tokenizer"] = {t._name: t.get_definition()} @@ -103,24 +108,24 @@ def get_analysis_definition(self): class BuiltinAnalysis(object): - name = "builtin" + name: Optional[str] = "builtin" - def __init__(self, name): + def __init__(self, name: Any) -> None: self._name = name super(BuiltinAnalysis, self).__init__() - def to_dict(self): + def to_dict(self) -> Any: # only name to present in lists return self._name class Analyzer(AnalysisBase, DslBase): - _type_name = "analyzer" - name = None + _type_name: str = "analyzer" + name: Optional[str] = None class BuiltinAnalyzer(BuiltinAnalysis, Analyzer): - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: return {} @@ -131,7 +136,13 @@ class CustomAnalyzer(CustomAnalysisDefinition, Analyzer): "tokenizer": {"type": "tokenizer"}, } - def simulate(self, text, using="default", explain=False, attributes=None): + def simulate( + self, + text: Any, + using: str = "default", + explain: bool = False, + attributes: Any = None, + ) -> Any: """ Use the Analyze API of opensearch to test the outcome of this analyzer. @@ -172,12 +183,12 @@ def simulate(self, text, using="default", explain=False, attributes=None): class Normalizer(AnalysisBase, DslBase): - _type_name = "normalizer" - name = None + _type_name: str = "normalizer" + name: Optional[str] = None class BuiltinNormalizer(BuiltinAnalysis, Normalizer): - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: return {} @@ -189,8 +200,8 @@ class CustomNormalizer(CustomAnalysisDefinition, Normalizer): class Tokenizer(AnalysisBase, DslBase): - _type_name = "tokenizer" - name = None + _type_name: str = "tokenizer" + name: Optional[str] = None class BuiltinTokenizer(BuiltinAnalysis, Tokenizer): @@ -202,8 +213,8 @@ class CustomTokenizer(CustomAnalysis, Tokenizer): class TokenFilter(AnalysisBase, DslBase): - _type_name = "token_filter" - name = None + _type_name: str = "token_filter" + name: Optional[str] = None class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter): @@ -217,7 +228,7 @@ class CustomTokenFilter(CustomAnalysis, TokenFilter): class MultiplexerTokenFilter(CustomTokenFilter): name = "multiplexer" - def get_definition(self): + def get_definition(self) -> Any: d = super(CustomTokenFilter, self).get_definition() if "filters" in d: @@ -230,11 +241,11 @@ def get_definition(self): ] return d - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: if not hasattr(self, "filters"): return {} - fs = {} + fs: Any = {} d = {"filter": fs} for filters in self.filters: if isinstance(filters, six.string_types): @@ -252,7 +263,7 @@ def get_analysis_definition(self): class ConditionalTokenFilter(CustomTokenFilter): name = "condition" - def get_definition(self): + def get_definition(self) -> Any: d = super(CustomTokenFilter, self).get_definition() if "filter" in d: d["filter"] = [ @@ -260,7 +271,7 @@ def get_definition(self): ] return d - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: if not hasattr(self, "filter"): return {} @@ -274,8 +285,8 @@ def get_analysis_definition(self): class CharFilter(AnalysisBase, DslBase): - _type_name = "char_filter" - name = None + _type_name: str = "char_filter" + name: Optional[str] = None class BuiltinCharFilter(BuiltinAnalysis, CharFilter): @@ -292,3 +303,5 @@ class CustomCharFilter(CustomAnalysis, CharFilter): token_filter = TokenFilter._type_shortcut char_filter = CharFilter._type_shortcut normalizer = Normalizer._type_shortcut + +__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"] diff --git a/opensearchpy/helpers/analysis.pyi b/opensearchpy/helpers/analysis.pyi deleted file mode 100644 index 364a6ea5..00000000 --- a/opensearchpy/helpers/analysis.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import DslBase - -class AnalysisBase(object): ... -class CustomAnalysis(object): ... -class CustomAnalysisDefinition(CustomAnalysis): ... -class BuiltinAnalysis(object): ... -class Analyzer(AnalysisBase, DslBase): ... -class BuiltinAnalyzer(BuiltinAnalysis, Analyzer): ... -class CustomAnalyzer(CustomAnalysisDefinition, Analyzer): ... -class Normalizer(AnalysisBase, DslBase): ... -class BuiltinNormalizer(BuiltinAnalysis, Normalizer): ... -class CustomNormalizer(CustomAnalysisDefinition, Normalizer): ... -class Tokenizer(AnalysisBase, DslBase): ... -class BuiltinTokenizer(BuiltinAnalysis, Tokenizer): ... -class CustomTokenizer(CustomAnalysis, Tokenizer): ... -class TokenFilter(AnalysisBase, DslBase): ... -class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter): ... -class CustomTokenFilter(CustomAnalysis, TokenFilter): ... -class MultiplexerTokenFilter(CustomTokenFilter): ... -class ConditionalTokenFilter(CustomTokenFilter): ... -class CharFilter(AnalysisBase, DslBase): ... -class BuiltinCharFilter(BuiltinAnalysis, CharFilter): ... -class CustomCharFilter(CustomAnalysis, CharFilter): ... diff --git a/opensearchpy/helpers/asyncsigner.py b/opensearchpy/helpers/asyncsigner.py index 7f063c9f..bd84e09e 100644 --- a/opensearchpy/helpers/asyncsigner.py +++ b/opensearchpy/helpers/asyncsigner.py @@ -8,9 +8,7 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys - -PY3 = sys.version_info[0] == 3 +from typing import Dict, Union class AWSV4SignerAsyncAuth: @@ -18,7 +16,7 @@ class AWSV4SignerAsyncAuth: AWS V4 Request Signer for Async Requests. """ - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region: str, service: str = "es") -> None: # type: ignore if not credentials: raise ValueError("Credentials cannot be empty") self.credentials = credentials @@ -31,10 +29,14 @@ def __init__(self, credentials, region, service="es"): # type: ignore raise ValueError("Service name cannot be empty") self.service = service - def __call__(self, method, url, query_string, body): # type: ignore - return self._sign_request(method, url, query_string, body) # type: ignore + def __call__( + self, method: str, url: str, query_string: str, body: Union[str, bytes] + ) -> Dict[str, str]: + return self._sign_request(method, url, query_string, body) - def _sign_request(self, method, url, query_string, body): + def _sign_request( + self, method: str, url: str, query_string: str, body: Union[str, bytes] + ) -> Dict[str, str]: """ This method helps in signing the request by injecting the required headers. :param prepared_request: unsigned headers diff --git a/opensearchpy/helpers/asyncsigner.pyi b/opensearchpy/helpers/asyncsigner.pyi deleted file mode 100644 index e0b5a7b5..00000000 --- a/opensearchpy/helpers/asyncsigner.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Dict, List - -class AWSV4SignerAsyncAuth: - @property - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - @property - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - @property - def _sign_request(self, *args: Any, **kwargs: Any) -> Dict[str, List[str]]: ... diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index de9891bc..19fda41e 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -25,12 +25,11 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from __future__ import annotations +import collections.abc as collections_abc from fnmatch import fnmatch +from typing import Any, Tuple, Type, Union from six import add_metaclass, iteritems @@ -46,15 +45,20 @@ class MetaField(object): - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.args, self.kwargs = args, kwargs class DocumentMeta(type): - def __new__(cls, name, bases, attrs): + def __new__( + cls: Union[Type[DocumentMeta], Type[IndexMeta]], + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: # DocumentMeta filters attrs in place attrs["_doc_type"] = DocumentOptions(name, bases, attrs) - return super(DocumentMeta, cls).__new__(cls, name, bases, attrs) + return super(DocumentMeta, cls).__new__(cls, name, bases, attrs) # type: ignore class IndexMeta(DocumentMeta): @@ -62,7 +66,12 @@ class IndexMeta(DocumentMeta): # class, only user defined subclasses should have an _index attr _document_initialized = False - def __new__(cls, name, bases, attrs): + def __new__( + cls: Type[IndexMeta], + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: new_cls = super(IndexMeta, cls).__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop("Index", None) @@ -73,7 +82,7 @@ def __new__(cls, name, bases, attrs): return new_cls @classmethod - def construct_index(cls, opts, bases): + def construct_index(cls, opts: Any, bases: Any) -> Any: if opts is None: for b in bases: if hasattr(b, "_index"): @@ -91,7 +100,12 @@ def construct_index(cls, opts, bases): class DocumentOptions(object): - def __init__(self, name, bases, attrs): + def __init__( + self, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> None: meta = attrs.pop("Meta", None) # create the mapping instance @@ -115,7 +129,7 @@ def __init__(self, name, bases, attrs): self.mapping.update(b._doc_type.mapping, update_only=True) @property - def name(self): + def name(self) -> Any: return self.mapping.properties.name @@ -126,7 +140,7 @@ class InnerDoc(ObjectBase): """ @classmethod - def from_opensearch(cls, data, data_only=False): + def from_opensearch(cls, data: Any, data_only: bool = False) -> Any: if data_only: data = {"_source": data} return super(InnerDoc, cls).from_opensearch(data) @@ -139,25 +153,25 @@ class Document(ObjectBase): """ @classmethod - def _matches(cls, hit): + def _matches(cls: Any, hit: Any) -> Any: if cls._index._name is None: return True return fnmatch(hit.get("_index", ""), cls._index._name) @classmethod - def _get_using(cls, using=None): + def _get_using(cls: Any, using: Any = None) -> Any: return using or cls._index._using @classmethod - def _get_connection(cls, using=None): + def _get_connection(cls, using: Any = None) -> Any: return get_connection(cls._get_using(using)) @classmethod - def _default_index(cls, index=None): + def _default_index(cls: Any, index: Any = None) -> Any: return index or cls._index._name @classmethod - def init(cls, index=None, using=None): + def init(cls: Any, index: Any = None, using: Any = None) -> None: """ Create the index and populate the mappings in opensearch. """ @@ -166,7 +180,7 @@ def init(cls, index=None, using=None): i = i.clone(name=index) i.save(using=using) - def _get_index(self, index=None, required=True): + def _get_index(self, index: Any = None, required: bool = True) -> Any: if index is None: index = getattr(self.meta, "index", None) if index is None: @@ -177,7 +191,7 @@ def _get_index(self, index=None, required=True): raise ValidationException("You cannot write to a wildcard index.") return index - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( @@ -188,7 +202,7 @@ def __repr__(self): ) @classmethod - def search(cls, using=None, index=None): + def search(cls, using: Any = None, index: Any = None) -> Any: """ Create an :class:`~opensearchpy.Search` instance that will search over this ``Document``. @@ -198,7 +212,7 @@ def search(cls, using=None, index=None): ) @classmethod - def get(cls, id, using=None, index=None, **kwargs): + def get(cls: Any, id: Any, using: Any = None, index: Any = None, **kwargs: Any) -> Any: # type: ignore """ Retrieve a single document from opensearch using its ``id``. @@ -217,7 +231,9 @@ def get(cls, id, using=None, index=None, **kwargs): return cls.from_opensearch(doc) @classmethod - def exists(cls, id, using=None, index=None, **kwargs): + def exists( + cls, id: Any, using: Any = None, index: Any = None, **kwargs: Any + ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -234,13 +250,19 @@ def exists(cls, id, using=None, index=None, **kwargs): @classmethod def mget( - cls, docs, using=None, index=None, raise_on_error=True, missing="none", **kwargs - ): - r""" - Retrieve multiple document by their ``id``\s. Returns a list of instances + cls, + docs: Any, + using: Any = None, + index: Any = None, + raise_on_error: bool = True, + missing: str = "none", + **kwargs: Any + ) -> Any: + """ + Retrieve multiple document by their ``id``'s. Returns a list of instances in the same order as requested. - :arg docs: list of ``id``\s of the documents to be retrieved or a list + :arg docs: list of ``id``'s of the documents to be retrieved or a list of document specifications as per https://opensearch.org/docs/latest/opensearch/rest-api/document-apis/multi-get/ :arg index: opensearch index to use, if the ``Document`` is @@ -264,7 +286,9 @@ def mget( } results = opensearch.mget(body, index=cls._default_index(index), **kwargs) - objs, error_docs, missing_docs = [], [], [] + objs: Any = [] + error_docs: Any = [] + missing_docs: Any = [] for doc in results["docs"]: if doc.get("found"): if error_docs or missing_docs: @@ -297,7 +321,7 @@ def mget( raise NotFoundError(404, message, {"docs": missing_docs}) return objs - def delete(self, using=None, index=None, **kwargs): + def delete(self, using: Any = None, index: Any = None, **kwargs: Any) -> Any: """ Delete the instance in opensearch. @@ -320,7 +344,7 @@ def delete(self, using=None, index=None, **kwargs): doc_meta.update(kwargs) opensearch.delete(index=self._get_index(index), **doc_meta) - def to_dict(self, include_meta=False, skip_empty=True): + def to_dict(self, include_meta: bool = False, skip_empty: bool = True) -> Any: # type: ignore """ Serialize the instance into a dictionary so that it can be saved in opensearch. @@ -348,19 +372,19 @@ def to_dict(self, include_meta=False, skip_empty=True): def update( self, - using=None, - index=None, - detect_noop=True, - doc_as_upsert=False, - refresh=False, - retry_on_conflict=None, - script=None, - script_id=None, - scripted_upsert=False, - upsert=None, - return_doc_meta=False, - **fields - ): + using: Any = None, + index: Any = None, + detect_noop: bool = True, + doc_as_upsert: bool = False, + refresh: bool = False, + retry_on_conflict: Any = None, + script: Any = None, + script_id: Any = None, + scripted_upsert: bool = False, + upsert: Any = None, + return_doc_meta: bool = False, + **fields: Any, + ) -> Any: """ Partial update of the document, specify fields you wish to update and both the instance and the document in opensearch will be updated:: @@ -389,7 +413,7 @@ def update( :return operation result noop/updated """ - body = { + body: Any = { "doc_as_upsert": doc_as_upsert, "detect_noop": detect_noop, } @@ -453,13 +477,13 @@ def update( def save( self, - using=None, - index=None, - validate=True, - skip_empty=True, - return_doc_meta=False, - **kwargs - ): + using: Any = None, + index: Any = None, + validate: bool = True, + skip_empty: bool = True, + return_doc_meta: bool = False, + **kwargs: Any, + ) -> Any: """ Save the document into opensearch. If the document doesn't exist it is created, it is overwritten otherwise. Returns ``True`` if this @@ -496,7 +520,7 @@ def save( meta = opensearch.index( index=self._get_index(index), body=self.to_dict(skip_empty=skip_empty), - **doc_meta + **doc_meta, ) # update meta information from OpenSearch for k in META_FIELDS: diff --git a/opensearchpy/helpers/document.pyi b/opensearchpy/helpers/document.pyi deleted file mode 100644 index 89ca6426..00000000 --- a/opensearchpy/helpers/document.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. - -from .utils import ObjectBase - -class MetaField(object): ... -class DocumentMeta(type): ... -class IndexMeta(DocumentMeta): ... -class DocumentOptions(object): ... -class InnerDoc(ObjectBase): ... -class Document(ObjectBase): ... diff --git a/opensearchpy/helpers/errors.py b/opensearchpy/helpers/errors.py index 5d05bd23..220b6b31 100644 --- a/opensearchpy/helpers/errors.py +++ b/opensearchpy/helpers/errors.py @@ -26,17 +26,21 @@ # under the License. +from typing import Any, List + from ..exceptions import OpenSearchException class BulkIndexError(OpenSearchException): @property - def errors(self): + def errors(self) -> List[Any]: """List of errors from execution of the last chunk.""" - return self.args[1] + return self.args[1] # type: ignore class ScanError(OpenSearchException): - def __init__(self, scroll_id, *args, **kwargs): - super(ScanError, self).__init__(*args, **kwargs) # type: ignore + scroll_id: str + + def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: + super(ScanError, self).__init__(*args, **kwargs) self.scroll_id = scroll_id diff --git a/opensearchpy/helpers/errors.pyi b/opensearchpy/helpers/errors.pyi deleted file mode 100644 index 9572d68f..00000000 --- a/opensearchpy/helpers/errors.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, List - -from ..exceptions import OpenSearchException - -class BulkIndexError(OpenSearchException): - @property - def errors(self) -> List[Any]: ... - -class ScanError(OpenSearchException): - scroll_id: str - def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: ... diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index e1bf9c0e..e9ae14ef 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -26,6 +26,7 @@ # under the License. from datetime import datetime, timedelta +from typing import Any, Optional from six import iteritems, itervalues @@ -53,16 +54,18 @@ class Facet(object): from the result of the aggregation. """ - agg_type = None + agg_type: Optional[str] = None - def __init__(self, metric=None, metric_sort="desc", **kwargs): + def __init__( + self, metric: Any = None, metric_sort: str = "desc", **kwargs: Any + ) -> None: self.filter_values = () self._params = kwargs self._metric = metric if metric and metric_sort: self._params["order"] = {"metric": metric_sort} - def get_aggregation(self): + def get_aggregation(self) -> Any: """ Return the aggregation object. """ @@ -71,7 +74,7 @@ def get_aggregation(self): agg.metric("metric", self._metric) return agg - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: """ Construct a filter. """ @@ -83,25 +86,25 @@ def add_filter(self, filter_values): f |= self.get_value_filter(v) return f - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: """ Construct a filter for an individual value """ pass - def is_filtered(self, key, filter_values): + def is_filtered(self, key: Any, filter_values: Any) -> bool: """ Is a filter active on the given key. """ return key in filter_values - def get_value(self, bucket): + def get_value(self, bucket: Any) -> Any: """ return a value representing a bucket. Its key as default. """ return bucket["key"] - def get_metric(self, bucket): + def get_metric(self, bucket: Any) -> Any: """ Return a metric, by default doc_count for a bucket. """ @@ -109,7 +112,7 @@ def get_metric(self, bucket): return bucket["metric"]["value"] return bucket["doc_count"] - def get_values(self, data, filter_values): + def get_values(self, data: Any, filter_values: Any) -> Any: """ Turn the raw bucket data into a list of tuples containing the key, number of documents and a flag indicating whether this value has been @@ -125,9 +128,9 @@ def get_values(self, data, filter_values): class TermsFacet(Facet): - agg_type = "terms" + agg_type: Optional[str] = "terms" - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: """Create a terms filter instead of bool containing term filters.""" if filter_values: return Terms( @@ -138,7 +141,7 @@ def add_filter(self, filter_values): class RangeFacet(Facet): agg_type = "range" - def _range_to_dict(self, range): + def _range_to_dict(self, range: Any) -> Any: key, range = range out = {"key": key} if range[0] is not None: @@ -147,13 +150,13 @@ def _range_to_dict(self, range): out["to"] = range[1] return out - def __init__(self, ranges, **kwargs): + def __init__(self, ranges: Any, **kwargs: Any) -> None: super(RangeFacet, self).__init__(**kwargs) self._params["ranges"] = list(map(self._range_to_dict, ranges)) self._params["keyed"] = False self._ranges = dict(ranges) - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: f, t = self._ranges[filter_value] limits = {} if f is not None: @@ -167,7 +170,7 @@ def get_value_filter(self, filter_value): class HistogramFacet(Facet): agg_type = "histogram" - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: return Range( _expand__to_dot=False, **{ @@ -179,25 +182,25 @@ def get_value_filter(self, filter_value): ) -def _date_interval_year(d): +def _date_interval_year(d: Any) -> Any: return d.replace( year=d.year + 1, day=(28 if d.month == 2 and d.day == 29 else d.day) ) -def _date_interval_month(d): +def _date_interval_month(d: Any) -> Any: return (d + timedelta(days=32)).replace(day=1) -def _date_interval_week(d): +def _date_interval_week(d: Any) -> Any: return d + timedelta(days=7) -def _date_interval_day(d): +def _date_interval_day(d: Any) -> Any: return d + timedelta(days=1) -def _date_interval_hour(d): +def _date_interval_hour(d: Any) -> Any: return d + timedelta(hours=1) @@ -217,22 +220,22 @@ class DateHistogramFacet(Facet): "1h": _date_interval_hour, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: kwargs.setdefault("min_doc_count", 0) super(DateHistogramFacet, self).__init__(**kwargs) - def get_value(self, bucket): + def get_value(self, bucket: Any) -> Any: if not isinstance(bucket["key"], datetime): # OpenSearch returns key=None instead of 0 for date 1970-01-01, # so we need to set key to 0 to avoid TypeError exception if bucket["key"] is None: bucket["key"] = 0 # Preserve milliseconds in the datetime - return datetime.utcfromtimestamp(int(bucket["key"]) / 1000.0) + return datetime.utcfromtimestamp(int(bucket["key"]) / 1000.0) # type: ignore else: return bucket["key"] - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: for interval_type in ("calendar_interval", "fixed_interval"): if interval_type in self._params: break @@ -255,17 +258,17 @@ def get_value_filter(self, filter_value): class NestedFacet(Facet): agg_type = "nested" - def __init__(self, path, nested_facet): + def __init__(self, path: Any, nested_facet: Any) -> None: self._path = path self._inner = nested_facet super(NestedFacet, self).__init__( path=path, aggs={"inner": nested_facet.get_aggregation()} ) - def get_values(self, data, filter_values): + def get_values(self, data: Any, filter_values: Any) -> Any: return self._inner.get_values(data.inner, filter_values) - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: inner_q = self._inner.add_filter(filter_values) if inner_q: return Nested(path=self._path, query=inner_q) @@ -273,11 +276,11 @@ def add_filter(self, filter_values): class FacetedResponse(Response): @property - def query_string(self): + def query_string(self) -> Any: return self._faceted_search._query @property - def facets(self): + def facets(self) -> Any: if not hasattr(self, "_facets"): super(AttrDict, self).__setattr__("_facets", AttrDict({})) for name, facet in iteritems(self._faceted_search.facets): @@ -330,38 +333,38 @@ def search(self): """ - index = None - doc_types = None - fields = None - facets = {} + index: Any = None + doc_types: Any = None + fields: Any = None + facets: Any = {} using = "default" - def __init__(self, query=None, filters={}, sort=()): + def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None: """ :arg query: the text to search for :arg filters: facet values to filter :arg sort: sort information to be passed to :class:`~opensearchpy.Search` """ self._query = query - self._filters = {} + self._filters: Any = {} self._sort = sort - self.filter_values = {} + self.filter_values: Any = {} for name, value in iteritems(filters): self.add_filter(name, value) self._s = self.build_search() - def count(self): + def count(self) -> Any: return self._s.count() - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: self._s = self._s[k] return self - def __iter__(self): + def __iter__(self) -> Any: return iter(self._s) - def add_filter(self, name, filter_values): + def add_filter(self, name: Any, filter_values: Any) -> Any: """ Add a filter for a facet. """ @@ -383,7 +386,7 @@ def add_filter(self, name, filter_values): self._filters[name] = f - def search(self): + def search(self) -> Any: """ Returns the base Search object to which the facets are added. @@ -393,7 +396,7 @@ def search(self): s = Search(doc_type=self.doc_types, index=self.index, using=self.using) return s.response_class(FacetedResponse) - def query(self, search, query): + def query(self, search: Any, query: Any) -> Any: """ Add query part to ``search``. @@ -406,7 +409,7 @@ def query(self, search, query): return search.query("multi_match", query=query) return search - def aggregate(self, search): + def aggregate(self, search: Any) -> Any: """ Add aggregations representing the facets selected, including potential filters. @@ -422,7 +425,7 @@ def aggregate(self, search): f, agg ) - def filter(self, search): + def filter(self, search: Any) -> Any: """ Add a ``post_filter`` to the search request narrowing the results based on the facet filters. @@ -435,7 +438,7 @@ def filter(self, search): post_filter &= f return search.post_filter(post_filter) - def highlight(self, search): + def highlight(self, search: Any) -> Any: """ Add highlighting for all the fields """ @@ -443,7 +446,7 @@ def highlight(self, search): *(f if "^" not in f else f.split("^", 1)[0] for f in self.fields) ) - def sort(self, search): + def sort(self, search: Any) -> Any: """ Add sorting information to the request. """ @@ -451,7 +454,7 @@ def sort(self, search): search = search.sort(*self._sort) return search - def build_search(self): + def build_search(self) -> Any: """ Construct the ``Search`` object. """ @@ -464,7 +467,7 @@ def build_search(self): self.aggregate(s) return s - def execute(self): + def execute(self) -> Any: """ Execute the search and return the response. """ diff --git a/opensearchpy/helpers/faceted_search.pyi b/opensearchpy/helpers/faceted_search.pyi deleted file mode 100644 index d3ff998d..00000000 --- a/opensearchpy/helpers/faceted_search.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from opensearchpy.helpers.response import Response - -class Facet(object): ... -class TermsFacet(Facet): ... -class RangeFacet(Facet): ... -class HistogramFacet(Facet): ... -class DateHistogramFacet(Facet): ... -class NestedFacet(Facet): ... -class FacetedResponse(Response): ... -class FacetedSearch(object): ... diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index edeaecf0..4881e819 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -26,15 +26,11 @@ # under the License. import base64 +import collections.abc as collections_abc import copy import ipaddress - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - from datetime import date, datetime +from typing import Any, Optional, Type from dateutil import parser, tz from six import integer_types, iteritems, string_types @@ -45,17 +41,17 @@ from .utils import AttrDict, AttrList, DslBase from .wrappers import Range -unicode = type("") +unicode: Type[str] = type("") -def construct_field(name_or_field, **params): +def construct_field(name_or_field: Any, **params: Any) -> Any: # {"type": "text", "analyzer": "snowball"} if isinstance(name_or_field, collections_abc.Mapping): if params: raise ValueError( "construct_field() cannot accept parameters when passing in a dict." ) - params = name_or_field.copy() + params = name_or_field.copy() # type: ignore if "type" not in params: # inner object can be implicitly defined if "properties" in params: @@ -80,14 +76,16 @@ def construct_field(name_or_field, **params): class Field(DslBase): - _type_name = "field" + _type_name: str = "field" _type_shortcut = staticmethod(construct_field) # all fields can be multifields _param_defs = {"fields": {"type": "field", "hash": True}} - name = None - _coerce = False + name: Optional[str] = None + _coerce: bool = False - def __init__(self, multi=False, required=False, *args, **kwargs): + def __init__( + self, multi: bool = False, required: bool = False, *args: Any, **kwargs: Any + ) -> None: """ :arg bool multi: specifies whether field can contain array of values :arg bool required: specifies whether field is required @@ -96,29 +94,29 @@ def __init__(self, multi=False, required=False, *args, **kwargs): self._required = required super(Field, self).__init__(*args, **kwargs) - def __getitem__(self, subfield): + def __getitem__(self, subfield: Any) -> Any: return self._params.get("fields", {})[subfield] - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return data - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return data - def _empty(self): + def _empty(self) -> None: return None - def empty(self): + def empty(self) -> Any: if self._multi: return AttrList([]) return self._empty() - def serialize(self, data): + def serialize(self, data: Any) -> Any: if isinstance(data, (list, AttrList, tuple)): return list(map(self._serialize, data)) return self._serialize(data) - def deserialize(self, data): + def deserialize(self, data: Any) -> Any: if isinstance(data, (list, AttrList, tuple)): data = [None if d is None else self._deserialize(d) for d in data] return data @@ -126,14 +124,14 @@ def deserialize(self, data): return None return self._deserialize(data) - def clean(self, data): + def clean(self, data: Any) -> Any: if data is not None: data = self.deserialize(data) if data in (None, [], {}) and self._required: raise ValidationException("Value required for this field.") return data - def to_dict(self): + def to_dict(self) -> Any: d = super(Field, self).to_dict() name, value = d.popitem() value["type"] = name @@ -144,7 +142,7 @@ class CustomField(Field): name = "custom" _coerce = True - def to_dict(self): + def to_dict(self) -> Any: if isinstance(self.builtin_type, Field): return self.builtin_type.to_dict() @@ -154,10 +152,16 @@ def to_dict(self): class Object(Field): - name = "object" - _coerce = True - - def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): + name: Optional[str] = "object" + _coerce: bool = True + + def __init__( + self, + doc_class: Any = None, + dynamic: Any = None, + properties: Any = None, + **kwargs: Any + ) -> None: """ :arg document.InnerDoc doc_class: base doc class that handles mapping. If no `doc_class` is provided, new instance of `InnerDoc` will be created, @@ -173,7 +177,7 @@ def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): "doc_class and properties/dynamic should not be provided together" ) if doc_class: - self._doc_class = doc_class + self._doc_class: Any = doc_class else: # FIXME import from opensearchpy.helpers.document import InnerDoc @@ -181,39 +185,39 @@ def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): # no InnerDoc subclass, creating one instead... self._doc_class = type("InnerDoc", (InnerDoc,), {}) for name, field in iteritems(properties or {}): - self._doc_class._doc_type.mapping.field(name, field) + self._doc_class._doc_type.mapping.field(name, field) # type: ignore if dynamic is not None: - self._doc_class._doc_type.mapping.meta("dynamic", dynamic) + self._doc_class._doc_type.mapping.meta("dynamic", dynamic) # type: ignore self._mapping = copy.deepcopy(self._doc_class._doc_type.mapping) super(Object, self).__init__(**kwargs) - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self._mapping[name] - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self._mapping - def _empty(self): + def _empty(self) -> Any: return self._wrap({}) - def _wrap(self, data): + def _wrap(self, data: Any) -> Any: return self._doc_class.from_opensearch(data, data_only=True) - def empty(self): + def empty(self) -> Any: if self._multi: return AttrList([], self._wrap) return self._empty() - def to_dict(self): + def to_dict(self) -> Any: d = self._mapping.to_dict() d.update(super(Object, self).to_dict()) return d - def _collect_fields(self): + def _collect_fields(self) -> Any: return self._mapping.properties._collect_fields() - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: # don't wrap already wrapped data if isinstance(data, self._doc_class): return data @@ -223,7 +227,7 @@ def _deserialize(self, data): return self._wrap(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None @@ -233,7 +237,7 @@ def _serialize(self, data): return data.to_dict() - def clean(self, data): + def clean(self, data: Any) -> Any: data = super(Object, self).clean(data) if data is None: return None @@ -244,7 +248,7 @@ def clean(self, data): data.full_clean() return data - def update(self, other, update_only=False): + def update(self, other: "Object", update_only: bool = False) -> None: if not isinstance(other, Object): # not an inner/nested object, no merge possible return @@ -253,18 +257,20 @@ def update(self, other, update_only=False): class Nested(Object): - name = "nested" + name: Optional[str] = "nested" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: kwargs.setdefault("multi", True) super(Nested, self).__init__(*args, **kwargs) class Date(Field): - name = "date" - _coerce = True + name: Optional[str] = "date" + _coerce: bool = True - def __init__(self, default_timezone=None, *args, **kwargs): + def __init__( + self, default_timezone: None = None, *args: Any, **kwargs: Any + ) -> None: """ :arg default_timezone: timezone that will be automatically used for tz-naive values May be instance of `datetime.tzinfo` or string containing TZ offset @@ -274,7 +280,7 @@ def __init__(self, default_timezone=None, *args, **kwargs): self._default_timezone = tz.gettz(self._default_timezone) super(Date, self).__init__(*args, **kwargs) - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, string_types): try: data = parser.parse(data) @@ -303,7 +309,7 @@ class Text(Field): "search_analyzer": {"type": "analyzer"}, "search_quote_analyzer": {"type": "analyzer"}, } - name = "text" + name: Optional[str] = "text" class SearchAsYouType(Field): @@ -312,7 +318,7 @@ class SearchAsYouType(Field): "search_analyzer": {"type": "analyzer"}, "search_quote_analyzer": {"type": "analyzer"}, } - name = "search_as_you_type" + name: Optional[str] = "search_as_you_type" class Keyword(Field): @@ -321,23 +327,23 @@ class Keyword(Field): "search_analyzer": {"type": "analyzer"}, "normalizer": {"type": "normalizer"}, } - name = "keyword" + name: Optional[str] = "keyword" class ConstantKeyword(Keyword): - name = "constant_keyword" + name: Optional[str] = "constant_keyword" class Boolean(Field): - name = "boolean" - _coerce = True + name: Optional[str] = "boolean" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if data == "false": return False return bool(data) - def clean(self, data): + def clean(self, data: Any) -> Any: if data is not None: data = self.deserialize(data) if data is None and self._required: @@ -346,108 +352,108 @@ def clean(self, data): class Float(Field): - name = "float" - _coerce = True + name: Optional[str] = "float" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return float(data) class DenseVector(Float): - name = "dense_vector" + name: Optional[str] = "dense_vector" - def __init__(self, dims, **kwargs): + def __init__(self, dims: Any, **kwargs: Any) -> None: kwargs["multi"] = True super(DenseVector, self).__init__(dims=dims, **kwargs) class SparseVector(Field): - name = "sparse_vector" + name: Optional[str] = "sparse_vector" class HalfFloat(Float): - name = "half_float" + name: Optional[str] = "half_float" class ScaledFloat(Float): - name = "scaled_float" + name: Optional[str] = "scaled_float" - def __init__(self, scaling_factor, *args, **kwargs): + def __init__(self, scaling_factor: Any, *args: Any, **kwargs: Any) -> None: super(ScaledFloat, self).__init__( scaling_factor=scaling_factor, *args, **kwargs ) class Double(Float): - name = "double" + name: Optional[str] = "double" class RankFeature(Float): - name = "rank_feature" + name: Optional[str] = "rank_feature" class RankFeatures(Field): - name = "rank_features" + name: Optional[str] = "rank_features" class Integer(Field): - name = "integer" - _coerce = True + name: Optional[str] = "integer" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return int(data) class Byte(Integer): - name = "byte" + name: Optional[str] = "byte" class Short(Integer): - name = "short" + name: Optional[str] = "short" class Long(Integer): - name = "long" + name: Optional[str] = "long" class Ip(Field): - name = "ip" - _coerce = True + name: Optional[str] = "ip" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: # the ipaddress library for pypy only accepts unicode. return ipaddress.ip_address(unicode(data)) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return str(data) class Binary(Field): - name = "binary" - _coerce = True + name: Optional[str] = "binary" + _coerce: bool = True - def clean(self, data): + def clean(self, data: Any) -> Any: # Binary fields are opaque, so there's not much cleaning # that can be done. return data - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return base64.b64decode(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return base64.b64encode(data).decode() class GeoPoint(Field): - name = "geo_point" + name: Optional[str] = "geo_point" class GeoShape(Field): - name = "geo_shape" + name: Optional[str] = "geo_shape" class Completion(Field): @@ -459,29 +465,29 @@ class Completion(Field): class Percolator(Field): - name = "percolator" - _coerce = True + name: Optional[str] = "percolator" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return Q(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return data.to_dict() class RangeField(Field): - _coerce = True - _core_field = None + _coerce: bool = True + _core_field: Any = None - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Range): return data data = dict((k, self._core_field.deserialize(v)) for k, v in iteritems(data)) return Range(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None if not isinstance(data, collections_abc.Mapping): @@ -490,42 +496,42 @@ def _serialize(self, data): class IntegerRange(RangeField): - name = "integer_range" - _core_field = Integer() + name: Optional[str] = "integer_range" + _core_field: Any = Integer() class FloatRange(RangeField): - name = "float_range" - _core_field = Float() + name: Optional[str] = "float_range" + _core_field: Any = Float() class LongRange(RangeField): - name = "long_range" - _core_field = Long() + name: Optional[str] = "long_range" + _core_field: Any = Long() class DoubleRange(RangeField): - name = "double_range" - _core_field = Double() + name: Optional[str] = "double_range" + _core_field: Any = Double() class DateRange(RangeField): - name = "date_range" - _core_field = Date() + name: Optional[str] = "date_range" + _core_field: Any = Date() class IpRange(Field): # not a RangeField since ip_range supports CIDR ranges - name = "ip_range" + name: Optional[str] = "ip_range" class Join(Field): - name = "join" + name: Optional[str] = "join" class TokenCount(Field): - name = "token_count" + name: Optional[str] = "token_count" class Murmur3(Field): - name = "murmur3" + name: Optional[str] = "murmur3" diff --git a/opensearchpy/helpers/field.pyi b/opensearchpy/helpers/field.pyi deleted file mode 100644 index 3b448641..00000000 --- a/opensearchpy/helpers/field.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class Field(DslBase): ... -class CustomField(Field): ... -class Object(Field): ... -class Nested(Object): ... -class Date(Field): ... -class Text(Field): ... -class SearchAsYouType(Field): ... -class Keyword(Field): ... -class ConstantKeyword(Keyword): ... -class Boolean(Field): ... -class Float(Field): ... -class DenseVector(Float): ... -class SparseVector(Field): ... -class HalfFloat(Float): ... -class ScaledFloat(Float): ... -class Double(Float): ... -class RankFeature(Float): ... -class RankFeatures(Field): ... -class Integer(Field): ... -class Byte(Integer): ... -class Short(Integer): ... -class Long(Integer): ... -class Ip(Field): ... -class Binary(Field): ... -class GeoPoint(Field): ... -class GeoShape(Field): ... -class Completion(Field): ... -class Percolator(Field): ... -class RangeField(Field): ... -class IntegerRange(RangeField): ... -class FloatRange(RangeField): ... -class LongRange(RangeField): ... -class DoubleRange(RangeField): ... -class DateRange(RangeField): ... -class IpRange(Field): ... -class Join(Field): ... -class TokenCount(Field): ... -class Murmur3(Field): ... - -def construct_field(name_or_field: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 47b7b148..00452f86 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -25,21 +25,19 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +import collections.abc as collections_abc +from typing import Any, Optional from .utils import DslBase -def SF(name_or_sf, **params): +def SF(name_or_sf: Any, **params: Any) -> Any: # {"script_score": {"script": "_score"}, "filter": {}} if isinstance(name_or_sf, collections_abc.Mapping): if params: raise ValueError("SF() cannot accept parameters when passing in a dict.") kwargs = {} - sf = name_or_sf.copy() + sf = name_or_sf.copy() # type: ignore for k in ScoreFunction._param_defs: if k in name_or_sf: kwargs[k] = sf.pop(k) @@ -74,16 +72,16 @@ def SF(name_or_sf, **params): class ScoreFunction(DslBase): - _type_name = "score_function" + _type_name: str = "score_function" _type_shortcut = staticmethod(SF) _param_defs = { "query": {"type": "query"}, "filter": {"type": "query"}, "weight": {}, } - name = None + name: Optional[str] = None - def to_dict(self): + def to_dict(self) -> Any: d = super(ScoreFunction, self).to_dict() # filter and query dicts should be at the same level as us for k in self._param_defs: @@ -99,7 +97,7 @@ class ScriptScore(ScoreFunction): class BoostFactor(ScoreFunction): name = "boost_factor" - def to_dict(self): + def to_dict(self) -> Any: d = super(BoostFactor, self).to_dict() if "value" in d[self.name]: d[self.name] = d[self.name].pop("value") diff --git a/opensearchpy/helpers/function.pyi b/opensearchpy/helpers/function.pyi deleted file mode 100644 index 72b4b342..00000000 --- a/opensearchpy/helpers/function.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class ScoreFunction(DslBase): ... -class ScriptScore(ScoreFunction): ... -class BoostFactor(ScoreFunction): ... -class RandomScore(ScoreFunction): ... -class FieldValueFactor(ScoreFunction): ... -class Linear(ScoreFunction): ... -class Gauss(ScoreFunction): ... -class Exp(ScoreFunction): ... - -def SF(name_or_sf: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index 6bbc23e8..e96136b2 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -25,6 +25,9 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + +from opensearchpy.client import OpenSearch from opensearchpy.connection.connections import get_connection from opensearchpy.helpers import analysis @@ -36,7 +39,14 @@ class IndexTemplate(object): - def __init__(self, name, template, index=None, order=None, **kwargs): + def __init__( + self, + name: Any, + template: Any, + index: Any = None, + order: Any = None, + **kwargs: Any + ) -> None: if index is None: self._index = Index(template, **kwargs) else: @@ -50,17 +60,17 @@ def __init__(self, name, template, index=None, order=None, **kwargs): self._template_name = name self.order = order - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._index, attr_name) - def to_dict(self): + def to_dict(self) -> Any: d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d - def save(self, using=None): + def save(self, using: Any = None) -> Any: opensearch = get_connection(using or self._index._using) return opensearch.indices.put_template( name=self._template_name, body=self.to_dict() @@ -68,25 +78,27 @@ def save(self, using=None): class Index(object): - def __init__(self, name, using="default"): + def __init__(self, name: Any, using: str = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` """ self._name = name - self._doc_types = [] + self._doc_types: Any = [] self._using = using - self._settings = {} - self._aliases = {} - self._analysis = {} - self._mapping = None + self._settings: Any = {} + self._aliases: Any = {} + self._analysis: Any = {} + self._mapping: Any = None - def get_or_create_mapping(self): + def get_or_create_mapping(self) -> Any: if self._mapping is None: self._mapping = Mapping() return self._mapping - def as_template(self, template_name, pattern=None, order=None): + def as_template( + self, template_name: Any, pattern: Any = None, order: Any = None + ) -> Any: # TODO: should we allow pattern to be a top-level arg? # or maybe have an IndexPattern that allows for it and have # Document._index be that? @@ -94,7 +106,7 @@ def as_template(self, template_name, pattern=None, order=None): template_name, pattern or self._name, index=self, order=order ) - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: @@ -103,7 +115,7 @@ def resolve_nested(self, field_path): return self._mapping.resolve_nested(field_path) return (), None - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: @@ -112,12 +124,12 @@ def resolve_field(self, field_path): return self._mapping.resolve_field(field_path) return None - def load_mappings(self, using=None): + def load_mappings(self, using: Optional[OpenSearch] = None) -> None: self.get_or_create_mapping().update_from_opensearch( self._name, using=using or self._using ) - def clone(self, name=None, using=None): + def clone(self, name: Any = None, using: Any = None) -> Any: """ Create a copy of the instance with another name or connection alias. Useful for creating multiple indices with shared configuration:: @@ -141,14 +153,14 @@ def clone(self, name=None, using=None): i._mapping = self._mapping._clone() return i - def _get_connection(self, using=None): + def _get_connection(self, using: Any = None) -> Any: if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return get_connection(using or self._using) connection = property(_get_connection) - def mapping(self, mapping): + def mapping(self, mapping: Any) -> Any: """ Associate a mapping (an instance of :class:`~opensearchpy.Mapping`) with this index. @@ -157,7 +169,7 @@ def mapping(self, mapping): """ self.get_or_create_mapping().update(mapping) - def document(self, document): + def document(self, document: Any) -> Any: """ Associate a :class:`~opensearchpy.Document` subclass with an index. This means that, when this index is created, it will contain the @@ -188,7 +200,7 @@ class Post(Document): return document - def settings(self, **kwargs): + def settings(self, **kwargs: Any) -> Any: """ Add settings to the index:: @@ -201,7 +213,7 @@ def settings(self, **kwargs): self._settings.update(kwargs) return self - def aliases(self, **kwargs): + def aliases(self, **kwargs: Any) -> Any: """ Add aliases to the index definition:: @@ -211,7 +223,7 @@ def aliases(self, **kwargs): self._aliases.update(kwargs) return self - def analyzer(self, *args, **kwargs): + def analyzer(self, *args: Any, **kwargs: Any) -> Any: """ Explicitly add an analyzer to an index. Note that all custom analyzers defined in mappings will also be created. This is useful for search analyzers. @@ -238,14 +250,14 @@ def analyzer(self, *args, **kwargs): # merge the definition merge(self._analysis, d, True) - def to_dict(self): + def to_dict(self) -> Any: out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases - mappings = self._mapping.to_dict() if self._mapping else {} - analysis = self._mapping._collect_analysis() if self._mapping else {} + mappings: Any = self._mapping.to_dict() if self._mapping else {} + analysis: Any = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) @@ -257,7 +269,7 @@ def to_dict(self): out.setdefault("settings", {})["analysis"] = analysis return out - def search(self, using=None): + def search(self, using: Optional[OpenSearch] = None) -> Search: """ Return a :class:`~opensearchpy.Search` object searching over the index (or all the indices belonging to this template) and its @@ -267,7 +279,7 @@ def search(self, using=None): using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using=None): + def updateByQuery(self, using: Optional[OpenSearch] = None) -> UpdateByQuery: """ Return a :class:`~opensearchpy.UpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match @@ -281,7 +293,7 @@ def updateByQuery(self, using=None): index=self._name, ) - def create(self, using=None, **kwargs): + def create(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Creates the index in opensearch. @@ -292,13 +304,13 @@ def create(self, using=None, **kwargs): index=self._name, body=self.to_dict(), **kwargs ) - def is_closed(self, using=None): + def is_closed(self, using: Optional[OpenSearch] = None) -> Any: state = self._get_connection(using).cluster.state( index=self._name, metric="metadata" ) return state["metadata"]["indices"][self._name]["state"] == "close" - def save(self, using=None): + def save(self, using: Optional[OpenSearch] = None) -> Any: """ Sync the index definition with opensearch, creating the index if it doesn't exist and updating its settings and mappings if it does. @@ -352,7 +364,7 @@ def save(self, using=None): if mappings: self.put_mapping(using=using, body=mappings) - def analyze(self, using=None, **kwargs): + def analyze(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Perform the analysis process on a text and return the tokens breakdown of the text. @@ -362,7 +374,7 @@ def analyze(self, using=None, **kwargs): """ return self._get_connection(using).indices.analyze(index=self._name, **kwargs) - def refresh(self, using=None, **kwargs): + def refresh(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Performs a refresh operation on the index. @@ -371,7 +383,7 @@ def refresh(self, using=None, **kwargs): """ return self._get_connection(using).indices.refresh(index=self._name, **kwargs) - def flush(self, using=None, **kwargs): + def flush(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Performs a flush operation on the index. @@ -380,7 +392,7 @@ def flush(self, using=None, **kwargs): """ return self._get_connection(using).indices.flush(index=self._name, **kwargs) - def get(self, using=None, **kwargs): + def get(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The get index API allows to retrieve information about the index. @@ -389,7 +401,7 @@ def get(self, using=None, **kwargs): """ return self._get_connection(using).indices.get(index=self._name, **kwargs) - def open(self, using=None, **kwargs): + def open(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Opens the index in opensearch. @@ -398,7 +410,7 @@ def open(self, using=None, **kwargs): """ return self._get_connection(using).indices.open(index=self._name, **kwargs) - def close(self, using=None, **kwargs): + def close(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Closes the index in opensearch. @@ -407,7 +419,7 @@ def close(self, using=None, **kwargs): """ return self._get_connection(using).indices.close(index=self._name, **kwargs) - def delete(self, using=None, **kwargs): + def delete(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Deletes the index in opensearch. @@ -416,7 +428,7 @@ def delete(self, using=None, **kwargs): """ return self._get_connection(using).indices.delete(index=self._name, **kwargs) - def exists(self, using=None, **kwargs): + def exists(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Returns ``True`` if the index already exists in opensearch. @@ -425,7 +437,7 @@ def exists(self, using=None, **kwargs): """ return self._get_connection(using).indices.exists(index=self._name, **kwargs) - def put_mapping(self, using=None, **kwargs): + def put_mapping(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Register specific mapping definition for a specific type. @@ -436,7 +448,7 @@ def put_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_mapping(self, using=None, **kwargs): + def get_mapping(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve specific mapping definition for a specific type. @@ -447,7 +459,9 @@ def get_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_field_mapping(self, using=None, **kwargs): + def get_field_mapping( + self, using: Optional[OpenSearch] = None, **kwargs: Any + ) -> Any: """ Retrieve mapping definition of a specific field. @@ -458,7 +472,7 @@ def get_field_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def put_alias(self, using=None, **kwargs): + def put_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Create an alias for the index. @@ -467,7 +481,7 @@ def put_alias(self, using=None, **kwargs): """ return self._get_connection(using).indices.put_alias(index=self._name, **kwargs) - def exists_alias(self, using=None, **kwargs): + def exists_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Return a boolean indicating whether given alias exists for this index. @@ -478,7 +492,7 @@ def exists_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_alias(self, using=None, **kwargs): + def get_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve a specified alias. @@ -487,7 +501,7 @@ def get_alias(self, using=None, **kwargs): """ return self._get_connection(using).indices.get_alias(index=self._name, **kwargs) - def delete_alias(self, using=None, **kwargs): + def delete_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Delete specific alias. @@ -498,7 +512,7 @@ def delete_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_settings(self, using=None, **kwargs): + def get_settings(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve settings for the index. @@ -509,7 +523,7 @@ def get_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - def put_settings(self, using=None, **kwargs): + def put_settings(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Change specific index level settings in real time. @@ -520,7 +534,7 @@ def put_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - def stats(self, using=None, **kwargs): + def stats(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve statistics on different operations happening on the index. @@ -529,7 +543,7 @@ def stats(self, using=None, **kwargs): """ return self._get_connection(using).indices.stats(index=self._name, **kwargs) - def segments(self, using=None, **kwargs): + def segments(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Provide low level segments information that a Lucene index (shard level) is built with. @@ -539,7 +553,7 @@ def segments(self, using=None, **kwargs): """ return self._get_connection(using).indices.segments(index=self._name, **kwargs) - def validate_query(self, using=None, **kwargs): + def validate_query(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Validate a potentially expensive query without executing it. @@ -550,7 +564,7 @@ def validate_query(self, using=None, **kwargs): index=self._name, **kwargs ) - def clear_cache(self, using=None, **kwargs): + def clear_cache(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Clear all caches or specific cached associated with the index. @@ -561,7 +575,7 @@ def clear_cache(self, using=None, **kwargs): index=self._name, **kwargs ) - def recovery(self, using=None, **kwargs): + def recovery(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The indices recovery API provides insight into on-going shard recoveries for the index. @@ -571,7 +585,7 @@ def recovery(self, using=None, **kwargs): """ return self._get_connection(using).indices.recovery(index=self._name, **kwargs) - def upgrade(self, using=None, **kwargs): + def upgrade(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Upgrade the index to the latest format. @@ -580,7 +594,7 @@ def upgrade(self, using=None, **kwargs): """ return self._get_connection(using).indices.upgrade(index=self._name, **kwargs) - def get_upgrade(self, using=None, **kwargs): + def get_upgrade(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Monitor how much of the index is upgraded. @@ -591,7 +605,7 @@ def get_upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - def shard_stores(self, using=None, **kwargs): + def shard_stores(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Provides store information for shard copies of the index. Store information reports on which nodes shard copies exist, the shard copy @@ -605,7 +619,7 @@ def shard_stores(self, using=None, **kwargs): index=self._name, **kwargs ) - def forcemerge(self, using=None, **kwargs): + def forcemerge(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The force merge API allows to force merging of the index through an API. The merge relates to the number of segments a Lucene index holds @@ -623,7 +637,7 @@ def forcemerge(self, using=None, **kwargs): index=self._name, **kwargs ) - def shrink(self, using=None, **kwargs): + def shrink(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The shrink index API allows you to shrink an existing index into a new index with fewer primary shards. The number of primary shards in the diff --git a/opensearchpy/helpers/index.pyi b/opensearchpy/helpers/index.pyi deleted file mode 100644 index e2f95797..00000000 --- a/opensearchpy/helpers/index.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -class IndexTemplate(object): ... -class Index(object): ... diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index 8fd37348..92879274 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -25,12 +25,11 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from __future__ import annotations +import collections.abc as collections_abc from itertools import chain +from typing import Any, Optional from six import iteritems, itervalues @@ -56,26 +55,26 @@ class Properties(DslBase): name = "properties" _param_defs = {"properties": {"type": "field", "hash": True}} - def __init__(self): + def __init__(self) -> None: super(Properties, self).__init__() - def __repr__(self): + def __repr__(self) -> str: return "Properties()" - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties[name] - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self.properties - def to_dict(self): + def to_dict(self) -> Any: return super(Properties, self).to_dict()["properties"] - def field(self, name, *args, **kwargs): + def field(self, name: Any, *args: Any, **kwargs: Any) -> "Properties": self.properties[name] = construct_field(*args, **kwargs) return self - def _collect_fields(self): + def _collect_fields(self) -> Any: """Iterate over all Field objects within, including multi fields.""" for f in itervalues(self.properties.to_dict()): yield f @@ -88,7 +87,7 @@ def _collect_fields(self): for inner_f in f._collect_fields(): yield inner_f - def update(self, other_object): + def update(self, other_object: Any) -> None: if not hasattr(other_object, "properties"): # not an inner/nested object, no merge possible return @@ -103,25 +102,25 @@ def update(self, other_object): class Mapping(object): - def __init__(self): + def __init__(self) -> None: self.properties = Properties() - self._meta = {} + self._meta: Any = {} - def __repr__(self): + def __repr__(self) -> str: return "Mapping()" - def _clone(self): + def _clone(self) -> Any: m = Mapping() m.properties._params = self.properties._params.copy() return m @classmethod - def from_opensearch(cls, index, using="default"): + def from_opensearch(cls, index: Any, using: str = "default") -> Any: m = cls() m.update_from_opensearch(index, using) return m - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: field = self nested = [] parts = field_path.split(".") @@ -134,18 +133,18 @@ def resolve_nested(self, field_path): nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Optional[Mapping]: field = self for step in field_path.split("."): try: field = field[step] except KeyError: - return + return None return field - def _collect_analysis(self): - analysis = {} - fields = [] + def _collect_analysis(self) -> Any: + analysis: Any = {} + fields: Any = [] if "_all" in self._meta: fields.append(Text(**self._meta["_all"])) @@ -171,20 +170,20 @@ def _collect_analysis(self): return analysis - def save(self, index, using="default"): + def save(self, index: Any, using: str = "default") -> Any: from opensearchpy.helpers.index import Index index = Index(index, using=using) index.mapping(self) return index.save() - def update_from_opensearch(self, index, using="default"): + def update_from_opensearch(self, index: Any, using: str = "default") -> None: opensearch = get_connection(using) raw = opensearch.indices.get_mapping(index=index) _, raw = raw.popitem() self._update_from_dict(raw["mappings"]) - def _update_from_dict(self, raw): + def _update_from_dict(self, raw: Any) -> None: for name, definition in iteritems(raw.get("properties", {})): self.field(name, definition) @@ -196,7 +195,7 @@ def _update_from_dict(self, raw): else: self.meta(name, value) - def update(self, mapping, update_only=False): + def update(self, mapping: Any, update_only: bool = False) -> None: for name in mapping: if update_only and name in self: # nested and inner objects, merge recursively @@ -213,20 +212,20 @@ def update(self, mapping, update_only=False): else: self._meta.update(mapping._meta) - def __contains__(self, name): + def __contains__(self, name: Any) -> Any: return name in self.properties.properties - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties.properties[name] - def __iter__(self): + def __iter__(self) -> Any: return iter(self.properties.properties) - def field(self, *args, **kwargs): + def field(self, *args: Any, **kwargs: Any) -> "Mapping": self.properties.field(*args, **kwargs) return self - def meta(self, name, params=None, **kwargs): + def meta(self, name: Any, params: Any = None, **kwargs: Any) -> "Mapping": if not name.startswith("_") and name not in META_FIELDS: name = "_" + name @@ -236,7 +235,7 @@ def meta(self, name, params=None, **kwargs): self._meta[name] = kwargs if params is None else params return self - def to_dict(self): + def to_dict(self) -> Any: meta = self._meta # hard coded serialization of analyzers in _all diff --git a/opensearchpy/helpers/mapping.pyi b/opensearchpy/helpers/mapping.pyi deleted file mode 100644 index 99a82935..00000000 --- a/opensearchpy/helpers/mapping.pyi +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import DslBase - -class Properties(DslBase): ... -class Mapping(object): ... diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index 784435d6..dc2db8a7 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -25,12 +25,9 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any, Optional # 'SF' looks unused but the test suite assumes it's available # from this module so others are liable to do so as well. @@ -39,7 +36,7 @@ from .utils import DslBase -def Q(name_or_query="match_all", **params): +def Q(name_or_query: str = "match_all", **params: Any) -> Any: # {"match": {"title": "python"}} if isinstance(name_or_query, collections_abc.Mapping): if params: @@ -49,7 +46,7 @@ def Q(name_or_query="match_all", **params): 'Q() can only accept dict with a single query ({"match": {...}}). ' "Instead it got (%r)" % name_or_query ) - name, params = name_or_query.copy().popitem() + name, params = name_or_query.copy().popitem() # type: ignore return Query.get_dsl_class(name)(_expand__to_dot=False, **params) # MatchAll() @@ -69,28 +66,28 @@ def Q(name_or_query="match_all", **params): class Query(DslBase): - _type_name = "query" + _type_name: str = "query" _type_shortcut = staticmethod(Q) - name = None + name: Optional[str] = None - def __add__(self, other): + def __add__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__radd__"): return other.__radd__(self) return Bool(must=[self, other]) - def __invert__(self): + def __invert__(self) -> Any: return Bool(must_not=[self]) - def __or__(self, other): + def __or__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__ror__"): return other.__ror__(self) return Bool(should=[self, other]) - def __and__(self, other): + def __and__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__rand__"): @@ -101,17 +98,17 @@ def __and__(self, other): class MatchAll(Query): name = "match_all" - def __add__(self, other): + def __add__(self, other: Any) -> Any: return other._clone() __and__ = __rand__ = __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: Any) -> "MatchAll": return self __ror__ = __or__ - def __invert__(self): + def __invert__(self) -> Any: return MatchNone() @@ -121,17 +118,17 @@ def __invert__(self): class MatchNone(Query): name = "match_none" - def __add__(self, other): + def __add__(self, other: Any) -> "MatchNone": return self __and__ = __rand__ = __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: Any) -> Any: return other._clone() __ror__ = __or__ - def __invert__(self): + def __invert__(self) -> Any: return MatchAll() @@ -144,7 +141,7 @@ class Bool(Query): "filter": {"type": "query", "multi": True}, } - def __add__(self, other): + def __add__(self, other: "Bool") -> Any: q = self._clone() if isinstance(other, Bool): q.must += other.must @@ -157,7 +154,7 @@ def __add__(self, other): __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: "Bool") -> Any: for q in (self, other): if isinstance(q, Bool) and not any( (q.must, q.must_not, q.filter, getattr(q, "minimum_should_match", None)) @@ -182,14 +179,14 @@ def __or__(self, other): __ror__ = __or__ @property - def _min_should_match(self): + def _min_should_match(self) -> Any: return getattr( self, "minimum_should_match", 0 if not self.should or (self.must or self.filter) else 1, ) - def __invert__(self): + def __invert__(self) -> Any: # Because an empty Bool query is treated like # MatchAll the inverse should be MatchNone if not any(chain(self.must, self.filter, self.should, self.must_not)): @@ -209,7 +206,7 @@ def __invert__(self): return negations[0] return Bool(should=negations) - def __and__(self, other): + def __and__(self, other: "Bool") -> Any: q = self._clone() if isinstance(other, Bool): q.must += other.must @@ -256,7 +253,7 @@ class FunctionScore(Query): "functions": {"type": "score_function", "multi": True}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: if "functions" in kwargs: pass else: diff --git a/opensearchpy/helpers/query.pyi b/opensearchpy/helpers/query.pyi deleted file mode 100644 index 673e83f9..00000000 --- a/opensearchpy/helpers/query.pyi +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class Query(DslBase): ... -class MatchAll(Query): ... -class MatchNone(Query): ... -class Bool(Query): ... -class FunctionScore(Query): ... -class Boosting(Query): ... -class ConstantScore(Query): ... -class DisMax(Query): ... -class Filtered(Query): ... -class Indices(Query): ... -class Percolate(Query): ... -class Nested(Query): ... -class HasChild(Query): ... -class HasParent(Query): ... -class TopChildren(Query): ... -class SpanFirst(Query): ... -class SpanMulti(Query): ... -class SpanNear(Query): ... -class SpanNot(Query): ... -class SpanOr(Query): ... -class FieldMaskingSpan(Query): ... -class SpanContaining(Query): ... -class SpanWithin(Query): ... -class Common(Query): ... -class Fuzzy(Query): ... -class FuzzyLikeThis(Query): ... -class FuzzyLikeThisField(Query): ... -class RankFeature(Query): ... -class DistanceFeature(Query): ... -class GeoBoundingBox(Query): ... -class GeoDistance(Query): ... -class GeoDistanceRange(Query): ... -class GeoPolygon(Query): ... -class GeoShape(Query): ... -class GeohashCell(Query): ... -class Ids(Query): ... -class Intervals(Query): ... -class Limit(Query): ... -class Match(Query): ... -class MatchPhrase(Query): ... -class MatchPhrasePrefix(Query): ... -class MatchBoolPrefix(Query): ... -class Exists(Query): ... -class MoreLikeThis(Query): ... -class MoreLikeThisField(Query): ... -class MultiMatch(Query): ... -class Prefix(Query): ... -class QueryString(Query): ... -class Range(Query): ... -class Regexp(Query): ... -class Shape(Query): ... -class SimpleQueryString(Query): ... -class SpanTerm(Query): ... -class Template(Query): ... -class Term(Query): ... -class Terms(Query): ... -class TermsSet(Query): ... -class Wildcard(Query): ... -class Script(Query): ... -class ScriptScore(Query): ... -class Type(Query): ... -class ParentId(Query): ... -class Wrapper(Query): ... - -def Q(name_or_query: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/response/__init__.py b/opensearchpy/helpers/response/__init__.py index d4792b11..c6215a6b 100644 --- a/opensearchpy/helpers/response/__init__.py +++ b/opensearchpy/helpers/response/__init__.py @@ -25,51 +25,51 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, AttrList, _wrap from .hit import Hit, HitMeta -__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"] - class Response(AttrDict): - def __init__(self, search, response, doc_class=None): + def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) super(Response, self).__init__(response) - def __iter__(self): + def __iter__(self) -> Any: return iter(self.hits) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if isinstance(key, (slice, int)): # for slicing etc return self.hits[key] return super(Response, self).__getitem__(key) - def __nonzero__(self): + def __nonzero__(self) -> Any: return bool(self.hits) __bool__ = __nonzero__ - def __repr__(self): + def __repr__(self) -> str: return "" % (self.hits or self.aggregations) - def __len__(self): + def __len__(self) -> int: return len(self.hits) - def __getstate__(self): + def __getstate__(self) -> Any: return self._d_, self._search, self._doc_class - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("_d_", state[0]) super(AttrDict, self).__setattr__("_search", state[1]) super(AttrDict, self).__setattr__("_doc_class", state[2]) - def success(self): + def success(self) -> bool: return self._shards.total == self._shards.successful and not self.timed_out @property - def hits(self): + def hits(self) -> Any: if not hasattr(self, "_hits"): h = self._d_["hits"] @@ -86,11 +86,11 @@ def hits(self): return self._hits @property - def aggregations(self): + def aggregations(self) -> Any: return self.aggs @property - def aggs(self): + def aggs(self) -> Any: if not hasattr(self, "_aggs"): aggs = AggResponse( self._search.aggs, self._search, self._d_.get("aggregations", {}) @@ -102,27 +102,30 @@ def aggs(self): class AggResponse(AttrDict): - def __init__(self, aggs, search, data): + def __init__(self, aggs: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__("_meta", {"search": search, "aggs": aggs}) super(AggResponse, self).__init__(data) - def __getitem__(self, attr_name): + def __getitem__(self, attr_name: Any) -> Any: if attr_name in self._meta["aggs"]: # don't do self._meta['aggs'][attr_name] to avoid copying agg = self._meta["aggs"].aggs[attr_name] return agg.result(self._meta["search"], self._d_[attr_name]) return super(AggResponse, self).__getitem__(attr_name) - def __iter__(self): + def __iter__(self) -> Any: for name in self._meta["aggs"]: yield self[name] class UpdateByQueryResponse(AttrDict): - def __init__(self, search, response, doc_class=None): + def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) super(UpdateByQueryResponse, self).__init__(response) - def success(self): + def success(self) -> bool: return not self.timed_out and not self.failures + + +__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"] diff --git a/opensearchpy/helpers/response/__init__.pyi b/opensearchpy/helpers/response/__init__.pyi deleted file mode 100644 index f592e46a..00000000 --- a/opensearchpy/helpers/response/__init__.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ..utils import AttrDict - -class Response(AttrDict): ... -class AggResponse(AttrDict): ... -class UpdateByQueryResponse(AttrDict): ... diff --git a/opensearchpy/helpers/response/aggs.py b/opensearchpy/helpers/response/aggs.py index c8e7d5dd..42015d2d 100644 --- a/opensearchpy/helpers/response/aggs.py +++ b/opensearchpy/helpers/response/aggs.py @@ -25,17 +25,19 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, AttrList from . import AggResponse, Response class Bucket(AggResponse): - def __init__(self, aggs, search, data, field=None): + def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: super(Bucket, self).__init__(aggs, search, data) class FieldBucket(Bucket): - def __init__(self, aggs, search, data, field=None): + def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: if field: data["key"] = field.deserialize(data["key"]) super(FieldBucket, self).__init__(aggs, search, data, field) @@ -44,7 +46,7 @@ def __init__(self, aggs, search, data, field=None): class BucketData(AggResponse): _bucket_class = Bucket - def _wrap_bucket(self, data): + def _wrap_bucket(self, data: Any) -> Any: return self._bucket_class( self._meta["aggs"], self._meta["search"], @@ -52,19 +54,19 @@ def _wrap_bucket(self, data): field=self._meta.get("field"), ) - def __iter__(self): + def __iter__(self) -> Any: return iter(self.buckets) - def __len__(self): + def __len__(self) -> int: return len(self.buckets) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if isinstance(key, (int, slice)): return self.buckets[key] return super(BucketData, self).__getitem__(key) @property - def buckets(self): + def buckets(self) -> Any: if not hasattr(self, "_buckets"): field = getattr(self._meta["aggs"], "field", None) if field: @@ -83,8 +85,11 @@ class FieldBucketData(BucketData): class TopHitsData(Response): - def __init__(self, agg, search, data): + def __init__(self, agg: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__( "meta", AttrDict({"agg": agg, "search": search}) ) super(TopHitsData, self).__init__(search, data) + + +__all__ = ["AggResponse"] diff --git a/opensearchpy/helpers/response/aggs.pyi b/opensearchpy/helpers/response/aggs.pyi deleted file mode 100644 index d943dbdd..00000000 --- a/opensearchpy/helpers/response/aggs.pyi +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from . import AggResponse as AggResponse -from . import Response as Response - -class Bucket(AggResponse): ... -class FieldBucket(Bucket): ... -class BucketData(AggResponse): ... -class FieldBucketData(BucketData): ... -class TopHitsData(Response): ... diff --git a/opensearchpy/helpers/response/hit.py b/opensearchpy/helpers/response/hit.py index 8f6230aa..c6e8a4a9 100644 --- a/opensearchpy/helpers/response/hit.py +++ b/opensearchpy/helpers/response/hit.py @@ -25,11 +25,13 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, HitMeta class Hit(AttrDict): - def __init__(self, document): + def __init__(self, document: Any) -> None: data = {} if "_source" in document: data = document["_source"] @@ -40,22 +42,25 @@ def __init__(self, document): # assign meta as attribute and not as key in self._d_ super(AttrDict, self).__setattr__("meta", HitMeta(document)) - def __getstate__(self): + def __getstate__(self) -> Any: # add self.meta since it is not in self.__dict__ return super(Hit, self).__getstate__() + (self.meta,) - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("meta", state[-1]) super(Hit, self).__setstate__(state[:-1]) - def __dir__(self): + def __dir__(self) -> Any: # be sure to expose meta in dir(self) return super(Hit, self).__dir__() + ["meta"] - def __repr__(self): + def __repr__(self) -> str: return "".format( "/".join( getattr(self.meta, key) for key in ("index", "id") if key in self.meta ), super(Hit, self).__repr__(), ) + + +__all__ = ["Hit", "HitMeta"] diff --git a/opensearchpy/helpers/response/hit.pyi b/opensearchpy/helpers/response/hit.pyi deleted file mode 100644 index 7597832d..00000000 --- a/opensearchpy/helpers/response/hit.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ..utils import AttrDict - -class Hit(AttrDict): ... diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 30c59a92..46ba9da9 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -25,12 +25,9 @@ # specific language governing permissions and limitations # under the License. +import collections.abc as collections_abc import copy - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from typing import Any from six import iteritems, string_types @@ -39,8 +36,8 @@ from opensearchpy.helpers import scan from ..exceptions import IllegalOperation -from ..helpers.aggs import A, AggBase from ..helpers.query import Bool, Q +from .aggs import A, AggBase from .response import Hit, Response from .utils import AttrDict, DslBase, recursive_to_dict @@ -52,17 +49,17 @@ class QueryProxy(object): the wrapped query. """ - def __init__(self, search, attr_name): + def __init__(self, search: Any, attr_name: Any) -> None: self._search = search - self._proxied = None + self._proxied: Any = None self._attr_name = attr_name - def __nonzero__(self): + def __nonzero__(self) -> bool: return self._proxied is not None __bool__ = __nonzero__ - def __call__(self, *args, **kwargs): + def __call__(self, *args: Any, **kwargs: Any) -> Any: s = self._search._clone() # we cannot use self._proxied since we just cloned self._search and @@ -76,19 +73,19 @@ def __call__(self, *args, **kwargs): # always return search to be chainable return s - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._proxied, attr_name) - def __setattr__(self, attr_name, value): + def __setattr__(self, attr_name: Any, value: Any) -> None: if not attr_name.startswith("_"): self._proxied = Q(self._proxied.to_dict()) setattr(self._proxied, attr_name, value) super(QueryProxy, self).__setattr__(attr_name, value) - def __getstate__(self): + def __getstate__(self) -> Any: return self._search, self._proxied, self._attr_name - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: self._search, self._proxied, self._attr_name = state @@ -101,13 +98,13 @@ class ProxyDescriptor(object): """ - def __init__(self, name): + def __init__(self, name: str) -> None: self._attr_name = "_%s_proxy" % name - def __get__(self, instance, owner): + def __get__(self, instance: Any, owner: Any) -> Any: return getattr(instance, self._attr_name) - def __set__(self, instance, value): + def __set__(self, instance: Any, value: Any) -> None: proxy = getattr(instance, self._attr_name) proxy._proxied = Q(value) @@ -115,17 +112,26 @@ def __set__(self, instance, value): class AggsProxy(AggBase, DslBase): name = "aggs" - def __init__(self, search): + def __init__(self, search: Any) -> None: self._base = self self._search = search self._params = {"aggs": {}} - def to_dict(self): + def to_dict(self) -> Any: return super(AggsProxy, self).to_dict().get("aggs", {}) class Request(object): - def __init__(self, using="default", index=None, doc_type=None, extra=None): + _doc_type: Any + _doc_type_map: Any + + def __init__( + self, + using: str = "default", + index: Any = None, + doc_type: Any = None, + extra: Any = None, + ) -> None: self._using = using self._index = None @@ -144,22 +150,22 @@ def __init__(self, using="default", index=None, doc_type=None, extra=None): elif doc_type: self._doc_type.append(doc_type) - self._params = {} - self._extra = extra or {} + self._params: Any = {} + self._extra: Any = extra or {} - def __eq__(self, other): + def __eq__(self: Any, other: Any) -> bool: return ( isinstance(other, Request) and other._params == self._params and other._index == self._index and other._doc_type == self._doc_type - and other.to_dict() == self.to_dict() + and other.to_dict() == self.to_dict() # type: ignore ) - def __copy__(self): + def __copy__(self) -> Any: return self._clone() - def params(self, **kwargs): + def params(self, **kwargs: Any) -> Any: """ Specify query params to be used when executing the search. All the keyword arguments will override the current values. @@ -173,7 +179,7 @@ def params(self, **kwargs): s._params.update(kwargs) return s - def index(self, *index): + def index(self, *index: Any) -> Any: """ Set the index for the search. If called empty it will remove all information. @@ -201,7 +207,7 @@ def index(self, *index): return s - def _resolve_field(self, path): + def _resolve_field(self, path: Any) -> Any: for dt in self._doc_type: if not hasattr(dt, "_index"): continue @@ -209,10 +215,10 @@ def _resolve_field(self, path): if field is not None: return field - def _resolve_nested(self, hit, parent_class=None): + def _resolve_nested(self, hit: Any, parent_class: Any = None) -> Any: doc_class = Hit - nested_path = [] + nested_path: Any = [] nesting = hit["_nested"] while nesting and "field" in nesting: nested_path.append(nesting["field"]) @@ -229,7 +235,7 @@ def _resolve_nested(self, hit, parent_class=None): return doc_class - def _get_result(self, hit, parent_class=None): + def _get_result(self, hit: Any, parent_class: Any = None) -> Any: doc_class = Hit dt = hit.get("_type") @@ -253,7 +259,7 @@ def _get_result(self, hit, parent_class=None): callback = getattr(doc_class, "from_opensearch", doc_class) return callback(hit) - def doc_type(self, *doc_type, **kwargs): + def doc_type(self, *doc_type: Any, **kwargs: Any) -> Any: """ Set the type to search through. You can supply a single value or multiple. Values can be strings or subclasses of ``Document``. @@ -279,7 +285,7 @@ def doc_type(self, *doc_type, **kwargs): s._doc_type_map.update(kwargs) return s - def using(self, client): + def using(self, client: Any) -> Any: """ Associate the search request with an opensearch client. A fresh copy will be returned with current instance remaining unchanged. @@ -292,7 +298,7 @@ def using(self, client): s._using = client return s - def extra(self, **kwargs): + def extra(self, **kwargs: Any) -> Any: """ Add extra keys to the request body. Mostly here for backwards compatibility. @@ -303,7 +309,7 @@ def extra(self, **kwargs): s._extra.update(kwargs) return s - def _clone(self): + def _clone(self) -> Any: s = self.__class__( using=self._using, index=self._index, doc_type=self._doc_type ) @@ -317,7 +323,7 @@ class Search(Request): query = ProxyDescriptor("query") post_filter = ProxyDescriptor("post_filter") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Search request to opensearch. @@ -331,31 +337,31 @@ def __init__(self, **kwargs): super(Search, self).__init__(**kwargs) self.aggs = AggsProxy(self) - self._sort = [] - self._collapse = {} - self._source = None - self._highlight = {} - self._highlight_opts = {} - self._suggest = {} - self._script_fields = {} + self._sort: Any = [] + self._collapse: Any = {} + self._source: Any = None + self._highlight: Any = {} + self._highlight_opts: Any = {} + self._suggest: Any = {} + self._script_fields: Any = {} self._response_class = Response self._query_proxy = QueryProxy(self, "query") self._post_filter_proxy = QueryProxy(self, "post_filter") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) - def __iter__(self): + def __iter__(self) -> Any: """ Iterate over the hits. """ return iter(self.execute()) - def __getitem__(self, n): + def __getitem__(self, n: Any) -> Any: """ Support slicing the `Search` instance for pagination. @@ -390,7 +396,7 @@ def __getitem__(self, n): return s @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `Search` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -411,7 +417,7 @@ def from_dict(cls, d): s.update_from_dict(d) return s - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -434,7 +440,7 @@ def _clone(self): s.aggs._params = {"aggs": self.aggs._params["aggs"].copy()} return s - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -442,7 +448,7 @@ def response_class(self, cls): s._response_class = cls return s - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "Search": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -477,7 +483,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script_fields(self, **kwargs): + def script_fields(self, **kwargs: Any) -> Any: """ Define script fields to be calculated on hits. @@ -503,7 +509,7 @@ def script_fields(self, **kwargs): s._script_fields.update(kwargs) return s - def source(self, fields=None, **kwargs): + def source(self, fields: Any = None, **kwargs: Any) -> Any: """ Selectively control how the _source field is returned. @@ -548,7 +554,7 @@ def source(self, fields=None, **kwargs): return s - def sort(self, *keys): + def sort(self, *keys: Any) -> Any: """ Add sorting information to the search request. If called without arguments it will remove all sort requirements. Otherwise it will @@ -581,7 +587,12 @@ def sort(self, *keys): s._sort.append(k) return s - def collapse(self, field=None, inner_hits=None, max_concurrent_group_searches=None): + def collapse( + self, + field: Any = None, + inner_hits: Any = None, + max_concurrent_group_searches: Any = None, + ) -> Any: """ Add collapsing information to the search request. @@ -604,7 +615,7 @@ def collapse(self, field=None, inner_hits=None, max_concurrent_group_searches=No s._collapse["max_concurrent_group_searches"] = max_concurrent_group_searches return s - def highlight_options(self, **kwargs): + def highlight_options(self, **kwargs: Any) -> Any: """ Update the global highlighting options used for this request. For example:: @@ -616,7 +627,7 @@ def highlight_options(self, **kwargs): s._highlight_opts.update(kwargs) return s - def highlight(self, *fields, **kwargs): + def highlight(self, *fields: Any, **kwargs: Any) -> Any: """ Request highlighting of some fields. All keyword arguments passed in will be used as parameters for all the fields in the ``fields`` parameter. Example:: @@ -656,7 +667,7 @@ def highlight(self, *fields, **kwargs): s._highlight[f] = kwargs return s - def suggest(self, name, text, **kwargs): + def suggest(self, name: Any, text: Any, **kwargs: Any) -> Any: """ Add a suggestions request to the search. @@ -673,7 +684,7 @@ def suggest(self, name, text, **kwargs): s._suggest[name].update(kwargs) return s - def to_dict(self, count=False, **kwargs): + def to_dict(self, count: bool = False, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request's body. @@ -720,7 +731,7 @@ def to_dict(self, count=False, **kwargs): d.update(recursive_to_dict(kwargs)) return d - def count(self): + def count(self) -> Any: """ Return the number of hits matching the query and filters. Note that only the actual number is returned. @@ -734,7 +745,7 @@ def count(self): # TODO: failed shards detection return opensearch.count(index=self._index, body=d, **self._params)["count"] - def execute(self, ignore_cache=False): + def execute(self, ignore_cache: bool = False) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. @@ -753,7 +764,7 @@ def execute(self, ignore_cache=False): ) return self._response - def scan(self): + def scan(self) -> Any: """ Turn the search into a scan search and return a generator that will iterate over all the documents matching the query. @@ -769,7 +780,7 @@ def scan(self): ): yield self._get_result(hit) - def delete(self): + def delete(self) -> Any: """ delete() executes the query by delegating to delete_by_query() """ @@ -789,22 +800,22 @@ class MultiSearch(Request): request. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(MultiSearch, self).__init__(**kwargs) - self._searches = [] + self._searches: Any = [] - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return self._searches[key] - def __iter__(self): + def __iter__(self) -> Any: return iter(self._searches) - def _clone(self): + def _clone(self) -> Any: ms = super(MultiSearch, self)._clone() ms._searches = self._searches[:] return ms - def add(self, search): + def add(self, search: Any) -> Any: """ Adds a new :class:`~opensearchpy.Search` object to the request:: @@ -816,7 +827,7 @@ def add(self, search): ms._searches.append(search) return ms - def to_dict(self): + def to_dict(self) -> Any: out = [] for s in self._searches: meta = {} @@ -829,7 +840,7 @@ def to_dict(self): return out - def execute(self, ignore_cache=False, raise_on_error=True): + def execute(self, ignore_cache: Any = False, raise_on_error: Any = True) -> Any: """ Execute the multi search request and return a list of search results. """ diff --git a/opensearchpy/helpers/search.pyi b/opensearchpy/helpers/search.pyi deleted file mode 100644 index 49eecb0d..00000000 --- a/opensearchpy/helpers/search.pyi +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .aggs import AggBase -from .utils import DslBase - -class QueryProxy(object): ... -class ProxyDescriptor(object): ... -class AggsProxy(AggBase, DslBase): ... -class Request(object): ... -class Search(Request): ... -class MultiSearch(Request): ... diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index ad6e9c65..930b8d25 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -8,16 +8,11 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys from typing import Any, Callable, Dict +from urllib.parse import parse_qs, urlencode, urlparse import requests -PY3 = sys.version_info[0] == 3 - -if PY3: - from urllib.parse import parse_qs, urlencode, urlparse - class AWSV4Signer: """ @@ -81,7 +76,7 @@ class RequestsAWSV4SignerAuth(requests.auth.AuthBase): AWS V4 Request Signer for Requests. """ - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore self.signer = AWSV4Signer(credentials, region, service) def __call__(self, request): # type: ignore @@ -136,7 +131,7 @@ class AWSV4SignerAuth(RequestsAWSV4SignerAuth): class Urllib3AWSV4SignerAuth(Callable): # type: ignore - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore self.signer = AWSV4Signer(credentials, region, service) def __call__(self, method: str, url: str, body: Any) -> Dict[str, str]: diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index e2467584..0ee1ccc9 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -30,18 +30,21 @@ import os import time +from typing import Any, Tuple from unittest import SkipTest, TestCase +import opensearchpy.client from opensearchpy import OpenSearch from opensearchpy.exceptions import ConnectionError +OPENSEARCH_URL: str if "OPENSEARCH_URL" in os.environ: OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] else: OPENSEARCH_URL = "https://admin:admin@localhost:9200" -def get_test_client(nowait=False, **kwargs): +def get_test_client(nowait: bool = False, **kwargs: Any) -> OpenSearch: # construct kwargs from the environment kw = {"timeout": 30} @@ -69,14 +72,14 @@ def get_test_client(nowait=False, **kwargs): class OpenSearchTestCase(TestCase): @staticmethod - def _get_client(): + def _get_client() -> OpenSearch: return get_test_client() @classmethod - def setup_class(cls): + def setup_class(cls) -> None: cls.client = cls._get_client() - def teardown_method(self, _): + def teardown_method(self, _: Any) -> None: # Hidden indices expanded in wildcards in OpenSearch 7.7 expand_wildcards = ["open", "closed"] if self.opensearch_version() >= (1, 0): @@ -87,20 +90,20 @@ def teardown_method(self, _): ) self.client.indices.delete_template(name="*", ignore=404) - def opensearch_version(self): + def opensearch_version(self) -> Tuple[int, ...]: if not hasattr(self, "_opensearch_version"): self._opensearch_version = opensearch_version(self.client) return self._opensearch_version -def _get_version(version_string): +def _get_version(version_string: str) -> Tuple[int, ...]: if "." not in version_string: return () version = version_string.strip().split(".") return tuple(int(v) if v.isdigit() else 999 for v in version) -def opensearch_version(client): +def opensearch_version(client: opensearchpy.client.OpenSearch) -> Tuple[int, int, int]: return _get_version(client.info()["version"]["number"]) diff --git a/opensearchpy/helpers/test.pyi b/opensearchpy/helpers/test.pyi deleted file mode 100644 index a4d2302a..00000000 --- a/opensearchpy/helpers/test.pyi +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Tuple -from unittest import TestCase - -from ..client import OpenSearch - -OPENSEARCH_URL: str - -def get_test_client(nowait: bool = ..., **kwargs: Any) -> OpenSearch: ... -def _get_version(version_string: str) -> Tuple[int, ...]: ... - -class OpenSearchTestCase(TestCase): - @staticmethod - def _get_client() -> OpenSearch: ... - @classmethod - def setup_class(cls) -> None: ... - def teardown_method(self, _: Any) -> None: ... - def opensearch_version(self) -> Tuple[int, ...]: ... diff --git a/opensearchpy/helpers/update_by_query.py b/opensearchpy/helpers/update_by_query.py index 32c7b705..7b560216 100644 --- a/opensearchpy/helpers/update_by_query.py +++ b/opensearchpy/helpers/update_by_query.py @@ -25,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.connection.connections import get_connection from ..helpers.query import Bool, Q @@ -36,7 +38,7 @@ class UpdateByQuery(Request): query = ProxyDescriptor("query") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Update by query request to opensearch. @@ -50,17 +52,17 @@ def __init__(self, **kwargs): """ super(UpdateByQuery, self).__init__(**kwargs) self._response_class = UpdateByQueryResponse - self._script = {} + self._script: Any = {} self._query_proxy = QueryProxy(self, "query") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `UpdateByQuery` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -81,7 +83,7 @@ def from_dict(cls, d): u.update_from_dict(d) return u - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -94,7 +96,7 @@ def _clone(self): ubq.query._proxied = self.query._proxied return ubq - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -102,7 +104,7 @@ def response_class(self, cls): ubq._response_class = cls return ubq - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "UpdateByQuery": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -115,7 +117,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script(self, **kwargs): + def script(self, **kwargs: Any) -> Any: """ Define update action to take: @@ -136,7 +138,7 @@ def script(self, **kwargs): ubq._script.update(kwargs) return ubq - def to_dict(self, **kwargs): + def to_dict(self, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request'ubq body. @@ -154,7 +156,7 @@ def to_dict(self, **kwargs): d.update(recursive_to_dict(kwargs)) return d - def execute(self): + def execute(self) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. diff --git a/opensearchpy/helpers/update_by_query.pyi b/opensearchpy/helpers/update_by_query.pyi deleted file mode 100644 index c0baf631..00000000 --- a/opensearchpy/helpers/update_by_query.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .search import Request - -class UpdateByQuery(Request): ... diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 4cd9dad8..513fb672 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -25,21 +25,18 @@ # specific language governing permissions and limitations # under the License. -from __future__ import unicode_literals - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from __future__ import annotations, unicode_literals +import collections.abc as collections_abc from copy import copy +from typing import Any, Callable, Dict, Optional, Tuple from six import add_metaclass, iteritems from six.moves import map from opensearchpy.exceptions import UnknownDslObject, ValidationException -SKIP_VALUES = ("", None) +SKIP_VALUES: Tuple[str, None] = ("", None) EXPAND__TO_DOT = True DOC_META_FIELDS = frozenset( @@ -62,7 +59,7 @@ ).union(DOC_META_FIELDS) -def _wrap(val, obj_wrapper=None): +def _wrap(val: Any, obj_wrapper: Optional[Callable[..., Any]] = None) -> Any: if isinstance(val, collections_abc.Mapping): return AttrDict(val) if obj_wrapper is None else obj_wrapper(val) if isinstance(val, list): @@ -71,52 +68,54 @@ def _wrap(val, obj_wrapper=None): class AttrList(object): - def __init__(self, p, obj_wrapper=None): + def __init__( + self, p: Any, obj_wrapper: Optional[Callable[..., Any]] = None + ) -> None: # make iterables into lists if not isinstance(p, list): p = list(p) self._l_ = p self._obj_wrapper = obj_wrapper - def __repr__(self): + def __repr__(self) -> str: return repr(self._l_) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, AttrList): - return other._l_ == self._l_ + return bool(other._l_ == self._l_) # make sure we still equal to a dict with the same data - return other == self._l_ + return bool(other == self._l_) - def __ne__(self, other): - return not self == other + def __ne__(self, other: Any) -> bool: + return bool(not self == other) - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: p = self._l_[k] if isinstance(k, slice): return AttrList(p, obj_wrapper=self._obj_wrapper) return _wrap(p, self._obj_wrapper) - def __setitem__(self, k, value): + def __setitem__(self, k: Any, value: Any) -> None: self._l_[k] = value - def __iter__(self): + def __iter__(self) -> Any: return map(lambda i: _wrap(i, self._obj_wrapper), self._l_) - def __len__(self): + def __len__(self) -> int: return len(self._l_) - def __nonzero__(self): + def __nonzero__(self) -> bool: return bool(self._l_) __bool__ = __nonzero__ - def __getattr__(self, name): + def __getattr__(self, name: Any) -> Any: return getattr(self._l_, name) - def __getstate__(self): + def __getstate__(self) -> Any: return self._l_, self._obj_wrapper - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: self._l_, self._obj_wrapper = state @@ -127,44 +126,44 @@ class AttrDict(object): nested dsl dicts. """ - def __init__(self, d): + def __init__(self, d: Any) -> None: # assign the inner dict manually to prevent __setattr__ from firing super(AttrDict, self).__setattr__("_d_", d) - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return key in self._d_ - def __nonzero__(self): + def __nonzero__(self) -> bool: return bool(self._d_) __bool__ = __nonzero__ - def __dir__(self): + def __dir__(self) -> Any: # introspection for auto-complete in IPython etc return list(self._d_.keys()) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, AttrDict): - return other._d_ == self._d_ + return bool(other._d_ == self._d_) # make sure we still equal to a dict with the same data - return other == self._d_ + return bool(other == self._d_) - def __ne__(self, other): - return not self == other + def __ne__(self, other: Any) -> bool: + return bool(not self == other) - def __repr__(self): + def __repr__(self) -> str: r = repr(self._d_) if len(r) > 60: r = r[:60] + "...}" return r - def __getstate__(self): + def __getstate__(self) -> Any: return (self._d_,) - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("_d_", state[0]) - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: try: return self.__getitem__(attr_name) except KeyError: @@ -174,7 +173,7 @@ def __getattr__(self, attr_name): ) ) - def get(self, key, default=None): + def get(self, key: Any, default: Any = None) -> Any: try: return self.__getattr__(key) except AttributeError: @@ -182,7 +181,7 @@ def get(self, key, default=None): return default raise - def __delattr__(self, attr_name): + def __delattr__(self, attr_name: Any) -> None: try: del self._d_[attr_name] except KeyError: @@ -192,26 +191,26 @@ def __delattr__(self, attr_name): ) ) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return _wrap(self._d_[key]) - def __setitem__(self, key, value): + def __setitem__(self, key: Any, value: Any) -> None: self._d_[key] = value - def __delitem__(self, key): + def __delitem__(self, key: Any) -> None: del self._d_[key] - def __setattr__(self, name, value): + def __setattr__(self, name: Any, value: Any) -> None: if name in self._d_ or not hasattr(self.__class__, name): self._d_[name] = value else: # there is an attribute on the class (could be property, ..) - don't add it as field super(AttrDict, self).__setattr__(name, value) - def __iter__(self): + def __iter__(self) -> Any: return iter(self._d_) - def to_dict(self): + def to_dict(self) -> Any: return self._d_ @@ -228,9 +227,12 @@ class DslMeta(type): For typical use see `QueryMeta` and `Query` in `opensearchpy.query`. """ - _types = {} + _types: Dict[str, Any] = {} - def __init__(cls, name, bases, attrs): + def __init__( + cls: Any, name: str, bases: tuple[type, ...], attrs: Dict[str, Any] + ) -> None: + # TODO: why is it calling itself?! super(DslMeta, cls).__init__(name, bases, attrs) # skip for DslBase if not hasattr(cls, "_type_shortcut"): @@ -246,7 +248,7 @@ def __init__(cls, name, bases, attrs): cls._classes[cls.name] = cls @classmethod - def get_dsl_type(cls, name): + def get_dsl_type(cls, name: Any) -> Any: try: return cls._types[name] except KeyError: @@ -270,10 +272,11 @@ class DslBase(object): all values in the `must` attribute into Query objects) """ - _param_defs = {} + _param_defs: Dict[str, Any] = {} + _params: Dict[str, Any] @classmethod - def get_dsl_class(cls, name, default=None): + def get_dsl_class(cls: Any, name: Any, default: Optional[bool] = None) -> Any: try: return cls._classes[name] except KeyError: @@ -283,14 +286,14 @@ def get_dsl_class(cls, name, default=None): "DSL class `{}` does not exist in {}.".format(name, cls._type_name) ) - def __init__(self, _expand__to_dot=EXPAND__TO_DOT, **params): + def __init__(self, _expand__to_dot: bool = EXPAND__TO_DOT, **params: Any) -> None: self._params = {} for pname, pvalue in iteritems(params): if "__" in pname and _expand__to_dot: pname = pname.replace("__", ".") self._setattr(pname, pvalue) - def _repr_params(self): + def _repr_params(self) -> str: """Produce a repr of all our parameters to be used in __repr__.""" return ", ".join( "{}={!r}".format(n.replace(".", "__"), v) @@ -299,21 +302,21 @@ def _repr_params(self): if "type" not in self._param_defs.get(n, {}) or v ) - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format(self.__class__.__name__, self._repr_params()) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and other.to_dict() == self.to_dict() - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: return not self == other - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Optional[bool]) -> None: if name.startswith("_"): return super(DslBase, self).__setattr__(name, value) return self._setattr(name, value) - def _setattr(self, name, value): + def _setattr(self, name: Any, value: Any) -> None: # if this attribute has special type assigned to it... if name in self._param_defs: pinfo = self._param_defs[name] @@ -343,7 +346,7 @@ def _setattr(self, name, value): value = shortcut(value) self._params[name] = value - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: if name.startswith("_"): raise AttributeError( "{!r} object has no attribute {!r}".format( @@ -375,7 +378,7 @@ def __getattr__(self, name): return AttrDict(value) return value - def to_dict(self): + def to_dict(self) -> Any: """ Serialize the DSL object to plain dict """ @@ -414,7 +417,7 @@ def to_dict(self): d[pname] = value return {self.name: d} - def _clone(self): + def _clone(self) -> Any: c = self.__class__() for attr in self._params: c._params[attr] = copy(self._params[attr]) @@ -422,7 +425,9 @@ def _clone(self): class HitMeta(AttrDict): - def __init__(self, document, exclude=("_source", "_fields")): + def __init__( + self, document: Dict[str, Any], exclude: Any = ("_source", "_fields") + ) -> None: d = { k[1:] if k.startswith("_") else k: v for (k, v) in iteritems(document) @@ -435,7 +440,7 @@ def __init__(self, document, exclude=("_source", "_fields")): class ObjectBase(AttrDict): - def __init__(self, meta=None, **kwargs): + def __init__(self, meta: Any = None, **kwargs: Any) -> None: meta = meta or {} for k in list(kwargs): if k.startswith("_") and k[1:] in META_FIELDS: @@ -446,7 +451,7 @@ def __init__(self, meta=None, **kwargs): super(ObjectBase, self).__init__(kwargs) @classmethod - def __list_fields(cls): + def __list_fields(cls: Any) -> Any: """ Get all the fields defined for our class, if we have an Index, try looking at the index mappings as well, mark the fields from Index as @@ -467,7 +472,7 @@ def __list_fields(cls): yield name, field, True @classmethod - def __get_field(cls, name): + def __get_field(cls: Any, name: Any) -> Any: try: return cls._doc_type.mapping[name] except KeyError: @@ -479,30 +484,30 @@ def __get_field(cls, name): pass @classmethod - def from_opensearch(cls, hit): + def from_opensearch(cls: Any, hit: Any) -> Any: meta = hit.copy() data = meta.pop("_source", {}) doc = cls(meta=meta) doc._from_dict(data) return doc - def _from_dict(self, data): + def _from_dict(self, data: Any) -> None: for k, v in iteritems(data): f = self.__get_field(k) if f and f._coerce: v = f.deserialize(v) setattr(self, k, v) - def __getstate__(self): + def __getstate__(self) -> Any: return self.to_dict(), self.meta._d_ - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: data, meta = state super(AttrDict, self).__setattr__("_d_", {}) super(AttrDict, self).__setattr__("meta", HitMeta(meta)) self._from_dict(data) - def __getattr__(self, name): + def __getattr__(self, name: Any) -> Any: try: return super(ObjectBase, self).__getattr__(name) except AttributeError: @@ -515,7 +520,7 @@ def __getattr__(self, name): return value raise - def to_dict(self, skip_empty=True): + def to_dict(self, skip_empty: Optional[bool] = True) -> Any: out = {} for k, v in iteritems(self._d_): # if this is a mapped field, @@ -536,8 +541,8 @@ def to_dict(self, skip_empty=True): out[k] = v return out - def clean_fields(self): - errors = {} + def clean_fields(self) -> None: + errors: Dict[str, Any] = {} for name, field, optional in self.__list_fields(): data = self._d_.get(name, None) if data is None and optional: @@ -554,15 +559,15 @@ def clean_fields(self): if errors: raise ValidationException(errors) - def clean(self): + def clean(self) -> None: pass - def full_clean(self): + def full_clean(self) -> None: self.clean_fields() self.clean() -def merge(data, new_data, raise_on_conflict=False): +def merge(data: Any, new_data: Any, raise_on_conflict: bool = False) -> None: if not ( isinstance(data, (AttrDict, collections_abc.Mapping)) and isinstance(new_data, (AttrDict, collections_abc.Mapping)) @@ -573,6 +578,13 @@ def merge(data, new_data, raise_on_conflict=False): ) ) + if not isinstance(new_data, Dict): + raise ValueError( + "You can only merge two dicts! Got {!r} and {!r} instead.".format( + data, new_data + ) + ) + for key, value in iteritems(new_data): if ( key in data @@ -583,10 +595,10 @@ def merge(data, new_data, raise_on_conflict=False): elif key in data and data[key] != value and raise_on_conflict: raise ValueError("Incompatible data for key %r, cannot be merged." % key) else: - data[key] = value + data[key] = value # type: ignore -def recursive_to_dict(data): +def recursive_to_dict(data: Any) -> Any: """Recursively transform objects that potentially have .to_dict() into dictionary literals by traversing AttrList, AttrDict, list, tuple, and Mapping types. diff --git a/opensearchpy/helpers/utils.pyi b/opensearchpy/helpers/utils.pyi deleted file mode 100644 index decb7382..00000000 --- a/opensearchpy/helpers/utils.pyi +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -class AttrList(object): ... -class AttrDict(object): ... -class DslMeta(type): ... -class DslBase(object): ... -class HitMeta(AttrDict): ... -class ObjectBase(AttrDict): ... diff --git a/opensearchpy/helpers/wrappers.py b/opensearchpy/helpers/wrappers.py index 968909a6..1583391c 100644 --- a/opensearchpy/helpers/wrappers.py +++ b/opensearchpy/helpers/wrappers.py @@ -26,13 +26,12 @@ # under the License. import operator +from typing import Any from six import iteritems, string_types from .utils import AttrDict -__all__ = ["Range"] - class Range(AttrDict): OPS = { @@ -42,7 +41,7 @@ class Range(AttrDict): "gte": operator.ge, } - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: if args and (len(args) > 1 or kwargs or not isinstance(args[0], dict)): raise ValueError( "Range accepts a single dictionary or a set of keyword arguments." @@ -61,10 +60,10 @@ def __init__(self, *args, **kwargs): super(Range, self).__init__(args[0] if args else kwargs) - def __repr__(self): + def __repr__(self) -> str: return "Range(%s)" % ", ".join("%s=%r" % op for op in iteritems(self._d_)) - def __contains__(self, item): + def __contains__(self, item: Any) -> bool: if isinstance(item, string_types): return super(Range, self).__contains__(item) @@ -74,7 +73,7 @@ def __contains__(self, item): return True @property - def upper(self): + def upper(self) -> Any: if "lt" in self._d_: return self._d_["lt"], False if "lte" in self._d_: @@ -82,9 +81,12 @@ def upper(self): return None, False @property - def lower(self): + def lower(self) -> Any: if "gt" in self._d_: return self._d_["gt"], False if "gte" in self._d_: return self._d_["gte"], True return None, False + + +__all__ = ["Range"] diff --git a/opensearchpy/helpers/wrappers.pyi b/opensearchpy/helpers/wrappers.pyi deleted file mode 100644 index 704159dc..00000000 --- a/opensearchpy/helpers/wrappers.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import AttrDict - -class Range(AttrDict): ... diff --git a/opensearchpy/plugins/__init__.pyi b/opensearchpy/plugins/__init__.pyi deleted file mode 100644 index 22c54ac8..00000000 --- a/opensearchpy/plugins/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. diff --git a/opensearchpy/plugins/alerting.py b/opensearchpy/plugins/alerting.py index 66b759a4..02c6b1a1 100644 --- a/opensearchpy/plugins/alerting.py +++ b/opensearchpy/plugins/alerting.py @@ -9,12 +9,14 @@ # GitHub history for details. +from typing import Any + from ..client.utils import NamespacedClient, _make_path, query_params class AlertingClient(NamespacedClient): @query_params() - def search_monitor(self, body, params=None, headers=None): + def search_monitor(self, body: Any, params: Any = None, headers: Any = None) -> Any: """ Returns the search result for a monitor. @@ -29,7 +31,9 @@ def search_monitor(self, body, params=None, headers=None): ) @query_params() - def get_monitor(self, monitor_id, params=None, headers=None): + def get_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Returns the details of a specific monitor. @@ -43,7 +47,9 @@ def get_monitor(self, monitor_id, params=None, headers=None): ) @query_params("dryrun") - def run_monitor(self, monitor_id, params=None, headers=None): + def run_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Runs/Executes a specific monitor. @@ -58,7 +64,9 @@ def run_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - def create_monitor(self, body=None, params=None, headers=None): + def create_monitor( + self, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates a monitor with inputs, triggers, and actions. @@ -73,7 +81,9 @@ def create_monitor(self, body=None, params=None, headers=None): ) @query_params() - def update_monitor(self, monitor_id, body=None, params=None, headers=None): + def update_monitor( + self, monitor_id: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates a monitor's inputs, triggers, and actions. @@ -89,7 +99,9 @@ def update_monitor(self, monitor_id, body=None, params=None, headers=None): ) @query_params() - def delete_monitor(self, monitor_id, params=None, headers=None): + def delete_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes a specific monitor. @@ -103,7 +115,9 @@ def delete_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - def get_destination(self, destination_id=None, params=None, headers=None): + def get_destination( + self, destination_id: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Returns the details of a specific destination. @@ -119,7 +133,9 @@ def get_destination(self, destination_id=None, params=None, headers=None): ) @query_params() - def create_destination(self, body=None, params=None, headers=None): + def create_destination( + self, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates a destination for slack, mail, or custom-webhook. @@ -134,7 +150,13 @@ def create_destination(self, body=None, params=None, headers=None): ) @query_params() - def update_destination(self, destination_id, body=None, params=None, headers=None): + def update_destination( + self, + destination_id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a destination's inputs, triggers, and actions. @@ -150,7 +172,9 @@ def update_destination(self, destination_id, body=None, params=None, headers=Non ) @query_params() - def delete_destination(self, destination_id, params=None, headers=None): + def delete_destination( + self, destination_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes a specific destination. @@ -164,7 +188,7 @@ def delete_destination(self, destination_id, params=None, headers=None): ) @query_params() - def get_alerts(self, params=None, headers=None): + def get_alerts(self, params: Any = None, headers: Any = None) -> Any: """ Returns all alerts. @@ -177,7 +201,9 @@ def get_alerts(self, params=None, headers=None): ) @query_params() - def acknowledge_alert(self, monitor_id, body=None, params=None, headers=None): + def acknowledge_alert( + self, monitor_id: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Acknowledges an alert. diff --git a/opensearchpy/plugins/alerting.pyi b/opensearchpy/plugins/alerting.pyi deleted file mode 100644 index 4454bf3c..00000000 --- a/opensearchpy/plugins/alerting.pyi +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class AlertingClient(NamespacedClient): - def search_monitor( - self, body: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def run_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def create_monitor( - self, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def update_monitor( - self, - monitor_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_destination( - self, - destination_id: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def create_destination( - self, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def update_destination( - self, - destination_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_destination( - self, destination_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_alerts( - self, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def acknowledge_alert( - self, - monitor_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/plugins/index_management.py b/opensearchpy/plugins/index_management.py index 9f4f5a6e..77a31279 100644 --- a/opensearchpy/plugins/index_management.py +++ b/opensearchpy/plugins/index_management.py @@ -9,12 +9,16 @@ # GitHub history for details. +from typing import Any + from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndexManagementClient(NamespacedClient): @query_params() - def put_policy(self, policy, body=None, params=None, headers=None): + def put_policy( + self, policy: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates, or updates, a policy. @@ -32,7 +36,9 @@ def put_policy(self, policy, body=None, params=None, headers=None): ) @query_params() - def add_policy(self, index, body=None, params=None, headers=None): + def add_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Adds a policy to an index. This operation does not change the policy if the index already has one. @@ -50,7 +56,7 @@ def add_policy(self, index, body=None, params=None, headers=None): ) @query_params() - def get_policy(self, policy, params=None, headers=None): + def get_policy(self, policy: Any, params: Any = None, headers: Any = None) -> Any: """ Gets the policy by `policy_id`. @@ -67,7 +73,9 @@ def get_policy(self, policy, params=None, headers=None): ) @query_params() - def remove_policy_from_index(self, index, params=None, headers=None): + def remove_policy_from_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Removes any ISM policy from the index. @@ -84,7 +92,9 @@ def remove_policy_from_index(self, index, params=None, headers=None): ) @query_params() - def change_policy(self, index, body=None, params=None, headers=None): + def change_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates the managed index policy to a new policy (or to a new version of the policy). @@ -102,7 +112,9 @@ def change_policy(self, index, body=None, params=None, headers=None): ) @query_params() - def retry(self, index, body=None, params=None, headers=None): + def retry( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Retries the failed action for an index. @@ -120,7 +132,7 @@ def retry(self, index, body=None, params=None, headers=None): ) @query_params("show_policy") - def explain_index(self, index, params=None, headers=None): + def explain_index(self, index: Any, params: Any = None, headers: Any = None) -> Any: """ Gets the current state of the index. @@ -137,7 +149,9 @@ def explain_index(self, index, params=None, headers=None): ) @query_params() - def delete_policy(self, policy, params=None, headers=None): + def delete_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes the policy by `policy_id`. diff --git a/opensearchpy/plugins/index_management.pyi b/opensearchpy/plugins/index_management.pyi deleted file mode 100644 index d4a6dbad..00000000 --- a/opensearchpy/plugins/index_management.pyi +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class IndexManagementClient(NamespacedClient): - def put_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def add_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def get_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def remove_policy_from_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def change_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def retry( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def explain_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/serializer.py b/opensearchpy/serializer.py index 7463dbcc..e8c87ba9 100644 --- a/opensearchpy/serializer.py +++ b/opensearchpy/serializer.py @@ -26,10 +26,12 @@ # under the License. +from typing import Any, Dict, Optional + try: import simplejson as json except ImportError: - import json + import json # type: ignore import uuid from datetime import date, datetime @@ -45,22 +47,22 @@ class Serializer(object): - mimetype = "" + mimetype: str = "" - def loads(self, s): + def loads(self, s: str) -> Any: raise NotImplementedError() - def dumps(self, data): + def dumps(self, data: Any) -> Any: raise NotImplementedError() class TextSerializer(Serializer): - mimetype = "text/plain" + mimetype: str = "text/plain" - def loads(self, s): + def loads(self, s: str) -> Any: return s - def dumps(self, data): + def dumps(self, data: Any) -> Any: if isinstance(data, string_types): return data @@ -68,9 +70,9 @@ def dumps(self, data): class JSONSerializer(Serializer): - mimetype = "application/json" + mimetype: str = "application/json" - def default(self, data): + def default(self, data: Any) -> Any: if isinstance(data, TIME_TYPES): # Little hack to avoid importing pandas but to not # return 'NaT' string for pd.NaT as that's not a valid @@ -142,13 +144,13 @@ def default(self, data): raise TypeError("Unable to serialize %r (type: %s)" % (data, type(data))) - def loads(self, s): + def loads(self, s: str) -> Any: try: return json.loads(s) except (ValueError, TypeError) as e: raise SerializationError(s, e) - def dumps(self, data): + def dumps(self, data: Any) -> Any: # don't serialize strings if isinstance(data, string_types): return data @@ -161,14 +163,18 @@ def dumps(self, data): raise SerializationError(data, e) -DEFAULT_SERIALIZERS = { +DEFAULT_SERIALIZERS: Dict[str, Serializer] = { JSONSerializer.mimetype: JSONSerializer(), TextSerializer.mimetype: TextSerializer(), } class Deserializer(object): - def __init__(self, serializers, default_mimetype="application/json"): + def __init__( + self, + serializers: Dict[str, Serializer], + default_mimetype: str = "application/json", + ) -> None: try: self.default = serializers[default_mimetype] except KeyError: @@ -177,7 +183,7 @@ def __init__(self, serializers, default_mimetype="application/json"): ) self.serializers = serializers - def loads(self, s, mimetype=None): + def loads(self, s: str, mimetype: Optional[str] = None) -> Any: if not mimetype: deserializer = self.default else: @@ -199,7 +205,7 @@ def loads(self, s, mimetype=None): class AttrJSONSerializer(JSONSerializer): - def default(self, data): + def default(self, data: Any) -> Any: if isinstance(data, AttrList): return data._l_ if hasattr(data, "to_dict"): diff --git a/opensearchpy/serializer.pyi b/opensearchpy/serializer.pyi deleted file mode 100644 index 6d798cce..00000000 --- a/opensearchpy/serializer.pyi +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Dict, Optional - -class Serializer(object): - mimetype: str - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -class TextSerializer(Serializer): - mimetype: str - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -class JSONSerializer(Serializer): - mimetype: str - def default(self, data: Any) -> Any: ... - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -DEFAULT_SERIALIZERS: Dict[str, Serializer] - -class Deserializer(object): - def __init__( - self, - serializers: Dict[str, Serializer], - default_mimetype: str = ..., - ) -> None: ... - def loads(self, s: str, mimetype: Optional[str] = ...) -> Any: ... - -class AttrJSONSerializer(JSONSerializer): ... diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index c36178b2..583d9ba7 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -28,8 +28,9 @@ import time from itertools import chain +from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union -from .connection import Urllib3HttpConnection +from .connection import Connection, Urllib3HttpConnection from .connection_pool import ConnectionPool, DummyConnectionPool, EmptyConnectionPool from .exceptions import ( ConnectionError, @@ -37,10 +38,12 @@ SerializationError, TransportError, ) -from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer +from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer, Serializer -def get_host_info(node_info, host): +def get_host_info( + node_info: Dict[str, Any], host: Optional[Dict[str, Any]] +) -> Optional[Dict[str, Any]]: """ Simple callback that takes the node info from `/_cluster/nodes` and a parsed connection information and return the connection information. If @@ -68,28 +71,50 @@ class Transport(object): Main interface is the `perform_request` method. """ - DEFAULT_CONNECTION_CLASS = Urllib3HttpConnection + DEFAULT_CONNECTION_CLASS: Type[Connection] = Urllib3HttpConnection + + connection_pool: Any + deserializer: Deserializer + + max_retries: int + retry_on_timeout: bool + retry_on_status: Collection[int] + send_get_body_as: str + serializer: Serializer + connection_pool_class: Any + connection_class: Type[Connection] + kwargs: Any + hosts: Any + seed_connections: List[Connection] + sniffer_timeout: Optional[float] + sniff_on_start: bool + sniff_on_connection_fail: bool + last_sniff: float + sniff_timeout: Optional[float] + host_info_callback: Any def __init__( self, - hosts, - connection_class=None, - connection_pool_class=ConnectionPool, - host_info_callback=get_host_info, - sniff_on_start=False, - sniffer_timeout=None, - sniff_timeout=0.1, - sniff_on_connection_fail=False, - serializer=JSONSerializer(), - serializers=None, - default_mimetype="application/json", - max_retries=3, - pool_maxsize=None, - retry_on_status=(502, 503, 504), - retry_on_timeout=False, - send_get_body_as="GET", - **kwargs - ): + hosts: Any, + connection_class: Optional[Type[Connection]] = None, + connection_pool_class: Type[ConnectionPool] = ConnectionPool, + host_info_callback: Callable[ + [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] + ] = get_host_info, + sniff_on_start: bool = False, + sniffer_timeout: Optional[float] = None, + sniff_timeout: float = 0.1, + sniff_on_connection_fail: bool = False, + serializer: Serializer = JSONSerializer(), + serializers: Optional[Mapping[str, Serializer]] = None, + default_mimetype: str = "application/json", + max_retries: int = 3, + pool_maxsize: Optional[int] = None, + retry_on_status: Collection[int] = (502, 503, 504), + retry_on_timeout: bool = False, + send_get_body_as: str = "GET", + **kwargs: Any + ) -> None: """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance @@ -185,7 +210,7 @@ def __init__( if sniff_on_start: self.sniff_hosts(True) - def add_connection(self, host): + def add_connection(self, host: Any) -> None: """ Create a new :class:`~opensearchpy.Connection` instance and add it to the pool. @@ -194,7 +219,7 @@ def add_connection(self, host): self.hosts.append(host) self.set_connections(self.hosts) - def set_connections(self, hosts): + def set_connections(self, hosts: Any) -> None: """ Instantiate all the connections and create new connection pool to hold them. Tries to identify unchanged hosts and re-use existing @@ -204,7 +229,7 @@ def set_connections(self, hosts): """ # construct the connections - def _create_connection(host): + def _create_connection(host: Any) -> Any: # if this is not the initial setup look at the existing connection # options and identify connections that haven't changed and can be # kept around. @@ -220,9 +245,7 @@ def _create_connection(host): kwargs["pool_maxsize"] = self.pool_maxsize return self.connection_class(**kwargs) - connections = map(_create_connection, hosts) - - connections = list(zip(connections, hosts)) + connections = list(zip(map(_create_connection, hosts), hosts)) if len(connections) == 1: self.connection_pool = DummyConnectionPool(connections) else: @@ -231,7 +254,7 @@ def _create_connection(host): connections, **self.kwargs ) - def get_connection(self): + def get_connection(self) -> Any: """ Retrieve a :class:`~opensearchpy.Connection` instance from the :class:`~opensearchpy.ConnectionPool` instance. @@ -241,7 +264,7 @@ def get_connection(self): self.sniff_hosts() return self.connection_pool.get_connection() - def _get_sniff_data(self, initial=False): + def _get_sniff_data(self, initial: bool = False) -> Any: """ Perform the request to get sniffing information. Returns a list of dictionaries (one per node) containing all the information from the @@ -289,7 +312,7 @@ def _get_sniff_data(self, initial=False): return list(node_info["nodes"].values()) - def _get_host_info(self, host_info): + def _get_host_info(self, host_info: Any) -> Any: host = {} address = host_info.get("http", {}).get("publish_address") @@ -310,7 +333,7 @@ def _get_host_info(self, host_info): return self.host_info_callback(host_info, host) - def sniff_hosts(self, initial=False): + def sniff_hosts(self, initial: bool = False) -> Any: """ Obtain a list of nodes from the cluster and create a new connection pool using the information retrieved. @@ -322,7 +345,7 @@ def sniff_hosts(self, initial=False): """ node_info = self._get_sniff_data(initial) - hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) + hosts: Any = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes or host_info_callback blocked all - # raise error. @@ -333,7 +356,7 @@ def sniff_hosts(self, initial=False): self.set_connections(hosts) - def mark_dead(self, connection): + def mark_dead(self, connection: Connection) -> None: """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. @@ -345,7 +368,16 @@ def mark_dead(self, connection): if self.sniff_on_connection_fail: self.sniff_hosts() - def perform_request(self, method, url, headers=None, params=None, body=None): + def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to its perform_request method and @@ -428,13 +460,13 @@ def perform_request(self, method, url, headers=None, params=None, body=None): ) return data - def close(self): + def close(self) -> Any: """ Explicitly closes connections """ - self.connection_pool.close() + return self.connection_pool.close() - def _resolve_request_args(self, method, params, body): + def _resolve_request_args(self, method: str, params: Any, body: Any) -> Any: """Resolves parameters for .perform_request()""" if body is not None: body = self.serializer.dumps(body) @@ -470,3 +502,6 @@ def _resolve_request_args(self, method, params, body): ignore = (ignore,) return method, params, body, ignore, timeout + + +__all__ = ["TransportError"] diff --git a/opensearchpy/transport.pyi b/opensearchpy/transport.pyi deleted file mode 100644 index fe33cfda..00000000 --- a/opensearchpy/transport.pyi +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union - -from .connection import Connection -from .connection_pool import ConnectionPool -from .serializer import Deserializer, Serializer - -def get_host_info( - node_info: Dict[str, Any], host: Optional[Dict[str, Any]] -) -> Optional[Dict[str, Any]]: ... - -class Transport(object): - DEFAULT_CONNECTION_CLASS: Type[Connection] - connection_pool: ConnectionPool - deserializer: Deserializer - - max_retries: int - retry_on_timeout: bool - retry_on_status: Collection[int] - send_get_body_as: str - serializer: Serializer - connection_pool_class: Type[ConnectionPool] - connection_class: Type[Connection] - kwargs: Any - hosts: Optional[List[Dict[str, Any]]] - seed_connections: List[Connection] - sniffer_timeout: Optional[float] - sniff_on_start: bool - sniff_on_connection_fail: bool - last_sniff: float - sniff_timeout: Optional[float] - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] - ] - def __init__( - self, - hosts: Any, - connection_class: Optional[Type[Any]] = ..., - connection_pool_class: Type[ConnectionPool] = ..., - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] - ] = ..., - sniff_on_start: bool = ..., - sniffer_timeout: Optional[float] = ..., - sniff_timeout: float = ..., - sniff_on_connection_fail: bool = ..., - serializer: Serializer = ..., - serializers: Optional[Mapping[str, Serializer]] = ..., - default_mimetype: str = ..., - max_retries: int = ..., - retry_on_status: Collection[int] = ..., - retry_on_timeout: bool = ..., - send_get_body_as: str = ..., - **kwargs: Any - ) -> None: ... - def add_connection(self, host: Any) -> None: ... - def set_connections(self, hosts: Collection[Any]) -> None: ... - def get_connection(self) -> Connection: ... - def sniff_hosts(self, initial: bool = ...) -> None: ... - def mark_dead(self, connection: Connection) -> None: ... - def perform_request( - self, - method: str, - url: str, - headers: Optional[Mapping[str, str]] = ..., - params: Optional[Mapping[str, Any]] = ..., - body: Optional[Any] = ..., - ) -> Union[bool, Any]: ... - def close(self) -> None: ... diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index 96d7d742..562f82e2 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -18,7 +18,7 @@ # urllib3.disable_warnings() -def test_opensearch_examples(): +def test_opensearch_examples() -> None: # Set up client = OpenSearch( hosts=["https://localhost:9200"], diff --git a/setup.py b/setup.py index f4163840..b608990e 100644 --- a/setup.py +++ b/setup.py @@ -32,12 +32,13 @@ from setuptools import find_packages, setup package_name = "opensearch-py" +package_version = "" base_dir = abspath(dirname(__file__)) with open(join(base_dir, package_name.replace("-", ""), "_version.py")) as f: - package_version = re.search( - r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read() - ).group(1) + m = re.search(r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read()) + if m: + package_version = m.group(1) with open(join(base_dir, "README.md")) as f: long_description = f.read().strip() @@ -90,7 +91,7 @@ "Issue Tracker": "https://github.com/opensearch-project/opensearch-py/issues", }, packages=packages, - package_data={"opensearchpy": ["py.typed", "*.pyi"]}, + package_data={"opensearchpy": ["py.typed"]}, include_package_data=True, zip_safe=False, classifiers=[ diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/TestHttpServer.py index d9fb8ede..ba83e041 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/TestHttpServer.py @@ -41,18 +41,18 @@ def do_GET(self): class TestHTTPServer(HTTPServer): __test__ = False - def __init__(self, host="localhost", port=8080): + def __init__(self, host: str = "localhost", port: int = 8080) -> None: super().__init__((host, port), TestHTTPRequestHandler) self._server_thread = None - def start(self): + def start(self) -> None: if self._server_thread is not None: return self._server_thread = threading.Thread(target=self.serve_forever) self._server_thread.start() - def stop(self): + def stop(self) -> None: if self._server_thread is None: return self.socket.close() diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index 55f1e586..de93adc7 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -39,7 +39,7 @@ from os.path import abspath, dirname, exists, join, pardir -def fetch_opensearch_repo(): +def fetch_opensearch_repo() -> None: # user is manually setting YAML dir, don't tamper with it if "TEST_OPENSEARCH_YAML_DIR" in environ: return @@ -88,7 +88,7 @@ def fetch_opensearch_repo(): subprocess.check_call("cd %s && git fetch origin %s" % (repo_path, sha), shell=True) -def run_all(argv=None): +def run_all(argv: None = None) -> None: sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") # fetch yaml tests anywhere that's not GitHub Actions if "GITHUB_ACTION" not in environ: diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 3df51645..e72a2358 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -35,6 +35,7 @@ import aiohttp import pytest +from _pytest.mark.structures import MarkDecorator from mock import patch from multidict import CIMultiDict from pytest import raises @@ -45,15 +46,15 @@ from opensearchpy.exceptions import ConnectionError, TransportError from test_opensearchpy.TestHttpServer import TestHTTPServer -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAIOHttpConnection: async def _get_mock_connection( self, connection_params={}, - response_code=200, - response_body=b"{}", + response_code: int = 200, + response_body: bytes = b"{}", response_headers={}, ): con = AIOHttpConnection(**connection_params) @@ -79,7 +80,7 @@ async def text(self): con.session.request = _dummy_request return con - async def test_ssl_context(self): + async def test_ssl_context(self) -> None: try: context = ssl.create_default_context() except AttributeError: @@ -95,11 +96,11 @@ async def test_ssl_context(self): assert con.use_ssl assert con.session.connector._ssl == context - async def test_opaque_id(self): + async def test_opaque_id(self) -> None: con = AIOHttpConnection(opaque_id="app-1") assert con.headers["x-opaque-id"] == "app-1" - async def test_no_http_compression(self): + async def test_no_http_compression(self) -> None: con = await self._get_mock_connection() assert not con.http_compress assert "accept-encoding" not in con.headers @@ -112,7 +113,7 @@ async def test_no_http_compression(self): assert "accept-encoding" not in kwargs["headers"] assert "content-encoding" not in kwargs["headers"] - async def test_http_compression(self): + async def test_http_compression(self) -> None: con = await self._get_mock_connection({"http_compress": True}) assert con.http_compress assert con.headers["accept-encoding"] == "gzip,deflate" @@ -138,7 +139,7 @@ async def test_http_compression(self): assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert "content-encoding" not in kwargs["headers"] - async def test_url_prefix(self): + async def test_url_prefix(self) -> None: con = await self._get_mock_connection( connection_params={"url_prefix": "/_search/"} ) @@ -150,18 +151,18 @@ async def test_url_prefix(self): method, yarl_url = con.session.request.call_args[0] assert method == "GET" and str(yarl_url) == "http://localhost:9200/_search/" - async def test_default_user_agent(self): + async def test_default_user_agent(self) -> None: con = AIOHttpConnection() assert con._get_default_user_agent() == "opensearch-py/%s (Python %s)" % ( __versionstr__, python_version(), ) - async def test_timeout_set(self): + async def test_timeout_set(self) -> None: con = AIOHttpConnection(timeout=42) assert 42 == con.timeout - async def test_keep_alive_is_on_by_default(self): + async def test_keep_alive_is_on_by_default(self) -> None: con = AIOHttpConnection() assert { "connection": "keep-alive", @@ -169,7 +170,7 @@ async def test_keep_alive_is_on_by_default(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth(self): + async def test_http_auth(self) -> None: con = AIOHttpConnection(http_auth="username:secret") assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -178,7 +179,7 @@ async def test_http_auth(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth_tuple(self): + async def test_http_auth_tuple(self) -> None: con = AIOHttpConnection(http_auth=("username", "secret")) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -187,7 +188,7 @@ async def test_http_auth_tuple(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth_list(self): + async def test_http_auth_list(self) -> None: con = AIOHttpConnection(http_auth=["username", "secret"]) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -196,7 +197,7 @@ async def test_http_auth_list(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_uses_https_if_verify_certs_is_off(self): + async def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) @@ -209,7 +210,7 @@ async def test_uses_https_if_verify_certs_is_off(self): assert con.scheme == "https" assert con.host == "https://localhost:9200" - async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): + async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -219,17 +220,17 @@ async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): assert isinstance(con.session, aiohttp.ClientSession) - async def test_doesnt_use_https_if_not_specified(self): + async def test_doesnt_use_https_if_not_specified(self) -> None: con = AIOHttpConnection() assert not con.use_ssl - async def test_no_warning_when_using_ssl_context(self): + async def test_no_warning_when_using_ssl_context(self) -> None: ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: AIOHttpConnection(ssl_context=ctx) assert w == [], str([x.message for x in w]) - async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): + async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -252,32 +253,32 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_given_ca_certs(self, load_verify_locations, tmp_path): + async def test_uses_given_ca_certs(self, load_verify_locations, tmp_path) -> None: path = tmp_path / "ca_certs.pem" path.touch() AIOHttpConnection(use_ssl=True, ca_certs=str(path)) load_verify_locations.assert_called_once_with(cafile=str(path)) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_default_ca_certs(self, load_verify_locations): + async def test_uses_default_ca_certs(self, load_verify_locations) -> None: AIOHttpConnection(use_ssl=True) load_verify_locations.assert_called_once_with( cafile=Connection.default_ca_certs() ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_no_ca_certs(self, load_verify_locations): + async def test_uses_no_ca_certs(self, load_verify_locations) -> None: AIOHttpConnection(use_ssl=True, verify_certs=False) load_verify_locations.assert_not_called() - async def test_trust_env(self): + async def test_trust_env(self) -> None: con = AIOHttpConnection(trust_env=True) await con._create_aiohttp_session() assert con._trust_env is True assert con.session.trust_env is True - async def test_trust_env_default_value_is_false(self): + async def test_trust_env_default_value_is_false(self) -> None: con = AIOHttpConnection() await con._create_aiohttp_session() @@ -285,7 +286,7 @@ async def test_trust_env_default_value_is_false(self): assert con.session.trust_env is False @patch("opensearchpy.connection.base.logger") - async def test_uncompressed_body_logged(self, logger): + async def test_uncompressed_body_logged(self, logger) -> None: con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -295,14 +296,14 @@ async def test_uncompressed_body_logged(self, logger): assert '> {"example": "body"}' == req[0][0] % req[0][1:] assert "< {}" == resp[0][0] % resp[0][1:] - async def test_surrogatepass_into_bytes(self): + async def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") assert u"你好\uda6a" == data # fmt: skip @pytest.mark.parametrize("exception_cls", reraise_exceptions) - async def test_recursion_error_reraised(self, exception_cls): + async def test_recursion_error_reraised(self, exception_cls) -> None: conn = AIOHttpConnection() def request_raise(*_, **__): @@ -315,7 +316,7 @@ def request_raise(*_, **__): await conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" - async def test_json_errors_are_parsed(self): + async def test_json_errors_are_parsed(self) -> None: con = await self._get_mock_connection( response_code=400, response_body=b'{"error": {"type": "snapshot_in_progress_exception"}}', @@ -334,13 +335,13 @@ class TestConnectionHttpServer: """Tests the HTTP connection implementations against a live server E2E""" @classmethod - def setup_class(cls): + def setup_class(cls) -> None: # Start server cls.server = TestHTTPServer(port=8081) cls.server.start() @classmethod - def teardown_class(cls): + def teardown_class(cls) -> None: # Stop server cls.server.stop() @@ -349,7 +350,7 @@ async def httpserver(self, conn, **kwargs): data = json.loads(data) return (status, data) - async def test_aiohttp_connection(self): + async def test_aiohttp_connection(self) -> None: # Defaults conn = AIOHttpConnection("localhost", port=8081, use_ssl=False) user_agent = conn._get_default_user_agent() @@ -409,13 +410,13 @@ async def test_aiohttp_connection(self): "User-Agent": user_agent, } - async def test_aiohttp_connection_error(self): + async def test_aiohttp_connection_error(self) -> None: conn = AIOHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): await conn.perform_request("GET", "/") -async def test_default_connection_is_returned_by_default(): +async def test_default_connection_is_returned_by_default() -> None: c = async_connections.AsyncConnections() con, con2 = object(), object() @@ -426,7 +427,7 @@ async def test_default_connection_is_returned_by_default(): assert await c.get_connection() is con -async def test_get_connection_created_connection_if_needed(): +async def test_get_connection_created_connection_if_needed() -> None: c = async_connections.AsyncConnections() await c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -439,7 +440,7 @@ async def test_get_connection_created_connection_if_needed(): assert [{"host": "localhost"}] == local.transport.hosts -async def test_configure_preserves_unchanged_connections(): +async def test_configure_preserves_unchanged_connections() -> None: c = async_connections.AsyncConnections() await c.configure( @@ -458,7 +459,7 @@ async def test_configure_preserves_unchanged_connections(): assert new_default is not default -async def test_remove_connection_removes_both_conn_and_conf(): +async def test_remove_connection_removes_both_conn_and_conf() -> None: c = async_connections.AsyncConnections() await c.configure( @@ -475,7 +476,7 @@ async def test_remove_connection_removes_both_conn_and_conf(): await c.get_connection("default") -async def test_create_connection_constructs_client(): +async def test_create_connection_constructs_client() -> None: c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) @@ -483,7 +484,7 @@ async def test_create_connection_constructs_client(): assert [{"host": "opensearch.com"}] == con.transport.hosts -async def test_create_connection_adds_our_serializer(): +async def test_create_connection_adds_our_serializer() -> None: c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) result = await c.get_connection("testing") diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index ca0c8d4c..f24b8a48 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -10,12 +10,13 @@ import pytest +from _pytest.mark.structures import MarkDecorator from mock import Mock from pytest import fixture from opensearchpy.connection.async_connections import add_connection, async_connections -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio @fixture diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 26d49bf0..d13c7272 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -17,6 +17,7 @@ from hashlib import sha256 import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import InnerDoc, MetaField, Range, analyzer @@ -26,7 +27,7 @@ from opensearchpy.exceptions import IllegalOperation, ValidationException from opensearchpy.helpers import field, utils -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class MyInner(InnerDoc): @@ -118,7 +119,7 @@ class Index: name = "test-host" -async def test_range_serializes_properly(): +async def test_range_serializes_properly() -> None: class D(document.AsyncDocument): lr = field.LongRange() @@ -131,7 +132,7 @@ class D(document.AsyncDocument): assert {"lr": {"lt": 42}} == d.to_dict() -async def test_range_deserializes_properly(): +async def test_range_deserializes_properly() -> None: class D(InnerDoc): lr = field.LongRange() @@ -141,13 +142,13 @@ class D(InnerDoc): assert 47 not in d.lr -async def test_resolve_nested(): +async def test_resolve_nested() -> None: nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] -async def test_conflicting_mapping_raises_error_in_index_to_dict(): +async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: class A(document.AsyncDocument): name = field.Text() @@ -162,18 +163,18 @@ class B(document.AsyncDocument): i.to_dict() -async def test_ip_address_serializes_properly(): +async def test_ip_address_serializes_properly() -> None: host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() -async def test_matches_uses_index(): +async def test_matches_uses_index() -> None: assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) -async def test_matches_with_no_name_always_matches(): +async def test_matches_with_no_name_always_matches() -> None: class D(document.AsyncDocument): pass @@ -181,7 +182,7 @@ class D(document.AsyncDocument): assert D._matches({"_index": "whatever"}) -async def test_matches_accepts_wildcards(): +async def test_matches_accepts_wildcards() -> None: class MyDoc(document.AsyncDocument): class Index: name = "my-*" @@ -190,7 +191,7 @@ class Index: assert not MyDoc._matches({"_index": "not-my-index"}) -async def test_assigning_attrlist_to_field(): +async def test_assigning_attrlist_to_field() -> None: sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -198,13 +199,13 @@ async def test_assigning_attrlist_to_field(): assert sc.to_dict()["files"] is ls -async def test_optional_inner_objects_are_not_validated_if_missing(): +async def test_optional_inner_objects_are_not_validated_if_missing() -> None: d = OptionalObjectWithRequiredField() assert d.full_clean() is None -async def test_custom_field(): +async def test_custom_field() -> None: s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -215,13 +216,13 @@ async def test_custom_field(): assert isinstance(s.title, Secret) -async def test_custom_field_mapping(): +async def test_custom_field_mapping() -> None: assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() -async def test_custom_field_in_nested(): +async def test_custom_field_in_nested() -> None: s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -229,7 +230,7 @@ async def test_custom_field_in_nested(): assert s.secrets[0].title == "Hello" -async def test_multi_works_after_doc_has_been_saved(): +async def test_multi_works_after_doc_has_been_saved() -> None: c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -237,7 +238,7 @@ async def test_multi_works_after_doc_has_been_saved(): assert c.to_dict() == {"files": ["setup.py"]} -async def test_multi_works_in_nested_after_doc_has_been_serialized(): +async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -246,7 +247,7 @@ async def test_multi_works_in_nested_after_doc_has_been_serialized(): assert [] == c.comments[0].tags -async def test_null_value_for_object(): +async def test_null_value_for_object() -> None: d = MyDoc(inner=None) assert d.inner is None @@ -302,21 +303,21 @@ async def test_to_dict_with_meta_includes_custom_index(): assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) -async def test_to_dict_without_skip_empty_will_include_empty_fields(): +async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) -async def test_attribute_can_be_removed(): +async def test_attribute_can_be_removed() -> None: d = MyDoc(title="hello") del d.title assert "title" not in d._d_ -async def test_doc_type_can_be_correctly_pickled(): +async def test_doc_type_can_be_correctly_pickled() -> None: d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -331,7 +332,7 @@ async def test_doc_type_can_be_correctly_pickled(): assert isinstance(d2.comments[0], Comment) -async def test_meta_is_accessible_even_on_empty_doc(): +async def test_meta_is_accessible_even_on_empty_doc() -> None: d = MyDoc() d.meta @@ -358,7 +359,7 @@ class Meta: } == User._doc_type.mapping.to_dict() -async def test_multi_value_fields(): +async def test_multi_value_fields() -> None: class Blog(document.AsyncDocument): tags = field.Keyword(multi=True) @@ -369,7 +370,7 @@ class Blog(document.AsyncDocument): assert ["search", "python"] == b.tags -async def test_docs_with_properties(): +async def test_docs_with_properties() -> None: class User(document.AsyncDocument): pwd_hash = field.Text() @@ -397,7 +398,7 @@ def password(self, pwd): u.password -async def test_nested_can_be_assigned_to(): +async def test_nested_can_be_assigned_to() -> None: d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -408,13 +409,13 @@ async def test_nested_can_be_assigned_to(): assert isinstance(d2.comments[0], Comment) -async def test_nested_can_be_none(): +async def test_nested_can_be_none() -> None: d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() -async def test_nested_defaults_to_list_and_can_be_updated(): +async def test_nested_defaults_to_list_and_can_be_updated() -> None: md = DocWithNested() assert [] == md.comments @@ -435,7 +436,7 @@ async def test_to_dict_is_recursive_and_can_cope_with_multi_values(): } == md.to_dict() -async def test_to_dict_ignores_empty_collections(): +async def test_to_dict_ignores_empty_collections() -> None: md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() @@ -489,7 +490,7 @@ async def test_document_can_be_created_dynamically(): } == md.to_dict() -async def test_invalid_date_will_raise_exception(): +async def test_invalid_date_will_raise_exception() -> None: md = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -528,7 +529,7 @@ class B(A): } == B._doc_type.mapping.to_dict() -async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): +async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: md = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -555,32 +556,32 @@ async def test_index_inheritance(): } == MyMultiSubDoc._doc_type.mapping.to_dict() -async def test_meta_fields_can_be_set_directly_in_init(): +async def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() md = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -async def test_save_no_index(mock_client): +async def test_save_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): await md.save(using="mock") -async def test_delete_no_index(mock_client): +async def test_delete_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): await md.delete(using="mock") -async def test_update_no_fields(): +async def test_update_no_fields() -> None: md = MyDoc() with raises(IllegalOperation): await md.update() -async def test_search_with_custom_alias_and_index(mock_client): +async def test_search_with_custom_alias_and_index(mock_client) -> None: search_object = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 34e18008..58c936c0 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -11,11 +11,12 @@ from datetime import datetime import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.faceted_search import AsyncFacetedSearch from opensearchpy.helpers.faceted_search import DateHistogramFacet, TermsFacet -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class BlogSearch(AsyncFacetedSearch): @@ -31,7 +32,7 @@ class BlogSearch(AsyncFacetedSearch): } -async def test_query_is_created_properly(): +async def test_query_is_created_properly() -> None: bs = BlogSearch("python search") s = bs.build_search() @@ -135,7 +136,7 @@ async def test_filters_are_applied_to_search_ant_relevant_facets(): } == d -async def test_date_histogram_facet_with_1970_01_01_date(): +async def test_date_histogram_facet_with_1970_01_01_date() -> None: dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -168,7 +169,7 @@ async def test_date_histogram_facet_with_1970_01_01_date(): ("fixed_interval", "1h"), ], ) -async def test_date_histogram_interval_types(interval_type, interval): +async def test_date_histogram_interval_types(interval_type, interval) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -180,7 +181,7 @@ async def test_date_histogram_interval_types(interval_type, interval): dhf.get_value_filter(datetime.now()) -async def test_date_histogram_no_interval_keyerror(): +async def test_date_histogram_no_interval_keyerror() -> None: dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index 1958f80f..681b9cfe 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -12,13 +12,14 @@ from random import choice import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import Date, Text, analyzer from opensearchpy._async.helpers.document import AsyncDocument from opensearchpy._async.helpers.index import AsyncIndex -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Post(AsyncDocument): @@ -26,7 +27,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_multiple_doc_types_will_combine_mappings(): +async def test_multiple_doc_types_will_combine_mappings() -> None: class User(AsyncDocument): username = Text() @@ -44,14 +45,14 @@ class User(AsyncDocument): } == i.to_dict() -async def test_search_is_limited_to_index_name(): +async def test_search_is_limited_to_index_name() -> None: i = AsyncIndex("my-index") s = i.search() assert s._index == ["my-index"] -async def test_cloned_index_has_copied_settings_and_using(): +async def test_cloned_index_has_copied_settings_and_using() -> None: client = object() i = AsyncIndex("my-index", using=client) i.settings(number_of_shards=1) @@ -64,7 +65,7 @@ async def test_cloned_index_has_copied_settings_and_using(): assert i._settings is not i2._settings -async def test_cloned_index_has_analysis_attribute(): +async def test_cloned_index_has_analysis_attribute() -> None: """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -84,7 +85,7 @@ async def test_cloned_index_has_analysis_attribute(): assert i.to_dict()["settings"]["analysis"] == i2.to_dict()["settings"]["analysis"] -async def test_settings_are_saved(): +async def test_settings_are_saved() -> None: i = AsyncIndex("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -92,7 +93,7 @@ async def test_settings_are_saved(): assert {"settings": {"number_of_shards": 1, "number_of_replicas": 0}} == i.to_dict() -async def test_registered_doc_type_included_in_to_dict(): +async def test_registered_doc_type_included_in_to_dict() -> None: i = AsyncIndex("i", using="alias") i.document(Post) @@ -106,7 +107,7 @@ async def test_registered_doc_type_included_in_to_dict(): } == i.to_dict() -async def test_registered_doc_type_included_in_search(): +async def test_registered_doc_type_included_in_search() -> None: i = AsyncIndex("i", using="alias") i.document(Post) @@ -115,7 +116,7 @@ async def test_registered_doc_type_included_in_search(): assert s._doc_type == [Post] -async def test_aliases_add_to_object(): +async def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -125,7 +126,7 @@ async def test_aliases_add_to_object(): assert index._aliases == alias_dict -async def test_aliases_returned_from_to_dict(): +async def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -164,7 +165,7 @@ async def test_analyzers_returned_from_to_dict(): ] == {"filter": ["standard"], "type": "custom", "tokenizer": "standard"} -async def test_conflicting_analyzer_raises_error(): +async def test_conflicting_analyzer_raises_error() -> None: i = AsyncIndex("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index 7c9e799f..6ae4c0b7 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -15,7 +15,7 @@ from opensearchpy.helpers import analysis -async def test_mapping_can_has_fields(): +async def test_mapping_can_has_fields() -> None: m = mapping.AsyncMapping() m.field("name", "text").field("tags", "keyword") @@ -57,7 +57,7 @@ async def test_mapping_update_is_recursive(): } == m1.to_dict() -async def test_properties_can_iterate_over_all_the_fields(): +async def test_properties_can_iterate_over_all_the_fields() -> None: m = mapping.AsyncMapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -186,7 +186,7 @@ async def test_mapping_can_collect_multiple_analyzers(): } == m._collect_analysis() -async def test_even_non_custom_analyzers_can_have_params(): +async def test_even_non_custom_analyzers_can_have_params() -> None: a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.AsyncMapping() m.field("title", "text", analyzer=a1) @@ -196,14 +196,14 @@ async def test_even_non_custom_analyzers_can_have_params(): } == m._collect_analysis() -async def test_resolve_field_can_resolve_multifields(): +async def test_resolve_field_can_resolve_multifields() -> None: m = mapping.AsyncMapping() m.field("title", "text", fields={"keyword": Keyword()}) assert isinstance(m.resolve_field("title.keyword"), Keyword) -async def test_resolve_nested(): +async def test_resolve_nested() -> None: m = mapping.AsyncMapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index 784193ee..c32a8c7c 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -11,6 +11,7 @@ from copy import deepcopy import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy._async.helpers import search @@ -19,16 +20,16 @@ from opensearchpy.helpers import query from opensearchpy.helpers.query import Q -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_expand__to_dot_is_respected(): +async def test_expand__to_dot_is_respected() -> None: s = search.AsyncSearch().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() -async def test_execute_uses_cache(): +async def test_execute_uses_cache() -> None: s = search.AsyncSearch() r = object() s._response = r @@ -36,20 +37,20 @@ async def test_execute_uses_cache(): assert r is await s.execute() -async def test_cache_isnt_cloned(): +async def test_cache_isnt_cloned() -> None: s = search.AsyncSearch() s._response = object() assert not hasattr(s._clone(), "_response") -async def test_search_starts_with_no_query(): +async def test_search_starts_with_no_query() -> None: s = search.AsyncSearch() assert s.query._proxied is None -async def test_search_query_combines_query(): +async def test_search_query_combines_query() -> None: s = search.AsyncSearch() s2 = s.query("match", f=42) @@ -61,7 +62,7 @@ async def test_search_query_combines_query(): assert s3.query._proxied == query.Bool(must=[query.Match(f=42), query.Match(f=43)]) -async def test_query_can_be_assigned_to(): +async def test_query_can_be_assigned_to() -> None: s = search.AsyncSearch() q = Q("match", title="python") @@ -85,7 +86,7 @@ async def test_query_can_be_wrapped(): } == s.to_dict() -async def test_using(): +async def test_using() -> None: o = object() o2 = object() s = search.AsyncSearch(using=o) @@ -95,19 +96,19 @@ async def test_using(): assert s2._using is o2 -async def test_methods_are_proxied_to_the_query(): +async def test_methods_are_proxied_to_the_query() -> None: s = search.AsyncSearch().query("match_all") assert s.query.to_dict() == {"match_all": {}} -async def test_query_always_returns_search(): +async def test_query_always_returns_search() -> None: s = search.AsyncSearch() assert isinstance(s.query("match", f=42), search.AsyncSearch) -async def test_source_copied_on_clone(): +async def test_source_copied_on_clone() -> None: s = search.AsyncSearch().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -121,7 +122,7 @@ async def test_source_copied_on_clone(): assert s3._clone()._source == ["some", "fields"] -async def test_copy_clones(): +async def test_copy_clones() -> None: from copy import copy s1 = search.AsyncSearch().source(["some", "fields"]) @@ -131,7 +132,7 @@ async def test_copy_clones(): assert s1 is not s2 -async def test_aggs_allow_two_metric(): +async def test_aggs_allow_two_metric() -> None: s = search.AsyncSearch() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -173,7 +174,7 @@ async def test_aggs_get_copied_on_change(): assert d == s4.to_dict() -async def test_search_index(): +async def test_search_index() -> None: s = search.AsyncSearch(index="i") assert s._index == ["i"] s = s.index("i2") @@ -204,7 +205,7 @@ async def test_search_index(): assert s2._index == ["i", "i2", "i3", "i4", "i5"] -async def test_doc_type_document_class(): +async def test_doc_type_document_class() -> None: class MyDocument(AsyncDocument): pass @@ -229,7 +230,7 @@ async def test_sort(): assert search.AsyncSearch().to_dict() == s.to_dict() -async def test_sort_by_score(): +async def test_sort_by_score() -> None: s = search.AsyncSearch() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -239,7 +240,7 @@ async def test_sort_by_score(): s.sort("-_score") -async def test_slice(): +async def test_slice() -> None: s = search.AsyncSearch() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -248,7 +249,7 @@ async def test_slice(): assert {"from": 20, "size": 0} == s[20:0].to_dict() -async def test_index(): +async def test_index() -> None: s = search.AsyncSearch() assert {"from": 3, "size": 1} == s[3].to_dict() @@ -383,13 +384,13 @@ async def test_reverse(): assert d == s.to_dict() -async def test_from_dict_doesnt_need_query(): +async def test_from_dict_doesnt_need_query() -> None: s = search.AsyncSearch.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() -async def test_source(): +async def test_source() -> None: assert {} == search.AsyncSearch().source().to_dict() assert { @@ -420,7 +421,7 @@ async def test_source_on_clone(): } == search.AsyncSearch().source(False).filter("term", title="python").to_dict() -async def test_source_on_clear(): +async def test_source_on_clear() -> None: assert ( {} == search.AsyncSearch() @@ -461,7 +462,7 @@ async def test_suggest(): } == s.to_dict() -async def test_exclude(): +async def test_exclude() -> None: s = search.AsyncSearch() s = s.exclude("match", title="python") diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index 340bd1b7..b15983dc 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -11,15 +11,16 @@ from copy import deepcopy import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import Q from opensearchpy._async.helpers import update_by_query from opensearchpy.helpers.response import UpdateByQueryResponse -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_ubq_starts_with_no_query(): +async def test_ubq_starts_with_no_query() -> None: ubq = update_by_query.AsyncUpdateByQuery() assert ubq.query._proxied is None @@ -81,7 +82,7 @@ async def test_complex_example(): } == ubq.to_dict() -async def test_exclude(): +async def test_exclude() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -130,7 +131,7 @@ async def test_reverse(): assert d == ubq.to_dict() -async def test_from_dict_doesnt_need_query(): +async def test_from_dict_doesnt_need_query() -> None: ubq = update_by_query.AsyncUpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() @@ -152,7 +153,7 @@ async def test_overwrite_script(): assert {"script": {"source": "ctx._source.likes++"}} == ubq.to_dict() -async def test_update_by_query_response_success(): +async def test_update_by_query_response_success() -> None: ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index 282a61c7..913a944d 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -28,29 +28,30 @@ import mock import pytest +from _pytest.mark.structures import MarkDecorator from multidict import CIMultiDict from opensearchpy._async._extra_imports import aiohttp from opensearchpy._async.compat import get_running_loop from opensearchpy.connection.http_async import AsyncHttpConnection -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAsyncHttpConnection: - def test_auth_as_tuple(self): + def test_auth_as_tuple(self) -> None: c = AsyncHttpConnection(http_auth=("username", "password")) assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" - def test_auth_as_string(self): + def test_auth_as_string(self) -> None: c = AsyncHttpConnection(http_auth="username:password") assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" - def test_auth_as_callable(self): + def test_auth_as_callable(self) -> None: def auth_fn(): pass @@ -58,7 +59,7 @@ def auth_fn(): assert callable(c._http_auth) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_basicauth_in_request_session(self, mock_request): + async def test_basicauth_in_request_session(self, mock_request) -> None: async def do_request(*args, **kwargs): response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() @@ -89,7 +90,7 @@ async def do_request(*args, **kwargs): ) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_callable_in_request_session(self, mock_request): + async def test_callable_in_request_session(self, mock_request) -> None: def auth_fn(*args, **kwargs): return { "Test": "PASSED", diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index c620873c..2364f0fa 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -14,7 +14,7 @@ class TestPluginsClient(TestCase): - async def test_plugins_client(self): + async def test_plugins_client(self) -> None: with self.assertWarns(Warning) as w: client = AsyncOpenSearch() client.plugins.__init__(client) # double-init diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index 794aeb53..36571a71 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -35,13 +35,13 @@ class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): - async def asyncSetUp(self): + async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self): + async def asyncTearDown(self) -> None: wipe_cluster(self.client) if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 2c49aca3..908313ee 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -29,13 +29,14 @@ import asyncio import pytest +from _pytest.mark.structures import MarkDecorator import opensearchpy from opensearchpy.helpers.test import OPENSEARCH_URL from ...utils import wipe_cluster -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio @pytest.fixture(scope="function") diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index 17104312..41a07012 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -29,24 +29,25 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestUnicode: - async def test_indices_analyze(self, async_client): + async def test_indices_analyze(self, async_client) -> None: await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: - async def test_bulk_works_with_string_body(self, async_client): + async def test_bulk_works_with_string_body(self, async_client) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) assert response["errors"] is False assert len(response["items"]) == 1 - async def test_bulk_works_with_bytestring_body(self, async_client): + async def test_bulk_works_with_bytestring_body(self, async_client) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -57,7 +58,7 @@ async def test_bulk_works_with_bytestring_body(self, async_client): class TestYarlMissing: async def test_aiohttp_connection_works_without_yarl( self, async_client, monkeypatch - ): + ) -> None: # This is a defensive test case for if aiohttp suddenly stops using yarl. from opensearchpy._async import http_aiohttp diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index e6d79c46..36ea7a10 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -96,7 +96,7 @@ async def pull_request(write_client): @fixture -async def setup_ubq_tests(client): +async def setup_ubq_tests(client) -> str: index = "test-git" await create_git_index(client, index) await async_bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 425eb2c7..dee69819 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -27,6 +27,7 @@ import asyncio +from typing import Tuple import pytest from mock import MagicMock, patch @@ -48,8 +49,11 @@ def __await__(self): class FailingBulkClient(object): def __init__( - self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) - ): + self, + client, + fail_at: Tuple[int] = (2,), + fail_with=TransportError(599, "Error!", {}), + ) -> None: self.client = client self._called = 0 self._fail_at = fail_at @@ -64,7 +68,7 @@ async def bulk(self, *args, **kwargs): class TestStreamingBulk(object): - async def test_actions_remain_unchanged(self, async_client): + async def test_actions_remain_unchanged(self, async_client) -> None: actions1 = [{"_id": 1}, {"_id": 2}] async for ok, item in actions.async_streaming_bulk( async_client, actions1, index="test-index" @@ -72,7 +76,7 @@ async def test_actions_remain_unchanged(self, async_client): assert ok assert [{"_id": 1}, {"_id": 2}] == actions1 - async def test_all_documents_get_inserted(self, async_client): + async def test_all_documents_get_inserted(self, async_client) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True @@ -118,7 +122,9 @@ def sync_gen(): "_source" ] - async def test_all_errors_from_chunk_are_raised_on_failure(self, async_client): + async def test_all_errors_from_chunk_are_raised_on_failure( + self, async_client + ) -> None: await async_client.indices.create( "i", { @@ -187,7 +193,7 @@ async def test_transport_error_can_becaught(self, async_client): } } == results[1][1] - async def test_rejected_documents_are_retried(self, async_client): + async def test_rejected_documents_are_retried(self, async_client) -> None: failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -217,7 +223,7 @@ async def test_rejected_documents_are_retried(self, async_client): async def test_rejected_documents_are_retried_at_most_max_retries_times( self, async_client - ): + ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -246,7 +252,9 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( assert {"value": 2, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called - async def test_transport_error_is_raised_with_max_retries(self, async_client): + async def test_transport_error_is_raised_with_max_retries( + self, async_client + ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2, 3, 4), @@ -272,7 +280,7 @@ async def streaming_bulk(): class TestBulk(object): - async def test_bulk_works_with_single_item(self, async_client): + async def test_bulk_works_with_single_item(self, async_client) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -285,7 +293,7 @@ async def test_bulk_works_with_single_item(self, async_client): "_source" ] - async def test_all_documents_get_inserted(self, async_client): + async def test_all_documents_get_inserted(self, async_client) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -298,7 +306,7 @@ async def test_all_documents_get_inserted(self, async_client): "_source" ] - async def test_stats_only_reports_numbers(self, async_client): + async def test_stats_only_reports_numbers(self, async_client) -> None: docs = [{"answer": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True @@ -402,7 +410,7 @@ async def test_errors_are_collected_properly(self, async_client): class MockScroll: - def __init__(self): + def __init__(self) -> None: self.calls = [] async def __call__(self, *args, **kwargs): @@ -424,7 +432,7 @@ async def __call__(self, *args, **kwargs): class MockResponse: - def __init__(self, resp): + def __init__(self, resp) -> None: self.resp = resp async def __call__(self, *args, **kwargs): @@ -564,7 +572,7 @@ async def test_initial_search_error(self, async_client, scan_teardown): assert data == [{"search_data": 1}] assert mock_scroll.calls == [] - async def test_no_scroll_id_fast_route(self, async_client, scan_teardown): + async def test_no_scroll_id_fast_route(self, async_client, scan_teardown) -> None: with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: @@ -776,7 +784,7 @@ async def reindex_setup(async_client): class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client, reindex_setup - ): + ) -> None: await actions.async_reindex( async_client, "test_index", @@ -795,7 +803,7 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( await async_client.get(index="prod_index", id=42) )["_source"] - async def test_reindex_accepts_a_query(self, async_client, reindex_setup): + async def test_reindex_accepts_a_query(self, async_client, reindex_setup) -> None: await actions.async_reindex( async_client, "test_index", @@ -814,7 +822,7 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): await async_client.get(index="prod_index", id=42) )["_source"] - async def test_all_documents_get_moved(self, async_client, reindex_setup): + async def test_all_documents_get_moved(self, async_client, reindex_setup) -> None: await actions.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index bc2df5ba..99f2486d 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -10,6 +10,8 @@ from __future__ import unicode_literals +from typing import Any, Dict + async def create_flat_git_index(client, index): # we will use user on several places @@ -1076,7 +1078,7 @@ async def create_git_index(client, index): ] -def flatten_doc(d): +def flatten_doc(d) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1085,7 +1087,7 @@ def flatten_doc(d): FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d): +def create_test_git_data(d) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 650c7b39..67982918 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -146,7 +146,7 @@ async def test_serialization(write_client): } -async def test_nested_inner_hits_are_wrapped_properly(pull_request): +async def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: history_query = Q( "nested", path="comments.history", @@ -174,7 +174,7 @@ async def test_nested_inner_hits_are_wrapped_properly(pull_request): assert "score" in history.meta -async def test_nested_inner_hits_are_deserialized_properly(pull_request): +async def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -189,7 +189,7 @@ async def test_nested_inner_hits_are_deserialized_properly(pull_request): assert isinstance(pr.comments[0].created_at, datetime) -async def test_nested_top_hits_are_wrapped_properly(pull_request): +async def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -201,7 +201,7 @@ async def test_nested_top_hits_are_wrapped_properly(pull_request): assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -async def test_update_object_field(write_client): +async def test_update_object_field(write_client) -> None: await Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -221,7 +221,7 @@ async def test_update_object_field(write_client): assert w.ranked == {"test1": 0.1, "topic2": 0.2} -async def test_update_script(write_client): +async def test_update_script(write_client) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -231,7 +231,7 @@ async def test_update_script(write_client): assert w.views == 47 -async def test_update_retry_on_conflict(write_client): +async def test_update_retry_on_conflict(write_client) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -250,7 +250,7 @@ async def test_update_retry_on_conflict(write_client): @pytest.mark.parametrize("retry_on_conflict", [None, 0]) -async def test_update_conflicting_version(write_client, retry_on_conflict): +async def test_update_conflicting_version(write_client, retry_on_conflict) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -267,7 +267,7 @@ async def test_update_conflicting_version(write_client, retry_on_conflict): ) -async def test_save_and_update_return_doc_meta(write_client): +async def test_save_and_update_return_doc_meta(write_client) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = await w.save(return_doc_meta=True) @@ -291,31 +291,33 @@ async def test_save_and_update_return_doc_meta(write_client): assert resp.keys().__contains__("_version") -async def test_init(write_client): +async def test_init(write_client) -> None: await Repository.init(index="test-git") assert await write_client.indices.exists(index="test-git") -async def test_get_raises_404_on_index_missing(data_client): +async def test_get_raises_404_on_index_missing(data_client) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php", index="not-there") -async def test_get_raises_404_on_non_existent_id(data_client): +async def test_get_raises_404_on_non_existent_id(data_client) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php") -async def test_get_returns_none_if_404_ignored(data_client): +async def test_get_returns_none_if_404_ignored(data_client) -> None: assert None is await Repository.get("opensearch-dsl-php", ignore=404) -async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client): +async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( + data_client, +) -> None: assert None is await Repository.get("42", index="not-there", ignore=404) -async def test_get(data_client): +async def test_get(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -323,15 +325,15 @@ async def test_get(data_client): assert datetime(2014, 3, 3) == opensearch_repo.created_at -async def test_exists_return_true(data_client): +async def test_exists_return_true(data_client) -> None: assert await Repository.exists("opensearch-py") -async def test_exists_false(data_client): +async def test_exists_false(data_client) -> None: assert not await Repository.exists("opensearch-dsl-php") -async def test_get_with_tz_date(data_client): +async def test_get_with_tz_date(data_client) -> None: first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -343,7 +345,7 @@ async def test_get_with_tz_date(data_client): ) -async def test_save_with_tz_date(data_client): +async def test_save_with_tz_date(data_client) -> None: tzinfo = timezone("Europe/Prague") first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -370,7 +372,7 @@ async def test_save_with_tz_date(data_client): ] -async def test_mget(data_client): +async def test_mget(data_client) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -378,23 +380,25 @@ async def test_mget(data_client): assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_mget_raises_exception_when_missing_param_is_invalid(data_client): +async def test_mget_raises_exception_when_missing_param_is_invalid(data_client) -> None: with raises(ValueError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -async def test_mget_raises_404_when_missing_param_is_raise(data_client): +async def test_mget_raises_404_when_missing_param_is_raise(data_client) -> None: with raises(NotFoundError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -async def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client): +async def test_mget_ignores_missing_docs_when_missing_param_is_skip( + data_client, +) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_update_works_from_search_response(data_client): +async def test_update_works_from_search_response(data_client) -> None: opensearch_repo = (await Repository.search().execute())[0] await opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -405,7 +409,7 @@ async def test_update_works_from_search_response(data_client): assert "opensearch" == new_version.owner.name -async def test_update(data_client): +async def test_update(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -429,7 +433,7 @@ async def test_update(data_client): assert "primary_term" in new_version.meta -async def test_save_updates_existing_doc(data_client): +async def test_save_updates_existing_doc(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -442,7 +446,7 @@ async def test_save_updates_existing_doc(data_client): assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -async def test_save_automatically_uses_seq_no_and_primary_term(data_client): +async def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -450,7 +454,7 @@ async def test_save_automatically_uses_seq_no_and_primary_term(data_client): await opensearch_repo.save() -async def test_delete_automatically_uses_seq_no_and_primary_term(data_client): +async def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -458,7 +462,7 @@ async def test_delete_automatically_uses_seq_no_and_primary_term(data_client): await opensearch_repo.delete() -async def assert_doc_equals(expected, actual): +async def assert_doc_equals(expected, actual) -> None: async for f in aiter(expected): assert f in actual assert actual[f] == expected[f] @@ -479,7 +483,7 @@ async def test_can_save_to_different_index(write_client): ) -async def test_save_without_skip_empty_will_include_empty_fields(write_client): +async def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert await test_repo.save(index="test-document", skip_empty=False) @@ -494,7 +498,7 @@ async def test_save_without_skip_empty_will_include_empty_fields(write_client): ) -async def test_delete(write_client): +async def test_delete(write_client) -> None: await write_client.create( index="test-document", id="opensearch-py", @@ -515,11 +519,11 @@ async def test_delete(write_client): ) -async def test_search(data_client): +async def test_search(data_client) -> None: assert await Repository.search().count() == 1 -async def test_search_returns_proper_doc_classes(data_client): +async def test_search_returns_proper_doc_classes(data_client) -> None: result = await Repository.search().execute() opensearch_repo = result.hits[0] @@ -528,7 +532,7 @@ async def test_search_returns_proper_doc_classes(data_client): assert opensearch_repo.owner.name == "opensearch" -async def test_refresh_mapping(data_client): +async def test_refresh_mapping(data_client) -> None: class Commit(AsyncDocument): class Index: name = "git" @@ -542,7 +546,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -async def test_highlight_in_meta(data_client): +async def test_highlight_in_meta(data_client) -> None: commit = ( await Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 9f2d919b..bc7abbd8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -11,6 +11,7 @@ from datetime import datetime import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import A, Boolean, Date, Keyword from opensearchpy._async.helpers.document import AsyncDocument @@ -25,7 +26,7 @@ PullRequest, ) -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Repos(AsyncDocument): @@ -118,7 +119,7 @@ class PRSearch(AsyncFacetedSearch): return PRSearch -async def test_facet_with_custom_metric(data_client): +async def test_facet_with_custom_metric(data_client) -> None: ms = MetricSearch() r = await ms.execute() @@ -127,7 +128,7 @@ async def test_facet_with_custom_metric(data_client): assert dates[0] == 1399038439000 -async def test_nested_facet(pull_request, pr_search_cls): +async def test_nested_facet(pull_request, pr_search_cls) -> None: prs = pr_search_cls() r = await prs.execute() @@ -135,7 +136,7 @@ async def test_nested_facet(pull_request, pr_search_cls): assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -async def test_nested_facet_with_filter(pull_request, pr_search_cls): +async def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = await prs.execute() @@ -147,7 +148,7 @@ async def test_nested_facet_with_filter(pull_request, pr_search_cls): assert not r.hits -async def test_datehistogram_facet(data_client, repo_search_cls): +async def test_datehistogram_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = await rs.execute() @@ -155,7 +156,7 @@ async def test_datehistogram_facet(data_client, repo_search_cls): assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -async def test_boolean_facet(data_client, repo_search_cls): +async def test_boolean_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = await rs.execute() @@ -167,7 +168,7 @@ async def test_boolean_facet(data_client, repo_search_cls): async def test_empty_search_finds_everything( data_client, opensearch_version, commit_search_cls -): +) -> None: cs = commit_search_cls() r = await cs.execute() assert r.hits.total.value == 52 @@ -213,7 +214,7 @@ async def test_empty_search_finds_everything( async def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = await cs.execute() @@ -259,7 +260,7 @@ async def test_term_filters_are_shown_as_selected_and_data_is_filtered( async def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"deletions": "better"}) r = await cs.execute() @@ -267,7 +268,7 @@ async def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -async def test_pagination(data_client, commit_search_cls): +async def test_pagination(data_client, commit_search_cls) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index cc489052..f11e6d3f 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -9,13 +9,14 @@ # GitHub history for details. import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import Date, Text from opensearchpy._async.helpers.document import AsyncDocument from opensearchpy._async.helpers.index import AsyncIndex, AsyncIndexTemplate from opensearchpy.helpers import analysis -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Post(AsyncDocument): @@ -23,7 +24,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_index_template_works(write_client): +async def test_index_template_works(write_client) -> None: it = AsyncIndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -44,7 +45,7 @@ async def test_index_template_works(write_client): } == await write_client.indices.get_mapping(index="test-blog") -async def test_index_can_be_saved_even_with_settings(write_client): +async def test_index_can_be_saved_even_with_settings(write_client) -> None: i = AsyncIndex("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) await i.save() @@ -59,12 +60,12 @@ async def test_index_can_be_saved_even_with_settings(write_client): ) -async def test_index_exists(data_client): +async def test_index_exists(data_client) -> None: assert await AsyncIndex("git").exists() assert not await AsyncIndex("not-there").exists() -async def test_index_can_be_created_with_settings_and_mappings(write_client): +async def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: i = AsyncIndex("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -89,7 +90,7 @@ async def test_index_can_be_created_with_settings_and_mappings(write_client): } -async def test_delete(write_client): +async def test_delete(write_client) -> None: await write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -100,7 +101,7 @@ async def test_delete(write_client): assert not await write_client.indices.exists(index="test-index") -async def test_multiple_indices_with_same_doc_type_work(write_client): +async def test_multiple_indices_with_same_doc_type_work(write_client) -> None: i1 = AsyncIndex("test-index-1", using=write_client) i2 = AsyncIndex("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index 1dca7959..6be391b3 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -9,16 +9,17 @@ # GitHub history for details. import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import exceptions from opensearchpy._async.helpers import mapping from opensearchpy.helpers import analysis -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_mapping_saved_into_opensearch(write_client): +async def test_mapping_saved_into_opensearch(write_client) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -40,7 +41,7 @@ async def test_mapping_saved_into_opensearch(write_client): async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( write_client, -): +) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -65,7 +66,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( async def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client, -): +) -> None: m = mapping.AsyncMapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 8eb202f7..2b995c54 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -11,6 +11,7 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import Date, Keyword, Q, Text, TransportError @@ -19,7 +20,7 @@ from opensearchpy.helpers.response import aggs from test_opensearchpy.test_async.test_server.test_helpers.test_data import FLAT_DATA -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Repository(AsyncDocument): @@ -40,7 +41,7 @@ class Index: name = "flat-git" -async def test_filters_aggregation_buckets_are_accessible(data_client): +async def test_filters_aggregation_buckets_are_accessible(data_client) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -61,7 +62,7 @@ async def test_filters_aggregation_buckets_are_accessible(data_client): ) -async def test_top_hits_are_wrapped_in_response(data_client): +async def test_top_hits_are_wrapped_in_response(data_client) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -77,7 +78,7 @@ async def test_top_hits_are_wrapped_in_response(data_client): assert isinstance(hits[0], Commit) -async def test_inner_hits_are_wrapped_in_response(data_client): +async def test_inner_hits_are_wrapped_in_response(data_client) -> None: s = AsyncSearch(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -88,7 +89,7 @@ async def test_inner_hits_are_wrapped_in_response(data_client): assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: result = Repository.search().scan() repos = await get_result(result) @@ -97,7 +98,7 @@ async def test_scan_respects_doc_types(data_client): assert repos[0].organization == "opensearch" -async def test_scan_iterates_through_all_docs(data_client): +async def test_scan_iterates_through_all_docs(data_client) -> None: s = AsyncSearch(index="flat-git") result = s.scan() commits = await get_result(result) @@ -113,7 +114,7 @@ async def get_result(b): return a -async def test_multi_search(data_client): +async def test_multi_search(data_client) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") @@ -130,7 +131,7 @@ async def test_multi_search(data_client): assert r2._search is s2 -async def test_multi_missing(data_client): +async def test_multi_missing(data_client) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") s3 = AsyncSearch(index="does_not_exist") @@ -153,7 +154,7 @@ async def test_multi_missing(data_client): assert r3 is None -async def test_raw_subfield_can_be_used_in_aggs(data_client): +async def test_raw_subfield_can_be_used_in_aggs(data_client) -> None: s = AsyncSearch(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) r = await s.execute() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 2db68326..4dcf32b3 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -9,14 +9,15 @@ # GitHub history for details. import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.update_by_query import AsyncUpdateByQuery from opensearchpy.helpers.search import Q -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_update_by_query_no_script(write_client, setup_ubq_tests): +async def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -35,7 +36,7 @@ async def test_update_by_query_no_script(write_client, setup_ubq_tests): assert response.success() -async def test_update_by_query_with_script(write_client, setup_ubq_tests): +async def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -52,7 +53,7 @@ async def test_update_by_query_with_script(write_client, setup_ubq_tests): assert response.version_conflicts == 0 -async def test_delete_by_query_with_script(write_client, setup_ubq_tests): +async def test_delete_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 2ef87bd3..88b792db 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -14,12 +14,13 @@ import unittest import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy.helpers.test import OPENSEARCH_VERSION from .. import AsyncOpenSearchTestCase -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAlertingPlugin(AsyncOpenSearchTestCase): @@ -43,7 +44,7 @@ async def test_create_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_get_destination(self): + async def test_get_destination(self) -> None: # Create a dummy destination await self.test_create_destination() @@ -123,7 +124,7 @@ async def test_create_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_search_monitor(self): + async def test_search_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() @@ -141,7 +142,7 @@ async def test_search_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_get_monitor(self): + async def test_get_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() @@ -165,7 +166,7 @@ async def test_get_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_run_monitor(self): + async def test_run_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index d4379648..4f5fcfa1 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -12,12 +12,13 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy.exceptions import NotFoundError from .. import AsyncOpenSearchTestCase -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestIndexManagementPlugin(AsyncOpenSearchTestCase): @@ -68,7 +69,7 @@ class TestIndexManagementPlugin(AsyncOpenSearchTestCase): } } - async def test_create_policy(self): + async def test_create_policy(self) -> None: # Test to create policy response = await self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -77,7 +78,7 @@ async def test_create_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - async def test_get_policy(self): + async def test_get_policy(self) -> None: # Create a policy await self.test_create_policy() @@ -88,7 +89,7 @@ async def test_get_policy(self): self.assertIn("_id", response) self.assertEqual(response["_id"], self.POLICY_NAME) - async def test_update_policy(self): + async def test_update_policy(self) -> None: # Create a policy await self.test_create_policy() @@ -110,7 +111,7 @@ async def test_update_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - async def test_delete_policy(self): + async def test_delete_policy(self) -> None: # Create a policy await self.test_create_policy() diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index bb8509dc..0efcd25e 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -35,6 +35,7 @@ import warnings import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import OpenSearchWarning from opensearchpy.helpers.test import _get_version @@ -47,7 +48,7 @@ YamlRunner, ) -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio OPENSEARCH_VERSION = None @@ -77,7 +78,7 @@ async def setup(self): if self._setup_code: await self.run_code(self._setup_code) - async def teardown(self): + async def teardown(self) -> None: if self._teardown_code: self.section("teardown") await self.run_code(self._teardown_code) @@ -92,10 +93,10 @@ async def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) return OPENSEARCH_VERSION - def section(self, name): + def section(self, name) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - async def run(self): + async def run(self) -> None: try: await self.setup() self.section("test") @@ -106,7 +107,7 @@ async def run(self): except Exception: pass - async def run_code(self, test): + async def run_code(self, test) -> None: """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 @@ -118,7 +119,7 @@ async def run_code(self, test): else: raise RuntimeError("Invalid action type %r" % (action_type,)) - async def run_do(self, action): + async def run_do(self, action) -> None: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -184,7 +185,7 @@ async def run_do(self, action): % (warn, caught_warnings) ) - async def run_skip(self, skip): + async def run_skip(self, skip) -> None: if "features" in skip: features = skip["features"] if not isinstance(features, (tuple, list)): @@ -204,7 +205,7 @@ async def run_skip(self, skip): if min_version <= (await self.opensearch_version()) <= max_version: pytest.skip(reason) - async def _feature_enabled(self, name): + async def _feature_enabled(self, name) -> bool: return False @@ -216,7 +217,7 @@ def async_runner(async_client): if RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - async def test_rest_api_spec(test_spec, async_runner): + async def test_rest_api_spec(test_spec, async_runner) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 9b1f7a5f..9fe8d9d1 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -14,12 +14,13 @@ from unittest import IsolatedAsyncioTestCase import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.test import get_test_client from opensearchpy.connection.async_connections import add_connection from opensearchpy.exceptions import NotFoundError -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestSecurityPlugin(IsolatedAsyncioTestCase): @@ -40,17 +41,17 @@ class TestSecurityPlugin(IsolatedAsyncioTestCase): USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - async def asyncSetUp(self): + async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self): + async def asyncTearDown(self) -> None: if self.client: await self.client.close() - async def test_create_role(self): + async def test_create_role(self) -> None: # Test to create role response = await self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -59,7 +60,7 @@ async def test_create_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_create_role_with_body_param_empty(self): + async def test_create_role_with_body_param_empty(self) -> None: try: await self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -67,7 +68,7 @@ async def test_create_role_with_body_param_empty(self): else: assert False - async def test_get_role(self): + async def test_get_role(self) -> None: # Create a role await self.test_create_role() @@ -77,7 +78,7 @@ async def test_get_role(self): self.assertNotIn("errors", response) self.assertIn(self.ROLE_NAME, response) - async def test_update_role(self): + async def test_update_role(self) -> None: # Create a role await self.test_create_role() @@ -92,7 +93,7 @@ async def test_update_role(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_delete_role(self): + async def test_delete_role(self) -> None: # Create a role await self.test_create_role() @@ -105,7 +106,7 @@ async def test_delete_role(self): with self.assertRaises(NotFoundError): response = await self.client.security.get_role(self.ROLE_NAME) - async def test_create_user(self): + async def test_create_user(self) -> None: # Test to create user response = await self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -114,7 +115,7 @@ async def test_create_user(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_create_user_with_body_param_empty(self): + async def test_create_user_with_body_param_empty(self) -> None: try: await self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -137,7 +138,7 @@ async def test_create_user_with_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_get_user(self): + async def test_get_user(self) -> None: # Create a user await self.test_create_user() @@ -147,7 +148,7 @@ async def test_get_user(self): self.assertNotIn("errors", response) self.assertIn(self.USER_NAME, response) - async def test_update_user(self): + async def test_update_user(self) -> None: # Create a user await self.test_create_user() @@ -162,7 +163,7 @@ async def test_update_user(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_delete_user(self): + async def test_delete_user(self) -> None: # Create a user await self.test_create_user() @@ -175,12 +176,12 @@ async def test_delete_user(self): with self.assertRaises(NotFoundError): response = await self.client.security.get_user(self.USER_NAME) - async def test_health_check(self): + async def test_health_check(self) -> None: response = await self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) - async def test_health(self): + async def test_health(self) -> None: response = await self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -213,14 +214,14 @@ async def test_health(self): }, } - async def test_update_audit_config(self): + async def test_update_audit_config(self) -> None: response = await self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_update_audit_configuration(self): + async def test_update_audit_configuration(self) -> None: response = await self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 84458c9e..50d734bc 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -11,9 +11,10 @@ import uuid import pytest +from _pytest.mark.structures import MarkDecorator from mock import Mock -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAsyncSigner: @@ -30,7 +31,7 @@ def mock_session(self): return dummy_session - async def test_aws_signer_async_as_http_auth(self): + async def test_aws_signer_async_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -41,7 +42,7 @@ async def test_aws_signer_async_as_http_auth(self): assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers - async def test_aws_signer_async_when_region_is_null(self): + async def test_aws_signer_async_when_region_is_null(self) -> None: session = self.mock_session() from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -54,7 +55,7 @@ async def test_aws_signer_async_when_region_is_null(self): AWSV4SignerAsyncAuth(session, "") assert str(e.value) == "Region cannot be empty" - async def test_aws_signer_async_when_credentials_is_null(self): + async def test_aws_signer_async_when_credentials_is_null(self) -> None: region = "us-west-1" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -63,7 +64,7 @@ async def test_aws_signer_async_when_credentials_is_null(self): AWSV4SignerAsyncAuth(None, region) assert str(e.value) == "Credentials cannot be empty" - async def test_aws_signer_async_when_service_is_specified(self): + async def test_aws_signer_async_when_service_is_specified(self) -> None: region = "us-west-2" service = "aoss" @@ -78,7 +79,7 @@ async def test_aws_signer_async_when_service_is_specified(self): class TestAsyncSignerWithFrozenCredentials(TestAsyncSigner): - def mock_session(self, disable_get_frozen=True): + def mock_session(self, disable_get_frozen: bool = True): access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -90,7 +91,7 @@ def mock_session(self, disable_get_frozen=True): return dummy_session - async def test_aws_signer_async_frozen_credentials_as_http_auth(self): + async def test_aws_signer_async_frozen_credentials_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index fc018e43..4dabee05 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -30,8 +30,10 @@ import asyncio import json +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from mock import patch from opensearchpy import AIOHttpConnection, AsyncTransport @@ -39,11 +41,11 @@ from opensearchpy.connection_pool import DummyConnectionPool from opensearchpy.exceptions import ConnectionError, TransportError -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class DummyConnection(Connection): - def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) @@ -52,7 +54,7 @@ def __init__(self, **kwargs): self.closed = False super(DummyConnection, self).__init__(**kwargs) - async def perform_request(self, *args, **kwargs): + async def perform_request(self, *args, **kwargs) -> Any: if self.closed: raise RuntimeError("This connection is closed") if self.delay: @@ -62,7 +64,7 @@ async def perform_request(self, *args, **kwargs): raise self.exception return self.status, self.headers, self.data - async def close(self): + async def close(self) -> None: if self.closed: raise RuntimeError("This connection is already closed") self.closed = True @@ -120,7 +122,7 @@ async def close(self): class TestTransport: - async def test_single_connection_uses_dummy_connection_pool(self): + async def test_single_connection_uses_dummy_connection_pool(self) -> None: t = AsyncTransport([{}]) await t._async_call() assert isinstance(t.connection_pool, DummyConnectionPool) @@ -128,7 +130,7 @@ async def test_single_connection_uses_dummy_connection_pool(self): await t._async_call() assert isinstance(t.connection_pool, DummyConnectionPool) - async def test_request_timeout_extracted_from_params_and_passed(self): + async def test_request_timeout_extracted_from_params_and_passed(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -140,7 +142,7 @@ async def test_request_timeout_extracted_from_params_and_passed(self): "headers": None, } == t.get_connection().calls[0][1] - async def test_timeout_extracted_from_params_and_passed(self): + async def test_timeout_extracted_from_params_and_passed(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"timeout": 84}) @@ -187,7 +189,7 @@ async def test_request_with_custom_user_agent_header(self): "headers": {"user-agent": "my-custom-value/1.2.3"}, } == t.get_connection().calls[0][1] - async def test_send_get_body_as_source(self): + async def test_send_get_body_as_source(self) -> None: t = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -196,7 +198,7 @@ async def test_send_get_body_as_source(self): assert 1 == len(t.get_connection().calls) assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] - async def test_send_get_body_as_post(self): + async def test_send_get_body_as_post(self) -> None: t = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -205,7 +207,7 @@ async def test_send_get_body_as_post(self): assert 1 == len(t.get_connection().calls) assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] - async def test_body_gets_encoded_into_bytes(self): + async def test_body_gets_encoded_into_bytes(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好") @@ -217,7 +219,7 @@ async def test_body_gets_encoded_into_bytes(self): b"\xe4\xbd\xa0\xe5\xa5\xbd", ) == t.get_connection().calls[0][0] - async def test_body_bytes_get_passed_untouched(self): + async def test_body_bytes_get_passed_untouched(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -225,7 +227,7 @@ async def test_body_bytes_get_passed_untouched(self): assert 1 == len(t.get_connection().calls) assert ("GET", "/", None, body) == t.get_connection().calls[0][0] - async def test_body_surrogates_replaced_encoded_into_bytes(self): + async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好\uda6a") @@ -237,19 +239,19 @@ async def test_body_surrogates_replaced_encoded_into_bytes(self): b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa", ) == t.get_connection().calls[0][0] - async def test_kwargs_passed_on_to_connections(self): + async def test_kwargs_passed_on_to_connections(self) -> None: t = AsyncTransport([{"host": "google.com"}], port=123) await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host - async def test_kwargs_passed_on_to_connection_pool(self): + async def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() t = AsyncTransport([{}, {}], dead_timeout=dt) await t._async_call() assert dt is t.connection_pool.dead_timeout - async def test_custom_connection_class(self): + async def test_custom_connection_class(self) -> None: class MyConnection(object): def __init__(self, **kwargs): self.kwargs = kwargs @@ -259,14 +261,14 @@ def __init__(self, **kwargs): assert 1 == len(t.connection_pool.connections) assert isinstance(t.connection_pool.connections[0], MyConnection) - async def test_add_connection(self): + async def test_add_connection(self) -> None: t = AsyncTransport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host - async def test_request_will_fail_after_X_retries(self): + async def test_request_will_fail_after_X_retries(self) -> None: t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, @@ -281,7 +283,7 @@ async def test_request_will_fail_after_X_retries(self): assert connection_error assert 4 == len(t.get_connection().calls) - async def test_failed_connection_will_be_marked_as_dead(self): + async def test_failed_connection_will_be_marked_as_dead(self) -> None: t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, @@ -296,7 +298,9 @@ async def test_failed_connection_will_be_marked_as_dead(self): assert connection_error assert 0 == len(t.connection_pool.connections) - async def test_resurrected_connection_will_be_marked_as_live_on_success(self): + async def test_resurrected_connection_will_be_marked_as_live_on_success( + self, + ) -> None: for method in ("GET", "HEAD"): t = AsyncTransport([{}, {}], connection_class=DummyConnection) await t._async_call() @@ -309,7 +313,7 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success(self): assert 1 == len(t.connection_pool.connections) assert 1 == len(t.connection_pool.dead_count) - async def test_sniff_will_use_seed_connections(self): + async def test_sniff_will_use_seed_connections(self) -> None: t = AsyncTransport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) await t._async_call() t.set_connections([{"data": "invalid"}]) @@ -318,7 +322,7 @@ async def test_sniff_will_use_seed_connections(self): assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host - async def test_sniff_on_start_fetches_and_uses_nodes_list(self): + async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -330,7 +334,7 @@ async def test_sniff_on_start_fetches_and_uses_nodes_list(self): assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host - async def test_sniff_on_start_ignores_sniff_timeout(self): + async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -344,7 +348,7 @@ async def test_sniff_on_start_ignores_sniff_timeout(self): 0 ].calls[0] - async def test_sniff_uses_sniff_timeout(self): + async def test_sniff_uses_sniff_timeout(self) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -412,7 +416,7 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts) assert 1 == len(conn_err.calls) assert 1 == len(conn_data.calls) - async def test_sniff_after_n_seconds(self, event_loop): + async def test_sniff_after_n_seconds(self, event_loop) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -433,7 +437,7 @@ async def test_sniff_after_n_seconds(self, event_loop): assert "http://1.1.1.1:123" == t.get_connection().host assert event_loop.time() - 1 < t.last_sniff < event_loop.time() + 0.01 - async def test_sniff_7x_publish_host(self): + async def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t = AsyncTransport( @@ -449,7 +453,7 @@ async def test_sniff_7x_publish_host(self): "port": 123, } - async def test_transport_close_closes_all_pool_connections(self): + async def test_transport_close_closes_all_pool_connections(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t._async_call() @@ -464,7 +468,7 @@ async def test_transport_close_closes_all_pool_connections(self): await t.close() assert all([conn.closed for conn in t.connection_pool.connections]) - async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop): + async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop) -> None: t = AsyncTransport( [ {"data": ""}, @@ -544,7 +548,7 @@ async def test_sniff_on_start_close_unlocks_async_calls(self, event_loop): # A lot quicker than 10 seconds defined in 'delay' assert duration < 1 - async def test_init_connection_pool_with_many_hosts(self): + async def test_init_connection_pool_with_many_hosts(self) -> None: """ Check init of connection pool with multiple connections. @@ -562,7 +566,7 @@ async def test_init_connection_pool_with_many_hosts(self): assert len(t.connection_pool.connections) == amt_hosts await t._async_call() - async def test_init_pool_with_connection_class_to_many_hosts(self): + async def test_init_pool_with_connection_class_to_many_hosts(self) -> None: """ Check init of connection pool with user specified connection_class. diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index 2a5ad5a3..ad795bcf 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -34,7 +34,7 @@ class DummyTransport(object): - def __init__(self, hosts, responses=None, **kwargs): + def __init__(self, hosts, responses=None, **kwargs) -> None: self.hosts = hosts self.responses = responses self.call_count = 0 @@ -50,14 +50,14 @@ def perform_request(self, method, url, params=None, headers=None, body=None): class OpenSearchTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super(OpenSearchTestCase, self).setUp() self.client = OpenSearch(transport_class=DummyTransport) - def assert_call_count_equals(self, count): + def assert_call_count_equals(self, count) -> None: self.assertEqual(count, self.client.transport.call_count) - def assert_url_called(self, method, url, count=1): + def assert_url_called(self, method, url, count: int = 1): self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) @@ -65,13 +65,13 @@ def assert_url_called(self, method, url, count=1): class TestOpenSearchTestCase(OpenSearchTestCase): - def test_our_transport_used(self): + def test_our_transport_used(self) -> None: self.assertIsInstance(self.client.transport, DummyTransport) - def test_start_with_0_call(self): + def test_start_with_0_call(self) -> None: self.assert_call_count_equals(0) - def test_each_call_is_recorded(self): + def test_each_call_is_recorded(self) -> None: self.client.transport.perform_request("GET", "/") self.client.transport.perform_request("DELETE", "/42", params={}, body="body") self.assert_call_count_equals(2) diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index ecbd769a..3174772e 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -30,19 +30,20 @@ import warnings -from opensearchpy.client import OpenSearch, _normalize_hosts +from opensearchpy.client import OpenSearch +from opensearchpy.client.utils import _normalize_hosts from ..test_cases import OpenSearchTestCase, TestCase class TestNormalizeHosts(TestCase): - def test_none_uses_defaults(self): + def test_none_uses_defaults(self) -> None: self.assertEqual([{}], _normalize_hosts(None)) - def test_strings_are_used_as_hostnames(self): + def test_strings_are_used_as_hostnames(self) -> None: self.assertEqual([{"host": "elastic.co"}], _normalize_hosts(["elastic.co"])) - def test_strings_are_parsed_for_port_and_user(self): + def test_strings_are_parsed_for_port_and_user(self) -> None: self.assertEqual( [ {"host": "elastic.co", "port": 42}, @@ -51,7 +52,7 @@ def test_strings_are_parsed_for_port_and_user(self): _normalize_hosts(["elastic.co:42", "user:secre%5D@elastic.co"]), ) - def test_strings_are_parsed_for_scheme(self): + def test_strings_are_parsed_for_scheme(self) -> None: self.assertEqual( [ {"host": "elastic.co", "port": 42, "use_ssl": True}, @@ -68,23 +69,23 @@ def test_strings_are_parsed_for_scheme(self): ), ) - def test_dicts_are_left_unchanged(self): + def test_dicts_are_left_unchanged(self) -> None: self.assertEqual( [{"host": "local", "extra": 123}], _normalize_hosts([{"host": "local", "extra": 123}]), ) - def test_single_string_is_wrapped_in_list(self): + def test_single_string_is_wrapped_in_list(self) -> None: self.assertEqual([{"host": "elastic.co"}], _normalize_hosts("elastic.co")) class TestClient(OpenSearchTestCase): - def test_request_timeout_is_passed_through_unescaped(self): + def test_request_timeout_is_passed_through_unescaped(self) -> None: self.client.ping(request_timeout=0.1) calls = self.assert_url_called("HEAD", "/") self.assertEqual([({"request_timeout": 0.1}, {}, None)], calls) - def test_params_is_copied_when(self): + def test_params_is_copied_when(self) -> None: rt = object() params = dict(request_timeout=rt) self.client.ping(params=params) @@ -96,7 +97,7 @@ def test_params_is_copied_when(self): ) self.assertFalse(calls[0][0] is calls[1][0]) - def test_headers_is_copied_when(self): + def test_headers_is_copied_when(self) -> None: hv = "value" headers = dict(Authentication=hv) self.client.ping(headers=headers) @@ -108,40 +109,40 @@ def test_headers_is_copied_when(self): ) self.assertFalse(calls[0][0] is calls[1][0]) - def test_from_in_search(self): + def test_from_in_search(self) -> None: self.client.search(index="i", from_=10) calls = self.assert_url_called("POST", "/i/_search") self.assertEqual([({"from": "10"}, {}, None)], calls) - def test_repr_contains_hosts(self): + def test_repr_contains_hosts(self) -> None: self.assertEqual("", repr(self.client)) - def test_repr_subclass(self): + def test_repr_subclass(self) -> None: class OtherOpenSearch(OpenSearch): pass self.assertEqual("", repr(OtherOpenSearch())) - def test_repr_contains_hosts_passed_in(self): + def test_repr_contains_hosts_passed_in(self) -> None: self.assertIn("opensearchpy.org", repr(OpenSearch(["opensearch.org:123"]))) - def test_repr_truncates_host_to_5(self): + def test_repr_truncates_host_to_5(self) -> None: hosts = [{"host": "opensearch" + str(i)} for i in range(10)] client = OpenSearch(hosts) self.assertNotIn("opensearch5", repr(client)) self.assertIn("...", repr(client)) - def test_index_uses_post_if_id_is_empty(self): + def test_index_uses_post_if_id_is_empty(self) -> None: self.client.index(index="my-index", id="", body={}) self.assert_url_called("POST", "/my-index/_doc") - def test_index_uses_put_if_id_is_not_empty(self): + def test_index_uses_put_if_id_is_not_empty(self) -> None: self.client.index(index="my-index", id=0, body={}) self.assert_url_called("PUT", "/my-index/_doc/0") - def test_tasks_get_without_task_id_deprecated(self): + def test_tasks_get_without_task_id_deprecated(self) -> None: warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get() @@ -155,7 +156,7 @@ def test_tasks_get_without_task_id_deprecated(self): "and will be removed in v8.0. Use client.tasks.list() instead.", ) - def test_tasks_get_with_task_id_not_deprecated(self): + def test_tasks_get_with_task_id_not_deprecated(self) -> None: warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get("task-1") diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index a66072cd..f170a448 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -30,18 +30,18 @@ class TestCluster(OpenSearchTestCase): - def test_stats_without_node_id(self): + def test_stats_without_node_id(self) -> None: self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") - def test_stats_with_node_id(self): + def test_stats_with_node_id(self) -> None: self.client.cluster.stats("node-1") self.assert_url_called("GET", "/_cluster/stats/nodes/node-1") self.client.cluster.stats(node_id="node-2") self.assert_url_called("GET", "/_cluster/stats/nodes/node-2") - def test_state_with_index_without_metric_defaults_to_all(self): + def test_state_with_index_without_metric_defaults_to_all(self) -> None: self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index f3e48f1b..668eebd7 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -30,19 +30,19 @@ class TestIndices(OpenSearchTestCase): - def test_create_one_index(self): + def test_create_one_index(self) -> None: self.client.indices.create("test-index") self.assert_url_called("PUT", "/test-index") - def test_delete_multiple_indices(self): + def test_delete_multiple_indices(self) -> None: self.client.indices.delete(["test-index", "second.index", "third/index"]) self.assert_url_called("DELETE", "/test-index,second.index,third%2Findex") - def test_exists_index(self): + def test_exists_index(self) -> None: self.client.indices.exists("second.index,third/index") self.assert_url_called("HEAD", "/second.index,third%2Findex") - def test_passing_empty_value_for_required_param_raises_exception(self): + def test_passing_empty_value_for_required_param_raises_exception(self) -> None: self.assertRaises(ValueError, self.client.indices.exists, index=None) self.assertRaises(ValueError, self.client.indices.exists, index=[]) self.assertRaises(ValueError, self.client.indices.exists, index="") diff --git a/test_opensearchpy/test_client/test_overrides.py b/test_opensearchpy/test_client/test_overrides.py index 4ce0931e..16cb3ab4 100644 --- a/test_opensearchpy/test_client/test_overrides.py +++ b/test_opensearchpy/test_client/test_overrides.py @@ -32,57 +32,57 @@ class TestOverriddenUrlTargets(OpenSearchTestCase): - def test_create(self): + def test_create(self) -> None: self.client.create(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_create/test-id") - def test_delete(self): + def test_delete(self) -> None: self.client.delete(index="test-index", id="test-id") self.assert_url_called("DELETE", "/test-index/_doc/test-id") - def test_exists(self): + def test_exists(self) -> None: self.client.exists(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_doc/test-id") - def test_explain(self): + def test_explain(self) -> None: self.client.explain(index="test-index", id="test-id") self.assert_url_called("POST", "/test-index/_explain/test-id") - def test_get(self): + def test_get(self) -> None: self.client.get(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_doc/test-id") - def test_get_source(self): + def test_get_source(self) -> None: self.client.get_source(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_source/test-id") - def test_exists_source(self): + def test_exists_source(self) -> None: self.client.exists_source(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_source/test-id") - def test_index(self): + def test_index(self) -> None: self.client.index(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_doc") self.client.index(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_doc/test-id") - def test_termvectors(self): + def test_termvectors(self) -> None: self.client.termvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_termvectors") self.client.termvectors(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_termvectors/test-id") - def test_mtermvectors(self): + def test_mtermvectors(self) -> None: self.client.mtermvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_mtermvectors") - def test_update(self): + def test_update(self) -> None: self.client.update(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_update/test-id") - def test_cluster_state(self): + def test_cluster_state(self) -> None: self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") @@ -92,20 +92,20 @@ def test_cluster_state(self): self.client.cluster.state(index="test-index", metric="test-metric") self.assert_url_called("GET", "/_cluster/state/test-metric/test-index") - def test_cluster_stats(self): + def test_cluster_stats(self) -> None: self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") self.client.cluster.stats(node_id="test-node") self.assert_url_called("GET", "/_cluster/stats/nodes/test-node") - def test_indices_put_mapping(self): + def test_indices_put_mapping(self) -> None: self.client.indices.put_mapping(body={}) self.assert_url_called("PUT", "/_all/_mapping") self.client.indices.put_mapping(index="test-index", body={}) self.assert_url_called("PUT", "/test-index/_mapping") - def test_tasks_get(self): + def test_tasks_get(self) -> None: with pytest.warns(DeprecationWarning): self.client.tasks.get() diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index a59ad04e..482a4224 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -12,41 +12,41 @@ class TestAlerting(OpenSearchTestCase): - def test_create_monitor(self): + def test_create_monitor(self) -> None: # Test Post Method self.client.alerting.create_monitor({}) self.assert_url_called("POST", "/_plugins/_alerting/monitors") - def test_run_monitor(self): + def test_run_monitor(self) -> None: self.client.alerting.run_monitor("...") self.assert_url_called("POST", "/_plugins/_alerting/monitors/.../_execute") - def test_get_monitor(self): + def test_get_monitor(self) -> None: # Test Get Method self.client.alerting.get_monitor("...") self.assert_url_called("GET", "/_plugins/_alerting/monitors/...") - def test_search_monitor(self): + def test_search_monitor(self) -> None: # Test Search Method self.client.alerting.search_monitor({}) self.assert_url_called("GET", "/_plugins/_alerting/monitors/_search") - def test_update_monitor(self): + def test_update_monitor(self) -> None: # Test Update Method self.client.alerting.update_monitor("...") self.assert_url_called("PUT", "/_plugins/_alerting/monitors/...") - def test_delete_monitor(self): + def test_delete_monitor(self) -> None: # Test Delete Method self.client.alerting.delete_monitor("...") self.assert_url_called("DELETE", "/_plugins/_alerting/monitors/...") - def test_create_destination(self): + def test_create_destination(self) -> None: # Test Post Method self.client.alerting.create_destination({}) self.assert_url_called("POST", "/_plugins/_alerting/destinations") - def test_get_destination(self): + def test_get_destination(self) -> None: # Test Get Method # Get a specific destination @@ -57,21 +57,21 @@ def test_get_destination(self): self.client.alerting.get_destination() self.assert_url_called("GET", "/_plugins/_alerting/destinations") - def test_update_destination(self): + def test_update_destination(self) -> None: # Test Update Method self.client.alerting.update_destination("...") self.assert_url_called("PUT", "/_plugins/_alerting/destinations/...") - def test_delete_destination(self): + def test_delete_destination(self) -> None: # Test Delete Method self.client.alerting.delete_destination("...") self.assert_url_called("DELETE", "/_plugins/_alerting/destinations/...") - def test_get_alerts(self): + def test_get_alerts(self) -> None: self.client.alerting.get_alerts() self.assert_url_called("GET", "/_plugins/_alerting/monitors/alerts") - def test_acknowledge_alerts(self): + def test_acknowledge_alerts(self) -> None: self.client.alerting.acknowledge_alert("...") self.assert_url_called( "POST", "/_plugins/_alerting/monitors/.../_acknowledge/alerts" diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index 2c744e19..891d6f02 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -12,11 +12,11 @@ class TestIndexManagement(OpenSearchTestCase): - def test_create_policy(self): + def test_create_policy(self) -> None: self.client.index_management.put_policy("...") self.assert_url_called("PUT", "/_plugins/_ism/policies/...") - def test_update_policy(self): + def test_update_policy(self) -> None: self.client.index_management.put_policy( "...", params={"if_seq_no": 7, "if_primary_term": 1} ) @@ -25,33 +25,33 @@ def test_update_policy(self): self.assert_url_called("PUT", "/_plugins/_ism/policies/..."), ) - def test_add_policy(self): + def test_add_policy(self) -> None: self.client.index_management.add_policy("...") self.assert_url_called("POST", "/_plugins/_ism/add/...") - def test_get_policy(self): + def test_get_policy(self) -> None: self.client.index_management.get_policy("...") self.assert_url_called("GET", "/_plugins/_ism/policies/...") - def test_remove_policy_from_index(self): + def test_remove_policy_from_index(self) -> None: self.client.index_management.remove_policy_from_index("...") self.assert_url_called("POST", "/_plugins/_ism/remove/...") - def test_change_policy(self): + def test_change_policy(self) -> None: self.client.index_management.change_policy("...") self.assert_url_called("POST", "/_plugins/_ism/change_policy/...") - def test_retry(self): + def test_retry(self) -> None: self.client.index_management.retry("...") self.assert_url_called("POST", "/_plugins/_ism/retry/...") - def test_explain_index(self): + def test_explain_index(self) -> None: self.client.index_management.explain_index("...", show_policy=True) self.assertEqual( [({"show_policy": b"true"}, {}, None)], self.assert_url_called("GET", "/_plugins/_ism/explain/..."), ) - def test_delete_policy(self): + def test_delete_policy(self) -> None: self.client.index_management.delete_policy("...") self.assert_url_called("DELETE", "/_plugins/_ism/policies/...") diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index e717d9cb..d09731bf 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -14,7 +14,7 @@ class TestPluginsClient(TestCase): - def test_plugins_client(self): + def test_plugins_client(self) -> None: with self.assertWarns(Warning) as w: client = OpenSearch() client.plugins.__init__(client) # double-init diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index 6ce12a46..30940ce4 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -12,36 +12,36 @@ class TestPointInTime(OpenSearchTestCase): - def test_create_one_point_in_time(self): + def test_create_one_point_in_time(self) -> None: index_name = "test-index" self.client.create_point_in_time(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") - def test_delete_one_point_in_time(self): + def test_delete_one_point_in_time(self) -> None: self.client.delete_point_in_time(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") - def test_delete_all_point_in_time(self): + def test_delete_all_point_in_time(self) -> None: self.client.delete_point_in_time(all=True) self.assert_url_called("DELETE", "/_search/point_in_time/_all") - def test_list_all_point_in_time(self): + def test_list_all_point_in_time(self) -> None: self.client.list_all_point_in_time() self.assert_url_called("GET", "/_search/point_in_time/_all") - def test_create_pit(self): + def test_create_pit(self) -> None: index_name = "test-index" self.client.create_pit(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") - def test_delete_pit(self): + def test_delete_pit(self) -> None: self.client.delete_pit(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") - def test_delete_all_pits(self): + def test_delete_all_pits(self) -> None: self.client.delete_all_pits() self.assert_url_called("DELETE", "/_search/point_in_time/_all") - def test_get_all_pits(self): + def test_get_all_pits(self) -> None: self.client.get_all_pits() self.assert_url_called("GET", "/_search/point_in_time/_all") diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py index 92265733..a9bfc894 100644 --- a/test_opensearchpy/test_client/test_remote_store.py +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -11,6 +11,6 @@ class TestRemoteStore(OpenSearchTestCase): - def test_remote_store_restore(self): + def test_remote_store_restore(self) -> None: self.client.remote_store.restore(body=["index-1"]) self.assert_url_called("POST", "/_remotestore/_restore") diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py index 3caf8d5f..66ec8cbc 100644 --- a/test_opensearchpy/test_client/test_requests.py +++ b/test_opensearchpy/test_client/test_requests.py @@ -14,7 +14,7 @@ class TestRequests(TestCase): - def test_connection_class(self): + def test_connection_class(self) -> None: client = OpenSearch(connection_class=RequestsHttpConnection) self.assertEqual(client.transport.pool_maxsize, None) self.assertEqual(client.transport.connection_class, RequestsHttpConnection) @@ -22,7 +22,7 @@ def test_connection_class(self): client.transport.connection_pool.connections[0], RequestsHttpConnection ) - def test_pool_maxsize(self): + def test_pool_maxsize(self) -> None: client = OpenSearch(connection_class=RequestsHttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) self.assertEqual( diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py index fa63133b..064c49cc 100644 --- a/test_opensearchpy/test_client/test_urllib3.py +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -16,12 +16,12 @@ class TestUrlLib3(TestCase): - def test_default(self): + def test_default(self) -> None: client = OpenSearch() self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertEqual(client.transport.pool_maxsize, None) - def test_connection_class(self): + def test_connection_class(self) -> None: client = OpenSearch(connection_class=Urllib3HttpConnection) self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertIsInstance( @@ -31,7 +31,7 @@ def test_connection_class(self): client.transport.connection_pool.connections[0].pool, HTTPConnectionPool ) - def test_pool_maxsize(self): + def test_pool_maxsize(self) -> None: client = OpenSearch(connection_class=Urllib3HttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) # https://github.com/python/cpython/blob/3.12/Lib/queue.py#L35 diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index 888e988d..b6a034eb 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -34,14 +34,14 @@ class TestQueryParams(TestCase): - def setup_method(self, _): + def setup_method(self, _) -> None: self.calls = [] @query_params("simple_param") - def func_to_wrap(self, *args, **kwargs): + def func_to_wrap(self, *args, **kwargs) -> None: self.calls.append((args, kwargs)) - def test_handles_params(self): + def test_handles_params(self) -> None: self.func_to_wrap(params={"simple_param_2": "2"}, simple_param="3") self.assertEqual( self.calls, @@ -56,19 +56,19 @@ def test_handles_params(self): ], ) - def test_handles_headers(self): + def test_handles_headers(self) -> None: self.func_to_wrap(headers={"X-Opaque-Id": "app-1"}) self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "app-1"}})] ) - def test_handles_opaque_id(self): + def test_handles_opaque_id(self) -> None: self.func_to_wrap(opaque_id="request-id") self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "request-id"}})] ) - def test_handles_empty_none_and_normalization(self): + def test_handles_empty_none_and_normalization(self) -> None: self.func_to_wrap(params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) @@ -84,7 +84,7 @@ def test_handles_empty_none_and_normalization(self): self.func_to_wrap(headers={"X": "y"}) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {"x": "y"}})) - def test_non_escaping_params(self): + def test_non_escaping_params(self) -> None: # the query_params decorator doesn't validate "timeout" it simply avoids escaping as it did self.func_to_wrap(simple_param="x", timeout="4s") self.assertEqual( @@ -109,7 +109,7 @@ def test_non_escaping_params(self): ), ) - def test_per_call_authentication(self): + def test_per_call_authentication(self) -> None: self.func_to_wrap(api_key=("name", "key")) self.assertEqual( self.calls[-1], @@ -154,7 +154,7 @@ def test_per_call_authentication(self): class TestMakePath(TestCase): - def test_handles_unicode(self): + def test_handles_unicode(self) -> None: id = "中文" self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) @@ -162,36 +162,36 @@ def test_handles_unicode(self): class TestEscape(TestCase): - def test_handles_ascii(self): + def test_handles_ascii(self) -> None: string = "abc123" self.assertEqual(b"abc123", _escape(string)) - def test_handles_unicode(self): + def test_handles_unicode(self) -> None: string = "中文" self.assertEqual(b"\xe4\xb8\xad\xe6\x96\x87", _escape(string)) - def test_handles_bytestring(self): + def test_handles_bytestring(self) -> None: string = b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0" self.assertEqual(string, _escape(string)) class TestBulkBody(TestCase): - def test_proper_bulk_body_as_string_is_not_modified(self): + def test_proper_bulk_body_as_string_is_not_modified(self) -> None: string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(string_body, _bulk_body(None, string_body)) - def test_proper_bulk_body_as_bytestring_is_not_modified(self): + def test_proper_bulk_body_as_bytestring_is_not_modified(self) -> None: bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(bytestring_body, _bulk_body(None, bytestring_body)) - def test_bulk_body_as_string_adds_trailing_newline(self): + def test_bulk_body_as_string_adds_trailing_newline(self) -> None: string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', _bulk_body(None, string_body), ) - def test_bulk_body_as_bytestring_adds_trailing_newline(self): + def test_bulk_body_as_bytestring_adds_trailing_newline(self) -> None: bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index 2c0a3fef..6ba12d0d 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -46,7 +46,7 @@ class TestBaseConnection(TestCase): - def test_empty_warnings(self): + def test_empty_warnings(self) -> None: con = Connection() with warnings.catch_warnings(record=True) as w: con._raise_warnings(()) @@ -54,7 +54,7 @@ def test_empty_warnings(self): self.assertEqual(w, []) - def test_raises_warnings(self): + def test_raises_warnings(self) -> None: con = Connection() with warnings.catch_warnings(record=True) as warn: @@ -76,7 +76,7 @@ def test_raises_warnings(self): ["this is also deprecated", "guess what? deprecated"], ) - def test_raises_warnings_when_folded(self): + def test_raises_warnings_when_folded(self) -> None: con = Connection() with warnings.catch_warnings(record=True) as warn: con._raise_warnings( @@ -99,7 +99,7 @@ def test_ipv6_host_and_port(self): conn = Connection(**kwargs) assert conn.host == expected_host - def test_compatibility_accept_header(self): + def test_compatibility_accept_header(self) -> None: try: conn = Connection() assert "accept" not in conn.headers @@ -119,29 +119,29 @@ def test_compatibility_accept_header(self): finally: os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") - def test_ca_certs_ssl_cert_file(self): + def test_ca_certs_ssl_cert_file(self) -> None: cert = "/path/to/clientcert.pem" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_FILE", cert) assert Connection.default_ca_certs() == cert - def test_ca_certs_ssl_cert_dir(self): + def test_ca_certs_ssl_cert_dir(self) -> None: cert = "/path/to/clientcert/dir" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_DIR", cert) assert Connection.default_ca_certs() == cert - def test_ca_certs_certifi(self): + def test_ca_certs_certifi(self) -> None: import certifi assert Connection.default_ca_certs() == certifi.where() - def test_no_ca_certs(self): + def test_no_ca_certs(self) -> None: with MonkeyPatch().context() as monkeypatch: monkeypatch.setitem(sys.modules, "certifi", None) assert Connection.default_ca_certs() is None - def test_default_connection_is_returned_by_default(self): + def test_default_connection_is_returned_by_default(self) -> None: c = connections.Connections() con, con2 = object(), object() @@ -151,7 +151,7 @@ def test_default_connection_is_returned_by_default(self): assert c.get_connection() is con - def test_get_connection_created_connection_if_needed(self): + def test_get_connection_created_connection_if_needed(self) -> None: c = connections.Connections() c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -166,7 +166,7 @@ def test_get_connection_created_connection_if_needed(self): assert [{"host": "opensearch.com"}] == default.transport.hosts assert [{"host": "localhost"}] == local.transport.hosts - def test_configure_preserves_unchanged_connections(self): + def test_configure_preserves_unchanged_connections(self) -> None: c = connections.Connections() c.configure( @@ -184,7 +184,7 @@ def test_configure_preserves_unchanged_connections(self): assert new_local is local assert new_default is not default - def test_remove_connection_removes_both_conn_and_conf(self): + def test_remove_connection_removes_both_conn_and_conf(self) -> None: c = connections.Connections() c.configure( @@ -200,14 +200,14 @@ def test_remove_connection_removes_both_conn_and_conf(self): c.get_connection("local2") c.get_connection("default") - def test_create_connection_constructs_client(self): + def test_create_connection_constructs_client(self) -> None: c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) con = c.get_connection("testing") assert [{"host": "opensearch.com"}] == con.transport.hosts - def test_create_connection_adds_our_serializer(self): + def test_create_connection_adds_our_serializer(self) -> None: c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index c85d2efd..409981f0 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -49,7 +49,7 @@ class TestRequestsHttpConnection(TestCase): def _get_mock_connection( - self, connection_params={}, status_code=200, response_body=b"{}" + self, connection_params={}, status_code: int = 200, response_body: bytes = b"{}" ): con = RequestsHttpConnection(**connection_params) @@ -80,21 +80,21 @@ def _get_request(self, connection, *args, **kwargs): self.assertEqual(1, len(args)) return args[0] - def test_custom_http_auth_is_allowed(self): + def test_custom_http_auth_is_allowed(self) -> None: auth = AuthBase() c = RequestsHttpConnection(http_auth=auth) self.assertEqual(auth, c.session.auth) - def test_timeout_set(self): + def test_timeout_set(self) -> None: con = RequestsHttpConnection(timeout=42) self.assertEqual(42, con.timeout) - def test_opaque_id(self): + def test_opaque_id(self) -> None: con = RequestsHttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") - def test_no_http_compression(self): + def test_no_http_compression(self) -> None: con = self._get_mock_connection() self.assertFalse(con.http_compress) @@ -106,7 +106,7 @@ def test_no_http_compression(self): self.assertNotIn("content-encoding", req.headers) self.assertNotIn("accept-encoding", req.headers) - def test_http_compression(self): + def test_http_compression(self) -> None: con = self._get_mock_connection( {"http_compress": True}, ) @@ -129,7 +129,7 @@ def test_http_compression(self): self.assertNotIn("content-encoding", req.headers) self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") - def test_uses_https_if_verify_certs_is_off(self): + def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( {"use_ssl": True, "url_prefix": "url", "verify_certs": False} @@ -146,20 +146,20 @@ def test_uses_https_if_verify_certs_is_off(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_uses_given_ca_certs(self): + def test_uses_given_ca_certs(self) -> None: path = "/path/to/my/ca_certs.pem" c = RequestsHttpConnection(ca_certs=path) self.assertEqual(path, c.session.verify) - def test_uses_default_ca_certs(self): + def test_uses_default_ca_certs(self) -> None: c = RequestsHttpConnection() self.assertEqual(Connection.default_ca_certs(), c.session.verify) - def test_uses_no_ca_certs(self): + def test_uses_no_ca_certs(self) -> None: c = RequestsHttpConnection(verify_certs=False) self.assertFalse(c.session.verify) - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): + def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( { @@ -177,7 +177,7 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_merge_headers(self): + def test_merge_headers(self) -> None: con = self._get_mock_connection( connection_params={"headers": {"h1": "v1", "h2": "v2"}} ) @@ -186,13 +186,13 @@ def test_merge_headers(self): self.assertEqual(req.headers["h2"], "v2p") self.assertEqual(req.headers["h3"], "v3") - def test_default_headers(self): + def test_default_headers(self) -> None: con = self._get_mock_connection() req = self._get_request(con, "GET", "/") self.assertEqual(req.headers["content-type"], "application/json") self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) - def test_custom_headers(self): + def test_custom_headers(self) -> None: con = self._get_mock_connection() req = self._get_request( con, @@ -206,45 +206,45 @@ def test_custom_headers(self): self.assertEqual(req.headers["content-type"], "application/x-ndjson") self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") - def test_http_auth(self): + def test_http_auth(self) -> None: con = RequestsHttpConnection(http_auth="username:secret") self.assertEqual(("username", "secret"), con.session.auth) - def test_http_auth_tuple(self): + def test_http_auth_tuple(self) -> None: con = RequestsHttpConnection(http_auth=("username", "secret")) self.assertEqual(("username", "secret"), con.session.auth) - def test_http_auth_list(self): + def test_http_auth_list(self) -> None: con = RequestsHttpConnection(http_auth=["username", "secret"]) self.assertEqual(("username", "secret"), con.session.auth) - def test_repr(self): + def test_repr(self) -> None: con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) self.assertEqual( "", repr(con) ) - def test_conflict_error_is_returned_on_409(self): + def test_conflict_error_is_returned_on_409(self) -> None: con = self._get_mock_connection(status_code=409) self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") - def test_not_found_error_is_returned_on_404(self): + def test_not_found_error_is_returned_on_404(self) -> None: con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") - def test_request_error_is_returned_on_400(self): + def test_request_error_is_returned_on_400(self) -> None: con = self._get_mock_connection(status_code=400) self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("opensearchpy.connection.base.logger") - def test_head_with_404_doesnt_get_logged(self, logger): + def test_head_with_404_doesnt_get_logged(self, logger) -> None: con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") - def test_failed_request_logs_and_traces(self, logger, tracer): + def test_failed_request_logs_and_traces(self, logger, tracer) -> None: con = self._get_mock_connection( response_body=b'{"answer": 42}', status_code=500 ) @@ -272,7 +272,7 @@ def test_failed_request_logs_and_traces(self, logger, tracer): @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") - def test_success_logs_and_traces(self, logger, tracer): + def test_success_logs_and_traces(self, logger, tracer) -> None: con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") status, headers, data = con.perform_request( "GET", @@ -311,7 +311,7 @@ def test_success_logs_and_traces(self, logger, tracer): self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): + def test_uncompressed_body_logged(self, logger) -> None: con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -333,7 +333,7 @@ def test_uncompressed_body_logged(self, logger): self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) - def test_defaults(self): + def test_defaults(self) -> None: con = self._get_mock_connection() request = self._get_request(con, "GET", "/") @@ -341,7 +341,7 @@ def test_defaults(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_params_properly_encoded(self): + def test_params_properly_encoded(self) -> None: con = self._get_mock_connection() request = self._get_request( con, "GET", "/", params={"param": "value with spaces"} @@ -351,7 +351,7 @@ def test_params_properly_encoded(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_body_attached(self): + def test_body_attached(self) -> None: con = self._get_mock_connection() request = self._get_request(con, "GET", "/", body='{"answer": 42}') @@ -359,14 +359,14 @@ def test_body_attached(self): self.assertEqual("GET", request.method) self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) - def test_http_auth_attached(self): + def test_http_auth_attached(self) -> None: con = self._get_mock_connection({"http_auth": "username:secret"}) request = self._get_request(con, "GET", "/") self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") @patch("opensearchpy.connection.base.tracer") - def test_url_prefix(self, tracer): + def test_url_prefix(self, tracer) -> None: con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) request = self._get_request( con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 @@ -383,13 +383,13 @@ def test_url_prefix(self, tracer): tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], ) - def test_surrogatepass_into_bytes(self): + def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip - def test_recursion_error_reraised(self): + def test_recursion_error_reraised(self) -> None: conn = RequestsHttpConnection() def send_raise(*_, **__): @@ -413,7 +413,7 @@ def mock_session(self): return dummy_session - def test_aws_signer_as_http_auth(self): + def test_aws_signer_as_http_auth(self) -> None: region = "us-west-2" import requests @@ -430,7 +430,7 @@ def test_aws_signer_as_http_auth(self): self.assertIn("X-Amz-Security-Token", prepared_request.headers) self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) - def test_aws_signer_when_service_is_specified(self): + def test_aws_signer_when_service_is_specified(self) -> None: region = "us-west-1" service = "aoss" @@ -448,7 +448,7 @@ def test_aws_signer_when_service_is_specified(self): self.assertIn("X-Amz-Security-Token", prepared_request.headers) @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") - def test_aws_signer_signs_with_query_string(self, mock_sign): + def test_aws_signer_signs_with_query_string(self, mock_sign) -> None: region = "us-west-1" service = "aoss" @@ -470,7 +470,7 @@ def test_aws_signer_signs_with_query_string(self, mock_sign): class TestRequestsConnectionRedirect: @classmethod - def setup_class(cls): + def setup_class(cls) -> None: # Start servers cls.server1 = TestHTTPServer(port=8080) cls.server1.start() @@ -478,20 +478,20 @@ def setup_class(cls): cls.server2.start() @classmethod - def teardown_class(cls): + def teardown_class(cls) -> None: # Stop servers cls.server2.stop() cls.server1.stop() # allow_redirects = False - def test_redirect_failure_when_allow_redirect_false(self): + def test_redirect_failure_when_allow_redirect_false(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) with pytest.raises(TransportError) as e: conn.perform_request("GET", "/redirect", allow_redirects=False) assert e.value.status_code == 302 # allow_redirects = True (Default) - def test_redirect_success_when_allow_redirect_true(self): + def test_redirect_success_when_allow_redirect_true(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) user_agent = conn._get_default_user_agent() status, headers, data = conn.perform_request("GET", "/redirect") @@ -517,7 +517,9 @@ def mock_session(self): return dummy_session - def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth(self): + def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth( + self, + ) -> None: region = "us-west-2" import requests diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 929258fd..c87d8ac0 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -45,7 +45,7 @@ class TestUrllib3HttpConnection(TestCase): - def _get_mock_connection(self, connection_params={}, response_body=b"{}"): + def _get_mock_connection(self, connection_params={}, response_body: bytes = b"{}"): con = Urllib3HttpConnection(**connection_params) def _dummy_urlopen(*args, **kwargs): @@ -59,7 +59,7 @@ def _dummy_urlopen(*args, **kwargs): con.pool.urlopen = _dummy_urlopen return con - def test_ssl_context(self): + def test_ssl_context(self) -> None: try: context = ssl.create_default_context() except AttributeError: @@ -75,11 +75,11 @@ def test_ssl_context(self): self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) self.assertTrue(con.use_ssl) - def test_opaque_id(self): + def test_opaque_id(self) -> None: con = Urllib3HttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") - def test_no_http_compression(self): + def test_no_http_compression(self) -> None: con = self._get_mock_connection() self.assertFalse(con.http_compress) self.assertNotIn("accept-encoding", con.headers) @@ -92,7 +92,7 @@ def test_no_http_compression(self): self.assertNotIn("accept-encoding", kwargs["headers"]) self.assertNotIn("content-encoding", kwargs["headers"]) - def test_http_compression(self): + def test_http_compression(self) -> None: con = self._get_mock_connection({"http_compress": True}) self.assertTrue(con.http_compress) self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") @@ -119,18 +119,18 @@ def test_http_compression(self): self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") self.assertNotIn("content-encoding", kwargs["headers"]) - def test_default_user_agent(self): + def test_default_user_agent(self) -> None: con = Urllib3HttpConnection() self.assertEqual( con._get_default_user_agent(), "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), ) - def test_timeout_set(self): + def test_timeout_set(self) -> None: con = Urllib3HttpConnection(timeout=42) self.assertEqual(42, con.timeout) - def test_keep_alive_is_on_by_default(self): + def test_keep_alive_is_on_by_default(self) -> None: con = Urllib3HttpConnection() self.assertEqual( { @@ -141,7 +141,7 @@ def test_keep_alive_is_on_by_default(self): con.headers, ) - def test_http_auth(self): + def test_http_auth(self) -> None: con = Urllib3HttpConnection(http_auth="username:secret") self.assertEqual( { @@ -153,7 +153,7 @@ def test_http_auth(self): con.headers, ) - def test_http_auth_tuple(self): + def test_http_auth_tuple(self) -> None: con = Urllib3HttpConnection(http_auth=("username", "secret")) self.assertEqual( { @@ -165,7 +165,7 @@ def test_http_auth_tuple(self): con.headers, ) - def test_http_auth_list(self): + def test_http_auth_list(self) -> None: con = Urllib3HttpConnection(http_auth=["username", "secret"]) self.assertEqual( { @@ -181,7 +181,7 @@ def test_http_auth_list(self): "urllib3.HTTPConnectionPool.urlopen", return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), ) - def test_aws_signer_as_http_auth_adds_headers(self, mock_open): + def test_aws_signer_as_http_auth_adds_headers(self, mock_open) -> None: from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") @@ -197,7 +197,7 @@ def test_aws_signer_as_http_auth_adds_headers(self, mock_open): self.assertIn("X-Amz-Security-Token", headers) self.assertIn("X-Amz-Content-SHA256", headers) - def test_aws_signer_as_http_auth(self): + def test_aws_signer_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -209,7 +209,7 @@ def test_aws_signer_as_http_auth(self): self.assertIn("X-Amz-Security-Token", headers) self.assertIn("X-Amz-Content-SHA256", headers) - def test_aws_signer_when_region_is_null(self): + def test_aws_signer_when_region_is_null(self) -> None: session = self.mock_session() from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -222,7 +222,7 @@ def test_aws_signer_when_region_is_null(self): Urllib3AWSV4SignerAuth(session, "") assert str(e.value) == "Region cannot be empty" - def test_aws_signer_when_credentials_is_null(self): + def test_aws_signer_when_credentials_is_null(self) -> None: region = "us-west-1" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -235,7 +235,7 @@ def test_aws_signer_when_credentials_is_null(self): Urllib3AWSV4SignerAuth("", region) assert str(e.value) == "Credentials cannot be empty" - def test_aws_signer_when_service_is_specified(self): + def test_aws_signer_when_service_is_specified(self) -> None: region = "us-west-1" service = "aoss" @@ -259,7 +259,7 @@ def mock_session(self): return dummy_session - def test_uses_https_if_verify_certs_is_off(self): + def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertEqual(1, len(w)) @@ -270,7 +270,7 @@ def test_uses_https_if_verify_certs_is_off(self): self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): + def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -279,17 +279,17 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self): self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - def test_doesnt_use_https_if_not_specified(self): + def test_doesnt_use_https_if_not_specified(self) -> None: con = Urllib3HttpConnection() self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) - def test_no_warning_when_using_ssl_context(self): + def test_no_warning_when_using_ssl_context(self) -> None: ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: Urllib3HttpConnection(ssl_context=ctx) self.assertEqual(0, len(w)) - def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): + def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -311,21 +311,21 @@ def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): str(w[0].message), ) - def test_uses_given_ca_certs(self): + def test_uses_given_ca_certs(self) -> None: path = "/path/to/my/ca_certs.pem" c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) self.assertEqual(path, c.pool.ca_certs) - def test_uses_default_ca_certs(self): + def test_uses_default_ca_certs(self) -> None: c = Urllib3HttpConnection(use_ssl=True) self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) - def test_uses_no_ca_certs(self): + def test_uses_no_ca_certs(self) -> None: c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertIsNone(c.pool.ca_certs) @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): + def test_uncompressed_body_logged(self, logger) -> None: con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -335,13 +335,13 @@ def test_uncompressed_body_logged(self, logger): self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) self.assertEqual("< {}", resp[0][0] % resp[0][1:]) - def test_surrogatepass_into_bytes(self): + def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip - def test_recursion_error_reraised(self): + def test_recursion_error_reraised(self) -> None: conn = Urllib3HttpConnection() def urlopen_raise(*_, **__): @@ -367,7 +367,9 @@ def mock_session(self): return dummy_session - def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth(self): + def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth( + self, + ) -> None: region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index f08b6f24..5630030e 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -40,16 +40,16 @@ class TestConnectionPool(TestCase): - def test_dummy_cp_raises_exception_on_more_connections(self): + def test_dummy_cp_raises_exception_on_more_connections(self) -> None: self.assertRaises(ImproperlyConfigured, DummyConnectionPool, []) self.assertRaises( ImproperlyConfigured, DummyConnectionPool, [object(), object()] ) - def test_raises_exception_when_no_connections_defined(self): + def test_raises_exception_when_no_connections_defined(self) -> None: self.assertRaises(ImproperlyConfigured, ConnectionPool, []) - def test_default_round_robin(self): + def test_default_round_robin(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) connections = set() @@ -83,7 +83,7 @@ def select(self, connections): connections.append(pool.get_connection()) self.assertEqual(connections, [x * x for x in range(100)]) - def test_dead_nodes_are_removed_from_active_connections(self): + def test_dead_nodes_are_removed_from_active_connections(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -92,7 +92,7 @@ def test_dead_nodes_are_removed_from_active_connections(self): self.assertEqual(1, pool.dead.qsize()) self.assertEqual((now + 60, 42), pool.dead.get()) - def test_connection_is_skipped_when_dead(self): + def test_connection_is_skipped_when_dead(self) -> None: pool = ConnectionPool([(x, {}) for x in range(2)]) pool.mark_dead(0) @@ -101,7 +101,7 @@ def test_connection_is_skipped_when_dead(self): [pool.get_connection(), pool.get_connection(), pool.get_connection()], ) - def test_new_connection_is_not_marked_dead(self): + def test_new_connection_is_not_marked_dead(self) -> None: # Create 10 connections pool = ConnectionPool([(Connection(), {}) for _ in range(10)]) @@ -112,7 +112,9 @@ def test_new_connection_is_not_marked_dead(self): # Nothing should be marked dead self.assertEqual(0, len(pool.dead_count)) - def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self): + def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible( + self, + ) -> None: pool = ConnectionPool([(x, {}) for x in range(2)]) pool.dead_count[0] = 1 pool.mark_dead(0) # failed twice, longer timeout @@ -122,7 +124,7 @@ def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self self.assertEqual(1, pool.get_connection()) self.assertEqual([1], pool.connections) - def test_connection_is_resurrected_after_its_timeout(self): + def test_connection_is_resurrected_after_its_timeout(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -131,7 +133,7 @@ def test_connection_is_resurrected_after_its_timeout(self): self.assertEqual(42, pool.connections[-1]) self.assertEqual(100, len(pool.connections)) - def test_force_resurrect_always_returns_a_connection(self): + def test_force_resurrect_always_returns_a_connection(self) -> None: pool = ConnectionPool([(0, {})]) pool.connections = [] @@ -139,7 +141,7 @@ def test_force_resurrect_always_returns_a_connection(self): self.assertEqual([], pool.connections) self.assertTrue(pool.dead.empty()) - def test_already_failed_connection_has_longer_timeout(self): + def test_already_failed_connection_has_longer_timeout(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 @@ -148,7 +150,7 @@ def test_already_failed_connection_has_longer_timeout(self): self.assertEqual(3, pool.dead_count[42]) self.assertEqual((now + 4 * 60, 42), pool.dead.get()) - def test_timeout_for_failed_connections_is_limitted(self): + def test_timeout_for_failed_connections_is_limitted(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 245 @@ -157,7 +159,7 @@ def test_timeout_for_failed_connections_is_limitted(self): self.assertEqual(246, pool.dead_count[42]) self.assertEqual((now + 32 * 60, 42), pool.dead.get()) - def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self): + def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index 0b4150fb..26e9e044 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -32,7 +32,7 @@ class TestTransformError(TestCase): - def test_transform_error_parse_with_error_reason(self): + def test_transform_error_parse_with_error_reason(self) -> None: e = TransportError( 500, "InternalServerError", @@ -43,7 +43,7 @@ def test_transform_error_parse_with_error_reason(self): str(e), "TransportError(500, 'InternalServerError', 'error reason')" ) - def test_transform_error_parse_with_error_string(self): + def test_transform_error_parse_with_error_string(self) -> None: e = TransportError( 500, "InternalServerError", {"error": "something error message"} ) diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 3538ae28..35b78d9a 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -60,7 +60,7 @@ class TestParallelBulk(TestCase): "opensearchpy.helpers.actions._process_bulk_chunk", side_effect=mock_process_bulk_chunk, ) - def test_all_chunks_sent(self, _process_bulk_chunk): + def test_all_chunks_sent(self, _process_bulk_chunk) -> None: actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(OpenSearch(), actions, chunk_size=2)) @@ -74,7 +74,7 @@ def test_all_chunks_sent(self, _process_bulk_chunk): (True, time.sleep(0.001) or threading.current_thread().ident) ], ) - def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): + def test_chunk_sent_from_different_threads(self, _process_bulk_chunk) -> None: actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) @@ -83,10 +83,10 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): class TestChunkActions(TestCase): - def setup_method(self, _): + def setup_method(self, _) -> None: self.actions = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip - def test_expand_action(self): + def test_expand_action(self) -> None: self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) self.assertEqual( helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) @@ -123,7 +123,7 @@ def test_expand_action_actions(self): ({"create": {"_id": "id", "_index": "index"}}, {"key": "val"}), ) - def test_expand_action_options(self): + def test_expand_action_options(self) -> None: for option in ( "_id", "_index", @@ -182,7 +182,7 @@ def test__source_metadata_or_source(self): ({"update": {}}, {"key2": "val2"}), ) - def test_chunks_are_chopped_by_byte_size(self): + def test_chunks_are_chopped_by_byte_size(self) -> None: self.assertEqual( 100, len( @@ -190,7 +190,7 @@ def test_chunks_are_chopped_by_byte_size(self): ), ) - def test_chunks_are_chopped_by_chunk_size(self): + def test_chunks_are_chopped_by_chunk_size(self) -> None: self.assertEqual( 10, len( @@ -200,7 +200,7 @@ def test_chunks_are_chopped_by_chunk_size(self): ), ) - def test_chunks_are_chopped_by_byte_size_properly(self): + def test_chunks_are_chopped_by_byte_size_properly(self) -> None: max_byte_size = 170 chunks = list( helpers._chunk_actions( @@ -215,7 +215,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self): class TestExpandActions(TestCase): - def test_string_actions_are_marked_as_simple_inserts(self): + def test_string_actions_are_marked_as_simple_inserts(self) -> None: self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 057e7f16..f46dd132 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -30,7 +30,7 @@ from opensearchpy.helpers import aggs, query -def test_repr(): +def test_repr() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -50,7 +50,7 @@ def test_meta(): } == a.to_dict() -def test_meta_from_dict(): +def test_meta_from_dict() -> None: max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -59,7 +59,7 @@ def test_meta_from_dict(): assert aggs.A(a.to_dict()) == a -def test_A_creates_proper_agg(): +def test_A_creates_proper_agg() -> None: a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) @@ -74,7 +74,7 @@ def test_A_handles_nested_aggs_properly(): assert a._params == {"field": "tags", "aggs": {"max_score": max_score}} -def test_A_passes_aggs_through(): +def test_A_passes_aggs_through() -> None: a = aggs.A("terms", field="tags") assert aggs.A(a) is a @@ -115,14 +115,14 @@ def test_A_fails_with_incorrect_dict(): aggs.A(d) -def test_A_fails_with_agg_and_params(): +def test_A_fails_with_agg_and_params() -> None: a = aggs.A("terms", field="tags") with raises(Exception): aggs.A(a, field="score") -def test_buckets_are_nestable(): +def test_buckets_are_nestable() -> None: a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -131,7 +131,7 @@ def test_buckets_are_nestable(): assert a.aggs == {"per_author": b} -def test_metric_inside_buckets(): +def test_metric_inside_buckets() -> None: a = aggs.Terms(field="tags") b = a.metric("max_score", "max", field="score") @@ -140,7 +140,7 @@ def test_metric_inside_buckets(): assert a.aggs["max_score"] == aggs.Max(field="score") -def test_buckets_equals_counts_subaggs(): +def test_buckets_equals_counts_subaggs() -> None: a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") b = aggs.Terms(field="tags") @@ -166,7 +166,7 @@ def test_buckets_to_dict(): } == a.to_dict() -def test_nested_buckets_are_reachable_as_getitem(): +def test_nested_buckets_are_reachable_as_getitem() -> None: a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -174,14 +174,14 @@ def test_nested_buckets_are_reachable_as_getitem(): assert a["per_author"] == b -def test_nested_buckets_are_settable_as_getitem(): +def test_nested_buckets_are_settable_as_getitem() -> None: a = aggs.Terms(field="tags") b = a["per_author"] = aggs.A("terms", field="author.raw") assert a.aggs["per_author"] is b -def test_filter_can_be_instantiated_using_positional_args(): +def test_filter_can_be_instantiated_using_positional_args() -> None: a = aggs.Filter(query.Q("term", f=42)) assert {"filter": {"term": {"f": 42}}} == a.to_dict() @@ -209,7 +209,7 @@ def test_filter_aggregation_with_nested_aggs(): } == a.to_dict() -def test_filters_correctly_identifies_the_hash(): +def test_filters_correctly_identifies_the_hash() -> None: a = aggs.A( "filters", filters={ @@ -266,19 +266,19 @@ def test_bucket_sort_agg_only_trnunc(): } == a.to_dict() -def test_geohash_grid_aggregation(): +def test_geohash_grid_aggregation() -> None: a = aggs.GeohashGrid(**{"field": "centroid", "precision": 3}) assert {"geohash_grid": {"field": "centroid", "precision": 3}} == a.to_dict() -def test_geotile_grid_aggregation(): +def test_geotile_grid_aggregation() -> None: a = aggs.GeotileGrid(**{"field": "centroid", "precision": 3}) assert {"geotile_grid": {"field": "centroid", "precision": 3}} == a.to_dict() -def test_boxplot_aggregation(): +def test_boxplot_aggregation() -> None: a = aggs.Boxplot(field="load_time") assert {"boxplot": {"field": "load_time"}} == a.to_dict() @@ -305,12 +305,12 @@ def test_rare_terms_aggregation(): } == a.to_dict() -def test_variable_width_histogram_aggregation(): +def test_variable_width_histogram_aggregation() -> None: a = aggs.VariableWidthHistogram(field="price", buckets=2) assert {"variable_width_histogram": {"buckets": 2, "field": "price"}} == a.to_dict() -def test_median_absolute_deviation_aggregation(): +def test_median_absolute_deviation_aggregation() -> None: a = aggs.MedianAbsoluteDeviation(field="rating") assert {"median_absolute_deviation": {"field": "rating"}} == a.to_dict() @@ -359,7 +359,7 @@ def test_moving_percentiles_aggregation(): } == a.to_dict() -def test_normalize_aggregation(): +def test_normalize_aggregation() -> None: a = aggs.Normalize(buckets_path="normalized", method="percent_of_sum") assert { "normalize": {"buckets_path": "normalized", "method": "percent_of_sum"} diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index 49a1d1fd..7b8f6b04 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -30,7 +30,7 @@ from opensearchpy.helpers import analysis -def test_analyzer_serializes_as_name(): +def test_analyzer_serializes_as_name() -> None: a = analysis.analyzer("my_analyzer") assert "my_analyzer" == a.to_dict() @@ -144,7 +144,7 @@ def test_conditional_token_filter(): } == a.get_analysis_definition() -def test_conflicting_nested_filters_cause_error(): +def test_conflicting_nested_filters_cause_error() -> None: a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -166,7 +166,7 @@ def test_conflicting_nested_filters_cause_error(): a.get_analysis_definition() -def test_normalizer_serializes_as_name(): +def test_normalizer_serializes_as_name() -> None: n = analysis.normalizer("my_normalizer") assert "my_normalizer" == n.to_dict() @@ -184,7 +184,7 @@ def test_normalizer_has_definition(): } == n.get_definition() -def test_tokenizer(): +def test_tokenizer() -> None: t = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) assert t.to_dict() == "trigram" @@ -218,7 +218,7 @@ def test_custom_analyzer_can_collect_custom_items(): } == a.get_analysis_definition() -def test_stemmer_analyzer_can_pass_name(): +def test_stemmer_analyzer_can_pass_name() -> None: t = analysis.token_filter( "my_english_filter", name="minimal_english", type="stemmer" ) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index d2da16e0..ed78b4c0 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -129,7 +129,7 @@ class Index: name = "test-host" -def test_range_serializes_properly(): +def test_range_serializes_properly() -> None: class D(document.Document): lr = field.LongRange() @@ -142,7 +142,7 @@ class D(document.Document): assert {"lr": {"lt": 42}} == d.to_dict() -def test_range_deserializes_properly(): +def test_range_deserializes_properly() -> None: class D(document.InnerDoc): lr = field.LongRange() @@ -152,13 +152,13 @@ class D(document.InnerDoc): assert 47 not in d.lr -def test_resolve_nested(): +def test_resolve_nested() -> None: nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] -def test_conflicting_mapping_raises_error_in_index_to_dict(): +def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: class A(document.Document): name = field.Text() @@ -173,18 +173,18 @@ class B(document.Document): i.to_dict() -def test_ip_address_serializes_properly(): +def test_ip_address_serializes_properly() -> None: host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() -def test_matches_uses_index(): +def test_matches_uses_index() -> None: assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) -def test_matches_with_no_name_always_matches(): +def test_matches_with_no_name_always_matches() -> None: class D(document.Document): pass @@ -192,7 +192,7 @@ class D(document.Document): assert D._matches({"_index": "whatever"}) -def test_matches_accepts_wildcards(): +def test_matches_accepts_wildcards() -> None: class MyDoc(document.Document): class Index: name = "my-*" @@ -201,7 +201,7 @@ class Index: assert not MyDoc._matches({"_index": "not-my-index"}) -def test_assigning_attrlist_to_field(): +def test_assigning_attrlist_to_field() -> None: sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -209,13 +209,13 @@ def test_assigning_attrlist_to_field(): assert sc.to_dict()["files"] is ls -def test_optional_inner_objects_are_not_validated_if_missing(): +def test_optional_inner_objects_are_not_validated_if_missing() -> None: d = OptionalObjectWithRequiredField() assert d.full_clean() is None -def test_custom_field(): +def test_custom_field() -> None: s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -226,13 +226,13 @@ def test_custom_field(): assert isinstance(s.title, Secret) -def test_custom_field_mapping(): +def test_custom_field_mapping() -> None: assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() -def test_custom_field_in_nested(): +def test_custom_field_in_nested() -> None: s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -240,7 +240,7 @@ def test_custom_field_in_nested(): assert s.secrets[0].title == "Hello" -def test_multi_works_after_doc_has_been_saved(): +def test_multi_works_after_doc_has_been_saved() -> None: c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -248,7 +248,7 @@ def test_multi_works_after_doc_has_been_saved(): assert c.to_dict() == {"files": ["setup.py"]} -def test_multi_works_in_nested_after_doc_has_been_serialized(): +def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -257,7 +257,7 @@ def test_multi_works_in_nested_after_doc_has_been_serialized(): assert [] == c.comments[0].tags -def test_null_value_for_object(): +def test_null_value_for_object() -> None: d = MyDoc(inner=None) assert d.inner is None @@ -313,21 +313,21 @@ def test_to_dict_with_meta_includes_custom_index(): assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) -def test_to_dict_without_skip_empty_will_include_empty_fields(): +def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) -def test_attribute_can_be_removed(): +def test_attribute_can_be_removed() -> None: d = MyDoc(title="hello") del d.title assert "title" not in d._d_ -def test_doc_type_can_be_correctly_pickled(): +def test_doc_type_can_be_correctly_pickled() -> None: d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -342,7 +342,7 @@ def test_doc_type_can_be_correctly_pickled(): assert isinstance(d2.comments[0], Comment) -def test_meta_is_accessible_even_on_empty_doc(): +def test_meta_is_accessible_even_on_empty_doc() -> None: d = MyDoc() d.meta @@ -369,7 +369,7 @@ class Meta: } == User._doc_type.mapping.to_dict() -def test_multi_value_fields(): +def test_multi_value_fields() -> None: class Blog(document.Document): tags = field.Keyword(multi=True) @@ -380,7 +380,7 @@ class Blog(document.Document): assert ["search", "python"] == b.tags -def test_docs_with_properties(): +def test_docs_with_properties() -> None: class User(document.Document): pwd_hash = field.Text() @@ -408,7 +408,7 @@ def password(self, pwd): u.password -def test_nested_can_be_assigned_to(): +def test_nested_can_be_assigned_to() -> None: d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -419,13 +419,13 @@ def test_nested_can_be_assigned_to(): assert isinstance(d2.comments[0], Comment) -def test_nested_can_be_none(): +def test_nested_can_be_none() -> None: d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() -def test_nested_defaults_to_list_and_can_be_updated(): +def test_nested_defaults_to_list_and_can_be_updated() -> None: md = DocWithNested() assert [] == md.comments @@ -446,7 +446,7 @@ def test_to_dict_is_recursive_and_can_cope_with_multi_values(): } == md.to_dict() -def test_to_dict_ignores_empty_collections(): +def test_to_dict_ignores_empty_collections() -> None: md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() @@ -500,7 +500,7 @@ def test_document_can_be_created_dynamically(): } == md.to_dict() -def test_invalid_date_will_raise_exception(): +def test_invalid_date_will_raise_exception() -> None: md = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -539,7 +539,7 @@ class B(A): } == B._doc_type.mapping.to_dict() -def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): +def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: md = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -566,32 +566,32 @@ def test_index_inheritance(): } == MyMultiSubDoc._doc_type.mapping.to_dict() -def test_meta_fields_can_be_set_directly_in_init(): +def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() md = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -def test_save_no_index(mock_client): +def test_save_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): md.save(using="mock") -def test_delete_no_index(mock_client): +def test_delete_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): md.delete(using="mock") -def test_update_no_fields(): +def test_update_no_fields() -> None: md = MyDoc() with raises(IllegalOperation): md.update() -def test_search_with_custom_alias_and_index(mock_client): +def test_search_with_custom_alias_and_index(mock_client) -> None: search_object = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index 9fcc68d1..e663bca1 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -49,7 +49,7 @@ class BlogSearch(FacetedSearch): } -def test_query_is_created_properly(): +def test_query_is_created_properly() -> None: bs = BlogSearch("python search") s = bs.build_search() @@ -153,7 +153,7 @@ def test_filters_are_applied_to_search_ant_relevant_facets(): } == d -def test_date_histogram_facet_with_1970_01_01_date(): +def test_date_histogram_facet_with_1970_01_01_date() -> None: dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -186,7 +186,7 @@ def test_date_histogram_facet_with_1970_01_01_date(): ("fixed_interval", "1h"), ], ) -def test_date_histogram_interval_types(interval_type, interval): +def test_date_histogram_interval_types(interval_type, interval) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -198,7 +198,7 @@ def test_date_histogram_interval_types(interval_type, interval): dhf.get_value_filter(datetime.now()) -def test_date_histogram_no_interval_keyerror(): +def test_date_histogram_no_interval_keyerror() -> None: dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index df30ad69..19582730 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -36,7 +36,7 @@ from opensearchpy.helpers import field -def test_date_range_deserialization(): +def test_date_range_deserialization() -> None: data = {"lt": "2018-01-01T00:30:10"} r = field.DateRange().deserialize(data) @@ -45,7 +45,7 @@ def test_date_range_deserialization(): assert r.lt == datetime(2018, 1, 1, 0, 30, 10) -def test_boolean_deserialization(): +def test_boolean_deserialization() -> None: bf = field.Boolean() assert not bf.deserialize("false") @@ -58,7 +58,7 @@ def test_boolean_deserialization(): assert bf.deserialize(1) -def test_date_field_can_have_default_tz(): +def test_date_field_can_have_default_tz() -> None: f = field.Date(default_timezone="UTC") now = datetime.now() @@ -73,7 +73,7 @@ def test_date_field_can_have_default_tz(): assert now.isoformat() + "+00:00" == now_with_tz.isoformat() -def test_custom_field_car_wrap_other_field(): +def test_custom_field_car_wrap_other_field() -> None: class MyField(field.CustomField): @property def builtin_type(self): @@ -84,7 +84,7 @@ def builtin_type(self): ).to_dict() -def test_field_from_dict(): +def test_field_from_dict() -> None: f = field.construct_field({"type": "text", "index": "not_analyzed"}) assert isinstance(f, field.Text) @@ -107,14 +107,14 @@ def test_multi_fields_are_accepted_and_parsed(): } == f.to_dict() -def test_nested_provides_direct_access_to_its_fields(): +def test_nested_provides_direct_access_to_its_fields() -> None: f = field.Nested(properties={"name": {"type": "text", "index": "not_analyzed"}}) assert "name" in f assert f["name"] == field.Text(index="not_analyzed") -def test_field_supports_multiple_analyzers(): +def test_field_supports_multiple_analyzers() -> None: f = field.Text(analyzer="snowball", search_analyzer="keyword") assert { "analyzer": "snowball", @@ -143,14 +143,14 @@ def test_multifield_supports_multiple_analyzers(): } == f.to_dict() -def test_scaled_float(): +def test_scaled_float() -> None: with pytest.raises(TypeError): field.ScaledFloat() f = field.ScaledFloat(123) assert f.to_dict() == {"scaling_factor": 123, "type": "scaled_float"} -def test_ipaddress(): +def test_ipaddress() -> None: f = field.Ip() assert f.deserialize("127.0.0.1") == ip_address("127.0.0.1") assert f.deserialize("::1") == ip_address("::1") @@ -160,7 +160,7 @@ def test_ipaddress(): assert f.deserialize("not_an_ipaddress") -def test_float(): +def test_float() -> None: f = field.Float() assert f.deserialize("42") == 42.0 assert f.deserialize(None) is None @@ -168,7 +168,7 @@ def test_float(): assert f.deserialize("not_a_float") -def test_integer(): +def test_integer() -> None: f = field.Integer() assert f.deserialize("42") == 42 assert f.deserialize(None) is None @@ -176,30 +176,30 @@ def test_integer(): assert f.deserialize("not_an_integer") -def test_binary(): +def test_binary() -> None: f = field.Binary() assert f.deserialize(base64.b64encode(b"42")) == b"42" assert f.deserialize(f.serialize(b"42")) == b"42" assert f.deserialize(None) is None -def test_constant_keyword(): +def test_constant_keyword() -> None: f = field.ConstantKeyword() assert f.to_dict() == {"type": "constant_keyword"} -def test_rank_features(): +def test_rank_features() -> None: f = field.RankFeatures() assert f.to_dict() == {"type": "rank_features"} -def test_object_dynamic_values(): +def test_object_dynamic_values() -> None: for dynamic in True, False, "strict": f = field.Object(dynamic=dynamic) assert f.to_dict()["dynamic"] == dynamic -def test_object_disabled(): +def test_object_disabled() -> None: f = field.Object(enabled=False) assert f.to_dict() == {"type": "object", "enabled": False} diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index 7163c09e..bb8aa578 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -38,7 +38,7 @@ class Post(Document): published_from = Date() -def test_multiple_doc_types_will_combine_mappings(): +def test_multiple_doc_types_will_combine_mappings() -> None: class User(Document): username = Text() @@ -56,14 +56,14 @@ class User(Document): } == i.to_dict() -def test_search_is_limited_to_index_name(): +def test_search_is_limited_to_index_name() -> None: i = Index("my-index") s = i.search() assert s._index == ["my-index"] -def test_cloned_index_has_copied_settings_and_using(): +def test_cloned_index_has_copied_settings_and_using() -> None: client = object() i = Index("my-index", using=client) i.settings(number_of_shards=1) @@ -76,7 +76,7 @@ def test_cloned_index_has_copied_settings_and_using(): assert i._settings is not i2._settings -def test_cloned_index_has_analysis_attribute(): +def test_cloned_index_has_analysis_attribute() -> None: """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -96,7 +96,7 @@ def test_cloned_index_has_analysis_attribute(): assert i.to_dict()["settings"]["analysis"] == i2.to_dict()["settings"]["analysis"] -def test_settings_are_saved(): +def test_settings_are_saved() -> None: i = Index("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -104,7 +104,7 @@ def test_settings_are_saved(): assert {"settings": {"number_of_shards": 1, "number_of_replicas": 0}} == i.to_dict() -def test_registered_doc_type_included_in_to_dict(): +def test_registered_doc_type_included_in_to_dict() -> None: i = Index("i", using="alias") i.document(Post) @@ -118,7 +118,7 @@ def test_registered_doc_type_included_in_to_dict(): } == i.to_dict() -def test_registered_doc_type_included_in_search(): +def test_registered_doc_type_included_in_search() -> None: i = Index("i", using="alias") i.document(Post) @@ -127,7 +127,7 @@ def test_registered_doc_type_included_in_search(): assert s._doc_type == [Post] -def test_aliases_add_to_object(): +def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -137,7 +137,7 @@ def test_aliases_add_to_object(): assert index._aliases == alias_dict -def test_aliases_returned_from_to_dict(): +def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -176,7 +176,7 @@ def test_analyzers_returned_from_to_dict(): ] == {"filter": ["standard"], "type": "custom", "tokenizer": "standard"} -def test_conflicting_analyzer_raises_error(): +def test_conflicting_analyzer_raises_error() -> None: i = Index("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -191,7 +191,7 @@ def test_index_template_can_have_order(): assert {"index_patterns": ["i-*"], "order": 2} == it.to_dict() -def test_index_template_save_result(mock_client): +def test_index_template_save_result(mock_client) -> None: it = IndexTemplate("test-template", "test-*") assert it.save(using="mock") == mock_client.indices.put_template() diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index ad042c58..5e4e49ce 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -31,7 +31,7 @@ from opensearchpy.helpers import analysis, mapping -def test_mapping_can_has_fields(): +def test_mapping_can_has_fields() -> None: m = mapping.Mapping() m.field("name", "text").field("tags", "keyword") @@ -73,7 +73,7 @@ def test_mapping_update_is_recursive(): } == m1.to_dict() -def test_properties_can_iterate_over_all_the_fields(): +def test_properties_can_iterate_over_all_the_fields() -> None: m = mapping.Mapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -202,7 +202,7 @@ def test_mapping_can_collect_multiple_analyzers(): } == m._collect_analysis() -def test_even_non_custom_analyzers_can_have_params(): +def test_even_non_custom_analyzers_can_have_params() -> None: a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.Mapping() m.field("title", "text", analyzer=a1) @@ -212,14 +212,14 @@ def test_even_non_custom_analyzers_can_have_params(): } == m._collect_analysis() -def test_resolve_field_can_resolve_multifields(): +def test_resolve_field_can_resolve_multifields() -> None: m = mapping.Mapping() m.field("title", "text", fields={"keyword": Keyword()}) assert isinstance(m.resolve_field("title.keyword"), Keyword) -def test_resolve_nested(): +def test_resolve_nested() -> None: m = mapping.Mapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 95acfbe5..142b865c 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -30,61 +30,61 @@ from opensearchpy.helpers import function, query -def test_empty_Q_is_match_all(): +def test_empty_Q_is_match_all() -> None: q = query.Q() assert isinstance(q, query.MatchAll) assert query.MatchAll() == q -def test_match_to_dict(): +def test_match_to_dict() -> None: assert {"match": {"f": "value"}} == query.Match(f="value").to_dict() -def test_match_to_dict_extra(): +def test_match_to_dict_extra() -> None: assert {"match": {"f": "value", "boost": 2}} == query.Match( f="value", boost=2 ).to_dict() -def test_fuzzy_to_dict(): +def test_fuzzy_to_dict() -> None: assert {"fuzzy": {"f": "value"}} == query.Fuzzy(f="value").to_dict() -def test_prefix_to_dict(): +def test_prefix_to_dict() -> None: assert {"prefix": {"f": "value"}} == query.Prefix(f="value").to_dict() -def test_term_to_dict(): +def test_term_to_dict() -> None: assert {"term": {"_type": "article"}} == query.Term(_type="article").to_dict() -def test_bool_to_dict(): +def test_bool_to_dict() -> None: bool = query.Bool(must=[query.Match(f="value")], should=[]) assert {"bool": {"must": [{"match": {"f": "value"}}]}} == bool.to_dict() -def test_dismax_to_dict(): +def test_dismax_to_dict() -> None: assert {"dis_max": {"queries": [{"term": {"_type": "article"}}]}} == query.DisMax( queries=[query.Term(_type="article")] ).to_dict() -def test_bool_from_dict_issue_318(): +def test_bool_from_dict_issue_318() -> None: d = {"bool": {"must_not": {"match": {"field": "value"}}}} q = query.Q(d) assert q == ~query.Match(field="value") -def test_repr(): +def test_repr() -> None: bool = query.Bool(must=[query.Match(f="value")], should=[]) assert "Bool(must=[Match(f='value')])" == repr(bool) -def test_query_clone(): +def test_query_clone() -> None: bool = query.Bool( must=[query.Match(x=42)], should=[query.Match(g="v2")], @@ -96,14 +96,14 @@ def test_query_clone(): assert bool is not bool_clone -def test_bool_converts_its_init_args_to_queries(): +def test_bool_converts_its_init_args_to_queries() -> None: q = query.Bool(must=[{"match": {"f": "value"}}]) assert len(q.must) == 1 assert q.must[0] == query.Match(f="value") -def test_two_queries_make_a_bool(): +def test_two_queries_make_a_bool() -> None: q1 = query.Match(f="value1") q2 = query.Match(message={"query": "this is a test", "opeartor": "and"}) q = q1 & q2 @@ -112,7 +112,7 @@ def test_two_queries_make_a_bool(): assert [q1, q2] == q.must -def test_other_and_bool_appends_other_to_must(): +def test_other_and_bool_appends_other_to_must() -> None: q1 = query.Match(f="value1") qb = query.Bool() @@ -121,7 +121,7 @@ def test_other_and_bool_appends_other_to_must(): assert q.must[0] == q1 -def test_bool_and_other_appends_other_to_must(): +def test_bool_and_other_appends_other_to_must() -> None: q1 = query.Match(f="value1") qb = query.Bool() @@ -130,7 +130,7 @@ def test_bool_and_other_appends_other_to_must(): assert q.must[0] == q1 -def test_bool_and_other_sets_min_should_match_if_needed(): +def test_bool_and_other_sets_min_should_match_if_needed() -> None: q1 = query.Q("term", category=1) q2 = query.Q( "bool", should=[query.Q("term", name="aaa"), query.Q("term", name="bbb")] @@ -144,7 +144,7 @@ def test_bool_and_other_sets_min_should_match_if_needed(): ) -def test_bool_with_different_minimum_should_match_should_not_be_combined(): +def test_bool_with_different_minimum_should_match_should_not_be_combined() -> None: q1 = query.Q( "bool", minimum_should_match=2, @@ -183,11 +183,11 @@ def test_bool_with_different_minimum_should_match_should_not_be_combined(): assert q5 == query.Bool(should=[q1, q2, q3]) -def test_empty_bool_has_min_should_match_0(): +def test_empty_bool_has_min_should_match_0() -> None: assert 0 == query.Bool()._min_should_match -def test_query_and_query_creates_bool(): +def test_query_and_query_creates_bool() -> None: q1 = query.Match(f=42) q2 = query.Match(g=47) @@ -196,7 +196,7 @@ def test_query_and_query_creates_bool(): assert q.must == [q1, q2] -def test_match_all_and_query_equals_other(): +def test_match_all_and_query_equals_other() -> None: q1 = query.Match(f=42) q2 = query.MatchAll() @@ -204,39 +204,39 @@ def test_match_all_and_query_equals_other(): assert q1 == q -def test_not_match_all_is_match_none(): +def test_not_match_all_is_match_none() -> None: q = query.MatchAll() assert ~q == query.MatchNone() -def test_not_match_none_is_match_all(): +def test_not_match_none_is_match_all() -> None: q = query.MatchNone() assert ~q == query.MatchAll() -def test_invert_empty_bool_is_match_none(): +def test_invert_empty_bool_is_match_none() -> None: q = query.Bool() assert ~q == query.MatchNone() -def test_match_none_or_query_equals_query(): +def test_match_none_or_query_equals_query() -> None: q1 = query.Match(f=42) q2 = query.MatchNone() assert q1 | q2 == query.Match(f=42) -def test_match_none_and_query_equals_match_none(): +def test_match_none_and_query_equals_match_none() -> None: q1 = query.Match(f=42) q2 = query.MatchNone() assert q1 & q2 == query.MatchNone() -def test_bool_and_bool(): +def test_bool_and_bool() -> None: qt1, qt2, qt3 = query.Match(f=1), query.Match(f=2), query.Match(f=3) q1 = query.Bool(must=[qt1], should=[qt2]) @@ -252,7 +252,7 @@ def test_bool_and_bool(): ) -def test_bool_and_bool_with_min_should_match(): +def test_bool_and_bool_with_min_should_match() -> None: qt1, qt2 = query.Match(f=1), query.Match(f=2) q1 = query.Q("bool", minimum_should_match=1, should=[qt1]) q2 = query.Q("bool", minimum_should_match=1, should=[qt2]) @@ -260,19 +260,19 @@ def test_bool_and_bool_with_min_should_match(): assert query.Q("bool", must=[qt1, qt2]) == q1 & q2 -def test_inverted_query_becomes_bool_with_must_not(): +def test_inverted_query_becomes_bool_with_must_not() -> None: q = query.Match(f=42) assert ~q == query.Bool(must_not=[query.Match(f=42)]) -def test_inverted_query_with_must_not_become_should(): +def test_inverted_query_with_must_not_become_should() -> None: q = query.Q("bool", must_not=[query.Q("match", f=1), query.Q("match", f=2)]) assert ~q == query.Q("bool", should=[query.Q("match", f=1), query.Q("match", f=2)]) -def test_inverted_query_with_must_and_must_not(): +def test_inverted_query_with_must_and_must_not() -> None: q = query.Q( "bool", must=[query.Q("match", f=3), query.Q("match", f=4)], @@ -292,13 +292,13 @@ def test_inverted_query_with_must_and_must_not(): ) -def test_double_invert_returns_original_query(): +def test_double_invert_returns_original_query() -> None: q = query.Match(f=42) assert q == ~~q -def test_bool_query_gets_inverted_internally(): +def test_bool_query_gets_inverted_internally() -> None: q = query.Bool(must_not=[query.Match(f=42)], must=[query.Match(g="v")]) assert ~q == query.Bool( @@ -311,7 +311,7 @@ def test_bool_query_gets_inverted_internally(): ) -def test_match_all_or_something_is_match_all(): +def test_match_all_or_something_is_match_all() -> None: q1 = query.MatchAll() q2 = query.Match(f=42) @@ -319,7 +319,7 @@ def test_match_all_or_something_is_match_all(): assert (q2 | q1) == query.MatchAll() -def test_or_produces_bool_with_should(): +def test_or_produces_bool_with_should() -> None: q1 = query.Match(f=42) q2 = query.Match(g="v") @@ -327,7 +327,7 @@ def test_or_produces_bool_with_should(): assert q == query.Bool(should=[q1, q2]) -def test_or_bool_doesnt_loop_infinitely_issue_37(): +def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: q = query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -335,7 +335,7 @@ def test_or_bool_doesnt_loop_infinitely_issue_37(): ) -def test_or_bool_doesnt_loop_infinitely_issue_96(): +def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: q = ~query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -346,14 +346,14 @@ def test_or_bool_doesnt_loop_infinitely_issue_96(): ) -def test_bool_will_append_another_query_with_or(): +def test_bool_will_append_another_query_with_or() -> None: qb = query.Bool(should=[query.Match(f="v"), query.Match(f="v2")]) q = query.Match(g=42) assert (q | qb) == query.Bool(should=[query.Match(f="v"), query.Match(f="v2"), q]) -def test_bool_queries_with_only_should_get_concatenated(): +def test_bool_queries_with_only_should_get_concatenated() -> None: q1 = query.Bool(should=[query.Match(f=1), query.Match(f=2)]) q2 = query.Bool(should=[query.Match(f=3), query.Match(f=4)]) @@ -362,7 +362,7 @@ def test_bool_queries_with_only_should_get_concatenated(): ) -def test_two_bool_queries_append_one_to_should_if_possible(): +def test_two_bool_queries_append_one_to_should_if_possible() -> None: q1 = query.Bool(should=[query.Match(f="v")]) q2 = query.Bool(must=[query.Match(f="v")]) @@ -374,12 +374,12 @@ def test_two_bool_queries_append_one_to_should_if_possible(): ) -def test_queries_are_registered(): +def test_queries_are_registered() -> None: assert "match" in query.Query._classes assert query.Query._classes["match"] is query.Match -def test_defining_query_registers_it(): +def test_defining_query_registers_it() -> None: class MyQuery(query.Query): name = "my_query" @@ -387,62 +387,62 @@ class MyQuery(query.Query): assert query.Query._classes["my_query"] is MyQuery -def test_Q_passes_query_through(): +def test_Q_passes_query_through() -> None: q = query.Match(f="value1") assert query.Q(q) is q -def test_Q_constructs_query_by_name(): +def test_Q_constructs_query_by_name() -> None: q = query.Q("match", f="value") assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_translates_double_underscore_to_dots_in_param_names(): +def test_Q_translates_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params -def test_Q_doesn_translate_double_underscore_to_dots_in_param_names(): +def test_Q_doesn_translate_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params -def test_Q_constructs_simple_query_from_dict(): +def test_Q_constructs_simple_query_from_dict() -> None: q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_constructs_compound_query_from_dict(): +def test_Q_constructs_compound_query_from_dict() -> None: q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) -def test_Q_raises_error_when_passed_in_dict_and_params(): +def test_Q_raises_error_when_passed_in_dict_and_params() -> None: with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") -def test_Q_raises_error_when_passed_in_query_and_params(): +def test_Q_raises_error_when_passed_in_query_and_params() -> None: q = query.Match(f="value1") with raises(Exception): query.Q(q, f="value") -def test_Q_raises_error_on_unknown_query(): +def test_Q_raises_error_on_unknown_query() -> None: with raises(Exception): query.Q("not a query", f="value") -def test_match_all_and_anything_is_anything(): +def test_match_all_and_anything_is_anything() -> None: q = query.MatchAll() s = query.Match(f=42) @@ -450,7 +450,7 @@ def test_match_all_and_anything_is_anything(): assert s & q == s -def test_function_score_with_functions(): +def test_function_score_with_functions() -> None: q = query.Q( "function_score", functions=[query.SF("script_score", script="doc['comment_count'] * _score")], @@ -550,7 +550,7 @@ def test_function_score_from_dict(): assert {"boost_factor": 6} == sf.to_dict() -def test_script_score(): +def test_script_score() -> None: d = { "script_score": { "query": {"match_all": {}}, diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index 83fe8a08..657beb05 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -41,7 +41,7 @@ def agg_response(aggs_search, aggs_data): return response.Response(aggs_search, aggs_data) -def test_agg_response_is_pickleable(agg_response): +def test_agg_response_is_pickleable(agg_response) -> None: agg_response.hits r = pickle.loads(pickle.dumps(agg_response)) @@ -50,7 +50,7 @@ def test_agg_response_is_pickleable(agg_response): assert r.hits == agg_response.hits -def test_response_is_pickleable(dummy_response): +def test_response_is_pickleable(dummy_response) -> None: res = response.Response(Search(), dummy_response) res.hits r = pickle.loads(pickle.dumps(res)) @@ -60,7 +60,7 @@ def test_response_is_pickleable(dummy_response): assert r.hits == res.hits -def test_hit_is_pickleable(dummy_response): +def test_hit_is_pickleable(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = pickle.loads(pickle.dumps(res.hits)) @@ -68,14 +68,14 @@ def test_hit_is_pickleable(dummy_response): assert hits[0].meta == res.hits[0].meta -def test_response_stores_search(dummy_response): +def test_response_stores_search(dummy_response) -> None: s = Search() r = response.Response(s, dummy_response) assert r._search is s -def test_interactive_helpers(dummy_response): +def test_interactive_helpers(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = res.hits h = hits[0] @@ -98,19 +98,19 @@ def test_interactive_helpers(dummy_response): ] == repr(h) -def test_empty_response_is_false(dummy_response): +def test_empty_response_is_false(dummy_response) -> None: dummy_response["hits"]["hits"] = [] res = response.Response(Search(), dummy_response) assert not res -def test_len_response(dummy_response): +def test_len_response(dummy_response) -> None: res = response.Response(Search(), dummy_response) assert len(res) == 4 -def test_iterating_over_response_gives_you_hits(dummy_response): +def test_iterating_over_response_gives_you_hits(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = list(h for h in res) @@ -127,7 +127,7 @@ def test_iterating_over_response_gives_you_hits(dummy_response): assert hits[1].meta.routing == "opensearch" -def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response): +def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = res.hits @@ -135,7 +135,7 @@ def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response): assert 12.0 == hits.max_score -def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response): +def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response) -> None: res = response.Response(Search(), dummy_response) h = res.hits[0] @@ -151,30 +151,30 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response): h.not_there -def test_slicing_on_response_slices_on_hits(dummy_response): +def test_slicing_on_response_slices_on_hits(dummy_response) -> None: res = response.Response(Search(), dummy_response) assert res[0] is res.hits[0] assert res[::-1] == res.hits[::-1] -def test_aggregation_base(agg_response): +def test_aggregation_base(agg_response) -> None: assert agg_response.aggs is agg_response.aggregations assert isinstance(agg_response.aggs, response.AggResponse) -def test_metric_agg_works(agg_response): +def test_metric_agg_works(agg_response) -> None: assert 25052.0 == agg_response.aggs.sum_lines.value -def test_aggregations_can_be_iterated_over(agg_response): +def test_aggregations_can_be_iterated_over(agg_response) -> None: aggs = [a for a in agg_response.aggs] assert len(aggs) == 3 assert all(map(lambda a: isinstance(a, AggResponse), aggs)) -def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search): +def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search) -> None: a = agg_response.aggs["popular_files"] assert isinstance(a, BucketData) @@ -182,7 +182,7 @@ def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search): assert a._meta["aggs"] is aggs_search.aggs.aggs["popular_files"] -def test_bucket_response_can_be_iterated_over(agg_response): +def test_bucket_response_can_be_iterated_over(agg_response) -> None: popular_files = agg_response.aggregations.popular_files buckets = [b for b in popular_files] @@ -190,7 +190,7 @@ def test_bucket_response_can_be_iterated_over(agg_response): assert buckets == popular_files.buckets -def test_bucket_keys_get_deserialized(aggs_data, aggs_search): +def test_bucket_keys_get_deserialized(aggs_data, aggs_search) -> None: class Commit(Document): info = Object(properties={"committed_date": Date()}) diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index dae61a00..73d078a9 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -34,13 +34,13 @@ from opensearchpy.helpers import query, search -def test_expand__to_dot_is_respected(): +def test_expand__to_dot_is_respected() -> None: s = search.Search().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() -def test_execute_uses_cache(): +def test_execute_uses_cache() -> None: s = search.Search() r = object() s._response = r @@ -48,7 +48,7 @@ def test_execute_uses_cache(): assert r is s.execute() -def test_cache_can_be_ignored(mock_client): +def test_cache_can_be_ignored(mock_client) -> None: s = search.Search(using="mock") r = object() s._response = r @@ -57,27 +57,27 @@ def test_cache_can_be_ignored(mock_client): mock_client.search.assert_called_once_with(index=None, body={}) -def test_iter_iterates_over_hits(): +def test_iter_iterates_over_hits() -> None: s = search.Search() s._response = [1, 2, 3] assert [1, 2, 3] == list(s) -def test_cache_isnt_cloned(): +def test_cache_isnt_cloned() -> None: s = search.Search() s._response = object() assert not hasattr(s._clone(), "_response") -def test_search_starts_with_no_query(): +def test_search_starts_with_no_query() -> None: s = search.Search() assert s.query._proxied is None -def test_search_query_combines_query(): +def test_search_query_combines_query() -> None: s = search.Search() s2 = s.query("match", f=42) @@ -89,7 +89,7 @@ def test_search_query_combines_query(): assert s3.query._proxied == query.Bool(must=[query.Match(f=42), query.Match(f=43)]) -def test_query_can_be_assigned_to(): +def test_query_can_be_assigned_to() -> None: s = search.Search() q = Q("match", title="python") @@ -113,7 +113,7 @@ def test_query_can_be_wrapped(): } == s.to_dict() -def test_using(): +def test_using() -> None: o = object() o2 = object() s = search.Search(using=o) @@ -123,19 +123,19 @@ def test_using(): assert s2._using is o2 -def test_methods_are_proxied_to_the_query(): +def test_methods_are_proxied_to_the_query() -> None: s = search.Search().query("match_all") assert s.query.to_dict() == {"match_all": {}} -def test_query_always_returns_search(): +def test_query_always_returns_search() -> None: s = search.Search() assert isinstance(s.query("match", f=42), search.Search) -def test_source_copied_on_clone(): +def test_source_copied_on_clone() -> None: s = search.Search().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -149,7 +149,7 @@ def test_source_copied_on_clone(): assert s3._clone()._source == ["some", "fields"] -def test_copy_clones(): +def test_copy_clones() -> None: from copy import copy s1 = search.Search().source(["some", "fields"]) @@ -159,7 +159,7 @@ def test_copy_clones(): assert s1 is not s2 -def test_aggs_allow_two_metric(): +def test_aggs_allow_two_metric() -> None: s = search.Search() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -201,7 +201,7 @@ def test_aggs_get_copied_on_change(): assert d == s4.to_dict() -def test_search_index(): +def test_search_index() -> None: s = search.Search(index="i") assert s._index == ["i"] s = s.index("i2") @@ -232,7 +232,7 @@ def test_search_index(): assert s2._index == ["i", "i2", "i3", "i4", "i5"] -def test_doc_type_document_class(): +def test_doc_type_document_class() -> None: class MyDocument(Document): pass @@ -257,7 +257,7 @@ def test_sort(): assert search.Search().to_dict() == s.to_dict() -def test_sort_by_score(): +def test_sort_by_score() -> None: s = search.Search() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -301,7 +301,7 @@ def test_collapse(): assert search.Search().to_dict() == s.to_dict() -def test_slice(): +def test_slice() -> None: s = search.Search() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -310,7 +310,7 @@ def test_slice(): assert {"from": 20, "size": 0} == s[20:0].to_dict() -def test_index(): +def test_index() -> None: s = search.Search() assert {"from": 3, "size": 1} == s[3].to_dict() @@ -445,13 +445,13 @@ def test_reverse(): assert d == s.to_dict() -def test_from_dict_doesnt_need_query(): +def test_from_dict_doesnt_need_query() -> None: s = search.Search.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() -def test_params_being_passed_to_search(mock_client): +def test_params_being_passed_to_search(mock_client) -> None: s = search.Search(using="mock") s = s.params(routing="42") s.execute() @@ -459,7 +459,7 @@ def test_params_being_passed_to_search(mock_client): mock_client.search.assert_called_once_with(index=None, body={}, routing="42") -def test_source(): +def test_source() -> None: assert {} == search.Search().source().to_dict() assert { @@ -488,7 +488,7 @@ def test_source_on_clone(): } == search.Search().source(False).filter("term", title="python").to_dict() -def test_source_on_clear(): +def test_source_on_clear() -> None: assert ( {} == search.Search() @@ -529,7 +529,7 @@ def test_suggest(): } == s.to_dict() -def test_exclude(): +def test_exclude() -> None: s = search.Search() s = s.exclude("match", title="python") @@ -542,7 +542,7 @@ def test_exclude(): } == s.to_dict() -def test_delete_by_query(mock_client): +def test_delete_by_query(mock_client) -> None: s = search.Search(using="mock").query("match", lang="java") s.delete() diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index 336f8fda..74030874 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -31,7 +31,7 @@ from opensearchpy.helpers.response import UpdateByQueryResponse -def test_ubq_starts_with_no_query(): +def test_ubq_starts_with_no_query() -> None: ubq = UpdateByQuery() assert ubq.query._proxied is None @@ -91,7 +91,7 @@ def test_complex_example(): } == ubq.to_dict() -def test_exclude(): +def test_exclude() -> None: ubq = UpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -140,13 +140,13 @@ def test_reverse(): assert d == ubq.to_dict() -def test_from_dict_doesnt_need_query(): +def test_from_dict_doesnt_need_query() -> None: ubq = UpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() -def test_params_being_passed_to_search(mock_client): +def test_params_being_passed_to_search(mock_client) -> None: ubq = UpdateByQuery(using="mock") ubq = ubq.params(routing="42") ubq.execute() @@ -172,7 +172,7 @@ def test_overwrite_script(): assert {"script": {"source": "ctx._source.likes++"}} == ubq.to_dict() -def test_update_by_query_response_success(): +def test_update_by_query_response_success() -> None: ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index c651fe2f..358b9184 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -33,21 +33,21 @@ from opensearchpy.helpers import utils -def test_attrdict_pickle(): +def test_attrdict_pickle() -> None: ad = utils.AttrDict({}) pickled_ad = pickle.dumps(ad) assert ad == pickle.loads(pickled_ad) -def test_attrlist_pickle(): +def test_attrlist_pickle() -> None: al = utils.AttrList([]) pickled_al = pickle.dumps(al) assert al == pickle.loads(pickled_al) -def test_attrlist_slice(): +def test_attrlist_slice() -> None: class MyAttrDict(utils.AttrDict): pass @@ -64,7 +64,7 @@ def test_merge(): assert a == {"a": {"b": 123, "c": 47, "d": -12}, "e": [1, 2, 3]} -def test_merge_conflict(): +def test_merge_conflict() -> None: for d in ( {"a": 42}, {"a": {"b": 47}}, @@ -74,7 +74,7 @@ def test_merge_conflict(): utils.merge({"a": {"b": 42}}, d, True) -def test_attrdict_bool(): +def test_attrdict_bool() -> None: d = utils.AttrDict({}) assert not d @@ -82,7 +82,7 @@ def test_attrdict_bool(): assert d -def test_attrlist_items_get_wrapped_during_iteration(): +def test_attrlist_items_get_wrapped_during_iteration() -> None: al = utils.AttrList([1, object(), [1], {}]) ls = list(iter(al)) @@ -91,7 +91,7 @@ def test_attrlist_items_get_wrapped_during_iteration(): assert isinstance(ls[3], utils.AttrDict) -def test_serializer_deals_with_Attr_versions(): +def test_serializer_deals_with_Attr_versions() -> None: d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( @@ -99,7 +99,7 @@ def test_serializer_deals_with_Attr_versions(): ) -def test_serializer_deals_with_objects_with_to_dict(): +def test_serializer_deals_with_objects_with_to_dict() -> None: class MyClass(object): def to_dict(self): return 42 @@ -107,13 +107,13 @@ def to_dict(self): assert serializer.serializer.dumps(MyClass()) == "42" -def test_recursive_to_dict(): +def test_recursive_to_dict() -> None: assert utils.recursive_to_dict({"k": [1, (1.0, {"v": Q("match", key="val")})]}) == { "k": [1, (1.0, {"v": {"match": {"key": "val"}}})] } -def test_attrdict_get(): +def test_attrdict_get() -> None: a = utils.AttrDict({"a": {"b": 42, "c": 47}}) assert a.get("a", {}).get("b", 0) == 42 assert a.get("a", {}).get("e", 0) == 0 diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index e8d9f5aa..1565b352 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -46,7 +46,7 @@ class Author(InnerDoc): name = Text(required=True) email = Text(required=True) - def clean(self): + def clean(self) -> None: print(self, type(self), self.name) if self.name.lower() not in self.email: raise ValidationException("Invalid email!") @@ -74,7 +74,7 @@ class Log(Document): data = Text() -def test_required_int_can_be_0(): +def test_required_int_can_be_0() -> None: class DT(Document): i = Integer(required=True) @@ -82,7 +82,7 @@ class DT(Document): assert dt.full_clean() is None -def test_required_field_cannot_be_empty_list(): +def test_required_field_cannot_be_empty_list() -> None: class DT(Document): i = Integer(required=True) @@ -91,7 +91,7 @@ class DT(Document): dt.full_clean() -def test_validation_works_for_lists_of_values(): +def test_validation_works_for_lists_of_values() -> None: class DT(Document): i = Date(required=True) @@ -103,21 +103,21 @@ class DT(Document): assert None is dt.full_clean() -def test_field_with_custom_clean(): +def test_field_with_custom_clean() -> None: ls = Log() ls.full_clean() assert isinstance(ls.timestamp, datetime) -def test_empty_object(): +def test_empty_object() -> None: d = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) d.inner = {} d.full_clean() -def test_missing_required_field_raises_validation_exception(): +def test_missing_required_field_raises_validation_exception() -> None: d = BlogPost() with raises(ValidationException): d.full_clean() @@ -132,7 +132,7 @@ def test_missing_required_field_raises_validation_exception(): d.full_clean() -def test_boolean_doesnt_treat_false_as_empty(): +def test_boolean_doesnt_treat_false_as_empty() -> None: d = BlogPostWithStatus() with raises(ValidationException): d.full_clean() @@ -142,7 +142,7 @@ def test_boolean_doesnt_treat_false_as_empty(): d.full_clean() -def test_custom_validation_on_nested_gets_run(): +def test_custom_validation_on_nested_gets_run() -> None: d = BlogPost(authors=[Author(name="Guian", email="king@example.com")], created=None) assert isinstance(d.authors[0], Author) @@ -151,7 +151,7 @@ def test_custom_validation_on_nested_gets_run(): d.full_clean() -def test_accessing_known_fields_returns_empty_value(): +def test_accessing_known_fields_returns_empty_value() -> None: d = BlogPost() assert [] == d.authors @@ -161,7 +161,7 @@ def test_accessing_known_fields_returns_empty_value(): assert None is d.authors[0].email -def test_empty_values_are_not_serialized(): +def test_empty_values_are_not_serialized() -> None: d = BlogPost( authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}], created=None ) diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index c49353c5..2212b070 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -44,7 +44,7 @@ ({"gt": datetime.now() - timedelta(seconds=10)}, datetime.now()), ], ) -def test_range_contains(kwargs, item): +def test_range_contains(kwargs, item) -> None: assert item in Range(**kwargs) @@ -72,7 +72,7 @@ def test_range_not_contains(kwargs, item): ((), {"gt": 1, "gte": 1}), ], ) -def test_range_raises_value_error_on_wrong_params(args, kwargs): +def test_range_raises_value_error_on_wrong_params(args, kwargs) -> None: with pytest.raises(ValueError): Range(*args, **kwargs) @@ -86,7 +86,7 @@ def test_range_raises_value_error_on_wrong_params(args, kwargs): (Range(lt=42), None, False), ], ) -def test_range_lower(range, lower, inclusive): +def test_range_lower(range, lower, inclusive) -> None: assert (lower, inclusive) == range.lower @@ -99,5 +99,5 @@ def test_range_lower(range, lower, inclusive): (Range(gt=42), None, False), ], ) -def test_range_upper(range, upper, inclusive): +def test_range_upper(range, upper, inclusive) -> None: assert (upper, inclusive) == range.upper diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index b324b53c..d7fef3e8 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -48,26 +48,26 @@ from .test_cases import SkipTest, TestCase -def requires_numpy_and_pandas(): +def requires_numpy_and_pandas() -> None: if np is None or pd is None: raise SkipTest("Test requires numpy or pandas to be available") class TestJSONSerializer(TestCase): - def test_datetime_serialization(self): + def test_datetime_serialization(self) -> None: self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": datetime(2010, 10, 1, 2, 30)}), ) - def test_decimal_serialization(self): + def test_decimal_serialization(self) -> None: requires_numpy_and_pandas() if sys.version_info[:2] == (2, 6): raise SkipTest("Float rounding is broken in 2.6.") self.assertEqual('{"d":3.8}', JSONSerializer().dumps({"d": Decimal("3.8")})) - def test_uuid_serialization(self): + def test_uuid_serialization(self) -> None: self.assertEqual( '{"d":"00000000-0000-0000-0000-000000000003"}', JSONSerializer().dumps( @@ -75,12 +75,12 @@ def test_uuid_serialization(self): ), ) - def test_serializes_numpy_bool(self): + def test_serializes_numpy_bool(self) -> None: requires_numpy_and_pandas() self.assertEqual('{"d":true}', JSONSerializer().dumps({"d": np.bool_(True)})) - def test_serializes_numpy_integers(self): + def test_serializes_numpy_integers(self) -> None: requires_numpy_and_pandas() ser = JSONSerializer() @@ -101,7 +101,7 @@ def test_serializes_numpy_integers(self): ): self.assertEqual(ser.dumps({"d": np_type(1)}), '{"d":1}') - def test_serializes_numpy_floats(self): + def test_serializes_numpy_floats(self) -> None: requires_numpy_and_pandas() ser = JSONSerializer() @@ -114,7 +114,7 @@ def test_serializes_numpy_floats(self): ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$' ) - def test_serializes_numpy_datetime(self): + def test_serializes_numpy_datetime(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -122,7 +122,7 @@ def test_serializes_numpy_datetime(self): JSONSerializer().dumps({"d": np.datetime64("2010-10-01T02:30:00")}), ) - def test_serializes_numpy_ndarray(self): + def test_serializes_numpy_ndarray(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -135,7 +135,7 @@ def test_serializes_numpy_ndarray(self): JSONSerializer().dumps({"d": np.zeros((2, 2), dtype=np.uint8)}), ) - def test_serializes_numpy_nan_to_nan(self): + def test_serializes_numpy_nan_to_nan(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -143,7 +143,7 @@ def test_serializes_numpy_nan_to_nan(self): JSONSerializer().dumps({"d": np.nan}), ) - def test_serializes_pandas_timestamp(self): + def test_serializes_pandas_timestamp(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -151,7 +151,7 @@ def test_serializes_pandas_timestamp(self): JSONSerializer().dumps({"d": pd.Timestamp("2010-10-01T02:30:00")}), ) - def test_serializes_pandas_series(self): + def test_serializes_pandas_series(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -159,7 +159,7 @@ def test_serializes_pandas_series(self): JSONSerializer().dumps({"d": pd.Series(["a", "b", "c", "d"])}), ) - def test_serializes_pandas_na(self): + def test_serializes_pandas_na(self) -> None: requires_numpy_and_pandas() if not hasattr(pd, "NA"): # pandas.NA added in v1 @@ -169,14 +169,14 @@ def test_serializes_pandas_na(self): JSONSerializer().dumps({"d": pd.NA}), ) - def test_raises_serialization_error_pandas_nat(self): + def test_raises_serialization_error_pandas_nat(self) -> None: requires_numpy_and_pandas() if not hasattr(pd, "NaT"): raise SkipTest("pandas.NaT required") self.assertRaises(SerializationError, JSONSerializer().dumps, {"d": pd.NaT}) - def test_serializes_pandas_category(self): + def test_serializes_pandas_category(self) -> None: requires_numpy_and_pandas() cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) @@ -191,34 +191,34 @@ def test_serializes_pandas_category(self): JSONSerializer().dumps({"d": cat}), ) - def test_raises_serialization_error_on_dump_error(self): + def test_raises_serialization_error_on_dump_error(self) -> None: self.assertRaises(SerializationError, JSONSerializer().dumps, object()) - def test_raises_serialization_error_on_load_error(self): + def test_raises_serialization_error_on_load_error(self) -> None: self.assertRaises(SerializationError, JSONSerializer().loads, object()) self.assertRaises(SerializationError, JSONSerializer().loads, "") self.assertRaises(SerializationError, JSONSerializer().loads, "{{") - def test_strings_are_left_untouched(self): + def test_strings_are_left_untouched(self) -> None: self.assertEqual("你好", JSONSerializer().dumps("你好")) class TestTextSerializer(TestCase): - def test_strings_are_left_untouched(self): + def test_strings_are_left_untouched(self) -> None: self.assertEqual("你好", TextSerializer().dumps("你好")) - def test_raises_serialization_error_on_dump_error(self): + def test_raises_serialization_error_on_dump_error(self) -> None: self.assertRaises(SerializationError, TextSerializer().dumps, {}) class TestDeserializer(TestCase): - def setup_method(self, _): + def setup_method(self, _) -> None: self.de = Deserializer(DEFAULT_SERIALIZERS) - def test_deserializes_json_by_default(self): + def test_deserializes_json_by_default(self) -> None: self.assertEqual({"some": "data"}, self.de.loads('{"some":"data"}')) - def test_deserializes_text_with_correct_ct(self): + def test_deserializes_text_with_correct_ct(self) -> None: self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain") ) @@ -227,10 +227,10 @@ def test_deserializes_text_with_correct_ct(self): self.de.loads('{"some":"data"}', "text/plain; charset=whatever"), ) - def test_raises_serialization_error_on_unknown_mimetype(self): + def test_raises_serialization_error_on_unknown_mimetype(self) -> None: self.assertRaises(SerializationError, self.de.loads, "{}", "text/html") def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized( self, - ): + ) -> None: self.assertRaises(ImproperlyConfigured, Deserializer, {}) diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index 164e6a5d..d3965fed 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -60,7 +60,7 @@ def get_client(**kwargs): return new_client -def setup_module(): +def setup_module() -> None: get_client() diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index 2d5c4155..32550a03 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -32,19 +32,19 @@ class TestUnicode(OpenSearchTestCase): - def test_indices_analyze(self): + def test_indices_analyze(self) -> None: self.client.indices.analyze(body='{"text": "привет"}') class TestBulk(OpenSearchTestCase): - def test_bulk_works_with_string_body(self): + def test_bulk_works_with_string_body(self) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = self.client.bulk(body=docs) self.assertFalse(response["errors"]) self.assertEqual(1, len(response["items"])) - def test_bulk_works_with_bytestring_body(self): + def test_bulk_works_with_bytestring_body(self) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = self.client.bulk(body=docs) diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 4e167d34..8be79616 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -30,6 +30,7 @@ from pytest import fixture +from opensearchpy.client import OpenSearch from opensearchpy.connection.connections import add_connection from opensearchpy.helpers import bulk from opensearchpy.helpers.test import get_test_client @@ -45,7 +46,7 @@ @fixture(scope="session") -def client(): +def client() -> OpenSearch: client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", client) return client @@ -106,7 +107,7 @@ def pull_request(write_client): @fixture -def setup_ubq_tests(client): +def setup_ubq_tests(client) -> str: index = "test-git" create_git_index(client, index) bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index fcb65fde..7fb8f234 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -26,6 +26,8 @@ # under the License. +from typing import Tuple + from mock import patch from opensearchpy import TransportError, helpers @@ -37,8 +39,11 @@ class FailingBulkClient(object): def __init__( - self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) - ): + self, + client, + fail_at: Tuple[int] = (2,), + fail_with=TransportError(599, "Error!", {}), + ) -> None: self.client = client self._called = 0 self._fail_at = fail_at @@ -53,7 +58,7 @@ def bulk(self, *args, **kwargs): class TestStreamingBulk(OpenSearchTestCase): - def test_actions_remain_unchanged(self): + def test_actions_remain_unchanged(self) -> None: actions = [{"_id": 1}, {"_id": 2}] for ok, item in helpers.streaming_bulk( self.client, actions, index="test-index" @@ -61,7 +66,7 @@ def test_actions_remain_unchanged(self): self.assertTrue(ok) self.assertEqual([{"_id": 1}, {"_id": 2}], actions) - def test_all_documents_get_inserted(self): + def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True @@ -73,7 +78,7 @@ def test_all_documents_get_inserted(self): {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) - def test_all_errors_from_chunk_are_raised_on_failure(self): + def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: self.client.indices.create( "i", { @@ -115,7 +120,7 @@ def test_different_op_types(self): self.assertEqual({"answer": 42}, self.client.get(index="i", id=42)["_source"]) self.assertEqual({"f": "v"}, self.client.get(index="i", id=47)["_source"]) - def test_transport_error_can_becaught(self): + def test_transport_error_can_becaught(self) -> None: failing_client = FailingBulkClient(self.client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -151,7 +156,7 @@ def test_transport_error_can_becaught(self): results[1][1], ) - def test_rejected_documents_are_retried(self): + def test_rejected_documents_are_retried(self) -> None: failing_client = FailingBulkClient( self.client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -178,7 +183,7 @@ def test_rejected_documents_are_retried(self): self.assertEqual({"value": 3, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) - def test_rejected_documents_are_retried_at_most_max_retries_times(self): + def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: failing_client = FailingBulkClient( self.client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -206,7 +211,7 @@ def test_rejected_documents_are_retried_at_most_max_retries_times(self): self.assertEqual({"value": 2, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) - def test_transport_error_is_raised_with_max_retries(self): + def test_transport_error_is_raised_with_max_retries(self) -> None: failing_client = FailingBulkClient( self.client, fail_at=(1, 2, 3, 4), @@ -230,7 +235,7 @@ def streaming_bulk(): class TestBulk(OpenSearchTestCase): - def test_bulk_works_with_single_item(self): + def test_bulk_works_with_single_item(self) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -243,7 +248,7 @@ def test_bulk_works_with_single_item(self): {"answer": 42}, self.client.get(index="test-index", id=1)["_source"] ) - def test_all_documents_get_inserted(self): + def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -256,7 +261,7 @@ def test_all_documents_get_inserted(self): {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) - def test_stats_only_reports_numbers(self): + def test_stats_only_reports_numbers(self) -> None: docs = [{"answer": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True, stats_only=True @@ -293,7 +298,7 @@ def test_errors_are_reported_correctly(self): or "mapper_parsing_exception" in repr(error["index"]["error"]) ) - def test_error_is_raised(self): + def test_error_is_raised(self) -> None: self.client.indices.create( "i", { @@ -379,7 +384,7 @@ class TestScan(OpenSearchTestCase): }, ] - def teardown_method(self, m): + def teardown_method(self, m) -> None: self.client.transport.perform_request("DELETE", "/_search/scroll/_all") super(TestScan, self).teardown_method(m) @@ -477,7 +482,7 @@ def test_initial_search_error(self): self.assertEqual(data, [{"search_data": 1}]) client_mock.scroll.assert_not_called() - def test_no_scroll_id_fast_route(self): + def test_no_scroll_id_fast_route(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = {"no": "_scroll_id"} data = list(helpers.scan(self.client, index="test_index")) @@ -654,7 +659,7 @@ def setup_method(self, _): ) self.client.bulk(bulk, refresh=True) - def test_reindex_passes_kwargs_to_scan_and_bulk(self): + def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: helpers.reindex( self.client, "test_index", @@ -673,7 +678,7 @@ def test_reindex_passes_kwargs_to_scan_and_bulk(self): self.client.get(index="prod_index", id=42)["_source"], ) - def test_reindex_accepts_a_query(self): + def test_reindex_accepts_a_query(self) -> None: helpers.reindex( self.client, "test_index", @@ -692,7 +697,7 @@ def test_reindex_accepts_a_query(self): self.client.get(index="prod_index", id=42)["_source"], ) - def test_all_documents_get_moved(self): + def test_all_documents_get_moved(self) -> None: helpers.reindex(self.client, "test_index", "prod_index") self.client.indices.refresh() @@ -737,7 +742,7 @@ def setup_method(self, _): ) self.client.indices.refresh(index="test-index") - def test_children_are_reindexed_correctly(self): + def test_children_are_reindexed_correctly(self) -> None: helpers.reindex(self.client, "test-index", "real-index") self.assertEqual( diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index d0073c53..2da9388a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -28,7 +28,7 @@ from opensearchpy import analyzer, token_filter, tokenizer -def test_simulate_with_just__builtin_tokenizer(client): +def test_simulate_with_just__builtin_tokenizer(client) -> None: a = analyzer("my-analyzer", tokenizer="keyword") tokens = a.simulate("Hello World!", using=client).tokens @@ -36,7 +36,7 @@ def test_simulate_with_just__builtin_tokenizer(client): assert tokens[0].token == "Hello World!" -def test_simulate_complex(client): +def test_simulate_complex(client) -> None: a = analyzer( "my-analyzer", tokenizer=tokenizer("split_words", "simple_pattern_split", pattern=":"), @@ -49,7 +49,7 @@ def test_simulate_complex(client): assert ["this", "works"] == [t.token for t in tokens] -def test_simulate_builtin(client): +def test_simulate_builtin(client) -> None: a = analyzer("my-analyzer", "english") tokens = a.simulate("fixes running").tokens diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index 6a507a9f..7bf9c27e 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -28,12 +28,12 @@ from opensearchpy.helpers.search import Q, Search -def test_count_all(data_client): +def test_count_all(data_client) -> None: s = Search(using=data_client).index("git") assert 53 == s.count() -def test_count_prefetch(data_client, mocker): +def test_count_prefetch(data_client, mocker) -> None: mocker.spy(data_client, "count") search = Search(using=data_client).index("git") @@ -46,7 +46,7 @@ def test_count_prefetch(data_client, mocker): assert data_client.count.call_count == 1 -def test_count_filter(data_client): +def test_count_filter(data_client) -> None: s = Search(using=data_client).index("git").filter(~Q("exists", field="parent_shas")) # initial commit + repo document assert 2 == s.count() diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 91e816b4..63302b7a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -27,6 +27,8 @@ from __future__ import unicode_literals +from typing import Any, Dict + def create_flat_git_index(client, index): # we will use user on several places @@ -1093,7 +1095,7 @@ def create_git_index(client, index): ] -def flatten_doc(d): +def flatten_doc(d) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1102,7 +1104,7 @@ def flatten_doc(d): FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d): +def create_test_git_data(d) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index f459afb2..0da4b856 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -161,7 +161,7 @@ def test_serialization(write_client): } -def test_nested_inner_hits_are_wrapped_properly(pull_request): +def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: history_query = Q( "nested", path="comments.history", @@ -189,7 +189,7 @@ def test_nested_inner_hits_are_wrapped_properly(pull_request): assert "score" in history.meta -def test_nested_inner_hits_are_deserialized_properly(pull_request): +def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -204,7 +204,7 @@ def test_nested_inner_hits_are_deserialized_properly(pull_request): assert isinstance(pr.comments[0].created_at, datetime) -def test_nested_top_hits_are_wrapped_properly(pull_request): +def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -216,7 +216,7 @@ def test_nested_top_hits_are_wrapped_properly(pull_request): assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -def test_update_object_field(write_client): +def test_update_object_field(write_client) -> None: Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -236,7 +236,7 @@ def test_update_object_field(write_client): assert w.ranked == {"test1": 0.1, "topic2": 0.2} -def test_update_script(write_client): +def test_update_script(write_client) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -246,7 +246,7 @@ def test_update_script(write_client): assert w.views == 47 -def test_update_retry_on_conflict(write_client): +def test_update_retry_on_conflict(write_client) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -261,7 +261,7 @@ def test_update_retry_on_conflict(write_client): @pytest.mark.parametrize("retry_on_conflict", [None, 0]) -def test_update_conflicting_version(write_client, retry_on_conflict): +def test_update_conflicting_version(write_client, retry_on_conflict) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -278,7 +278,7 @@ def test_update_conflicting_version(write_client, retry_on_conflict): ) -def test_save_and_update_return_doc_meta(write_client): +def test_save_and_update_return_doc_meta(write_client) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = w.save(return_doc_meta=True) @@ -302,31 +302,31 @@ def test_save_and_update_return_doc_meta(write_client): assert resp.keys().__contains__("_version") -def test_init(write_client): +def test_init(write_client) -> None: Repository.init(index="test-git") assert write_client.indices.exists(index="test-git") -def test_get_raises_404_on_index_missing(data_client): +def test_get_raises_404_on_index_missing(data_client) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php", index="not-there") -def test_get_raises_404_on_non_existent_id(data_client): +def test_get_raises_404_on_non_existent_id(data_client) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php") -def test_get_returns_none_if_404_ignored(data_client): +def test_get_returns_none_if_404_ignored(data_client) -> None: assert None is Repository.get("opensearch-dsl-php", ignore=404) -def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client): +def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client) -> None: assert None is Repository.get("42", index="not-there", ignore=404) -def test_get(data_client): +def test_get(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -334,15 +334,15 @@ def test_get(data_client): assert datetime(2014, 3, 3) == opensearch_repo.created_at -def test_exists_return_true(data_client): +def test_exists_return_true(data_client) -> None: assert Repository.exists("opensearch-py") -def test_exists_false(data_client): +def test_exists_false(data_client) -> None: assert not Repository.exists("opensearch-dsl-php") -def test_get_with_tz_date(data_client): +def test_get_with_tz_date(data_client) -> None: first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -354,7 +354,7 @@ def test_get_with_tz_date(data_client): ) -def test_save_with_tz_date(data_client): +def test_save_with_tz_date(data_client) -> None: tzinfo = timezone("Europe/Prague") first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -381,7 +381,7 @@ def test_save_with_tz_date(data_client): ] -def test_mget(data_client): +def test_mget(data_client) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -389,23 +389,23 @@ def test_mget(data_client): assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_mget_raises_exception_when_missing_param_is_invalid(data_client): +def test_mget_raises_exception_when_missing_param_is_invalid(data_client) -> None: with raises(ValueError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -def test_mget_raises_404_when_missing_param_is_raise(data_client): +def test_mget_raises_404_when_missing_param_is_raise(data_client) -> None: with raises(NotFoundError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client): +def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_update_works_from_search_response(data_client): +def test_update_works_from_search_response(data_client) -> None: opensearch_repo = Repository.search().execute()[0] opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -416,7 +416,7 @@ def test_update_works_from_search_response(data_client): assert "opensearch" == new_version.owner.name -def test_update(data_client): +def test_update(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -440,7 +440,7 @@ def test_update(data_client): assert "primary_term" in new_version.meta -def test_save_updates_existing_doc(data_client): +def test_save_updates_existing_doc(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -453,7 +453,7 @@ def test_save_updates_existing_doc(data_client): assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -def test_save_automatically_uses_seq_no_and_primary_term(data_client): +def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -461,7 +461,7 @@ def test_save_automatically_uses_seq_no_and_primary_term(data_client): opensearch_repo.save() -def test_delete_automatically_uses_seq_no_and_primary_term(data_client): +def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -469,7 +469,7 @@ def test_delete_automatically_uses_seq_no_and_primary_term(data_client): opensearch_repo.delete() -def assert_doc_equals(expected, actual): +def assert_doc_equals(expected, actual) -> None: for f in expected: assert f in actual assert actual[f] == expected[f] @@ -490,7 +490,7 @@ def test_can_save_to_different_index(write_client): ) -def test_save_without_skip_empty_will_include_empty_fields(write_client): +def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert test_repo.save(index="test-document", skip_empty=False) @@ -505,7 +505,7 @@ def test_save_without_skip_empty_will_include_empty_fields(write_client): ) -def test_delete(write_client): +def test_delete(write_client) -> None: write_client.create( index="test-document", id="opensearch-py", @@ -526,11 +526,11 @@ def test_delete(write_client): ) -def test_search(data_client): +def test_search(data_client) -> None: assert Repository.search().count() == 1 -def test_search_returns_proper_doc_classes(data_client): +def test_search_returns_proper_doc_classes(data_client) -> None: result = Repository.search().execute() opensearch_repo = result.hits[0] @@ -539,7 +539,7 @@ def test_search_returns_proper_doc_classes(data_client): assert opensearch_repo.owner.name == "opensearch" -def test_refresh_mapping(data_client): +def test_refresh_mapping(data_client) -> None: class Commit(Document): class Index: name = "git" @@ -553,7 +553,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -def test_highlight_in_meta(data_client): +def test_highlight_in_meta(data_client) -> None: commit = ( Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index f7469d18..4656d4b2 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -131,7 +131,7 @@ class PRSearch(FacetedSearch): return PRSearch -def test_facet_with_custom_metric(data_client): +def test_facet_with_custom_metric(data_client) -> None: ms = MetricSearch() r = ms.execute() @@ -140,7 +140,7 @@ def test_facet_with_custom_metric(data_client): assert dates[0] == 1399038439000 -def test_nested_facet(pull_request, pr_search_cls): +def test_nested_facet(pull_request, pr_search_cls) -> None: prs = pr_search_cls() r = prs.execute() @@ -148,7 +148,7 @@ def test_nested_facet(pull_request, pr_search_cls): assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -def test_nested_facet_with_filter(pull_request, pr_search_cls): +def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = prs.execute() @@ -160,7 +160,7 @@ def test_nested_facet_with_filter(pull_request, pr_search_cls): assert not r.hits -def test_datehistogram_facet(data_client, repo_search_cls): +def test_datehistogram_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = rs.execute() @@ -168,7 +168,7 @@ def test_datehistogram_facet(data_client, repo_search_cls): assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -def test_boolean_facet(data_client, repo_search_cls): +def test_boolean_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = rs.execute() @@ -180,7 +180,7 @@ def test_boolean_facet(data_client, repo_search_cls): def test_empty_search_finds_everything( data_client, opensearch_version, commit_search_cls -): +) -> None: cs = commit_search_cls() r = cs.execute() assert r.hits.total.value == 52 @@ -226,7 +226,7 @@ def test_empty_search_finds_everything( def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = cs.execute() @@ -272,7 +272,7 @@ def test_term_filters_are_shown_as_selected_and_data_is_filtered( def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"deletions": "better"}) r = cs.execute() @@ -280,7 +280,7 @@ def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -def test_pagination(data_client, commit_search_cls): +def test_pagination(data_client, commit_search_cls) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 84525b01..8593459c 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -34,7 +34,7 @@ class Post(Document): published_from = Date() -def test_index_template_works(write_client): +def test_index_template_works(write_client) -> None: it = IndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -55,7 +55,7 @@ def test_index_template_works(write_client): } == write_client.indices.get_mapping(index="test-blog") -def test_index_can_be_saved_even_with_settings(write_client): +def test_index_can_be_saved_even_with_settings(write_client) -> None: i = Index("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) i.save() @@ -67,12 +67,12 @@ def test_index_can_be_saved_even_with_settings(write_client): ) -def test_index_exists(data_client): +def test_index_exists(data_client) -> None: assert Index("git").exists() assert not Index("not-there").exists() -def test_index_can_be_created_with_settings_and_mappings(write_client): +def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: i = Index("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -97,7 +97,7 @@ def test_index_can_be_created_with_settings_and_mappings(write_client): } -def test_delete(write_client): +def test_delete(write_client) -> None: write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -108,7 +108,7 @@ def test_delete(write_client): assert not write_client.indices.exists(index="test-index") -def test_multiple_indices_with_same_doc_type_work(write_client): +def test_multiple_indices_with_same_doc_type_work(write_client) -> None: i1 = Index("test-index-1", using=write_client) i2 = Index("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index a9278159..50a80dea 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -31,7 +31,7 @@ from opensearchpy.helpers import analysis, mapping -def test_mapping_saved_into_opensearch(write_client): +def test_mapping_saved_into_opensearch(write_client) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -51,7 +51,9 @@ def test_mapping_saved_into_opensearch(write_client): } == write_client.indices.get_mapping(index="test-mapping") -def test_mapping_saved_into_opensearch_when_index_already_exists_closed(write_client): +def test_mapping_saved_into_opensearch_when_index_already_exists_closed( + write_client, +) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -76,7 +78,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_closed(write_cl def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client, -): +) -> None: m = mapping.Mapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index 90aabbc0..5e45645a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -62,7 +62,7 @@ class Index: name = "flat-git" -def test_filters_aggregation_buckets_are_accessible(data_client): +def test_filters_aggregation_buckets_are_accessible(data_client) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -83,7 +83,7 @@ def test_filters_aggregation_buckets_are_accessible(data_client): ) -def test_top_hits_are_wrapped_in_response(data_client): +def test_top_hits_are_wrapped_in_response(data_client) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -99,7 +99,7 @@ def test_top_hits_are_wrapped_in_response(data_client): assert isinstance(hits[0], Commit) -def test_inner_hits_are_wrapped_in_response(data_client): +def test_inner_hits_are_wrapped_in_response(data_client) -> None: s = Search(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -110,7 +110,7 @@ def test_inner_hits_are_wrapped_in_response(data_client): assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: repos = list(Repository.search().scan()) assert 1 == len(repos) @@ -118,7 +118,7 @@ def test_scan_respects_doc_types(data_client): assert repos[0].organization == "opensearch" -def test_scan_iterates_through_all_docs(data_client): +def test_scan_iterates_through_all_docs(data_client) -> None: s = Search(index="flat-git") commits = list(s.scan()) @@ -127,7 +127,7 @@ def test_scan_iterates_through_all_docs(data_client): assert {d["_id"] for d in FLAT_DATA} == {c.meta.id for c in commits} -def test_response_is_cached(data_client): +def test_response_is_cached(data_client) -> None: s = Repository.search() repos = list(s) @@ -135,7 +135,7 @@ def test_response_is_cached(data_client): assert s._response.hits == repos -def test_multi_search(data_client): +def test_multi_search(data_client) -> None: s1 = Repository.search() s2 = Search(index="flat-git") @@ -152,7 +152,7 @@ def test_multi_search(data_client): assert r2._search is s2 -def test_multi_missing(data_client): +def test_multi_missing(data_client) -> None: s1 = Repository.search() s2 = Search(index="flat-git") s3 = Search(index="does_not_exist") @@ -175,7 +175,7 @@ def test_multi_missing(data_client): assert r3 is None -def test_raw_subfield_can_be_used_in_aggs(data_client): +def test_raw_subfield_can_be_used_in_aggs(data_client) -> None: s = Search(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index 81a75802..fb46e956 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -29,7 +29,7 @@ from opensearchpy.helpers.update_by_query import UpdateByQuery -def test_update_by_query_no_script(write_client, setup_ubq_tests): +def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -48,7 +48,7 @@ def test_update_by_query_no_script(write_client, setup_ubq_tests): assert response.success() -def test_update_by_query_with_script(write_client, setup_ubq_tests): +def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -65,7 +65,7 @@ def test_update_by_query_with_script(write_client, setup_ubq_tests): assert response.version_conflicts == 0 -def test_delete_by_query_with_script(write_client, setup_ubq_tests): +def test_delete_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 3a503e43..d127edb1 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -39,7 +39,7 @@ def test_create_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_get_destination(self): + def test_get_destination(self) -> None: # Create a dummy destination self.test_create_destination() @@ -119,7 +119,7 @@ def test_create_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_search_monitor(self): + def test_search_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() @@ -137,7 +137,7 @@ def test_search_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_get_monitor(self): + def test_get_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() @@ -161,7 +161,7 @@ def test_get_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_run_monitor(self): + def test_run_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index 68f61c7b..ed8c0b57 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -64,7 +64,7 @@ class TestIndexManagementPlugin(OpenSearchTestCase): } } - def test_create_policy(self): + def test_create_policy(self) -> None: # Test to create policy response = self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -73,7 +73,7 @@ def test_create_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - def test_get_policy(self): + def test_get_policy(self) -> None: # Create a policy self.test_create_policy() @@ -84,7 +84,7 @@ def test_get_policy(self): self.assertIn("_id", response) self.assertEqual(response["_id"], self.POLICY_NAME) - def test_update_policy(self): + def test_update_policy(self) -> None: # Create a policy self.test_create_policy() @@ -106,7 +106,7 @@ def test_update_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - def test_delete_policy(self): + def test_delete_policy(self) -> None: # Create a policy self.test_create_policy() diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index e4c5cb3f..ba16d044 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -142,7 +142,7 @@ class YamlRunner: - def __init__(self, client): + def __init__(self, client) -> None: self.client = client self.last_response = None @@ -151,7 +151,7 @@ def __init__(self, client): self._teardown_code = None self._state = {} - def use_spec(self, test_spec): + def use_spec(self, test_spec) -> None: self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) @@ -174,7 +174,7 @@ def setup(self): if self._setup_code: self.run_code(self._setup_code) - def teardown(self): + def teardown(self) -> None: if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) @@ -189,10 +189,10 @@ def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 99 for v in version) return OPENSEARCH_VERSION - def section(self, name): + def section(self, name) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - def run(self): + def run(self) -> None: try: self.setup() self.section("test") @@ -203,7 +203,7 @@ def run(self): except Exception: pass - def run_code(self, test): + def run_code(self, test) -> None: """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 @@ -215,7 +215,7 @@ def run_code(self, test): else: raise RuntimeError("Invalid action type %r" % (action_type,)) - def run_do(self, action): + def run_do(self, action) -> None: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -281,7 +281,7 @@ def run_do(self, action): % (warn, caught_warnings) ) - def run_catch(self, catch, exception): + def run_catch(self, catch, exception) -> None: if catch == "param": assert isinstance(exception, TypeError) return @@ -296,7 +296,7 @@ def run_catch(self, catch, exception): ) is not None self.last_response = exception.info - def run_skip(self, skip): + def run_skip(self, skip) -> None: global IMPLEMENTED_FEATURES if "features" in skip: @@ -318,32 +318,32 @@ def run_skip(self, skip): if min_version <= (self.opensearch_version()) <= max_version: pytest.skip(reason) - def run_gt(self, action): + def run_gt(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value - def run_gte(self, action): + def run_gte(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value - def run_lt(self, action): + def run_lt(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value - def run_lte(self, action): + def run_lte(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value - def run_set(self, action): + def run_set(self, action) -> None: for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) - def run_is_false(self, action): + def run_is_false(self, action) -> None: try: value = self._lookup(action) except AssertionError: @@ -351,17 +351,17 @@ def run_is_false(self, action): else: assert value in FALSEY_VALUES - def run_is_true(self, action): + def run_is_true(self, action) -> None: value = self._lookup(action) assert value not in FALSEY_VALUES - def run_length(self, action): + def run_length(self, action) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) - def run_match(self, action): + def run_match(self, action) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) @@ -379,7 +379,7 @@ def run_match(self, action): else: self._assert_match_equals(value, expected) - def run_contains(self, action): + def run_contains(self, action) -> None: for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] @@ -387,7 +387,7 @@ def run_contains(self, action): if expected not in value: raise AssertionError("%s is not contained by %s" % (expected, value)) - def run_transform_and_set(self, action): + def run_transform_and_set(self, action) -> None: for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: @@ -449,10 +449,10 @@ def _lookup(self, path): value = value[step] return value - def _feature_enabled(self, name): + def _feature_enabled(self, name) -> bool: return False - def _assert_match_equals(self, a, b): + def _assert_match_equals(self, a, b) -> None: # Handle for large floating points with 'E' if isinstance(b, string_types) and isinstance(a, float) and "e" in repr(a): a = repr(a).replace("e+", "E") @@ -533,7 +533,7 @@ def sync_runner(sync_client): if not RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - def test_rest_api_spec(test_spec, sync_runner): + def test_rest_api_spec(test_spec, sync_runner) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_server_secured/test_clients.py b/test_opensearchpy/test_server_secured/test_clients.py index e597c6ac..94684ffb 100644 --- a/test_opensearchpy/test_server_secured/test_clients.py +++ b/test_opensearchpy/test_server_secured/test_clients.py @@ -15,7 +15,7 @@ class TestSecurity(TestCase): - def test_security(self): + def test_security(self) -> None: client = OpenSearch( OPENSEARCH_URL, http_auth=("admin", "admin"), diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 90283af8..5c719953 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -36,15 +36,15 @@ class TestSecurityPlugin(TestCase): USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - def setUp(self): + def setUp(self) -> None: self.client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", self.client) - def tearDown(self): + def tearDown(self) -> None: if self.client: self.client.close() - def test_create_role(self): + def test_create_role(self) -> None: # Test to create role response = self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -53,7 +53,7 @@ def test_create_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_create_role_with_body_param_empty(self): + def test_create_role_with_body_param_empty(self) -> None: try: self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -61,7 +61,7 @@ def test_create_role_with_body_param_empty(self): else: assert False - def test_get_role(self): + def test_get_role(self) -> None: # Create a role self.test_create_role() @@ -71,7 +71,7 @@ def test_get_role(self): self.assertNotIn("errors", response) self.assertIn(self.ROLE_NAME, response) - def test_update_role(self): + def test_update_role(self) -> None: # Create a role self.test_create_role() @@ -84,7 +84,7 @@ def test_update_role(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_delete_role(self): + def test_delete_role(self) -> None: # Create a role self.test_create_role() @@ -97,7 +97,7 @@ def test_delete_role(self): with self.assertRaises(NotFoundError): response = self.client.security.get_role(self.ROLE_NAME) - def test_create_user(self): + def test_create_user(self) -> None: # Test to create user response = self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -106,7 +106,7 @@ def test_create_user(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_create_user_with_body_param_empty(self): + def test_create_user_with_body_param_empty(self) -> None: try: self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -129,7 +129,7 @@ def test_create_user_with_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_get_user(self): + def test_get_user(self) -> None: # Create a user self.test_create_user() @@ -139,7 +139,7 @@ def test_get_user(self): self.assertNotIn("errors", response) self.assertIn(self.USER_NAME, response) - def test_update_user(self): + def test_update_user(self) -> None: # Create a user self.test_create_user() @@ -152,7 +152,7 @@ def test_update_user(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_delete_user(self): + def test_delete_user(self) -> None: # Create a user self.test_create_user() @@ -165,12 +165,12 @@ def test_delete_user(self): with self.assertRaises(NotFoundError): response = self.client.security.get_user(self.USER_NAME) - def test_health_check(self): + def test_health_check(self) -> None: response = self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) - def test_health(self): + def test_health(self) -> None: response = self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -203,14 +203,14 @@ def test_health(self): }, } - def test_update_audit_config(self): + def test_update_audit_config(self) -> None: response = self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_update_audit_configuration(self): + def test_update_audit_configuration(self) -> None: response = self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 2c0892cf..a69a7cf0 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -42,7 +42,7 @@ class DummyConnection(Connection): - def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) @@ -108,7 +108,7 @@ def perform_request(self, *args, **kwargs): class TestHostsInfoCallback(TestCase): - def test_cluster_manager_only_nodes_are_ignored(self): + def test_cluster_manager_only_nodes_are_ignored(self) -> None: nodes = [ {"roles": ["cluster_manager"]}, {"roles": ["cluster_manager", "data", "ingest"]}, @@ -125,13 +125,13 @@ def test_cluster_manager_only_nodes_are_ignored(self): class TestTransport(TestCase): - def test_single_connection_uses_dummy_connection_pool(self): + def test_single_connection_uses_dummy_connection_pool(self) -> None: t = Transport([{}]) self.assertIsInstance(t.connection_pool, DummyConnectionPool) t = Transport([{"host": "localhost"}]) self.assertIsInstance(t.connection_pool, DummyConnectionPool) - def test_request_timeout_extracted_from_params_and_passed(self): + def test_request_timeout_extracted_from_params_and_passed(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -142,7 +142,7 @@ def test_request_timeout_extracted_from_params_and_passed(self): t.get_connection().calls[0][1], ) - def test_timeout_extracted_from_params_and_passed(self): + def test_timeout_extracted_from_params_and_passed(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"timeout": 84}) @@ -153,7 +153,7 @@ def test_timeout_extracted_from_params_and_passed(self): t.get_connection().calls[0][1], ) - def test_opaque_id(self): + def test_opaque_id(self) -> None: t = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) t.perform_request("GET", "/") @@ -173,7 +173,7 @@ def test_opaque_id(self): t.get_connection().calls[1][1], ) - def test_request_with_custom_user_agent_header(self): + def test_request_with_custom_user_agent_header(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) @@ -187,7 +187,7 @@ def test_request_with_custom_user_agent_header(self): t.get_connection().calls[0][1], ) - def test_send_get_body_as_source(self): + def test_send_get_body_as_source(self) -> None: t = Transport([{}], send_get_body_as="source", connection_class=DummyConnection) t.perform_request("GET", "/", body={}) @@ -196,14 +196,14 @@ def test_send_get_body_as_source(self): ("GET", "/", {"source": "{}"}, None), t.get_connection().calls[0][0] ) - def test_send_get_body_as_post(self): + def test_send_get_body_as_post(self) -> None: t = Transport([{}], send_get_body_as="POST", connection_class=DummyConnection) t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) - def test_body_gets_encoded_into_bytes(self): + def test_body_gets_encoded_into_bytes(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好") @@ -213,7 +213,7 @@ def test_body_gets_encoded_into_bytes(self): t.get_connection().calls[0][0], ) - def test_body_bytes_get_passed_untouched(self): + def test_body_bytes_get_passed_untouched(self) -> None: t = Transport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -221,7 +221,7 @@ def test_body_bytes_get_passed_untouched(self): self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) - def test_body_surrogates_replaced_encoded_into_bytes(self): + def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好\uda6a") @@ -231,17 +231,17 @@ def test_body_surrogates_replaced_encoded_into_bytes(self): t.get_connection().calls[0][0], ) - def test_kwargs_passed_on_to_connections(self): + def test_kwargs_passed_on_to_connections(self) -> None: t = Transport([{"host": "google.com"}], port=123) self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) - def test_kwargs_passed_on_to_connection_pool(self): + def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() t = Transport([{}, {}], dead_timeout=dt) self.assertIs(dt, t.connection_pool.dead_timeout) - def test_custom_connection_class(self): + def test_custom_connection_class(self) -> None: class MyConnection(object): def __init__(self, **kwargs): self.kwargs = kwargs @@ -250,7 +250,7 @@ def __init__(self, **kwargs): self.assertEqual(1, len(t.connection_pool.connections)) self.assertIsInstance(t.connection_pool.connections[0], MyConnection) - def test_add_connection(self): + def test_add_connection(self) -> None: t = Transport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) @@ -259,7 +259,7 @@ def test_add_connection(self): "http://google.com:1234", t.connection_pool.connections[1].host ) - def test_request_will_fail_after_X_retries(self): + def test_request_will_fail_after_X_retries(self) -> None: t = Transport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, @@ -268,7 +268,7 @@ def test_request_will_fail_after_X_retries(self): self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(4, len(t.get_connection().calls)) - def test_failed_connection_will_be_marked_as_dead(self): + def test_failed_connection_will_be_marked_as_dead(self) -> None: t = Transport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, @@ -277,7 +277,7 @@ def test_failed_connection_will_be_marked_as_dead(self): self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(0, len(t.connection_pool.connections)) - def test_resurrected_connection_will_be_marked_as_live_on_success(self): + def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: for method in ("GET", "HEAD"): t = Transport([{}, {}], connection_class=DummyConnection) con1 = t.connection_pool.get_connection() @@ -289,7 +289,7 @@ def test_resurrected_connection_will_be_marked_as_live_on_success(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual(1, len(t.connection_pool.dead_count)) - def test_sniff_will_use_seed_connections(self): + def test_sniff_will_use_seed_connections(self) -> None: t = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t.set_connections([{"data": "invalid"}]) @@ -297,7 +297,7 @@ def test_sniff_will_use_seed_connections(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) - def test_sniff_on_start_fetches_and_uses_nodes_list(self): + def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -306,7 +306,7 @@ def test_sniff_on_start_fetches_and_uses_nodes_list(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) - def test_sniff_on_start_ignores_sniff_timeout(self): + def test_sniff_on_start_ignores_sniff_timeout(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -318,7 +318,7 @@ def test_sniff_on_start_ignores_sniff_timeout(self): t.seed_connections[0].calls[0], ) - def test_sniff_uses_sniff_timeout(self): + def test_sniff_uses_sniff_timeout(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -373,7 +373,7 @@ def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): self.assertEqual(1, len(conn_err.calls)) self.assertEqual(1, len(conn_data.calls)) - def test_sniff_after_n_seconds(self): + def test_sniff_after_n_seconds(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -391,7 +391,7 @@ def test_sniff_after_n_seconds(self): self.assertEqual("http://1.1.1.1:123", t.get_connection().host) self.assertTrue(time.time() - 1 < t.last_sniff < time.time() + 0.01) - def test_sniff_7x_publish_host(self): + def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t = Transport( diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 41497808..5aa4983b 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -31,7 +31,7 @@ from opensearchpy import OpenSearch -def wipe_cluster(client): +def wipe_cluster(client) -> None: """Wipes a cluster clean between test cases""" close_after_wipe = False try: @@ -59,7 +59,7 @@ def wipe_cluster(client): client.close() -def wipe_cluster_settings(client): +def wipe_cluster_settings(client) -> None: settings = client.cluster.get_settings() new_settings = {} for name, value in settings.items(): @@ -96,14 +96,14 @@ def wipe_snapshots(client): assert in_progress_snapshots == [] -def wipe_data_streams(client): +def wipe_data_streams(client) -> None: try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: client.indices.delete_data_stream(name="*") -def wipe_indices(client): +def wipe_indices(client) -> None: client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", @@ -111,7 +111,7 @@ def wipe_indices(client): ) -def wipe_searchable_snapshot_indices(client): +def wipe_searchable_snapshot_indices(client) -> None: cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", @@ -121,17 +121,17 @@ def wipe_searchable_snapshot_indices(client): client.indices.delete(index=index) -def wipe_slm_policies(client): +def wipe_slm_policies(client) -> None: for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) -def wipe_auto_follow_patterns(client): +def wipe_auto_follow_patterns(client) -> None: for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) -def wipe_node_shutdown_metadata(client): +def wipe_node_shutdown_metadata(client) -> None: shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. @@ -143,14 +143,14 @@ def wipe_node_shutdown_metadata(client): client.shutdown.delete_node(node_id=node_id) -def wipe_tasks(client): +def wipe_tasks(client) -> None: tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): client.tasks.cancel(task_id=task_id, wait_for_completion=True) -def wait_for_pending_tasks(client, filter, timeout=30): +def wait_for_pending_tasks(client, filter, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True).split("\n") @@ -158,7 +158,7 @@ def wait_for_pending_tasks(client, filter, timeout=30): break -def wait_for_pending_datafeeds_and_jobs(client, timeout=30): +def wait_for_pending_datafeeds_and_jobs(client, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if ( @@ -171,7 +171,7 @@ def wait_for_pending_datafeeds_and_jobs(client, timeout=30): break -def wait_for_cluster_state_updates_to_finish(client, timeout=30): +def wait_for_cluster_state_updates_to_finish(client, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): diff --git a/utils/build-dists.py b/utils/build-dists.py index c52421e7..e886c71e 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -52,7 +52,7 @@ def set_tmp_dir(): tmp_dir = None -def run(*argv, expect_exit_code=0): +def run(*argv, expect_exit_code: int = 0) -> None: global tmp_dir if tmp_dir is None: os.chdir(base_dir) @@ -70,7 +70,7 @@ def run(*argv, expect_exit_code=0): exit(exit_code or 1) -def test_dist(dist): +def test_dist(dist) -> None: with set_tmp_dir() as tmp_dir: dist_name = re.match( r"^(opensearchpy\d*)-", @@ -180,7 +180,7 @@ def test_dist(dist): ) -def main(): +def main() -> None: run("git", "checkout", "--", "setup.py", "opensearchpy/") run("rm", "-rf", "build/", "dist/*", "*.egg-info", ".eggs") run("python", "setup.py", "sdist", "bdist_wheel") diff --git a/utils/generate-api.py b/utils/generate-api.py index 7e241236..c9bec008 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -78,7 +78,7 @@ ) -def blacken(filename): +def blacken(filename) -> None: runner = CliRunner() result = runner.invoke(black.main, [str(filename)]) assert result.exit_code == 0, result.output @@ -90,29 +90,20 @@ def is_valid_url(url): class Module: - def __init__(self, namespace, is_pyi=False): + def __init__(self, namespace) -> None: self.namespace = namespace - self.is_pyi = is_pyi self._apis = [] self.parse_orig() - if not is_pyi: - self.pyi = Module(namespace, is_pyi=True) - self.pyi.orders = self.orders[:] - - def add(self, api): + def add(self, api) -> None: self._apis.append(api) def parse_orig(self): self.orders = [] - self.header = "" - if self.is_pyi is True: - self.header = "from typing import Any, Collection, MutableMapping, Optional, Tuple, Union\n\n" + self.header = "from typing import Any, Collection, MutableMapping, Optional, Tuple, Union\n\n" namespace_new = "".join(word.capitalize() for word in self.namespace.split("_")) - self.header = ( - self.header + "class " + namespace_new + "Client(NamespacedClient):" - ) + self.header += "class " + namespace_new + "Client(NamespacedClient):" if os.path.exists(self.filepath): with open(self.filepath) as f: content = f.read() @@ -127,10 +118,7 @@ def parse_orig(self): for line in content.split("\n"): header_lines.append(line) if line.startswith("class"): - if ( - "security.py" in str(self.filepath) - and not self.filepath.suffix == ".pyi" - ): + if "security.py" in str(self.filepath): header_lines.append( " from ._patch import health_check, update_audit_config" ) @@ -146,10 +134,10 @@ def _position(self, api): except ValueError: return len(self.orders) - def sort(self): + def sort(self) -> None: self._apis.sort(key=self._position) - def dump(self): + def dump(self) -> None: self.sort() # This code snippet adds headers to each generated module indicating that the code is generated. @@ -244,22 +232,15 @@ def dump(self): with open(self.filepath, "w") as f: f.write(file_content) - if not self.is_pyi: - self.pyi.dump() - @property def filepath(self): - return ( - CODE_ROOT - / f"opensearchpy/_async/client/{self.namespace}.py{'i' if self.is_pyi else ''}" - ) + return CODE_ROOT / f"opensearchpy/_async/client/{self.namespace}.py" class API: - def __init__(self, namespace, name, definition, is_pyi=False): + def __init__(self, namespace, name, definition) -> None: self.namespace = namespace self.name = name - self.is_pyi = is_pyi # overwrite the dict to maintain key order definition["params"] = { @@ -429,13 +410,10 @@ def required_parts(self): return required def to_python(self): - if self.is_pyi: - t = jinja_env.get_template("base_pyi") - else: - try: - t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") - except TemplateNotFound: - t = jinja_env.get_template("base") + try: + t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") + except TemplateNotFound: + t = jinja_env.get_template("base") return t.render( api=self, @@ -658,7 +636,6 @@ def read_modules(): modules[namespace] = Module(namespace) modules[namespace].add(API(namespace, name, api)) - modules[namespace].pyi.add(API(namespace, name, api, is_pyi=True)) return modules @@ -697,10 +674,9 @@ def dump_modules(modules): filepaths = [] for root, _, filenames in os.walk(CODE_ROOT / "opensearchpy/_async"): for filename in filenames: - if filename.rpartition(".")[-1] in ( - "py", - "pyi", - ) and not filename.startswith("utils.py"): + if filename.rpartition(".")[-1] in ("py",) and not filename.startswith( + "utils.py" + ): filepaths.append(os.path.join(root, filename)) unasync.unasync_files(filepaths, rules) diff --git a/utils/license-headers.py b/utils/license-headers.py index 67b0ef4a..e0f31b59 100644 --- a/utils/license-headers.py +++ b/utils/license-headers.py @@ -48,7 +48,7 @@ def find_files_to_fix(sources: List[str]) -> Iterator[str]: def does_file_need_fix(filepath: str) -> bool: - if not re.search(r"\.pyi?$", filepath): + if not re.search(r"\.py$", filepath): return False existing_header = "" with open(filepath, mode="r") as f: @@ -78,7 +78,7 @@ def add_header_to_file(filepath: str) -> None: print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") -def main(): +def main() -> None: mode = sys.argv[1] assert mode in ("fix", "check") sources = [os.path.abspath(x) for x in sys.argv[2:]] diff --git a/utils/templates/base b/utils/templates/base index bf270aee..1d00e646 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -1,6 +1,6 @@ - @query_params({{ api.query_params|map("tojson")|join(", ")}}) - async def {{ api.name }}(self, {% include "func_params" %}): + @query_params({{ api.query_params|map("tojson")|join(", ")}}) + async def {{ api.name }}(self, {% include "func_params" %}) -> {% if api.method == 'HEAD' %}bool{% else %}Any{% endif %}: """ {% if api.description %} {{ api.description|replace("\n", " ")|wordwrap(wrapstring="\n ") }} diff --git a/utils/templates/base_pyi b/utils/templates/base_pyi deleted file mode 100644 index c4dbde15..00000000 --- a/utils/templates/base_pyi +++ /dev/null @@ -1,2 +0,0 @@ - - async def {{ api.name }}(self, {% include "func_params_pyi" %}) -> {% if api.method == 'HEAD' %}bool{% else %}Any{% endif %}: ... diff --git a/utils/templates/func_params b/utils/templates/func_params index 067e8f12..9d891ad2 100644 --- a/utils/templates/func_params +++ b/utils/templates/func_params @@ -1,14 +1,14 @@ {% for p, info in api.all_parts.items() %} - {% if info.required %}{{ p }}, {% endif %} + {% if info.required %}{{ p }}: {{ info.type }}, {% endif %} {% endfor %} {% if api.body %} - body{% if not api.body.required %}=None{% endif %}, + body{% if not api.body.required %}: Any=None{% else %}: Any{% endif %}, {% endif %} {% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}=None, {% endif %} + {% if not info.required %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} {% endfor %} -params=None, -headers=None +params: Optional[MutableMapping[str, Any]]=None, +headers: Optional[MutableMapping[str, str]]=None, \ No newline at end of file diff --git a/utils/templates/func_params_pyi b/utils/templates/func_params_pyi deleted file mode 100644 index cd48f9a6..00000000 --- a/utils/templates/func_params_pyi +++ /dev/null @@ -1,26 +0,0 @@ -{% for p, info in api.all_parts.items() %} - {% if info.required %}{{ p }}: {{ info.type }}, {% endif %} -{% endfor %} - -*, - -{% if api.body %} - body{% if not api.body.required %}: Optional[Any]=...{% else %}: Any{% endif %}, -{% endif %} - -{% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}: Optional[{{ info.type }}]=..., {% endif %} -{% endfor %} - -{% for p in api.query_params %} - {{ p }}: Optional[Any]=..., -{% endfor %} - -{% for p, p_type in global_query_params.items() %} - {% if p not in api.all_func_params %} - {{ p }}: {{ p_type }}=..., - {% endif %} -{% endfor %} - -params: Optional[MutableMapping[str, Any]]=..., -headers: Optional[MutableMapping[str, str]]=..., From 9a57c78cb44250ccdd044e6dc59175357b55a8c9 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 13:02:50 -0400 Subject: [PATCH 02/17] Fix: nox -rs generate. Signed-off-by: dblock --- DEVELOPER_GUIDE.md | 4 +- opensearchpy/_async/client/__init__.py | 36 +-- opensearchpy/_async/client/cat.py | 122 ++++----- opensearchpy/_async/client/cluster.py | 96 +++---- .../_async/client/dangling_indices.py | 14 +- opensearchpy/_async/client/indices.py | 250 +++++++++--------- opensearchpy/_async/client/ingest.py | 26 +- opensearchpy/_async/client/nodes.py | 20 +- opensearchpy/_async/client/remote_store.py | 6 +- opensearchpy/_async/client/security.py | 192 +++++++------- opensearchpy/_async/client/snapshot.py | 52 ++-- opensearchpy/_async/client/tasks.py | 18 +- opensearchpy/_async/helpers/document.py | 14 +- opensearchpy/client/__init__.py | 39 +-- opensearchpy/client/cat.py | 122 ++++----- opensearchpy/client/cluster.py | 96 +++---- opensearchpy/client/dangling_indices.py | 14 +- opensearchpy/client/indices.py | 250 +++++++++--------- opensearchpy/client/ingest.py | 26 +- opensearchpy/client/nodes.py | 20 +- opensearchpy/client/remote_store.py | 6 +- opensearchpy/client/security.py | 192 +++++++------- opensearchpy/client/snapshot.py | 52 ++-- opensearchpy/client/tasks.py | 18 +- opensearchpy/client/utils.py | 2 +- opensearchpy/helpers/document.py | 2 +- utils/generate-api.py | 5 +- utils/templates/base | 2 +- utils/templates/func_params | 6 +- 29 files changed, 853 insertions(+), 849 deletions(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index dd1fad26..f6cb568c 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -66,9 +66,11 @@ If you want to auto-start one, the following will start a new instance and run t If your OpenSearch docker instance is running, you can execute the test suite directly. ``` -$ nox -rs test +$ nox -rs test-3.9 ``` +Substitute `3.9` with your Python version above, or use `nox -rs test` to run with multiple. + To run tests against different versions of OpenSearch, use `run-tests [with/without security] [version]`: ``` diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 279fda37..4143b4a1 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -39,7 +39,7 @@ from __future__ import unicode_literals import logging -from typing import Any, Type +from typing import Any, Optional, Type from ..transport import AsyncTransport, TransportError from .cat import CatClient @@ -357,7 +357,7 @@ async def index( self, index: Any, body: Any, - id: Any = None, + id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -420,7 +420,7 @@ async def index( async def bulk( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -470,7 +470,7 @@ async def bulk( async def clear_scroll( self, body: Any = None, - scroll_id: Any = None, + scroll_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -512,7 +512,7 @@ async def clear_scroll( async def count( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -970,7 +970,7 @@ async def explain( async def field_caps( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1143,7 +1143,7 @@ async def get_source( async def mget( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1194,7 +1194,7 @@ async def mget( async def msearch( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1252,7 +1252,7 @@ async def msearch( async def msearch_template( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1306,7 +1306,7 @@ async def msearch_template( async def mtermvectors( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1361,7 +1361,7 @@ async def put_script( self, id: Any, body: Any, - context: Any = None, + context: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1397,7 +1397,7 @@ async def put_script( async def rank_eval( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1512,7 +1512,7 @@ async def reindex_rethrottle( async def render_search_template( self, body: Any = None, - id: Any = None, + id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1556,7 +1556,7 @@ async def scripts_painless_execute( async def scroll( self, body: Any = None, - scroll_id: Any = None, + scroll_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1631,7 +1631,7 @@ async def scroll( async def search( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1756,7 +1756,7 @@ async def search( ) async def search_shards( self, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1803,7 +1803,7 @@ async def search_shards( async def search_template( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1872,7 +1872,7 @@ async def termvectors( self, index: Any, body: Any = None, - id: Any = None, + id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index bd2c8b6a..dfee84b6 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -35,7 +35,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import NamespacedClient, _make_path, query_params @@ -44,9 +44,9 @@ class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") async def aliases( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Shows information about currently configured aliases to indices including @@ -74,8 +74,8 @@ async def aliases( @query_params() async def all_pit_segments( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Lists all active point-in-time segments. @@ -98,9 +98,9 @@ async def all_pit_segments( ) async def allocation( self, - node_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + node_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how @@ -145,8 +145,8 @@ async def allocation( ) async def cluster_manager( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the cluster-manager node. @@ -174,9 +174,9 @@ async def cluster_manager( @query_params("format", "h", "help", "s", "v") async def count( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides quick access to the document count of the entire cluster, or @@ -200,9 +200,9 @@ async def count( @query_params("bytes", "format", "h", "help", "s", "v") async def fielddata( self, - fields: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + fields: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Shows how much heap memory is currently being used by fielddata on every data @@ -231,8 +231,8 @@ async def fielddata( @query_params("format", "h", "help", "s", "time", "ts", "v") async def health( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a concise representation of the cluster health. @@ -256,8 +256,8 @@ async def health( @query_params("help", "s") async def help( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns help for the Cat APIs. @@ -289,9 +289,9 @@ async def help( ) async def indices( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about indices: number of primaries and replicas, document @@ -346,8 +346,8 @@ async def indices( ) async def master( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the cluster-manager node. @@ -389,8 +389,8 @@ async def master( ) async def nodeattrs( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about custom node attributes. @@ -430,8 +430,8 @@ async def nodeattrs( ) async def nodes( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns basic statistics about performance of cluster nodes. @@ -476,8 +476,8 @@ async def nodes( ) async def pending_tasks( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a concise representation of the cluster pending tasks. @@ -508,8 +508,8 @@ async def pending_tasks( async def pit_segments( self, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ List segments for one or several PITs. @@ -532,8 +532,8 @@ async def pit_segments( ) async def plugins( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about installed plugins across nodes node. @@ -563,9 +563,9 @@ async def plugins( ) async def recovery( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about index shard recoveries, both on-going completed. @@ -605,8 +605,8 @@ async def recovery( ) async def repositories( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about snapshot repositories registered in the cluster. @@ -646,9 +646,9 @@ async def repositories( ) async def segment_replication( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about both on-going and latest completed Segment @@ -695,9 +695,9 @@ async def segment_replication( ) async def segments( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides low-level information about the segments in the shards of an index. @@ -738,9 +738,9 @@ async def segments( ) async def shards( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides a detailed view of shard allocation on nodes. @@ -784,9 +784,9 @@ async def shards( ) async def thread_pool( self, - thread_pool_patterns: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + thread_pool_patterns: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns cluster-wide thread pool statistics per node. By default the active, @@ -831,9 +831,9 @@ async def thread_pool( ) async def snapshots( self, - repository: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + repository: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns all snapshots in a specific repository. @@ -879,8 +879,8 @@ async def snapshots( ) async def tasks( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in @@ -923,9 +923,9 @@ async def tasks( ) async def templates( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about existing templates. diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index a76a05fb..ac4b1493 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -59,9 +59,9 @@ class ClusterClient(NamespacedClient): ) async def health( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns basic information about the health of the cluster. @@ -108,8 +108,8 @@ async def health( @query_params("cluster_manager_timeout", "local", "master_timeout") async def pending_tasks( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, @@ -141,10 +141,10 @@ async def pending_tasks( ) async def state( self, - metric: Any = None, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + metric: Optional[Any] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a comprehensive information about the state of the cluster. @@ -190,9 +190,9 @@ async def state( @query_params("flat_settings", "timeout") async def stats( self, - node_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + node_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns high-level overview of cluster statistics. @@ -227,8 +227,8 @@ async def stats( async def reroute( self, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. @@ -264,8 +264,8 @@ async def reroute( ) async def get_settings( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns cluster settings. @@ -292,8 +292,8 @@ async def get_settings( async def put_settings( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the cluster settings. @@ -320,8 +320,8 @@ async def put_settings( @query_params() async def remote_info( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns the information about configured remote clusters. @@ -335,8 +335,8 @@ async def remote_info( async def allocation_explain( self, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -361,8 +361,8 @@ async def allocation_explain( async def delete_component_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a component template. @@ -389,9 +389,9 @@ async def delete_component_template( @query_params("cluster_manager_timeout", "local", "master_timeout") async def get_component_template( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns one or more component templates. @@ -418,8 +418,8 @@ async def put_component_template( self, name: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates a component template. @@ -452,8 +452,8 @@ async def put_component_template( async def exists_component_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular component template exist. @@ -481,8 +481,8 @@ async def exists_component_template( @query_params("wait_for_removal") async def delete_voting_config_exclusions( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clears cluster voting config exclusions. @@ -502,8 +502,8 @@ async def delete_voting_config_exclusions( @query_params("node_ids", "node_names", "timeout") async def post_voting_config_exclusions( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. @@ -524,8 +524,8 @@ async def post_voting_config_exclusions( @query_params() async def delete_decommission_awareness( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete any existing decommission. @@ -541,8 +541,8 @@ async def delete_decommission_awareness( @query_params() async def delete_weighted_routing( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete weighted shard routing weights. @@ -559,8 +559,8 @@ async def delete_weighted_routing( async def get_decommission_awareness( self, awareness_attribute_name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Get details and status of decommissioned attribute. @@ -590,8 +590,8 @@ async def get_decommission_awareness( async def get_weighted_routing( self, attribute: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Fetches weighted shard routing weights. @@ -614,8 +614,8 @@ async def put_decommission_awareness( self, awareness_attribute_name: Any, awareness_attribute_value: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Decommissions an awareness attribute. @@ -645,8 +645,8 @@ async def put_decommission_awareness( async def put_weighted_routing( self, attribute: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates weighted shard routing weights. diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index 19b0e5c4..6bc9a343 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -48,8 +48,8 @@ class DanglingIndicesClient(NamespacedClient): async def delete_dangling_index( self, index_uuid: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes the specified dangling index. @@ -81,8 +81,8 @@ async def delete_dangling_index( async def import_dangling_index( self, index_uuid: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Imports the specified dangling index. @@ -108,8 +108,8 @@ async def import_dangling_index( @query_params() async def list_dangling_indices( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index 09d3b802..e1874deb 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -46,9 +46,9 @@ class IndicesClient(NamespacedClient): async def analyze( self, body: Any = None, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the @@ -70,9 +70,9 @@ async def analyze( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") async def refresh( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the refresh operation in one or more indices. @@ -102,9 +102,9 @@ async def refresh( ) async def flush( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the flush operation on one or more indices. @@ -141,8 +141,8 @@ async def create( self, index: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates an index with optional settings and mappings. @@ -175,8 +175,8 @@ async def clone( index: Any, target: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clones an index. @@ -220,8 +220,8 @@ async def clone( async def get( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about one or more indices. @@ -268,8 +268,8 @@ async def get( async def open( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Opens an index. @@ -312,8 +312,8 @@ async def open( async def close( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Closes an index. @@ -355,8 +355,8 @@ async def close( async def delete( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an index. @@ -398,8 +398,8 @@ async def delete( async def exists( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular index exists. @@ -441,9 +441,9 @@ async def exists( async def put_mapping( self, body: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the index mappings. @@ -493,9 +493,9 @@ async def put_mapping( ) async def get_mapping( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns mappings for one or more indices. @@ -533,9 +533,9 @@ async def get_mapping( async def get_field_mapping( self, fields: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns mapping for one or more fields. @@ -572,8 +572,8 @@ async def put_alias( index: Any, name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates an alias. @@ -607,9 +607,9 @@ async def put_alias( async def exists_alias( self, name: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular alias exists. @@ -638,10 +638,10 @@ async def exists_alias( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") async def get_alias( self, - index: Any = None, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns an alias. @@ -668,8 +668,8 @@ async def get_alias( async def update_aliases( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates index aliases. @@ -695,8 +695,8 @@ async def delete_alias( self, index: Any, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an alias. @@ -726,8 +726,8 @@ async def put_template( self, name: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates an index template. @@ -762,8 +762,8 @@ async def put_template( async def exists_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular index template exists. @@ -790,9 +790,9 @@ async def exists_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def get_template( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns an index template. @@ -817,8 +817,8 @@ async def get_template( async def delete_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an index template. @@ -851,10 +851,10 @@ async def delete_template( ) async def get_settings( self, - index: Any = None, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns settings for one or more indices. @@ -900,9 +900,9 @@ async def get_settings( async def put_settings( self, body: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the index settings. @@ -955,10 +955,10 @@ async def put_settings( ) async def stats( self, - index: Any = None, - metric: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + metric: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides statistics on operations happening in an index. @@ -1002,9 +1002,9 @@ async def stats( ) async def segments( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides low-level information about segments in a Lucene index. @@ -1044,9 +1044,9 @@ async def segments( async def validate_query( self, body: Any = None, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. @@ -1098,9 +1098,9 @@ async def validate_query( ) async def clear_cache( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clears all or specific caches for one or more indices. @@ -1129,9 +1129,9 @@ async def clear_cache( @query_params("active_only", "detailed") async def recovery( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about ongoing index shard recoveries. @@ -1157,9 +1157,9 @@ async def recovery( ) async def upgrade( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1187,9 +1187,9 @@ async def upgrade( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") async def get_upgrade( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1215,9 +1215,9 @@ async def get_upgrade( ) async def shard_stores( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides store information for shard copies of indices. @@ -1250,9 +1250,9 @@ async def shard_stores( ) async def forcemerge( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the force merge operation on one or more indices. @@ -1291,8 +1291,8 @@ async def shrink( index: Any, target: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. @@ -1337,8 +1337,8 @@ async def split( index: Any, target: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows you to split an existing index into a new index with more primary @@ -1383,9 +1383,9 @@ async def rollover( self, alias: Any, body: Any = None, - new_index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + new_index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates an alias to point to a new index when the existing index is considered @@ -1425,8 +1425,8 @@ async def create_data_stream( self, name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates a data stream. @@ -1450,8 +1450,8 @@ async def create_data_stream( async def delete_data_stream( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a data stream. @@ -1471,8 +1471,8 @@ async def delete_data_stream( async def delete_index_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an index template. @@ -1500,8 +1500,8 @@ async def delete_index_template( async def exists_index_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular index template exists. @@ -1528,9 +1528,9 @@ async def exists_index_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def get_index_template( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns an index template. @@ -1556,8 +1556,8 @@ async def put_index_template( self, name: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates an index template. @@ -1592,8 +1592,8 @@ async def simulate_index_template( self, name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Simulate matching the given index name against the index templates in the @@ -1629,9 +1629,9 @@ async def simulate_index_template( @query_params() async def get_data_stream( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns data streams. @@ -1648,9 +1648,9 @@ async def get_data_stream( async def simulate_template( self, body: Any = None, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Simulate resolving the given template name or body. @@ -1682,8 +1682,8 @@ async def simulate_template( async def resolve_index( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about any matching indices, aliases, and data streams. @@ -1714,8 +1714,8 @@ async def add_block( self, index: Any, block: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Adds a block to an index. @@ -1750,9 +1750,9 @@ async def add_block( @query_params() async def data_streams_stats( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides statistics on operations happening in a data stream. diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 64cbdbb6..587cab9d 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -45,9 +45,9 @@ class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") async def get_pipeline( self, - id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a pipeline. @@ -70,8 +70,8 @@ async def put_pipeline( self, id: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates a pipeline. @@ -102,8 +102,8 @@ async def put_pipeline( async def delete_pipeline( self, id: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a pipeline. @@ -131,9 +131,9 @@ async def delete_pipeline( async def simulate( self, body: Any, - id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows to simulate a pipeline with example documents. @@ -158,8 +158,8 @@ async def simulate( @query_params() async def processor_grok( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a list of the built-in patterns. diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index 36146fad..3021524a 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any +from typing import Any, Optional from .utils import NamespacedClient, _make_path, query_params @@ -46,7 +46,7 @@ class NodesClient(NamespacedClient): async def reload_secure_settings( self, body: Any = None, - node_id: Any = None, + node_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -72,8 +72,8 @@ async def reload_secure_settings( @query_params("flat_settings", "timeout") async def info( self, - node_id: Any = None, - metric: Any = None, + node_id: Optional[Any] = None, + metric: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -108,9 +108,9 @@ async def info( ) async def stats( self, - node_id: Any = None, - metric: Any = None, - index_metric: Any = None, + node_id: Optional[Any] = None, + metric: Optional[Any] = None, + index_metric: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -159,7 +159,7 @@ async def stats( ) async def hot_threads( self, - node_id: Any = None, + node_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -197,8 +197,8 @@ async def hot_threads( @query_params("timeout") async def usage( self, - node_id: Any = None, - metric: Any = None, + node_id: Optional[Any] = None, + metric: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index ed06763b..0319ea1d 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, query_params @@ -46,8 +46,8 @@ class RemoteStoreClient(NamespacedClient): async def restore( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Restores from remote store. diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 1dce2dfa..72a1629a 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -56,8 +56,8 @@ class SecurityClient(NamespacedClient): @query_params() async def get_account_details( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns account details for the current user. @@ -71,8 +71,8 @@ async def get_account_details( async def change_password( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Changes the password for the current user. @@ -94,8 +94,8 @@ async def change_password( async def get_action_group( self, action_group: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one action group. @@ -118,8 +118,8 @@ async def get_action_group( @query_params() async def get_action_groups( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all action groups. @@ -136,8 +136,8 @@ async def get_action_groups( async def delete_action_group( self, action_group: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete a specified action group. @@ -162,8 +162,8 @@ async def create_action_group( self, action_group: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified action group. @@ -189,8 +189,8 @@ async def patch_action_group( self, action_group: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of an action group. @@ -213,8 +213,8 @@ async def patch_action_group( async def patch_action_groups( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. @@ -236,8 +236,8 @@ async def patch_action_groups( async def get_user( self, username: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieve one internal user. @@ -257,8 +257,8 @@ async def get_user( @query_params() async def get_users( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieve all internal users. @@ -275,8 +275,8 @@ async def get_users( async def delete_user( self, username: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete the specified user. @@ -298,8 +298,8 @@ async def create_user( self, username: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified user. @@ -323,8 +323,8 @@ async def patch_user( self, username: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of an internal user. @@ -347,8 +347,8 @@ async def patch_user( async def patch_users( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. @@ -370,8 +370,8 @@ async def patch_users( async def get_role( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one role. @@ -391,8 +391,8 @@ async def get_role( @query_params() async def get_roles( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all roles. @@ -406,8 +406,8 @@ async def get_roles( async def delete_role( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete the specified role. @@ -429,8 +429,8 @@ async def create_role( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified role. @@ -454,8 +454,8 @@ async def patch_role( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of a role. @@ -478,8 +478,8 @@ async def patch_role( async def patch_roles( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. @@ -501,8 +501,8 @@ async def patch_roles( async def get_role_mapping( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one role mapping. @@ -522,8 +522,8 @@ async def get_role_mapping( @query_params() async def get_role_mappings( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all role mappings. @@ -540,8 +540,8 @@ async def get_role_mappings( async def delete_role_mapping( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes the specified role mapping. @@ -563,8 +563,8 @@ async def create_role_mapping( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified role mapping. @@ -588,8 +588,8 @@ async def patch_role_mapping( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of a role mapping. @@ -612,8 +612,8 @@ async def patch_role_mapping( async def patch_role_mappings( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates multiple role mappings in a single call. @@ -635,8 +635,8 @@ async def patch_role_mappings( async def get_tenant( self, tenant: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one tenant. @@ -656,8 +656,8 @@ async def get_tenant( @query_params() async def get_tenants( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all tenants. @@ -671,8 +671,8 @@ async def get_tenants( async def delete_tenant( self, tenant: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete the specified tenant. @@ -694,8 +694,8 @@ async def create_tenant( self, tenant: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified tenant. @@ -719,8 +719,8 @@ async def patch_tenant( self, tenant: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Add, delete, or modify a single tenant. @@ -743,8 +743,8 @@ async def patch_tenant( async def patch_tenants( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Add, delete, or modify multiple tenants in a single call. @@ -765,8 +765,8 @@ async def patch_tenants( @query_params() async def get_configuration( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns the current Security plugin configuration in JSON format. @@ -783,8 +783,8 @@ async def get_configuration( async def update_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Adds or updates the existing configuration using the REST API. @@ -806,8 +806,8 @@ async def update_configuration( async def patch_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ A PATCH call is used to update the existing configuration using the REST API. @@ -828,9 +828,9 @@ async def patch_configuration( @query_params() async def get_distinguished_names( self, - cluster_name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + cluster_name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all distinguished names in the allow list. @@ -849,8 +849,8 @@ async def update_distinguished_names( self, cluster_name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Adds or updates the specified distinguished names in the cluster’s or node’s @@ -875,8 +875,8 @@ async def update_distinguished_names( async def delete_distinguished_names( self, cluster_name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes all distinguished names in the specified cluster’s or node’s allow @@ -899,8 +899,8 @@ async def delete_distinguished_names( @query_params() async def get_certificates( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves the cluster’s security certificates. @@ -913,8 +913,8 @@ async def get_certificates( @query_params() async def reload_transport_certificates( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Reload transport layer communication certificates. @@ -930,8 +930,8 @@ async def reload_transport_certificates( @query_params() async def reload_http_certificates( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Reload HTTP layer communication certificates. @@ -947,8 +947,8 @@ async def reload_http_certificates( @query_params() async def flush_cache( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. @@ -961,8 +961,8 @@ async def flush_cache( @query_params() async def health( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Checks to see if the Security plugin is up and running. @@ -975,8 +975,8 @@ async def health( @query_params() async def get_audit_configuration( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves the audit configuration. @@ -990,8 +990,8 @@ async def get_audit_configuration( async def update_audit_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the audit configuration. @@ -1013,8 +1013,8 @@ async def update_audit_configuration( async def patch_audit_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. @@ -1036,8 +1036,8 @@ async def patch_audit_configuration( async def patch_distinguished_names( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Bulk update of distinguished names. diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 212b778d..1a544ddb 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -48,8 +48,8 @@ async def create( repository: Any, snapshot: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates a snapshot in a repository. @@ -83,8 +83,8 @@ async def delete( self, repository: Any, snapshot: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a snapshot. @@ -116,8 +116,8 @@ async def get( self, repository: Any, snapshot: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about a snapshot. @@ -151,8 +151,8 @@ async def get( async def delete_repository( self, repository: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a repository. @@ -180,9 +180,9 @@ async def delete_repository( @query_params("cluster_manager_timeout", "local", "master_timeout") async def get_repository( self, - repository: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + repository: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about a repository. @@ -206,8 +206,8 @@ async def create_repository( self, repository: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates a repository. @@ -241,8 +241,8 @@ async def restore( repository: Any, snapshot: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Restores a snapshot. @@ -274,10 +274,10 @@ async def restore( @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") async def status( self, - repository: Any = None, - snapshot: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + repository: Optional[Any] = None, + snapshot: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the status of a snapshot. @@ -305,8 +305,8 @@ async def status( async def verify_repository( self, repository: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Verifies a repository. @@ -334,8 +334,8 @@ async def verify_repository( async def cleanup_repository( self, repository: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Removes stale data from repository. @@ -366,8 +366,8 @@ async def clone( snapshot: Any, target_snapshot: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index f6c8701f..78deb4a9 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -37,7 +37,7 @@ import warnings -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -54,8 +54,8 @@ class TasksClient(NamespacedClient): ) async def list( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a list of tasks. @@ -84,9 +84,9 @@ async def list( @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") async def cancel( self, - task_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + task_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Cancels a task, if it can be cancelled through an API. @@ -115,9 +115,9 @@ async def cancel( @query_params("timeout", "wait_for_completion") async def get( self, - task_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + task_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about a task. diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 18612187..ef326c7a 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -147,7 +147,7 @@ async def get( # type: ignore id: str, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Retrieve a single document from opensearch using its ``id``. @@ -172,7 +172,7 @@ async def exists( id: str, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -196,7 +196,7 @@ async def mget( index: Optional[str] = None, raise_on_error: Optional[bool] = True, missing: Optional[str] = "none", - **kwargs: Any + **kwargs: Any, ) -> Any: """ Retrieve multiple document by their ``id``'s. Returns a list of instances @@ -265,7 +265,7 @@ async def delete( self, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Delete the instance in opensearch. @@ -329,7 +329,7 @@ async def update( scripted_upsert: Optional[bool] = False, upsert: Optional[bool] = None, return_doc_meta: Optional[bool] = False, - **fields: Any + **fields: Any, ) -> Any: """ Partial update of the document, specify fields you wish to update and @@ -428,7 +428,7 @@ async def save( validate: Optional[bool] = True, skip_empty: Optional[bool] = True, return_doc_meta: Optional[bool] = False, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Save the document into opensearch. If the document doesn't exist it @@ -466,7 +466,7 @@ async def save( meta = await opensearch.index( index=self._get_index(index), body=self.to_dict(skip_empty=skip_empty), - **doc_meta + **doc_meta, ) # update meta information from OpenSearch for k in META_FIELDS: diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 142f3af2..6f0c54b0 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -39,7 +39,7 @@ from __future__ import unicode_literals import logging -from typing import Any, Type +from typing import Any, Optional, Type from ..transport import Transport, TransportError from .cat import CatClient @@ -186,6 +186,7 @@ def default(self, obj): """ + # include PIT functions inside _patch.py from ._patch import ( # type: ignore create_point_in_time, delete_point_in_time, @@ -243,7 +244,7 @@ def __repr__(self) -> Any: # probably operating on custom transport and connection_pool, ignore return super(OpenSearch, self).__repr__() - def __enter__(self) -> "OpenSearch": + def __enter__(self) -> Any: if hasattr(self.transport, "_async_call"): self.transport._async_call() return self @@ -356,7 +357,7 @@ def index( self, index: Any, body: Any, - id: Any = None, + id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -419,7 +420,7 @@ def index( def bulk( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -469,7 +470,7 @@ def bulk( def clear_scroll( self, body: Any = None, - scroll_id: Any = None, + scroll_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -511,7 +512,7 @@ def clear_scroll( def count( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -969,7 +970,7 @@ def explain( def field_caps( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1142,7 +1143,7 @@ def get_source( def mget( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1193,7 +1194,7 @@ def mget( def msearch( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1251,7 +1252,7 @@ def msearch( def msearch_template( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1305,7 +1306,7 @@ def msearch_template( def mtermvectors( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1360,7 +1361,7 @@ def put_script( self, id: Any, body: Any, - context: Any = None, + context: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1396,7 +1397,7 @@ def put_script( def rank_eval( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1511,7 +1512,7 @@ def reindex_rethrottle( def render_search_template( self, body: Any = None, - id: Any = None, + id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1555,7 +1556,7 @@ def scripts_painless_execute( def scroll( self, body: Any = None, - scroll_id: Any = None, + scroll_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1630,7 +1631,7 @@ def scroll( def search( self, body: Any = None, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1755,7 +1756,7 @@ def search( ) def search_shards( self, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1802,7 +1803,7 @@ def search_shards( def search_template( self, body: Any, - index: Any = None, + index: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1871,7 +1872,7 @@ def termvectors( self, index: Any, body: Any = None, - id: Any = None, + id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index aac5d8a6..5fb55053 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -35,7 +35,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import NamespacedClient, _make_path, query_params @@ -44,9 +44,9 @@ class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") def aliases( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Shows information about currently configured aliases to indices including @@ -74,8 +74,8 @@ def aliases( @query_params() def all_pit_segments( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Lists all active point-in-time segments. @@ -98,9 +98,9 @@ def all_pit_segments( ) def allocation( self, - node_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + node_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how @@ -145,8 +145,8 @@ def allocation( ) def cluster_manager( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the cluster-manager node. @@ -174,9 +174,9 @@ def cluster_manager( @query_params("format", "h", "help", "s", "v") def count( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides quick access to the document count of the entire cluster, or @@ -200,9 +200,9 @@ def count( @query_params("bytes", "format", "h", "help", "s", "v") def fielddata( self, - fields: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + fields: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Shows how much heap memory is currently being used by fielddata on every data @@ -231,8 +231,8 @@ def fielddata( @query_params("format", "h", "help", "s", "time", "ts", "v") def health( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a concise representation of the cluster health. @@ -256,8 +256,8 @@ def health( @query_params("help", "s") def help( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns help for the Cat APIs. @@ -289,9 +289,9 @@ def help( ) def indices( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about indices: number of primaries and replicas, document @@ -346,8 +346,8 @@ def indices( ) def master( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the cluster-manager node. @@ -389,8 +389,8 @@ def master( ) def nodeattrs( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about custom node attributes. @@ -430,8 +430,8 @@ def nodeattrs( ) def nodes( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns basic statistics about performance of cluster nodes. @@ -476,8 +476,8 @@ def nodes( ) def pending_tasks( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a concise representation of the cluster pending tasks. @@ -508,8 +508,8 @@ def pending_tasks( def pit_segments( self, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ List segments for one or several PITs. @@ -532,8 +532,8 @@ def pit_segments( ) def plugins( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about installed plugins across nodes node. @@ -563,9 +563,9 @@ def plugins( ) def recovery( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about index shard recoveries, both on-going completed. @@ -605,8 +605,8 @@ def recovery( ) def repositories( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about snapshot repositories registered in the cluster. @@ -646,9 +646,9 @@ def repositories( ) def segment_replication( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about both on-going and latest completed Segment @@ -695,9 +695,9 @@ def segment_replication( ) def segments( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides low-level information about the segments in the shards of an index. @@ -738,9 +738,9 @@ def segments( ) def shards( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides a detailed view of shard allocation on nodes. @@ -784,9 +784,9 @@ def shards( ) def thread_pool( self, - thread_pool_patterns: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + thread_pool_patterns: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns cluster-wide thread pool statistics per node. By default the active, @@ -831,9 +831,9 @@ def thread_pool( ) def snapshots( self, - repository: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + repository: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns all snapshots in a specific repository. @@ -879,8 +879,8 @@ def snapshots( ) def tasks( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in @@ -923,9 +923,9 @@ def tasks( ) def templates( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about existing templates. diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index 0354e42c..a3bc1596 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -59,9 +59,9 @@ class ClusterClient(NamespacedClient): ) def health( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns basic information about the health of the cluster. @@ -108,8 +108,8 @@ def health( @query_params("cluster_manager_timeout", "local", "master_timeout") def pending_tasks( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, @@ -141,10 +141,10 @@ def pending_tasks( ) def state( self, - metric: Any = None, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + metric: Optional[Any] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a comprehensive information about the state of the cluster. @@ -190,9 +190,9 @@ def state( @query_params("flat_settings", "timeout") def stats( self, - node_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + node_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns high-level overview of cluster statistics. @@ -227,8 +227,8 @@ def stats( def reroute( self, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. @@ -264,8 +264,8 @@ def reroute( ) def get_settings( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns cluster settings. @@ -292,8 +292,8 @@ def get_settings( def put_settings( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the cluster settings. @@ -320,8 +320,8 @@ def put_settings( @query_params() def remote_info( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns the information about configured remote clusters. @@ -335,8 +335,8 @@ def remote_info( def allocation_explain( self, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -361,8 +361,8 @@ def allocation_explain( def delete_component_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a component template. @@ -389,9 +389,9 @@ def delete_component_template( @query_params("cluster_manager_timeout", "local", "master_timeout") def get_component_template( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns one or more component templates. @@ -418,8 +418,8 @@ def put_component_template( self, name: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates a component template. @@ -452,8 +452,8 @@ def put_component_template( def exists_component_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular component template exist. @@ -481,8 +481,8 @@ def exists_component_template( @query_params("wait_for_removal") def delete_voting_config_exclusions( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clears cluster voting config exclusions. @@ -502,8 +502,8 @@ def delete_voting_config_exclusions( @query_params("node_ids", "node_names", "timeout") def post_voting_config_exclusions( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. @@ -524,8 +524,8 @@ def post_voting_config_exclusions( @query_params() def delete_decommission_awareness( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete any existing decommission. @@ -541,8 +541,8 @@ def delete_decommission_awareness( @query_params() def delete_weighted_routing( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete weighted shard routing weights. @@ -559,8 +559,8 @@ def delete_weighted_routing( def get_decommission_awareness( self, awareness_attribute_name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Get details and status of decommissioned attribute. @@ -590,8 +590,8 @@ def get_decommission_awareness( def get_weighted_routing( self, attribute: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Fetches weighted shard routing weights. @@ -614,8 +614,8 @@ def put_decommission_awareness( self, awareness_attribute_name: Any, awareness_attribute_value: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Decommissions an awareness attribute. @@ -645,8 +645,8 @@ def put_decommission_awareness( def put_weighted_routing( self, attribute: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates weighted shard routing weights. diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 8734a3ac..8617708e 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -48,8 +48,8 @@ class DanglingIndicesClient(NamespacedClient): def delete_dangling_index( self, index_uuid: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes the specified dangling index. @@ -81,8 +81,8 @@ def delete_dangling_index( def import_dangling_index( self, index_uuid: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Imports the specified dangling index. @@ -108,8 +108,8 @@ def import_dangling_index( @query_params() def list_dangling_indices( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index b826e99f..52573409 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -46,9 +46,9 @@ class IndicesClient(NamespacedClient): def analyze( self, body: Any = None, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the @@ -70,9 +70,9 @@ def analyze( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") def refresh( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the refresh operation in one or more indices. @@ -102,9 +102,9 @@ def refresh( ) def flush( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the flush operation on one or more indices. @@ -141,8 +141,8 @@ def create( self, index: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates an index with optional settings and mappings. @@ -175,8 +175,8 @@ def clone( index: Any, target: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clones an index. @@ -220,8 +220,8 @@ def clone( def get( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about one or more indices. @@ -268,8 +268,8 @@ def get( def open( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Opens an index. @@ -312,8 +312,8 @@ def open( def close( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Closes an index. @@ -355,8 +355,8 @@ def close( def delete( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an index. @@ -398,8 +398,8 @@ def delete( def exists( self, index: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular index exists. @@ -441,9 +441,9 @@ def exists( def put_mapping( self, body: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the index mappings. @@ -493,9 +493,9 @@ def put_mapping( ) def get_mapping( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns mappings for one or more indices. @@ -533,9 +533,9 @@ def get_mapping( def get_field_mapping( self, fields: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns mapping for one or more fields. @@ -572,8 +572,8 @@ def put_alias( index: Any, name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates an alias. @@ -607,9 +607,9 @@ def put_alias( def exists_alias( self, name: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular alias exists. @@ -638,10 +638,10 @@ def exists_alias( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") def get_alias( self, - index: Any = None, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns an alias. @@ -668,8 +668,8 @@ def get_alias( def update_aliases( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates index aliases. @@ -695,8 +695,8 @@ def delete_alias( self, index: Any, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an alias. @@ -726,8 +726,8 @@ def put_template( self, name: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates an index template. @@ -762,8 +762,8 @@ def put_template( def exists_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular index template exists. @@ -790,9 +790,9 @@ def exists_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def get_template( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns an index template. @@ -817,8 +817,8 @@ def get_template( def delete_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an index template. @@ -851,10 +851,10 @@ def delete_template( ) def get_settings( self, - index: Any = None, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns settings for one or more indices. @@ -900,9 +900,9 @@ def get_settings( def put_settings( self, body: Any, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the index settings. @@ -955,10 +955,10 @@ def put_settings( ) def stats( self, - index: Any = None, - metric: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + metric: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides statistics on operations happening in an index. @@ -1002,9 +1002,9 @@ def stats( ) def segments( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides low-level information about segments in a Lucene index. @@ -1044,9 +1044,9 @@ def segments( def validate_query( self, body: Any = None, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. @@ -1098,9 +1098,9 @@ def validate_query( ) def clear_cache( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clears all or specific caches for one or more indices. @@ -1129,9 +1129,9 @@ def clear_cache( @query_params("active_only", "detailed") def recovery( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about ongoing index shard recoveries. @@ -1157,9 +1157,9 @@ def recovery( ) def upgrade( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1187,9 +1187,9 @@ def upgrade( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") def get_upgrade( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1215,9 +1215,9 @@ def get_upgrade( ) def shard_stores( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides store information for shard copies of indices. @@ -1250,9 +1250,9 @@ def shard_stores( ) def forcemerge( self, - index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Performs the force merge operation on one or more indices. @@ -1291,8 +1291,8 @@ def shrink( index: Any, target: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. @@ -1337,8 +1337,8 @@ def split( index: Any, target: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows you to split an existing index into a new index with more primary @@ -1383,9 +1383,9 @@ def rollover( self, alias: Any, body: Any = None, - new_index: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + new_index: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates an alias to point to a new index when the existing index is considered @@ -1425,8 +1425,8 @@ def create_data_stream( self, name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates a data stream. @@ -1450,8 +1450,8 @@ def create_data_stream( def delete_data_stream( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a data stream. @@ -1471,8 +1471,8 @@ def delete_data_stream( def delete_index_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes an index template. @@ -1500,8 +1500,8 @@ def delete_index_template( def exists_index_template( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about whether a particular index template exists. @@ -1528,9 +1528,9 @@ def exists_index_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def get_index_template( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns an index template. @@ -1556,8 +1556,8 @@ def put_index_template( self, name: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates an index template. @@ -1592,8 +1592,8 @@ def simulate_index_template( self, name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Simulate matching the given index name against the index templates in the @@ -1629,9 +1629,9 @@ def simulate_index_template( @query_params() def get_data_stream( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns data streams. @@ -1648,9 +1648,9 @@ def get_data_stream( def simulate_template( self, body: Any = None, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Simulate resolving the given template name or body. @@ -1682,8 +1682,8 @@ def simulate_template( def resolve_index( self, name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about any matching indices, aliases, and data streams. @@ -1714,8 +1714,8 @@ def add_block( self, index: Any, block: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Adds a block to an index. @@ -1750,9 +1750,9 @@ def add_block( @query_params() def data_streams_stats( self, - name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Provides statistics on operations happening in a data stream. diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index a2d0fa0d..527467bb 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -45,9 +45,9 @@ class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") def get_pipeline( self, - id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a pipeline. @@ -70,8 +70,8 @@ def put_pipeline( self, id: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates a pipeline. @@ -102,8 +102,8 @@ def put_pipeline( def delete_pipeline( self, id: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a pipeline. @@ -131,9 +131,9 @@ def delete_pipeline( def simulate( self, body: Any, - id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Allows to simulate a pipeline with example documents. @@ -158,8 +158,8 @@ def simulate( @query_params() def processor_grok( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a list of the built-in patterns. diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 6a7b5db1..728e8f35 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any +from typing import Any, Optional from .utils import NamespacedClient, _make_path, query_params @@ -46,7 +46,7 @@ class NodesClient(NamespacedClient): def reload_secure_settings( self, body: Any = None, - node_id: Any = None, + node_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -72,8 +72,8 @@ def reload_secure_settings( @query_params("flat_settings", "timeout") def info( self, - node_id: Any = None, - metric: Any = None, + node_id: Optional[Any] = None, + metric: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -108,9 +108,9 @@ def info( ) def stats( self, - node_id: Any = None, - metric: Any = None, - index_metric: Any = None, + node_id: Optional[Any] = None, + metric: Optional[Any] = None, + index_metric: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -159,7 +159,7 @@ def stats( ) def hot_threads( self, - node_id: Any = None, + node_id: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: @@ -197,8 +197,8 @@ def hot_threads( @query_params("timeout") def usage( self, - node_id: Any = None, - metric: Any = None, + node_id: Optional[Any] = None, + metric: Optional[Any] = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index fc45c1e8..8d93bc23 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, query_params @@ -46,8 +46,8 @@ class RemoteStoreClient(NamespacedClient): def restore( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Restores from remote store. diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index f074574e..3dd921e0 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -56,8 +56,8 @@ class SecurityClient(NamespacedClient): @query_params() def get_account_details( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns account details for the current user. @@ -71,8 +71,8 @@ def get_account_details( def change_password( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Changes the password for the current user. @@ -94,8 +94,8 @@ def change_password( def get_action_group( self, action_group: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one action group. @@ -118,8 +118,8 @@ def get_action_group( @query_params() def get_action_groups( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all action groups. @@ -136,8 +136,8 @@ def get_action_groups( def delete_action_group( self, action_group: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete a specified action group. @@ -162,8 +162,8 @@ def create_action_group( self, action_group: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified action group. @@ -189,8 +189,8 @@ def patch_action_group( self, action_group: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of an action group. @@ -213,8 +213,8 @@ def patch_action_group( def patch_action_groups( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. @@ -236,8 +236,8 @@ def patch_action_groups( def get_user( self, username: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieve one internal user. @@ -257,8 +257,8 @@ def get_user( @query_params() def get_users( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieve all internal users. @@ -275,8 +275,8 @@ def get_users( def delete_user( self, username: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete the specified user. @@ -298,8 +298,8 @@ def create_user( self, username: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified user. @@ -323,8 +323,8 @@ def patch_user( self, username: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of an internal user. @@ -347,8 +347,8 @@ def patch_user( def patch_users( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. @@ -370,8 +370,8 @@ def patch_users( def get_role( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one role. @@ -391,8 +391,8 @@ def get_role( @query_params() def get_roles( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all roles. @@ -406,8 +406,8 @@ def get_roles( def delete_role( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete the specified role. @@ -429,8 +429,8 @@ def create_role( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified role. @@ -454,8 +454,8 @@ def patch_role( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of a role. @@ -478,8 +478,8 @@ def patch_role( def patch_roles( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. @@ -501,8 +501,8 @@ def patch_roles( def get_role_mapping( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one role mapping. @@ -522,8 +522,8 @@ def get_role_mapping( @query_params() def get_role_mappings( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all role mappings. @@ -540,8 +540,8 @@ def get_role_mappings( def delete_role_mapping( self, role: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes the specified role mapping. @@ -563,8 +563,8 @@ def create_role_mapping( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified role mapping. @@ -588,8 +588,8 @@ def patch_role_mapping( self, role: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates individual attributes of a role mapping. @@ -612,8 +612,8 @@ def patch_role_mapping( def patch_role_mappings( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or updates multiple role mappings in a single call. @@ -635,8 +635,8 @@ def patch_role_mappings( def get_tenant( self, tenant: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves one tenant. @@ -656,8 +656,8 @@ def get_tenant( @query_params() def get_tenants( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all tenants. @@ -671,8 +671,8 @@ def get_tenants( def delete_tenant( self, tenant: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Delete the specified tenant. @@ -694,8 +694,8 @@ def create_tenant( self, tenant: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates or replaces the specified tenant. @@ -719,8 +719,8 @@ def patch_tenant( self, tenant: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Add, delete, or modify a single tenant. @@ -743,8 +743,8 @@ def patch_tenant( def patch_tenants( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Add, delete, or modify multiple tenants in a single call. @@ -765,8 +765,8 @@ def patch_tenants( @query_params() def get_configuration( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns the current Security plugin configuration in JSON format. @@ -783,8 +783,8 @@ def get_configuration( def update_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Adds or updates the existing configuration using the REST API. @@ -806,8 +806,8 @@ def update_configuration( def patch_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ A PATCH call is used to update the existing configuration using the REST API. @@ -828,9 +828,9 @@ def patch_configuration( @query_params() def get_distinguished_names( self, - cluster_name: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + cluster_name: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves all distinguished names in the allow list. @@ -849,8 +849,8 @@ def update_distinguished_names( self, cluster_name: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Adds or updates the specified distinguished names in the cluster’s or node’s @@ -875,8 +875,8 @@ def update_distinguished_names( def delete_distinguished_names( self, cluster_name: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes all distinguished names in the specified cluster’s or node’s allow @@ -899,8 +899,8 @@ def delete_distinguished_names( @query_params() def get_certificates( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves the cluster’s security certificates. @@ -913,8 +913,8 @@ def get_certificates( @query_params() def reload_transport_certificates( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Reload transport layer communication certificates. @@ -930,8 +930,8 @@ def reload_transport_certificates( @query_params() def reload_http_certificates( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Reload HTTP layer communication certificates. @@ -947,8 +947,8 @@ def reload_http_certificates( @query_params() def flush_cache( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. @@ -961,8 +961,8 @@ def flush_cache( @query_params() def health( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Checks to see if the Security plugin is up and running. @@ -975,8 +975,8 @@ def health( @query_params() def get_audit_configuration( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Retrieves the audit configuration. @@ -990,8 +990,8 @@ def get_audit_configuration( def update_audit_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Updates the audit configuration. @@ -1013,8 +1013,8 @@ def update_audit_configuration( def patch_audit_configuration( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. @@ -1036,8 +1036,8 @@ def patch_audit_configuration( def patch_distinguished_names( self, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Bulk update of distinguished names. diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index a931a231..5fbcf255 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -48,8 +48,8 @@ def create( repository: Any, snapshot: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates a snapshot in a repository. @@ -83,8 +83,8 @@ def delete( self, repository: Any, snapshot: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a snapshot. @@ -116,8 +116,8 @@ def get( self, repository: Any, snapshot: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about a snapshot. @@ -151,8 +151,8 @@ def get( def delete_repository( self, repository: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Deletes a repository. @@ -180,9 +180,9 @@ def delete_repository( @query_params("cluster_manager_timeout", "local", "master_timeout") def get_repository( self, - repository: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + repository: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about a repository. @@ -206,8 +206,8 @@ def create_repository( self, repository: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Creates a repository. @@ -241,8 +241,8 @@ def restore( repository: Any, snapshot: Any, body: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Restores a snapshot. @@ -274,10 +274,10 @@ def restore( @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") def status( self, - repository: Any = None, - snapshot: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + repository: Optional[Any] = None, + snapshot: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about the status of a snapshot. @@ -305,8 +305,8 @@ def status( def verify_repository( self, repository: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Verifies a repository. @@ -334,8 +334,8 @@ def verify_repository( def cleanup_repository( self, repository: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Removes stale data from repository. @@ -366,8 +366,8 @@ def clone( snapshot: Any, target_snapshot: Any, body: Any, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 908a790e..76ff6924 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -37,7 +37,7 @@ import warnings -from typing import Any, MutableMapping, Optional +from typing import Any, Optional from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -54,8 +54,8 @@ class TasksClient(NamespacedClient): ) def list( self, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns a list of tasks. @@ -84,9 +84,9 @@ def list( @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") def cancel( self, - task_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + task_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Cancels a task, if it can be cancelled through an API. @@ -115,9 +115,9 @@ def cancel( @query_params("timeout", "wait_for_completion") def get( self, - task_id: Any = None, - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, + task_id: Optional[Any] = None, + params: Any = None, + headers: Any = None, ) -> Any: """ Returns information about a task. diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index 8e2c3510..3ae204e6 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -60,7 +60,7 @@ def _normalize_hosts(hosts: Any) -> Any: for host in hosts: if isinstance(host, string_types): if "://" not in host: - host = "//%r" % host + host = "//%s" % host # type: ignore parsed_url = urlparse(host) h = {"host": parsed_url.hostname} diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index 19fda41e..a4151b81 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -256,7 +256,7 @@ def mget( index: Any = None, raise_on_error: bool = True, missing: str = "none", - **kwargs: Any + **kwargs: Any, ) -> Any: """ Retrieve multiple document by their ``id``'s. Returns a list of instances diff --git a/utils/generate-api.py b/utils/generate-api.py index c9bec008..f53e212c 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -100,7 +100,7 @@ def add(self, api) -> None: def parse_orig(self): self.orders = [] - self.header = "from typing import Any, Collection, MutableMapping, Optional, Tuple, Union\n\n" + self.header = "from typing import Any, Collection, Optional, Tuple, Union\n\n" namespace_new = "".join(word.capitalize() for word in self.namespace.split("_")) self.header += "class " + namespace_new + "Client(NamespacedClient):" @@ -119,8 +119,9 @@ def parse_orig(self): header_lines.append(line) if line.startswith("class"): if "security.py" in str(self.filepath): + # TODO: FIXME, import code header_lines.append( - " from ._patch import health_check, update_audit_config" + " from ._patch import health_check, update_audit_config # type: ignore" ) break self.header = "\n".join(header_lines) diff --git a/utils/templates/base b/utils/templates/base index 1d00e646..54db3451 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -1,6 +1,6 @@ @query_params({{ api.query_params|map("tojson")|join(", ")}}) - async def {{ api.name }}(self, {% include "func_params" %}) -> {% if api.method == 'HEAD' %}bool{% else %}Any{% endif %}: + async def {{ api.name }}(self, {% include "func_params" %}) -> Any: """ {% if api.description %} {{ api.description|replace("\n", " ")|wordwrap(wrapstring="\n ") }} diff --git a/utils/templates/func_params b/utils/templates/func_params index 9d891ad2..7a66cb36 100644 --- a/utils/templates/func_params +++ b/utils/templates/func_params @@ -7,8 +7,8 @@ {% endif %} {% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} + {% if not info.required and info.type == 'Any' %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} {% endfor %} -params: Optional[MutableMapping[str, Any]]=None, -headers: Optional[MutableMapping[str, str]]=None, \ No newline at end of file +params: Any=None, +headers: Any=None, \ No newline at end of file From dc3b013e822eb7fd2b224baeec069923a3bb870d Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 13:07:48 -0400 Subject: [PATCH 03/17] Updated CHANGELOG. Signed-off-by: dblock --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16a4e6e2..d9d0c49f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) - Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) - Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) +- Merge `.pyi` Python types inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed From 491e00f3b6225d4052d27b979e438f9aa692f376 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 13:20:08 -0400 Subject: [PATCH 04/17] Use lowest common python version for lint. Signed-off-by: dblock --- noxfile.py | 2 +- opensearchpy/_async/helpers/actions.py | 6 +++--- opensearchpy/_async/transport.py | 6 +++--- opensearchpy/helpers/actions.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/noxfile.py b/noxfile.py index d503b261..3b97ad03 100644 --- a/noxfile.py +++ b/noxfile.py @@ -59,7 +59,7 @@ def format(session) -> None: lint(session) -@nox.session() +@nox.session(python=["3.7"]) def lint(session) -> None: session.install( "flake8", diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 94eade34..92338c14 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -303,8 +303,8 @@ async def async_bulk( # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - async for ok, item in async_streaming_bulk( - client, actions, ignore_status=ignore_status, *args, **kwargs # type: ignore + async for ok, item in async_streaming_bulk( # type: ignore + client, actions, ignore_status=ignore_status, *args, **kwargs ): # go through request-response pairs and detect failures if not ok: @@ -328,7 +328,7 @@ async def async_scan( clear_scroll: bool = True, scroll_kwargs: Any = None, **kwargs: Any -) -> AsyncGenerator[dict[str, Any], None]: +) -> Any: """ Simple abstraction on top of the :meth:`~opensearchpy.Any.scroll` api - a simple iterator that diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index 417c6805..854f0a06 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -30,7 +30,7 @@ import logging import sys from itertools import chain -from typing import Any, Collection, Mapping, Optional, Tuple, Type, Union +from typing import Any, Collection, Mapping, Optional, Type, Union from opensearchpy.connection.base import Connection from opensearchpy.serializer import Serializer @@ -76,7 +76,7 @@ def __init__( serializers: Any = None, default_mimetype: str = "application/json", max_retries: int = 3, - retry_on_status: Tuple[int, ...] = (502, 503, 504), + retry_on_status: Any = (502, 503, 504), retry_on_timeout: bool = False, send_get_body_as: str = "GET", **kwargs: Any @@ -440,7 +440,7 @@ async def perform_request( ) return data - async def close(self) -> None: # type: ignore + async def close(self) -> None: """ Explicitly closes connections """ diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index a02e13a5..a49e8210 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -507,7 +507,7 @@ def scan( raise_on_error: bool = True, preserve_order: bool = False, size: int = 1000, - request_timeout: Optional[int] = None, + request_timeout: Optional[float] = None, clear_scroll: bool = True, scroll_kwargs: Any = None, **kwargs: Any From b03ae323f5240bd3a60f989faf692e49210f4508 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 13:23:49 -0400 Subject: [PATCH 05/17] Fix: don't typeshed. Signed-off-by: dblock --- opensearchpy/_async/helpers/test.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/opensearchpy/_async/helpers/test.py b/opensearchpy/_async/helpers/test.py index c0a261c8..9516857c 100644 --- a/opensearchpy/_async/helpers/test.py +++ b/opensearchpy/_async/helpers/test.py @@ -13,16 +13,10 @@ from typing import Any from unittest import SkipTest -from _typeshed import Incomplete - from opensearchpy import AsyncOpenSearch from opensearchpy.exceptions import ConnectionError -OPENSEARCH_URL: Incomplete -if "OPENSEARCH_URL" in os.environ: - OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] -else: - OPENSEARCH_URL = "https://admin:admin@localhost:9200" +OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") async def get_test_client(nowait: bool = False, **kwargs: Any) -> Any: From 7df36cd1a28c3f37e1be4ff6bddfedd17036ee26 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 15:40:59 -0400 Subject: [PATCH 06/17] Removed unneeded comment. Signed-off-by: dblock --- opensearchpy/_async/helpers/actions.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 92338c14..9b98fb7f 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -55,10 +55,6 @@ ) from ...helpers.errors import ScanError -# from opensearchpy._async.client import Any -# from opensearchpy.serializer import Serializer - - logger: logging.Logger = logging.getLogger("opensearchpy.helpers") From a98d40819c50cdec47f4dd8e165befe38ce582ae Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 15:41:31 -0400 Subject: [PATCH 07/17] Simplify OPENSEARCH_URL. Signed-off-by: dblock --- opensearchpy/helpers/test.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index 0ee1ccc9..96282c49 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -37,11 +37,7 @@ from opensearchpy import OpenSearch from opensearchpy.exceptions import ConnectionError -OPENSEARCH_URL: str -if "OPENSEARCH_URL" in os.environ: - OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] -else: - OPENSEARCH_URL = "https://admin:admin@localhost:9200" +OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") def get_test_client(nowait: bool = False, **kwargs: Any) -> OpenSearch: From df6133e4ce80c1b3f6d20cd369ba6f2696eb4565 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 15:41:49 -0400 Subject: [PATCH 08/17] Fix: positional ignore_status used as chunk_size. Signed-off-by: dblock --- opensearchpy/helpers/actions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index a49e8210..39e3cdaf 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -422,7 +422,7 @@ def bulk( # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - for ok, item in streaming_bulk(client, actions, ignore_status, *args, **kwargs): + for ok, item in streaming_bulk(client, actions, ignore_status=ignore_status, *args, **kwargs): # type: ignore # go through request-response pairs and detect failures if not ok: if not stats_only: From 64251f0b08d7084a4ee74251efdb13229f9f05f7 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 15:51:31 -0400 Subject: [PATCH 09/17] Fix: parse version string. Signed-off-by: dblock --- utils/build-dists.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/utils/build-dists.py b/utils/build-dists.py index e886c71e..b45da98e 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -188,9 +188,13 @@ def main() -> None: # Grab the major version to be used as a suffix. version_path = os.path.join(base_dir, "opensearchpy/_version.py") with open(version_path) as f: - version = re.search( - r"^__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read(), re.M - ).group(1) + data = f.read() + m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) + if m: + version = m.group(1) + else: + raise Exception(f"Invalid version {data}") + major_version = version.split(".")[0] # If we're handed a version from the build manager we From 05b919a6eddbbc3ba5319efae1861e451a3ad3ae Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 15:56:26 -0400 Subject: [PATCH 10/17] Remove future annotations for Python 3.6. Signed-off-by: dblock --- opensearchpy/_async/helpers/document.py | 2 -- opensearchpy/_async/helpers/mapping.py | 6 ++---- opensearchpy/helpers/document.py | 10 ++++------ opensearchpy/helpers/mapping.py | 6 ++---- opensearchpy/helpers/utils.py | 6 ++---- 5 files changed, 10 insertions(+), 20 deletions(-) diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index ef326c7a..25196e01 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -8,8 +8,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from __future__ import annotations - import collections.abc as collections_abc from fnmatch import fnmatch from typing import Any, Optional, Sequence, Tuple, Type diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index c89bf70a..dd560564 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -8,11 +8,9 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from __future__ import annotations - import collections.abc as collections_abc from itertools import chain -from typing import Any, Optional +from typing import Any from six import iteritems @@ -56,7 +54,7 @@ def resolve_nested(self, field_path: str) -> Any: nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path: Any) -> Optional[AsyncMapping]: + def resolve_field(self, field_path: Any) -> Any: field = self for step in field_path.split("."): try: diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index a4151b81..f1673ce7 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -25,11 +25,9 @@ # specific language governing permissions and limitations # under the License. -from __future__ import annotations - import collections.abc as collections_abc from fnmatch import fnmatch -from typing import Any, Tuple, Type, Union +from typing import Any, Tuple, Type from six import add_metaclass, iteritems @@ -51,14 +49,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class DocumentMeta(type): def __new__( - cls: Union[Type[DocumentMeta], Type[IndexMeta]], + cls: Any, name: str, bases: Tuple[Type[ObjectBase]], attrs: Any, ) -> Any: # DocumentMeta filters attrs in place attrs["_doc_type"] = DocumentOptions(name, bases, attrs) - return super(DocumentMeta, cls).__new__(cls, name, bases, attrs) # type: ignore + return super(DocumentMeta, cls).__new__(cls, name, bases, attrs) class IndexMeta(DocumentMeta): @@ -67,7 +65,7 @@ class IndexMeta(DocumentMeta): _document_initialized = False def __new__( - cls: Type[IndexMeta], + cls: Any, name: str, bases: Tuple[Type[ObjectBase]], attrs: Any, diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index 92879274..eaa13e3f 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -25,11 +25,9 @@ # specific language governing permissions and limitations # under the License. -from __future__ import annotations - import collections.abc as collections_abc from itertools import chain -from typing import Any, Optional +from typing import Any from six import iteritems, itervalues @@ -133,7 +131,7 @@ def resolve_nested(self, field_path: Any) -> Any: nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path: Any) -> Optional[Mapping]: + def resolve_field(self, field_path: Any) -> Any: field = self for step in field_path.split("."): try: diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 513fb672..e17b89a6 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -25,7 +25,7 @@ # specific language governing permissions and limitations # under the License. -from __future__ import annotations, unicode_literals +from __future__ import unicode_literals import collections.abc as collections_abc from copy import copy @@ -229,9 +229,7 @@ class DslMeta(type): _types: Dict[str, Any] = {} - def __init__( - cls: Any, name: str, bases: tuple[type, ...], attrs: Dict[str, Any] - ) -> None: + def __init__(cls: Any, name: str, bases: Any, attrs: Any) -> None: # TODO: why is it calling itself?! super(DslMeta, cls).__init__(name, bases, attrs) # skip for DslBase From 122d0ad608658d1e10521ec71c091b7735f629f6 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 16:40:24 -0400 Subject: [PATCH 11/17] Fix: types in documentation. Signed-off-by: dblock --- opensearchpy/_async/helpers/actions.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 9b98fb7f..cc26bdd9 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -175,7 +175,7 @@ async def async_streaming_bulk( every subsequent rejection for the same chunk, for double the time every time up to ``max_backoff`` seconds. - :arg client: instance of :class:`~opensearchpy.Any` to use + :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use :arg actions: iterable or async iterable containing the actions to be executed :arg chunk_size: number of docs in one chunk sent to client (default: 500) :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB) @@ -281,7 +281,7 @@ async def async_bulk( just return the errors and not store them in memory. - :arg client: instance of :class:`~opensearchpy.Any` to use + :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use :arg actions: iterator containing the actions :arg stats_only: if `True` only report number of successful/failed operations instead of just number of successful and a list of error responses @@ -336,8 +336,8 @@ async def async_scan( may be an expensive operation and will negate the performance benefits of using ``scan``. - :arg client: instance of :class:`~opensearchpy.Any` to use - :arg query: body for the :meth:`~opensearchpy.Any.search` api + :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use + :arg query: body for the :meth:`~opensearchpy.AsyncOpenSearch.search` api :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg raise_on_error: raises an exception (``ScanError``) if an error is @@ -459,7 +459,7 @@ async def async_reindex( This helper doesn't transfer mappings, just the data. - :arg client: instance of :class:`~opensearchpy.Any` to use (for + :arg client: instance of :class:`~opensearchpy.AsyncOpenSearch` to use (for read if `target_client` is specified as well) :arg source_index: index (or list of indices) to read documents from :arg target_index: name of the index in the target cluster to populate From 151c026e355809b18e469e91fb5175b4eec4434b Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 16:41:15 -0400 Subject: [PATCH 12/17] Improve CHANGELOG text. Signed-off-by: dblock --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d9d0c49f..f4dd77a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,7 +23,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) - Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) - Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) -- Merge `.pyi` Python types inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) +- Merge `.pyi` type stubs inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed From 92db92260e361d6782cc1bd8054a76564aa977fe Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 16:44:38 -0400 Subject: [PATCH 13/17] Re-added missing separator. Signed-off-by: dblock --- opensearchpy/_async/client/cat.py | 11 ++++++----- opensearchpy/client/cat.py | 11 ++++++----- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index dfee84b6..e0e84b69 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -26,6 +26,12 @@ # under the License. +# ----------------------------------------------------- + +from typing import Any, Optional + +from .utils import NamespacedClient, _make_path, query_params + # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": @@ -35,11 +41,6 @@ # ----------------------------------------------------- -from typing import Any, Optional - -from .utils import NamespacedClient, _make_path, query_params - - class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") async def aliases( diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 5fb55053..1a16f836 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -26,6 +26,12 @@ # under the License. +# ----------------------------------------------------- + +from typing import Any, Optional + +from .utils import NamespacedClient, _make_path, query_params + # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": @@ -35,11 +41,6 @@ # ----------------------------------------------------- -from typing import Any, Optional - -from .utils import NamespacedClient, _make_path, query_params - - class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") def aliases( From 2e8819410779a4c269c61a8906576bb616af7d90 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 16:50:24 -0400 Subject: [PATCH 14/17] Remove duplicate licenses. Signed-off-by: dblock --- opensearchpy/_async/client/cat.py | 12 +++++----- opensearchpy/_async/client/remote_store.py | 18 --------------- opensearchpy/_async/client/security.py | 27 ---------------------- opensearchpy/_async/http_aiohttp.py | 4 ---- opensearchpy/client/cat.py | 12 +++++----- opensearchpy/client/remote_store.py | 18 --------------- opensearchpy/client/security.py | 27 ---------------------- 7 files changed, 12 insertions(+), 106 deletions(-) diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index e0e84b69..8371c2c9 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -26,12 +26,7 @@ # under the License. -# ----------------------------------------------------- - -from typing import Any, Optional - -from .utils import NamespacedClient, _make_path, query_params - +# ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": @@ -41,6 +36,11 @@ # ----------------------------------------------------- +from typing import Any, Optional + +from .utils import NamespacedClient, _make_path, query_params + + class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") async def aliases( diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index 0319ea1d..8a72f41c 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -7,24 +7,6 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 72a1629a..dd63abfb 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -7,24 +7,6 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. @@ -40,15 +22,6 @@ from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - class SecurityClient(NamespacedClient): from ._patch import health_check, update_audit_config # type: ignore diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index 9d12066d..34819970 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -30,10 +30,6 @@ import os import ssl import warnings - -# sentinel value for `verify_certs`. -# This is used to detect if a user is passing in a value -# for SSL kwargs if also using an SSLContext. from typing import Any, Collection, Mapping, Optional, Union import urllib3 diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 1a16f836..01455e9c 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -26,12 +26,7 @@ # under the License. -# ----------------------------------------------------- - -from typing import Any, Optional - -from .utils import NamespacedClient, _make_path, query_params - +# ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": @@ -41,6 +36,11 @@ # ----------------------------------------------------- +from typing import Any, Optional + +from .utils import NamespacedClient, _make_path, query_params + + class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") def aliases( diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index 8d93bc23..a019a99c 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -7,24 +7,6 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 3dd921e0..0174532e 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -7,24 +7,6 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. @@ -40,15 +22,6 @@ from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - class SecurityClient(NamespacedClient): from ._patch import health_check, update_audit_config # type: ignore From 96fcc3e856ae392275bc83b96a77cc20181799b9 Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 16:59:51 -0400 Subject: [PATCH 15/17] Get rid of Optional[Any]. Signed-off-by: dblock --- opensearchpy/_async/client/__init__.py | 36 ++++++++-------- opensearchpy/_async/client/cat.py | 26 ++++++------ opensearchpy/_async/client/cluster.py | 12 +++--- opensearchpy/_async/client/indices.py | 58 +++++++++++++------------- opensearchpy/_async/client/ingest.py | 6 +-- opensearchpy/_async/client/nodes.py | 20 ++++----- opensearchpy/_async/client/security.py | 4 +- opensearchpy/_async/client/snapshot.py | 8 ++-- opensearchpy/_async/client/tasks.py | 6 +-- opensearchpy/client/__init__.py | 36 ++++++++-------- opensearchpy/client/cat.py | 26 ++++++------ opensearchpy/client/cluster.py | 12 +++--- opensearchpy/client/indices.py | 58 +++++++++++++------------- opensearchpy/client/ingest.py | 6 +-- opensearchpy/client/nodes.py | 20 ++++----- opensearchpy/client/security.py | 4 +- opensearchpy/client/snapshot.py | 8 ++-- opensearchpy/client/tasks.py | 6 +-- utils/templates/func_params | 3 +- 19 files changed, 178 insertions(+), 177 deletions(-) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 4143b4a1..279fda37 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -39,7 +39,7 @@ from __future__ import unicode_literals import logging -from typing import Any, Optional, Type +from typing import Any, Type from ..transport import AsyncTransport, TransportError from .cat import CatClient @@ -357,7 +357,7 @@ async def index( self, index: Any, body: Any, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -420,7 +420,7 @@ async def index( async def bulk( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -470,7 +470,7 @@ async def bulk( async def clear_scroll( self, body: Any = None, - scroll_id: Optional[Any] = None, + scroll_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -512,7 +512,7 @@ async def clear_scroll( async def count( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -970,7 +970,7 @@ async def explain( async def field_caps( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1143,7 +1143,7 @@ async def get_source( async def mget( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1194,7 +1194,7 @@ async def mget( async def msearch( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1252,7 +1252,7 @@ async def msearch( async def msearch_template( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1306,7 +1306,7 @@ async def msearch_template( async def mtermvectors( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1361,7 +1361,7 @@ async def put_script( self, id: Any, body: Any, - context: Optional[Any] = None, + context: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1397,7 +1397,7 @@ async def put_script( async def rank_eval( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1512,7 +1512,7 @@ async def reindex_rethrottle( async def render_search_template( self, body: Any = None, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1556,7 +1556,7 @@ async def scripts_painless_execute( async def scroll( self, body: Any = None, - scroll_id: Optional[Any] = None, + scroll_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1631,7 +1631,7 @@ async def scroll( async def search( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1756,7 +1756,7 @@ async def search( ) async def search_shards( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1803,7 +1803,7 @@ async def search_shards( async def search_template( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1872,7 +1872,7 @@ async def termvectors( self, index: Any, body: Any = None, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index 8371c2c9..4310511c 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import NamespacedClient, _make_path, query_params @@ -45,7 +45,7 @@ class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") async def aliases( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -99,7 +99,7 @@ async def all_pit_segments( ) async def allocation( self, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -175,7 +175,7 @@ async def cluster_manager( @query_params("format", "h", "help", "s", "v") async def count( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -201,7 +201,7 @@ async def count( @query_params("bytes", "format", "h", "help", "s", "v") async def fielddata( self, - fields: Optional[Any] = None, + fields: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -290,7 +290,7 @@ async def help( ) async def indices( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -564,7 +564,7 @@ async def plugins( ) async def recovery( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -647,7 +647,7 @@ async def repositories( ) async def segment_replication( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -696,7 +696,7 @@ async def segment_replication( ) async def segments( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -739,7 +739,7 @@ async def segments( ) async def shards( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -785,7 +785,7 @@ async def shards( ) async def thread_pool( self, - thread_pool_patterns: Optional[Any] = None, + thread_pool_patterns: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -832,7 +832,7 @@ async def thread_pool( ) async def snapshots( self, - repository: Optional[Any] = None, + repository: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -924,7 +924,7 @@ async def tasks( ) async def templates( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index ac4b1493..905853e9 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -59,7 +59,7 @@ class ClusterClient(NamespacedClient): ) async def health( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -141,8 +141,8 @@ async def pending_tasks( ) async def state( self, - metric: Optional[Any] = None, - index: Optional[Any] = None, + metric: Any = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -190,7 +190,7 @@ async def state( @query_params("flat_settings", "timeout") async def stats( self, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -389,7 +389,7 @@ async def delete_component_template( @query_params("cluster_manager_timeout", "local", "master_timeout") async def get_component_template( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index e1874deb..a4ef8b5b 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -46,7 +46,7 @@ class IndicesClient(NamespacedClient): async def analyze( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -70,7 +70,7 @@ async def analyze( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") async def refresh( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -102,7 +102,7 @@ async def refresh( ) async def flush( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -441,7 +441,7 @@ async def exists( async def put_mapping( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -493,7 +493,7 @@ async def put_mapping( ) async def get_mapping( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -533,7 +533,7 @@ async def get_mapping( async def get_field_mapping( self, fields: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -607,7 +607,7 @@ async def put_alias( async def exists_alias( self, name: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -638,8 +638,8 @@ async def exists_alias( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") async def get_alias( self, - index: Optional[Any] = None, - name: Optional[Any] = None, + index: Any = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -790,7 +790,7 @@ async def exists_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def get_template( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -851,8 +851,8 @@ async def delete_template( ) async def get_settings( self, - index: Optional[Any] = None, - name: Optional[Any] = None, + index: Any = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -900,7 +900,7 @@ async def get_settings( async def put_settings( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -955,8 +955,8 @@ async def put_settings( ) async def stats( self, - index: Optional[Any] = None, - metric: Optional[Any] = None, + index: Any = None, + metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1002,7 +1002,7 @@ async def stats( ) async def segments( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1044,7 +1044,7 @@ async def segments( async def validate_query( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1098,7 +1098,7 @@ async def validate_query( ) async def clear_cache( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1129,7 +1129,7 @@ async def clear_cache( @query_params("active_only", "detailed") async def recovery( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1157,7 +1157,7 @@ async def recovery( ) async def upgrade( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1187,7 +1187,7 @@ async def upgrade( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") async def get_upgrade( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1215,7 +1215,7 @@ async def get_upgrade( ) async def shard_stores( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1250,7 +1250,7 @@ async def shard_stores( ) async def forcemerge( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1383,7 +1383,7 @@ async def rollover( self, alias: Any, body: Any = None, - new_index: Optional[Any] = None, + new_index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1528,7 +1528,7 @@ async def exists_index_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def get_index_template( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1629,7 +1629,7 @@ async def simulate_index_template( @query_params() async def get_data_stream( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1648,7 +1648,7 @@ async def get_data_stream( async def simulate_template( self, body: Any = None, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1750,7 +1750,7 @@ async def add_block( @query_params() async def data_streams_stats( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 587cab9d..2f8cff27 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -45,7 +45,7 @@ class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") async def get_pipeline( self, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -131,7 +131,7 @@ async def delete_pipeline( async def simulate( self, body: Any, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index 3021524a..36146fad 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import NamespacedClient, _make_path, query_params @@ -46,7 +46,7 @@ class NodesClient(NamespacedClient): async def reload_secure_settings( self, body: Any = None, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -72,8 +72,8 @@ async def reload_secure_settings( @query_params("flat_settings", "timeout") async def info( self, - node_id: Optional[Any] = None, - metric: Optional[Any] = None, + node_id: Any = None, + metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -108,9 +108,9 @@ async def info( ) async def stats( self, - node_id: Optional[Any] = None, - metric: Optional[Any] = None, - index_metric: Optional[Any] = None, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -159,7 +159,7 @@ async def stats( ) async def hot_threads( self, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -197,8 +197,8 @@ async def hot_threads( @query_params("timeout") async def usage( self, - node_id: Optional[Any] = None, - metric: Optional[Any] = None, + node_id: Any = None, + metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index dd63abfb..dc893f86 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -18,7 +18,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -801,7 +801,7 @@ async def patch_configuration( @query_params() async def get_distinguished_names( self, - cluster_name: Optional[Any] = None, + cluster_name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 1a544ddb..97ffec72 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -180,7 +180,7 @@ async def delete_repository( @query_params("cluster_manager_timeout", "local", "master_timeout") async def get_repository( self, - repository: Optional[Any] = None, + repository: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -274,8 +274,8 @@ async def restore( @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") async def status( self, - repository: Optional[Any] = None, - snapshot: Optional[Any] = None, + repository: Any = None, + snapshot: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 78deb4a9..39aefe93 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -37,7 +37,7 @@ import warnings -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -84,7 +84,7 @@ async def list( @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") async def cancel( self, - task_id: Optional[Any] = None, + task_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -115,7 +115,7 @@ async def cancel( @query_params("timeout", "wait_for_completion") async def get( self, - task_id: Optional[Any] = None, + task_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 6f0c54b0..05af6764 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -39,7 +39,7 @@ from __future__ import unicode_literals import logging -from typing import Any, Optional, Type +from typing import Any, Type from ..transport import Transport, TransportError from .cat import CatClient @@ -357,7 +357,7 @@ def index( self, index: Any, body: Any, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -420,7 +420,7 @@ def index( def bulk( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -470,7 +470,7 @@ def bulk( def clear_scroll( self, body: Any = None, - scroll_id: Optional[Any] = None, + scroll_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -512,7 +512,7 @@ def clear_scroll( def count( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -970,7 +970,7 @@ def explain( def field_caps( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1143,7 +1143,7 @@ def get_source( def mget( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1194,7 +1194,7 @@ def mget( def msearch( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1252,7 +1252,7 @@ def msearch( def msearch_template( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1306,7 +1306,7 @@ def msearch_template( def mtermvectors( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1361,7 +1361,7 @@ def put_script( self, id: Any, body: Any, - context: Optional[Any] = None, + context: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1397,7 +1397,7 @@ def put_script( def rank_eval( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1512,7 +1512,7 @@ def reindex_rethrottle( def render_search_template( self, body: Any = None, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1556,7 +1556,7 @@ def scripts_painless_execute( def scroll( self, body: Any = None, - scroll_id: Optional[Any] = None, + scroll_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1631,7 +1631,7 @@ def scroll( def search( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1756,7 +1756,7 @@ def search( ) def search_shards( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1803,7 +1803,7 @@ def search_shards( def search_template( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1872,7 +1872,7 @@ def termvectors( self, index: Any, body: Any = None, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 01455e9c..91adbf35 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import NamespacedClient, _make_path, query_params @@ -45,7 +45,7 @@ class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") def aliases( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -99,7 +99,7 @@ def all_pit_segments( ) def allocation( self, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -175,7 +175,7 @@ def cluster_manager( @query_params("format", "h", "help", "s", "v") def count( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -201,7 +201,7 @@ def count( @query_params("bytes", "format", "h", "help", "s", "v") def fielddata( self, - fields: Optional[Any] = None, + fields: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -290,7 +290,7 @@ def help( ) def indices( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -564,7 +564,7 @@ def plugins( ) def recovery( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -647,7 +647,7 @@ def repositories( ) def segment_replication( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -696,7 +696,7 @@ def segment_replication( ) def segments( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -739,7 +739,7 @@ def segments( ) def shards( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -785,7 +785,7 @@ def shards( ) def thread_pool( self, - thread_pool_patterns: Optional[Any] = None, + thread_pool_patterns: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -832,7 +832,7 @@ def thread_pool( ) def snapshots( self, - repository: Optional[Any] = None, + repository: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -924,7 +924,7 @@ def tasks( ) def templates( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index a3bc1596..f2770f2d 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -59,7 +59,7 @@ class ClusterClient(NamespacedClient): ) def health( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -141,8 +141,8 @@ def pending_tasks( ) def state( self, - metric: Optional[Any] = None, - index: Optional[Any] = None, + metric: Any = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -190,7 +190,7 @@ def state( @query_params("flat_settings", "timeout") def stats( self, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -389,7 +389,7 @@ def delete_component_template( @query_params("cluster_manager_timeout", "local", "master_timeout") def get_component_template( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 52573409..7cdc7e57 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -46,7 +46,7 @@ class IndicesClient(NamespacedClient): def analyze( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -70,7 +70,7 @@ def analyze( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") def refresh( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -102,7 +102,7 @@ def refresh( ) def flush( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -441,7 +441,7 @@ def exists( def put_mapping( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -493,7 +493,7 @@ def put_mapping( ) def get_mapping( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -533,7 +533,7 @@ def get_mapping( def get_field_mapping( self, fields: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -607,7 +607,7 @@ def put_alias( def exists_alias( self, name: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -638,8 +638,8 @@ def exists_alias( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") def get_alias( self, - index: Optional[Any] = None, - name: Optional[Any] = None, + index: Any = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -790,7 +790,7 @@ def exists_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def get_template( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -851,8 +851,8 @@ def delete_template( ) def get_settings( self, - index: Optional[Any] = None, - name: Optional[Any] = None, + index: Any = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -900,7 +900,7 @@ def get_settings( def put_settings( self, body: Any, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -955,8 +955,8 @@ def put_settings( ) def stats( self, - index: Optional[Any] = None, - metric: Optional[Any] = None, + index: Any = None, + metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1002,7 +1002,7 @@ def stats( ) def segments( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1044,7 +1044,7 @@ def segments( def validate_query( self, body: Any = None, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1098,7 +1098,7 @@ def validate_query( ) def clear_cache( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1129,7 +1129,7 @@ def clear_cache( @query_params("active_only", "detailed") def recovery( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1157,7 +1157,7 @@ def recovery( ) def upgrade( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1187,7 +1187,7 @@ def upgrade( @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") def get_upgrade( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1215,7 +1215,7 @@ def get_upgrade( ) def shard_stores( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1250,7 +1250,7 @@ def shard_stores( ) def forcemerge( self, - index: Optional[Any] = None, + index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1383,7 +1383,7 @@ def rollover( self, alias: Any, body: Any = None, - new_index: Optional[Any] = None, + new_index: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1528,7 +1528,7 @@ def exists_index_template( @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def get_index_template( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1629,7 +1629,7 @@ def simulate_index_template( @query_params() def get_data_stream( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1648,7 +1648,7 @@ def get_data_stream( def simulate_template( self, body: Any = None, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -1750,7 +1750,7 @@ def add_block( @query_params() def data_streams_stats( self, - name: Optional[Any] = None, + name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index 527467bb..4bf558b9 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -45,7 +45,7 @@ class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") def get_pipeline( self, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -131,7 +131,7 @@ def delete_pipeline( def simulate( self, body: Any, - id: Optional[Any] = None, + id: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 728e8f35..6a7b5db1 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import NamespacedClient, _make_path, query_params @@ -46,7 +46,7 @@ class NodesClient(NamespacedClient): def reload_secure_settings( self, body: Any = None, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -72,8 +72,8 @@ def reload_secure_settings( @query_params("flat_settings", "timeout") def info( self, - node_id: Optional[Any] = None, - metric: Optional[Any] = None, + node_id: Any = None, + metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -108,9 +108,9 @@ def info( ) def stats( self, - node_id: Optional[Any] = None, - metric: Optional[Any] = None, - index_metric: Optional[Any] = None, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -159,7 +159,7 @@ def stats( ) def hot_threads( self, - node_id: Optional[Any] = None, + node_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -197,8 +197,8 @@ def hot_threads( @query_params("timeout") def usage( self, - node_id: Optional[Any] = None, - metric: Optional[Any] = None, + node_id: Any = None, + metric: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 0174532e..6d1574ea 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -18,7 +18,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -801,7 +801,7 @@ def patch_configuration( @query_params() def get_distinguished_names( self, - cluster_name: Optional[Any] = None, + cluster_name: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index 5fbcf255..fe6536fa 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -36,7 +36,7 @@ # ----------------------------------------------------- -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -180,7 +180,7 @@ def delete_repository( @query_params("cluster_manager_timeout", "local", "master_timeout") def get_repository( self, - repository: Optional[Any] = None, + repository: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -274,8 +274,8 @@ def restore( @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") def status( self, - repository: Optional[Any] = None, - snapshot: Optional[Any] = None, + repository: Any = None, + snapshot: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 76ff6924..7e675233 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -37,7 +37,7 @@ import warnings -from typing import Any, Optional +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -84,7 +84,7 @@ def list( @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") def cancel( self, - task_id: Optional[Any] = None, + task_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: @@ -115,7 +115,7 @@ def cancel( @query_params("timeout", "wait_for_completion") def get( self, - task_id: Optional[Any] = None, + task_id: Any = None, params: Any = None, headers: Any = None, ) -> Any: diff --git a/utils/templates/func_params b/utils/templates/func_params index 7a66cb36..cbb976ed 100644 --- a/utils/templates/func_params +++ b/utils/templates/func_params @@ -7,7 +7,8 @@ {% endif %} {% for p, info in api.all_parts.items() %} - {% if not info.required and info.type == 'Any' %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} + {% if not info.required and not info.type == 'Any' %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} + {% if not info.required and info.type == 'Any' %}{{ p }}: {{ info.type }}=None, {% endif %} {% endfor %} params: Any=None, From 0e13cc95608902c13186453a6e632c9d70f411fc Mon Sep 17 00:00:00 2001 From: dblock Date: Wed, 1 Nov 2023 17:02:39 -0400 Subject: [PATCH 16/17] Fix docs with AsyncOpenSearch. Signed-off-by: dblock --- opensearchpy/_async/helpers/actions.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index cc26bdd9..c85b2ac8 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -265,7 +265,7 @@ async def async_bulk( **kwargs: Any ) -> Tuple[int, Union[int, List[Any]]]: """ - Helper for the :meth:`~opensearchpy.Any.bulk` api that provides + Helper for the :meth:`~opensearchpy.AsyncOpenSearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and sends them to opensearch in chunks. It returns a tuple with summary information - number of successfully executed actions and either list of @@ -327,7 +327,7 @@ async def async_scan( ) -> Any: """ Simple abstraction on top of the - :meth:`~opensearchpy.Any.scroll` api - a simple iterator that + :meth:`~opensearchpy.AsyncOpenSearch.scroll` api - a simple iterator that yields all hits as returned by underlining scroll requests. By default scan does not return results in any pre-determined order. To @@ -352,10 +352,10 @@ async def async_scan( scroll API at the end of the method on completion or error, defaults to true. :arg scroll_kwargs: additional kwargs to be passed to - :meth:`~opensearchpy.Any.scroll` + :meth:`~opensearchpy.AsyncOpenSearch.scroll` Any additional keyword arguments will be passed to the initial - :meth:`~opensearchpy.Any.search` call:: + :meth:`~opensearchpy.AsyncOpenSearch.search` call:: async_scan(client, query={"query": {"match": {"title": "python"}}}, @@ -449,7 +449,7 @@ async def async_reindex( to another, potentially (if `target_client` is specified) on a different cluster. If you don't specify the query you will reindex all the documents. - Since ``2.3`` a :meth:`~opensearchpy.Any.reindex` api is + Since ``2.3`` a :meth:`~opensearchpy.AsyncOpenSearch.reindex` api is available as part of opensearch itself. It is recommended to use the api instead of this helper wherever possible. The helper is here mostly for backwards compatibility and for situations where more flexibility is @@ -463,7 +463,7 @@ async def async_reindex( read if `target_client` is specified as well) :arg source_index: index (or list of indices) to read documents from :arg target_index: name of the index in the target cluster to populate - :arg query: body for the :meth:`~opensearchpy.Any.search` api + :arg query: body for the :meth:`~opensearchpy.AsyncOpenSearch.search` api :arg target_client: optional, is specified will be used for writing (thus enabling reindex between clusters) :arg chunk_size: number of docs in one chunk sent to client (default: 500) From a97845f7115f61ca78a6dd7c7f9357964f709634 Mon Sep 17 00:00:00 2001 From: dblock Date: Mon, 6 Nov 2023 09:59:30 -0500 Subject: [PATCH 17/17] Fix: undo comment. Signed-off-by: dblock --- opensearchpy/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 1a5fe5cd..3dcd7389 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -146,7 +146,7 @@ from .serializer import JSONSerializer from .transport import Transport -# Only raise one warning per deprecation message so +# Only raise one warning per deprecation message so as not # to spam up the user if the same action is done multiple times. warnings.simplefilter("default", category=OpenSearchDeprecationWarning, append=True)