From 10a2b4fd30e28a8ca6a907033de85ff611180a42 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Mon, 18 Dec 2023 20:57:56 -0500 Subject: [PATCH 01/14] updated files with docstrings to pass pylint Signed-off-by: Mark Cohen --- .github/workflows/update_api.yml | 0 benchmarks/bench_async.py | 12 ++++++++++++ benchmarks/bench_info_sync.py | 7 +++++++ benchmarks/bench_sync.py | 7 +++++++ setup.cfg | 2 +- 5 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/update_api.yml diff --git a/.github/workflows/update_api.yml b/.github/workflows/update_api.yml new file mode 100644 index 00000000..e69de29b diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index 7e9d1b3d..3df4e0ed 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -17,6 +17,7 @@ async def index_records(client: Any, index_name: str, item_count: int) -> None: + """asynchronously bulk index item_count records into the index (index_name)""" await asyncio.gather( *[ client.index( @@ -34,6 +35,10 @@ async def index_records(client: Any, index_name: str, item_count: int) -> None: async def test_async(client_count: int = 1, item_count: int = 1) -> None: + """ + asynchronously index with item_count records and run client_count clients. This function can be used to + test balancing the number of items indexed with the number of documents. + """ host = "localhost" port = 9200 auth = ("admin", "admin") @@ -74,6 +79,7 @@ async def test_async(client_count: int = 1, item_count: int = 1) -> None: def test(item_count: int = 1, client_count: int = 1) -> None: + """sets up and executes the asynchronous tests""" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(test_async(item_count, client_count)) @@ -84,26 +90,32 @@ def test(item_count: int = 1, client_count: int = 1) -> None: def test_1() -> None: + """run a test for one item and 32*ITEM_COUNT clients""" test(1, 32 * ITEM_COUNT) def test_2() -> None: + """run a test for two items and 16*ITEM_COUNT clients""" test(2, 16 * ITEM_COUNT) def test_4() -> None: + """run a test for two items and 8*ITEM_COUNT clients""" test(4, 8 * ITEM_COUNT) def test_8() -> None: + """run a test for four items and 4*ITEM_COUNT clients""" test(8, 4 * ITEM_COUNT) def test_16() -> None: + """run a test for 16 items and 2*ITEM_COUNT clients""" test(16, 2 * ITEM_COUNT) def test_32() -> None: + """run a test for 32 items and ITEM_COUNT clients""" test(32, ITEM_COUNT) diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index aba6d024..36e59814 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -21,6 +21,7 @@ def get_info(client: Any, request_count: int) -> float: + """get info from client""" tt: float = 0 for n in range(request_count): start = time.time() * 1000 @@ -31,6 +32,7 @@ def get_info(client: Any, request_count: int) -> float: def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: + """test to index with thread_count threads, item_count records and run client_count clients""" host = "localhost" port = 9200 auth = ("admin", "admin") @@ -79,22 +81,27 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) - def test_1() -> None: + """testing 1 threads""" test(1, 32 * REQUEST_COUNT, 1) def test_2() -> None: + """testing 2 threads""" test(2, 16 * REQUEST_COUNT, 2) def test_4() -> None: + """testing 4 threads""" test(4, 8 * REQUEST_COUNT, 3) def test_8() -> None: + """testing 8 threads""" test(8, 4 * REQUEST_COUNT, 8) def test_32() -> None: + """testing 32 threads""" test(32, REQUEST_COUNT, 32) diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 0f3c5286..d86085d2 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -22,6 +22,7 @@ def index_records(client: Any, index_name: str, item_count: int) -> Any: + """bulk index item_count records into index_name""" tt = 0 for n in range(10): data: Any = [] @@ -48,6 +49,7 @@ def index_records(client: Any, index_name: str, item_count: int) -> Any: def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: + """test to index with thread_count threads, item_count records and run client_count clients""" host = "localhost" port = 9200 auth = ("admin", "admin") @@ -118,22 +120,27 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N def test_1() -> None: + """testing 1 threads""" test(1, 32 * ITEM_COUNT, 1) def test_2() -> None: + """testing 2 threads""" test(2, 16 * ITEM_COUNT, 2) def test_4() -> None: + """testing 4 threads""" test(4, 8 * ITEM_COUNT, 3) def test_8() -> None: + """testing 8 threads""" test(8, 4 * ITEM_COUNT, 8) def test_32() -> None: + """testing 32 threads""" test(32, ITEM_COUNT, 32) diff --git a/setup.cfg b/setup.cfg index 301efb34..d65bae6a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,4 +28,4 @@ good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names [pylint.MESSAGE CONTROL] disable = all -enable = line-too-long, invalid-name, pointless-statement +enable = line-too-long, invalid-name, pointless-statement, missing-function-docstring From 9b8b830c05042d78185ec5090d50e486497aa891 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Fri, 22 Dec 2023 12:55:17 -0500 Subject: [PATCH 02/14] updated samples to prepare for enabling missing-docstring linter; will continue to work on this before committing setup.cfg Signed-off-by: Mark Cohen --- .github/workflows/update_api.yml | 0 .../advanced_index_actions_sample.py | 4 +++ samples/aws/search_requests.py | 6 +++++ samples/aws/search_urllib3.py | 10 +++++++- samples/bulk/bulk_array.py | 1 + samples/bulk/bulk_helpers.py | 4 +++ samples/bulk/bulk_ld.py | 4 +++ .../document_lifecycle_sample.py | 4 +++ samples/hello/hello.py | 5 ++++ samples/hello/hello_async.py | 5 ++++ .../index_template/index_template_sample.py | 25 +++++++++++++------ samples/json/json_hello.py | 4 +++ samples/json/json_hello_async.py | 5 ++++ samples/knn/knn_async_basics.py | 4 +++ samples/knn/knn_basics.py | 4 +++ samples/knn/knn_boolean_filter.py | 4 +++ samples/knn/knn_efficient_filter.py | 4 +++ samples/logging/log_collection_sample.py | 4 +++ samples/security/roles.py | 6 ++--- samples/security/users.py | 7 +++--- 20 files changed, 95 insertions(+), 15 deletions(-) delete mode 100644 .github/workflows/update_api.yml diff --git a/.github/workflows/update_api.yml b/.github/workflows/update_api.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index b06d82c3..b5af6be4 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -18,6 +18,10 @@ def main() -> None: + """ + demonstrates various functions to operate on the index (e.g. clear different levels of cache, refreshing the + index) + """ # Set up client = OpenSearch( hosts=["https://localhost:9200"], diff --git a/samples/aws/search_requests.py b/samples/aws/search_requests.py index 544285ac..743d3d96 100644 --- a/samples/aws/search_requests.py +++ b/samples/aws/search_requests.py @@ -20,6 +20,12 @@ def main() -> None: + """ + connects to a cluster specified in environment variables, creates an index, inserts documents, + searches the index, deletes the document, deletes the index. + the environment variables are "ENDPOINT" for the cluster endpoint, AWS_REGION for the region in which the cluster + is hosted, and SERVICE to indicate if this is an ES 7.10.2 compatible cluster + """ # verbose logging logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) diff --git a/samples/aws/search_urllib3.py b/samples/aws/search_urllib3.py index 5ac438d9..c7382d0d 100644 --- a/samples/aws/search_urllib3.py +++ b/samples/aws/search_urllib3.py @@ -20,10 +20,18 @@ def main() -> None: + """ + 1. connects to an OpenSearch cluster on AWS defined by environment variables (i.e. ENDPOINT - cluster endpoint like + my-test-domain.us-east-1.es.amazonaws.com; AWS_REGION like us-east-1, us-west-2; and SERVICE like es which + differentiates beteween serverless and the managed service. + 2. creates an index called "movies" and adds a single document + 3. queries for that document + 4. deletes the document + 5. deletes the index + """ # verbose logging logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) - # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com url = urlparse(environ["ENDPOINT"]) region = environ.get("AWS_REGION", "us-east-1") service = environ.get("SERVICE", "es") diff --git a/samples/bulk/bulk_array.py b/samples/bulk/bulk_array.py index cb6dc8b1..a7814ddb 100755 --- a/samples/bulk/bulk_array.py +++ b/samples/bulk/bulk_array.py @@ -17,6 +17,7 @@ def main() -> None: + """demonstrates how to bulk load data into an index""" # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/bulk/bulk_helpers.py b/samples/bulk/bulk_helpers.py index 7371d6b1..02150e25 100755 --- a/samples/bulk/bulk_helpers.py +++ b/samples/bulk/bulk_helpers.py @@ -17,6 +17,10 @@ def main() -> None: + """ + demonstrates how to bulk load data using opensearchpy.helpers including examples of serial, parallel, and streaming + bulk load + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py index 89e6f661..daa4d16b 100755 --- a/samples/bulk/bulk_ld.py +++ b/samples/bulk/bulk_ld.py @@ -17,6 +17,10 @@ def main() -> None: + """ + bulk index 100 items and then delete the index + :return: + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index 53c7e718..51d1284d 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -17,6 +17,10 @@ def main() -> None: + """ + provides samples for different ways to handle documents including indexing, searching, updating, and deleting + :return: + """ # Connect to OpenSearch client = OpenSearch( hosts=["https://localhost:9200"], diff --git a/samples/hello/hello.py b/samples/hello/hello.py index acecf890..b7d9ba97 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -16,6 +16,11 @@ def main() -> None: + """ + an example showing how to create an synchronous connection to OpenSearch, create an index, index a document + and search to return the document + :return: + """ host = "localhost" port = 9200 auth = ("admin", "admin") # For testing only. Don't store credentials in code. diff --git a/samples/hello/hello_async.py b/samples/hello/hello_async.py index c6a04e02..89558ce1 100755 --- a/samples/hello/hello_async.py +++ b/samples/hello/hello_async.py @@ -16,6 +16,11 @@ async def main() -> None: + """ + an example showing how to create an asynchronous connection to OpenSearch, create an index, index a document + and search to return the document + :return: + """ # connect to OpenSearch host = "localhost" port = 9200 diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index 25cfdddd..00978aba 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -12,6 +12,20 @@ def main() -> None: + """ + 1. connects to an OpenSearch instance running on localhost + 2. Create an index template named `books` with default settings and mappings for indices of + the `books-*` pattern. You can create an index template to define default settings and mappings for indices + of certain patterns. + 3. When creating an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's + settings and mappings to the index. Create an index named books-nonfiction and verify that its settings and mappings + match those of the template + 4. If multiple index templates match the index's name, OpenSearch will apply the template with the highest + `priority`. In the example, two templates are created with different priorities. + 5. Composable index templates are a new type of index template that allow you to define multiple component templates + and compose them into a final template. The last part of the example before cleaning up creates a component + template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns. + """ # Create a client instance client = OpenSearch( hosts=["https://localhost:9200"], @@ -20,8 +34,7 @@ def main() -> None: http_auth=("admin", "admin"), ) - # You can create an index template to define default settings and mappings for indices of certain patterns. - # The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: + # create an index template client.indices.put_index_template( name="books", body={ @@ -41,13 +54,10 @@ def main() -> None: }, ) - # Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. - # Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: + # create the index which applies the index template settings matched by pattern client.indices.create(index="books-nonfiction") print(client.indices.get(index="books-nonfiction")) - # If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. - # The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: client.indices.put_index_template( name="books", body={ @@ -74,8 +84,6 @@ def main() -> None: client.indices.create(index="books-fiction-romance") print(client.indices.get(index="books-fiction-romance")) - # Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. - # The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: client.cluster.put_component_template( name="books_mappings", body={ @@ -92,6 +100,7 @@ def main() -> None: }, ) + # composable index templates client.indices.put_index_template( name="books", body={ diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py index c0e537ec..90a1e82b 100755 --- a/samples/json/json_hello.py +++ b/samples/json/json_hello.py @@ -14,6 +14,10 @@ def main() -> None: + """ + demonstrates how to index a document using a dict + :return: + """ # connect to OpenSearch host = "localhost" diff --git a/samples/json/json_hello_async.py b/samples/json/json_hello_async.py index afe5065f..c8bdbc03 100755 --- a/samples/json/json_hello_async.py +++ b/samples/json/json_hello_async.py @@ -16,6 +16,11 @@ async def main() -> None: + """ + this sample uses asyncio and AsyncOpenSearch to asynchronously connect to local OpenSearch cluster, create an index, + index data, search the index, delete the document, delete the index + :return: + """ # connect to OpenSearch host = "localhost" port = 9200 diff --git a/samples/knn/knn_async_basics.py b/samples/knn/knn_async_basics.py index 273015c2..d86cc278 100755 --- a/samples/knn/knn_async_basics.py +++ b/samples/knn/knn_async_basics.py @@ -18,6 +18,10 @@ async def main() -> None: + """ + asynchronously create, bulk index, and query kNN. then delete the index + :return: + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") port = int(os.getenv("PORT", 9200)) diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py index 4ea49a21..1a6e8ba0 100755 --- a/samples/knn/knn_basics.py +++ b/samples/knn/knn_basics.py @@ -17,6 +17,10 @@ def main() -> None: + """ + create, bulk index, and query kNN. then delete the index + :return: + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py index 156fcf86..93982a13 100755 --- a/samples/knn/knn_boolean_filter.py +++ b/samples/knn/knn_boolean_filter.py @@ -17,6 +17,10 @@ def main() -> None: + """ + create and query a kNN index + :return: + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/knn/knn_efficient_filter.py b/samples/knn/knn_efficient_filter.py index 7777173d..3e7ba710 100755 --- a/samples/knn/knn_efficient_filter.py +++ b/samples/knn/knn_efficient_filter.py @@ -16,6 +16,10 @@ def main() -> None: + """ + create a kNN index using Lucene and query it using filters + :return: + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py index fbf25b60..6bf824ae 100644 --- a/samples/logging/log_collection_sample.py +++ b/samples/logging/log_collection_sample.py @@ -23,6 +23,10 @@ def main() -> None: + """ + sample for custom logging; this shows how to create a console handler, connect to OpenSearch, define a custom + logger and log to an OpenSearch index + """ print("Collecting logs.") # Create a console handler diff --git a/samples/security/roles.py b/samples/security/roles.py index 7628a9f4..53508634 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -9,13 +9,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - -# A basic OpenSearch sample that create and manage roles. - from opensearchpy import OpenSearch def main() -> None: + """ + A basic OpenSearch sample that create and manage roles. + """ # connect to OpenSearch host = "localhost" diff --git a/samples/security/users.py b/samples/security/users.py index 7b89a37f..d0db3f5a 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -9,13 +9,14 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - -# A basic OpenSearch sample that create and manage users. - from opensearchpy import OpenSearch def main() -> None: + """ + A basic OpenSearch sample that create and manage users. + :return: + """ # connect to OpenSearch host = "localhost" From bc5aac1ef20cd7f184082686f79fe9236dda111e Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Fri, 22 Dec 2023 12:57:09 -0500 Subject: [PATCH 03/14] removed missing-function-docstring from setup.cfg so the linter doesn't fail while work on docstrings continues Signed-off-by: Mark Cohen --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index d65bae6a..301efb34 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,4 +28,4 @@ good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names [pylint.MESSAGE CONTROL] disable = all -enable = line-too-long, invalid-name, pointless-statement, missing-function-docstring +enable = line-too-long, invalid-name, pointless-statement From 78887f6e9b37756557e619a287c602b04c791d51 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Fri, 22 Dec 2023 14:21:11 -0500 Subject: [PATCH 04/14] corrected unnecessary return docstring values Signed-off-by: Mark Cohen --- samples/bulk/bulk_ld.py | 1 - .../document_lifecycle/document_lifecycle_sample.py | 1 - samples/hello/hello.py | 1 - samples/hello/hello_async.py | 1 - samples/json/json_hello.py | 1 - samples/json/json_hello_async.py | 1 - samples/knn/knn_async_basics.py | 1 - samples/knn/knn_basics.py | 1 - samples/knn/knn_boolean_filter.py | 3 +-- samples/knn/knn_efficient_filter.py | 3 +-- samples/logging/log_collection_sample.py | 10 ++++++++-- samples/security/users.py | 1 - 12 files changed, 10 insertions(+), 15 deletions(-) diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py index daa4d16b..a6c3a585 100755 --- a/samples/bulk/bulk_ld.py +++ b/samples/bulk/bulk_ld.py @@ -19,7 +19,6 @@ def main() -> None: """ bulk index 100 items and then delete the index - :return: """ # connect to an instance of OpenSearch diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index 51d1284d..b195262b 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -19,7 +19,6 @@ def main() -> None: """ provides samples for different ways to handle documents including indexing, searching, updating, and deleting - :return: """ # Connect to OpenSearch client = OpenSearch( diff --git a/samples/hello/hello.py b/samples/hello/hello.py index b7d9ba97..39744752 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -19,7 +19,6 @@ def main() -> None: """ an example showing how to create an synchronous connection to OpenSearch, create an index, index a document and search to return the document - :return: """ host = "localhost" port = 9200 diff --git a/samples/hello/hello_async.py b/samples/hello/hello_async.py index 89558ce1..a3620dba 100755 --- a/samples/hello/hello_async.py +++ b/samples/hello/hello_async.py @@ -19,7 +19,6 @@ async def main() -> None: """ an example showing how to create an asynchronous connection to OpenSearch, create an index, index a document and search to return the document - :return: """ # connect to OpenSearch host = "localhost" diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py index 90a1e82b..9a8c213f 100755 --- a/samples/json/json_hello.py +++ b/samples/json/json_hello.py @@ -16,7 +16,6 @@ def main() -> None: """ demonstrates how to index a document using a dict - :return: """ # connect to OpenSearch diff --git a/samples/json/json_hello_async.py b/samples/json/json_hello_async.py index c8bdbc03..499def22 100755 --- a/samples/json/json_hello_async.py +++ b/samples/json/json_hello_async.py @@ -19,7 +19,6 @@ async def main() -> None: """ this sample uses asyncio and AsyncOpenSearch to asynchronously connect to local OpenSearch cluster, create an index, index data, search the index, delete the document, delete the index - :return: """ # connect to OpenSearch host = "localhost" diff --git a/samples/knn/knn_async_basics.py b/samples/knn/knn_async_basics.py index d86cc278..8847f924 100755 --- a/samples/knn/knn_async_basics.py +++ b/samples/knn/knn_async_basics.py @@ -20,7 +20,6 @@ async def main() -> None: """ asynchronously create, bulk index, and query kNN. then delete the index - :return: """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py index 1a6e8ba0..b3cdfca4 100755 --- a/samples/knn/knn_basics.py +++ b/samples/knn/knn_basics.py @@ -19,7 +19,6 @@ def main() -> None: """ create, bulk index, and query kNN. then delete the index - :return: """ # connect to an instance of OpenSearch diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py index 93982a13..40b5434b 100755 --- a/samples/knn/knn_boolean_filter.py +++ b/samples/knn/knn_boolean_filter.py @@ -18,8 +18,7 @@ def main() -> None: """ - create and query a kNN index - :return: + create, query, and delete a kNN index """ # connect to an instance of OpenSearch diff --git a/samples/knn/knn_efficient_filter.py b/samples/knn/knn_efficient_filter.py index 3e7ba710..4c23a43e 100755 --- a/samples/knn/knn_efficient_filter.py +++ b/samples/knn/knn_efficient_filter.py @@ -17,8 +17,7 @@ def main() -> None: """ - create a kNN index using Lucene and query it using filters - :return: + create a kNN index using Lucene kNN and query it using filters """ # connect to an instance of OpenSearch diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py index 6bf824ae..ad36fb73 100644 --- a/samples/logging/log_collection_sample.py +++ b/samples/logging/log_collection_sample.py @@ -51,15 +51,21 @@ def main() -> None: # Add console handler to the logger os_logger.addHandler(console_handler) - # Define a custom handler that logs to OpenSearch + class OpenSearchHandler(logging.Handler): + """ + define a custom handler that logs to opensearch + """ # Initializer / Instance attributes def __init__(self, opensearch_client: Any) -> None: super().__init__() self.os_client = opensearch_client - # Build index name (e.g., "logs-YYYY-MM-DD") def _build_index_name(self) -> str: + """ + Build index name (e.g., "logs-YYYY-MM-DD") + :return: a str with date formatted as 'logs-YYYY-MM-DD' + """ return f"logs-{datetime.date(datetime.now())}" # Emit logs to the OpenSearch cluster diff --git a/samples/security/users.py b/samples/security/users.py index d0db3f5a..54f4d36e 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -15,7 +15,6 @@ def main() -> None: """ A basic OpenSearch sample that create and manage users. - :return: """ # connect to OpenSearch From 8752a16823ea68d358f2566793d6aa7385009efd Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Fri, 22 Dec 2023 14:30:39 -0500 Subject: [PATCH 05/14] fixing failure in 'black' on reformatting Signed-off-by: Mark Cohen --- samples/logging/log_collection_sample.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py index ad36fb73..84ff0194 100644 --- a/samples/logging/log_collection_sample.py +++ b/samples/logging/log_collection_sample.py @@ -51,11 +51,11 @@ def main() -> None: # Add console handler to the logger os_logger.addHandler(console_handler) - class OpenSearchHandler(logging.Handler): """ define a custom handler that logs to opensearch """ + # Initializer / Instance attributes def __init__(self, opensearch_client: Any) -> None: super().__init__() @@ -64,6 +64,7 @@ def __init__(self, opensearch_client: Any) -> None: def _build_index_name(self) -> str: """ Build index name (e.g., "logs-YYYY-MM-DD") + :rtype: bool :return: a str with date formatted as 'logs-YYYY-MM-DD' """ return f"logs-{datetime.date(datetime.now())}" From 435948b44e4fe079e33292551c24e33c363ed91d Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Fri, 29 Dec 2023 16:09:50 -0500 Subject: [PATCH 06/14] updated utils to pass missing-function-docstring tests Signed-off-by: Mark Cohen --- opensearchpy/_async/client/__init__.py | 2008 +---------------- opensearchpy/_async/helpers/document.py | 3 +- opensearchpy/_async/helpers/faceted_search.py | 3 +- opensearchpy/_async/helpers/mapping.py | 3 +- opensearchpy/_async/helpers/search.py | 3 +- opensearchpy/connection/async_connections.py | 3 +- opensearchpy/connection/connections.py | 3 +- opensearchpy/helpers/analysis.py | 1 - opensearchpy/helpers/document.py | 3 +- opensearchpy/helpers/faceted_search.py | 3 +- opensearchpy/helpers/mapping.py | 3 +- opensearchpy/helpers/search.py | 3 +- opensearchpy/helpers/utils.py | 3 +- utils/build_dists.py | 18 + utils/generate_api.py | 79 +- utils/license_headers.py | 15 + 16 files changed, 123 insertions(+), 2031 deletions(-) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index f88ee0b9..bf3532da 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -34,6 +34,9 @@ # -----------------------------------------------------------------------------------------+ +#replace_token# + + from __future__ import unicode_literals import logging @@ -55,7 +58,6 @@ from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") @@ -255,2006 +257,4 @@ async def close(self) -> None: """Closes the Transport and all internal connections""" await self.transport.close() - # AUTO-GENERATED-API-DEFINITIONS # - @query_params() - async def ping( - self, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns whether the cluster is running. - - """ - try: - return await self.transport.perform_request( - "HEAD", "/", params=params, headers=headers - ) - except TransportError: - return False - - @query_params() - async def info( - self, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns basic information about the cluster. - - """ - return await self.transport.perform_request( - "GET", "/", params=params, headers=headers - ) - - @query_params( - "pipeline", - "refresh", - "routing", - "timeout", - "version", - "version_type", - "wait_for_active_shards", - ) - async def create( - self, - index: Any, - id: Any, - body: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Creates a new document in the index. Returns a 409 response when a document - with a same ID already exists in the index. - - - :arg index: Index name. - :arg id: Document ID. - :arg body: The document - :arg pipeline: The pipeline id to preprocess incoming documents - with. - :arg refresh: If `true` then refresh the affected shards to make - this operation visible to search, if `wait_for` then wait for a refresh - to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices are true, false, wait_for. - :arg routing: Routing value. - :arg timeout: Operation timeout. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - """ - for param in (index, id, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - path = _make_path(index, "_create", id) - - return await self.transport.perform_request( - "PUT", path, params=params, headers=headers, body=body - ) - - @query_params( - "if_primary_term", - "if_seq_no", - "op_type", - "pipeline", - "refresh", - "require_alias", - "routing", - "timeout", - "version", - "version_type", - "wait_for_active_shards", - ) - async def index( - self, - index: Any, - body: Any, - id: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Creates or updates a document in an index. - - - :arg index: Index name. - :arg body: The document - :arg id: Document ID. - :arg if_primary_term: only perform the operation if the last - operation that has changed the document has the specified primary term. - :arg if_seq_no: only perform the operation if the last operation - that has changed the document has the specified sequence number. - :arg op_type: Explicit operation type. Defaults to `index` for - requests with an explicit document ID, and to `create` for requests - without an explicit document ID. Valid choices are index, create. - :arg pipeline: The pipeline id to preprocess incoming documents - with. - :arg refresh: If `true` then refresh the affected shards to make - this operation visible to search, if `wait_for` then wait for a refresh - to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices are true, false, wait_for. - :arg require_alias: When true, requires destination to be an - alias. Default is false. - :arg routing: Routing value. - :arg timeout: Operation timeout. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - """ - for param in (index, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - return await self.transport.perform_request( - "POST" if id in SKIP_IN_PATH else "PUT", - _make_path(index, "_doc", id), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "pipeline", - "refresh", - "require_alias", - "routing", - "timeout", - "wait_for_active_shards", - ) - async def bulk( - self, - body: Any, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to perform multiple index/update/delete operations in a single request. - - - :arg body: The operation definition and data (action-data - pairs), separated by newlines - :arg index: Default index for items which don't provide one. - :arg _source: True or false to return the _source field or not, - or default list of fields to return, can be overridden on each sub- - request. - :arg _source_excludes: Default list of fields to exclude from - the returned _source field, can be overridden on each sub-request. - :arg _source_includes: Default list of fields to extract and - return from the _source field, can be overridden on each sub-request. - :arg pipeline: The pipeline id to preprocess incoming documents - with. - :arg refresh: If `true` then refresh the affected shards to make - this operation visible to search, if `wait_for` then wait for a refresh - to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices are true, false, wait_for. - :arg require_alias: Sets require_alias for all incoming - documents. Default is false. - :arg routing: Routing value. - :arg timeout: Operation timeout. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - body = _bulk_body(self.transport.serializer, body) - return await self.transport.perform_request( - "POST", - _make_path(index, "_bulk"), - params=params, - headers=headers, - body=body, - ) - - @query_params() - async def clear_scroll( - self, - body: Any = None, - scroll_id: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Explicitly clears the search context for a scroll. - - - :arg body: Comma-separated list of scroll IDs to clear if none - was specified via the scroll_id parameter - :arg scroll_id: Comma-separated list of scroll IDs to clear. - """ - if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: - raise ValueError("You need to supply scroll_id or body.") - elif scroll_id and not body: - body = {"scroll_id": [scroll_id]} - elif scroll_id: - params["scroll_id"] = scroll_id - - return await self.transport.perform_request( - "DELETE", "/_search/scroll", params=params, headers=headers, body=body - ) - - @query_params( - "allow_no_indices", - "analyze_wildcard", - "analyzer", - "default_operator", - "df", - "expand_wildcards", - "ignore_throttled", - "ignore_unavailable", - "lenient", - "min_score", - "preference", - "q", - "routing", - "terminate_after", - ) - async def count( - self, - body: Any = None, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns number of documents matching a query. - - - :arg body: Query to restrict the results specified with the - Query DSL (optional) - :arg index: Comma-separated list of indices to restrict the - results. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed. Default is false. - :arg analyzer: The analyzer to use for the query string. - :arg default_operator: The default operator for query string - query (AND or OR). Valid choices are AND, OR. - :arg df: The field to use as default where no field prefix is - given in the query string. - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored. - :arg min_score: Include only documents with a specific `_score` - value in the result. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg q: Query in the Lucene query string syntax. - :arg routing: Comma-separated list of specific routing values. - :arg terminate_after: The maximum number of documents to collect - for each shard, upon reaching which the query execution will terminate - early. - """ - return await self.transport.perform_request( - "POST", - _make_path(index, "_count"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "if_primary_term", - "if_seq_no", - "refresh", - "routing", - "timeout", - "version", - "version_type", - "wait_for_active_shards", - ) - async def delete( - self, - index: Any, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Removes a document from the index. - - - :arg index: Index name. - :arg id: Document ID. - :arg if_primary_term: only perform the operation if the last - operation that has changed the document has the specified primary term. - :arg if_seq_no: only perform the operation if the last operation - that has changed the document has the specified sequence number. - :arg refresh: If `true` then refresh the affected shards to make - this operation visible to search, if `wait_for` then wait for a refresh - to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices are true, false, wait_for. - :arg routing: Routing value. - :arg timeout: Operation timeout. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - """ - for param in (index, id): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - return await self.transport.perform_request( - "DELETE", _make_path(index, "_doc", id), params=params, headers=headers - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "allow_no_indices", - "analyze_wildcard", - "analyzer", - "conflicts", - "default_operator", - "df", - "expand_wildcards", - "from_", - "ignore_unavailable", - "lenient", - "max_docs", - "preference", - "q", - "refresh", - "request_cache", - "requests_per_second", - "routing", - "scroll", - "scroll_size", - "search_timeout", - "search_type", - "size", - "slices", - "sort", - "stats", - "terminate_after", - "timeout", - "version", - "wait_for_active_shards", - "wait_for_completion", - ) - async def delete_by_query( - self, - index: Any, - body: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Deletes documents matching the provided query. - - - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg body: The search definition using the Query DSL - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed. Default is false. - :arg analyzer: The analyzer to use for the query string. - :arg conflicts: What to do when the operation encounters version - conflicts?. Valid choices are abort, proceed. - :arg default_operator: The default operator for query string - query (AND or OR). Valid choices are AND, OR. - :arg df: The field to use as default where no field prefix is - given in the query string. - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg from_: Starting offset. Default is 0. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored. - :arg max_docs: Maximum number of documents to process (default: - all documents). - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg q: Query in the Lucene query string syntax. - :arg refresh: Refresh the shard containing the document before - performing the operation. - :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting. - :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. Default is 0. - :arg routing: Comma-separated list of specific routing values. - :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search. - :arg scroll_size: Size on the scroll request powering the - operation. Default is 100. - :arg search_timeout: Explicit timeout for each search request. - Defaults to no timeout. - :arg search_type: Search operation type. Valid choices are - query_then_fetch, dfs_query_then_fetch. - :arg size: Deprecated, please use `max_docs` instead. - :arg slices: The number of slices this task should be divided - into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default is 1. - :arg sort: Comma-separated list of : pairs. - :arg stats: Specific 'tag' of the request for logging and - statistical purposes. - :arg terminate_after: The maximum number of documents to collect - for each shard, upon reaching which the query execution will terminate - early. - :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default is 1m. - :arg version: Whether to return document version as part of a - hit. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - :arg wait_for_completion: Should this request wait until the - operation has completed before returning. Default is True. - """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - - for param in (index, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_delete_by_query"), - params=params, - headers=headers, - body=body, - ) - - @query_params("requests_per_second") - async def delete_by_query_rethrottle( - self, - task_id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Changes the number of requests per second for a particular Delete By Query - operation. - - - :arg task_id: The task id to rethrottle. - :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - """ - if task_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'task_id'.") - - return await self.transport.perform_request( - "POST", - _make_path("_delete_by_query", task_id, "_rethrottle"), - params=params, - headers=headers, - ) - - @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_script( - self, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Deletes a script. - - - :arg id: Script ID. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. - :arg timeout: Operation timeout. - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'id'.") - - return await self.transport.perform_request( - "DELETE", _make_path("_scripts", id), params=params, headers=headers - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "preference", - "realtime", - "refresh", - "routing", - "stored_fields", - "version", - "version_type", - ) - async def exists( - self, - index: Any, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns information about whether a document exists in an index. - - - :arg index: Index name. - :arg id: Document ID. - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg realtime: Specify whether to perform the operation in - realtime or search mode. - :arg refresh: Refresh the shard containing the document before - performing the operation. - :arg routing: Routing value. - :arg stored_fields: Comma-separated list of stored fields to - return. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - """ - for param in (index, id): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - return await self.transport.perform_request( - "HEAD", _make_path(index, "_doc", id), params=params, headers=headers - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "preference", - "realtime", - "refresh", - "routing", - "version", - "version_type", - ) - async def exists_source( - self, - index: Any, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns information about whether a document source exists in an index. - - - :arg index: Index name. - :arg id: Document ID. - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg realtime: Specify whether to perform the operation in - realtime or search mode. - :arg refresh: Refresh the shard containing the document before - performing the operation. - :arg routing: Routing value. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - """ - for param in (index, id): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - path = _make_path(index, "_source", id) - - return await self.transport.perform_request( - "HEAD", path, params=params, headers=headers - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "analyze_wildcard", - "analyzer", - "default_operator", - "df", - "lenient", - "preference", - "q", - "routing", - "stored_fields", - ) - async def explain( - self, - index: Any, - id: Any, - body: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns information about why a specific matches (or doesn't match) a query. - - - :arg index: Index name. - :arg id: Document ID. - :arg body: The query definition using the Query DSL - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg analyze_wildcard: Specify whether wildcards and prefix - queries in the query string query should be analyzed. Default is false. - :arg analyzer: The analyzer to use for the query string. - :arg default_operator: The default operator for query string - query (AND or OR). Valid choices are AND, OR. - :arg df: The default field for query string query. Default is - _all. - :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg q: Query in the Lucene query string syntax. - :arg routing: Routing value. - :arg stored_fields: Comma-separated list of stored fields to - return. - """ - for param in (index, id): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - path = _make_path(index, "_explain", id) - - return await self.transport.perform_request( - "POST", path, params=params, headers=headers, body=body - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "fields", - "ignore_unavailable", - "include_unmapped", - ) - async def field_caps( - self, - body: Any = None, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns the information about the capabilities of fields among multiple - indices. - - - :arg body: An index filter specified with the Query DSL - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg fields: Comma-separated list of field names. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg include_unmapped: Indicates whether unmapped fields should - be included in the response. Default is false. - """ - return await self.transport.perform_request( - "POST", - _make_path(index, "_field_caps"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "preference", - "realtime", - "refresh", - "routing", - "stored_fields", - "version", - "version_type", - ) - async def get( - self, - index: Any, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns a document. - - - :arg index: Index name. - :arg id: Document ID. - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg realtime: Specify whether to perform the operation in - realtime or search mode. - :arg refresh: Refresh the shard containing the document before - performing the operation. - :arg routing: Routing value. - :arg stored_fields: Comma-separated list of stored fields to - return. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - """ - for param in (index, id): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - return await self.transport.perform_request( - "GET", _make_path(index, "_doc", id), params=params, headers=headers - ) - - @query_params("cluster_manager_timeout", "master_timeout") - async def get_script( - self, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns a script. - - - :arg id: Script ID. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. - """ - if id in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'id'.") - - return await self.transport.perform_request( - "GET", _make_path("_scripts", id), params=params, headers=headers - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "preference", - "realtime", - "refresh", - "routing", - "version", - "version_type", - ) - async def get_source( - self, - index: Any, - id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns the source of a document. - - - :arg index: Index name. - :arg id: Document ID. - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg realtime: Specify whether to perform the operation in - realtime or search mode. - :arg refresh: Refresh the shard containing the document before - performing the operation. - :arg routing: Routing value. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - """ - for param in (index, id): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - path = _make_path(index, "_source", id) - - return await self.transport.perform_request( - "GET", path, params=params, headers=headers - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "preference", - "realtime", - "refresh", - "routing", - "stored_fields", - ) - async def mget( - self, - body: Any, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to get multiple documents in one request. - - - :arg body: Document identifiers; can be either `docs` - (containing full document information) or `ids` (when index is provided - in the URL. - :arg index: Index name. - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg realtime: Specify whether to perform the operation in - realtime or search mode. - :arg refresh: Refresh the shard containing the document before - performing the operation. - :arg routing: Routing value. - :arg stored_fields: Comma-separated list of stored fields to - return. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_mget"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "ccs_minimize_roundtrips", - "max_concurrent_searches", - "max_concurrent_shard_requests", - "pre_filter_shard_size", - "rest_total_hits_as_int", - "search_type", - "typed_keys", - ) - async def msearch( - self, - body: Any, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to execute several search operations in one request. - - - :arg body: The request definitions (metadata-search request - definition pairs), separated by newlines - :arg index: Comma-separated list of indices to use as default. - :arg ccs_minimize_roundtrips: Indicates whether network round- - trips should be minimized as part of cross-cluster search requests - execution. Default is True. - :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute. - :arg max_concurrent_shard_requests: The number of concurrent - shard requests each sub search executes concurrently per node. This - value should be used to limit the impact of the search on the cluster in - order to limit the number of concurrent shard requests. Default is 5. - :arg pre_filter_shard_size: Threshold that enforces a pre-filter - round-trip to prefilter search shards based on query rewriting if the - number of shards the search request expands to exceeds the threshold. - This filter round-trip can limit the number of shards significantly if - for instance a shard can not match any documents based on its rewrite - method ie. if date filters are mandatory to match but the shard bounds - and the query are disjoint. - :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response. - Default is false. - :arg search_type: Search operation type. Valid choices are - query_then_fetch, query_and_fetch, dfs_query_then_fetch, - dfs_query_and_fetch. - :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - body = _bulk_body(self.transport.serializer, body) - return await self.transport.perform_request( - "POST", - _make_path(index, "_msearch"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "ccs_minimize_roundtrips", - "max_concurrent_searches", - "rest_total_hits_as_int", - "search_type", - "typed_keys", - ) - async def msearch_template( - self, - body: Any, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to execute several search template operations in one request. - - - :arg body: The request definitions (metadata-search request - definition pairs), separated by newlines - :arg index: Comma-separated list of indices to use as default. - :arg ccs_minimize_roundtrips: Indicates whether network round- - trips should be minimized as part of cross-cluster search requests - execution. Default is True. - :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute. - :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response. - Default is false. - :arg search_type: Search operation type. Valid choices are - query_then_fetch, query_and_fetch, dfs_query_then_fetch, - dfs_query_and_fetch. - :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - body = _bulk_body(self.transport.serializer, body) - return await self.transport.perform_request( - "POST", - _make_path(index, "_msearch", "template"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "field_statistics", - "fields", - "ids", - "offsets", - "payloads", - "positions", - "preference", - "realtime", - "routing", - "term_statistics", - "version", - "version_type", - ) - async def mtermvectors( - self, - body: Any = None, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns multiple termvectors in one request. - - - :arg body: Define ids, documents, parameters or a list of - parameters per document here. You must at least provide a list of - document ids. See documentation. - :arg index: The index in which the document resides. - :arg field_statistics: Specifies if document count, sum of - document frequencies and sum of total term frequencies should be - returned. Applies to all returned documents unless otherwise specified - in body 'params' or 'docs'. Default is True. - :arg fields: Comma-separated list of fields to return. Applies - to all returned documents unless otherwise specified in body 'params' or - 'docs'. - :arg ids: Comma-separated list of documents ids. You must define - ids as parameter or set 'ids' or 'docs' in the request body. - :arg offsets: Specifies if term offsets should be returned. - Applies to all returned documents unless otherwise specified in body - 'params' or 'docs'. Default is True. - :arg payloads: Specifies if term payloads should be returned. - Applies to all returned documents unless otherwise specified in body - 'params' or 'docs'. Default is True. - :arg positions: Specifies if term positions should be returned. - Applies to all returned documents unless otherwise specified in body - 'params' or 'docs'. Default is True. - :arg preference: Specify the node or shard the operation should - be performed on. Applies to all returned documents unless otherwise - specified in body 'params' or 'docs'. Default is random. - :arg realtime: Specifies if requests are real-time as opposed to - near-real-time. Default is True. - :arg routing: Routing value. Applies to all returned documents - unless otherwise specified in body 'params' or 'docs'. - :arg term_statistics: Specifies if total term frequency and - document frequency should be returned. Applies to all returned documents - unless otherwise specified in body 'params' or 'docs'. Default is false. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - """ - path = _make_path(index, "_mtermvectors") - - return await self.transport.perform_request( - "POST", path, params=params, headers=headers, body=body - ) - - @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_script( - self, - id: Any, - body: Any, - context: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Creates or updates a script. - - - :arg id: Script ID. - :arg body: The document - :arg context: Script context. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. - :arg timeout: Operation timeout. - """ - for param in (id, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - return await self.transport.perform_request( - "PUT", - _make_path("_scripts", id, context), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" - ) - async def rank_eval( - self, - body: Any, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to evaluate the quality of ranked search results over a set of typical - search queries. - - - :arg body: The ranking evaluation search definition, including - search requests, document ratings and ranking metric definition. - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg search_type: Search operation type. Valid choices are - query_then_fetch, dfs_query_then_fetch. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_rank_eval"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "max_docs", - "refresh", - "requests_per_second", - "scroll", - "slices", - "timeout", - "wait_for_active_shards", - "wait_for_completion", - ) - async def reindex( - self, - body: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to copy documents from one index to another, optionally filtering the - source documents by a query, changing the destination index settings, or - fetching the documents from a remote cluster. - - - :arg body: The search definition using the Query DSL and the - prototype for the index request. - :arg max_docs: Maximum number of documents to process (default: - all documents). - :arg refresh: Should the affected indexes be refreshed?. - :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. Default is 0. - :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search. - :arg slices: The number of slices this task should be divided - into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default is 1. - :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default is 1m. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - :arg wait_for_completion: Should this request wait until the - operation has completed before returning. Default is True. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - return await self.transport.perform_request( - "POST", "/_reindex", params=params, headers=headers, body=body - ) - - @query_params("requests_per_second") - async def reindex_rethrottle( - self, - task_id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Changes the number of requests per second for a particular Reindex operation. - - - :arg task_id: The task id to rethrottle. - :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - """ - if task_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'task_id'.") - - return await self.transport.perform_request( - "POST", - _make_path("_reindex", task_id, "_rethrottle"), - params=params, - headers=headers, - ) - - @query_params() - async def render_search_template( - self, - body: Any = None, - id: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to use the Mustache language to pre-render a search definition. - - - :arg body: The search definition template and its params - :arg id: The id of the stored search template. - """ - return await self.transport.perform_request( - "POST", - _make_path("_render", "template", id), - params=params, - headers=headers, - body=body, - ) - - @query_params() - async def scripts_painless_execute( - self, - body: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows an arbitrary script to be executed and a result to be returned. - - - :arg body: The script to execute - """ - return await self.transport.perform_request( - "POST", - "/_scripts/painless/_execute", - params=params, - headers=headers, - body=body, - ) - - @query_params("rest_total_hits_as_int", "scroll") - async def scroll( - self, - body: Any = None, - scroll_id: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to retrieve a large numbers of results from a single search request. - - - :arg body: The scroll ID if not passed by URL or query - parameter. - :arg scroll_id: Scroll ID. - :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response. - Default is false. - :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search. - """ - if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: - raise ValueError("You need to supply scroll_id or body.") - elif scroll_id and not body: - body = {"scroll_id": scroll_id} - elif scroll_id: - params["scroll_id"] = scroll_id - - return await self.transport.perform_request( - "POST", "/_search/scroll", params=params, headers=headers, body=body - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "allow_no_indices", - "allow_partial_search_results", - "analyze_wildcard", - "analyzer", - "batched_reduce_size", - "ccs_minimize_roundtrips", - "default_operator", - "df", - "docvalue_fields", - "expand_wildcards", - "explain", - "from_", - "ignore_throttled", - "ignore_unavailable", - "lenient", - "max_concurrent_shard_requests", - "pre_filter_shard_size", - "preference", - "q", - "request_cache", - "rest_total_hits_as_int", - "routing", - "scroll", - "search_type", - "seq_no_primary_term", - "size", - "sort", - "stats", - "stored_fields", - "suggest_field", - "suggest_mode", - "suggest_size", - "suggest_text", - "terminate_after", - "timeout", - "track_scores", - "track_total_hits", - "typed_keys", - "version", - ) - async def search( - self, - body: Any = None, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns results matching a query. - - - :arg body: The search definition using the Query DSL - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg allow_partial_search_results: Indicate if an error should - be returned if there is a partial search failure or timeout. Default is - True. - :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed. Default is false. - :arg analyzer: The analyzer to use for the query string. - :arg batched_reduce_size: The number of shard results that - should be reduced at once on the coordinating node. This value should be - used as a protection mechanism to reduce the memory overhead per search - request if the potential number of shards in the request can be large. - Default is 512. - :arg ccs_minimize_roundtrips: Indicates whether network round- - trips should be minimized as part of cross-cluster search requests - execution. Default is True. - :arg default_operator: The default operator for query string - query (AND or OR). Valid choices are AND, OR. - :arg df: The field to use as default where no field prefix is - given in the query string. - :arg docvalue_fields: Comma-separated list of fields to return - as the docvalue representation of a field for each hit. - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg explain: Specify whether to return detailed information - about score computation as part of a hit. - :arg from_: Starting offset. Default is 0. - :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored. - :arg max_concurrent_shard_requests: The number of concurrent - shard requests per node this search executes concurrently. This value - should be used to limit the impact of the search on the cluster in order - to limit the number of concurrent shard requests. Default is 5. - :arg pre_filter_shard_size: Threshold that enforces a pre-filter - round-trip to prefilter search shards based on query rewriting if the - number of shards the search request expands to exceeds the threshold. - This filter round-trip can limit the number of shards significantly if - for instance a shard can not match any documents based on its rewrite - method ie. if date filters are mandatory to match but the shard bounds - and the query are disjoint. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg q: Query in the Lucene query string syntax. - :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting. - :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response. - Default is false. - :arg routing: Comma-separated list of specific routing values. - :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search. - :arg search_type: Search operation type. Valid choices are - query_then_fetch, dfs_query_then_fetch. - :arg seq_no_primary_term: Specify whether to return sequence - number and primary term of the last modification of each hit. - :arg size: Number of hits to return. Default is 10. - :arg sort: Comma-separated list of : pairs. - :arg stats: Specific 'tag' of the request for logging and - statistical purposes. - :arg stored_fields: Comma-separated list of stored fields to - return. - :arg suggest_field: Specify which field to use for suggestions. - :arg suggest_mode: Specify suggest mode. Valid choices are - missing, popular, always. - :arg suggest_size: How many suggestions to return in response. - :arg suggest_text: The source text for which the suggestions - should be returned. - :arg terminate_after: The maximum number of documents to collect - for each shard, upon reaching which the query execution will terminate - early. - :arg timeout: Operation timeout. - :arg track_scores: Whether to calculate and return scores even - if they are not used for sorting. - :arg track_total_hits: Indicate if the number of documents that - match the query should be tracked. - :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response. - :arg version: Whether to return document version as part of a - hit. - """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_search"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "local", - "preference", - "routing", - ) - async def search_shards( - self, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns information about the indices and shards that a search request would be - executed against. - - - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg routing: Routing value. - """ - return await self.transport.perform_request( - "GET", _make_path(index, "_search_shards"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", - "ccs_minimize_roundtrips", - "expand_wildcards", - "explain", - "ignore_throttled", - "ignore_unavailable", - "preference", - "profile", - "rest_total_hits_as_int", - "routing", - "scroll", - "search_type", - "typed_keys", - ) - async def search_template( - self, - body: Any, - index: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Allows to use the Mustache language to pre-render a search definition. - - - :arg body: The search definition template and its params - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg ccs_minimize_roundtrips: Indicates whether network round- - trips should be minimized as part of cross-cluster search requests - execution. Default is True. - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg explain: Specify whether to return detailed information - about score computation as part of a hit. - :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg profile: Specify whether to profile the query execution. - :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response. - Default is false. - :arg routing: Comma-separated list of specific routing values. - :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search. - :arg search_type: Search operation type. Valid choices are - query_then_fetch, query_and_fetch, dfs_query_then_fetch, - dfs_query_and_fetch. - :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response. - """ - if body in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'body'.") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_search", "template"), - params=params, - headers=headers, - body=body, - ) - - @query_params( - "field_statistics", - "fields", - "offsets", - "payloads", - "positions", - "preference", - "realtime", - "routing", - "term_statistics", - "version", - "version_type", - ) - async def termvectors( - self, - index: Any, - body: Any = None, - id: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns information and statistics about terms in the fields of a particular - document. - - - :arg index: The index in which the document resides. - :arg body: Define parameters and or supply a document to get - termvectors for. See documentation. - :arg id: Document ID. When not specified a doc param should be - supplied. - :arg field_statistics: Specifies if document count, sum of - document frequencies and sum of total term frequencies should be - returned. Default is True. - :arg fields: Comma-separated list of fields to return. - :arg offsets: Specifies if term offsets should be returned. - Default is True. - :arg payloads: Specifies if term payloads should be returned. - Default is True. - :arg positions: Specifies if term positions should be returned. - Default is True. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg realtime: Specifies if request is real-time as opposed to - near-real-time. Default is True. - :arg routing: Routing value. - :arg term_statistics: Specifies if total term frequency and - document frequency should be returned. Default is false. - :arg version: Explicit version number for concurrency control. - :arg version_type: Specific version type. Valid choices are - internal, external, external_gte, force. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - path = _make_path(index, "_termvectors", id) - - return await self.transport.perform_request( - "POST", path, params=params, headers=headers, body=body - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "if_primary_term", - "if_seq_no", - "lang", - "refresh", - "require_alias", - "retry_on_conflict", - "routing", - "timeout", - "wait_for_active_shards", - ) - async def update( - self, - index: Any, - id: Any, - body: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Updates a document with a script or partial document. - - - :arg index: Index name. - :arg id: Document ID. - :arg body: The request definition requires either `script` or - partial `doc` - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg if_primary_term: only perform the operation if the last - operation that has changed the document has the specified primary term. - :arg if_seq_no: only perform the operation if the last operation - that has changed the document has the specified sequence number. - :arg lang: The script language. Default is painless. - :arg refresh: If `true` then refresh the affected shards to make - this operation visible to search, if `wait_for` then wait for a refresh - to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices are true, false, wait_for. - :arg require_alias: When true, requires destination to be an - alias. Default is false. - :arg retry_on_conflict: Specify how many times should the - operation be retried when a conflict occurs. Default is 0. - :arg routing: Routing value. - :arg timeout: Operation timeout. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - """ - for param in (index, id, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") - - path = _make_path(index, "_update", id) - - return await self.transport.perform_request( - "POST", path, params=params, headers=headers, body=body - ) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "allow_no_indices", - "analyze_wildcard", - "analyzer", - "conflicts", - "default_operator", - "df", - "expand_wildcards", - "from_", - "ignore_unavailable", - "lenient", - "max_docs", - "pipeline", - "preference", - "q", - "refresh", - "request_cache", - "requests_per_second", - "routing", - "scroll", - "scroll_size", - "search_timeout", - "search_type", - "size", - "slices", - "sort", - "stats", - "terminate_after", - "timeout", - "version", - "wait_for_active_shards", - "wait_for_completion", - ) - async def update_by_query( - self, - index: Any, - body: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Performs an update on every document in the index without changing the source, - for example to pick up a mapping change. - - - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg body: The search definition using the Query DSL - :arg _source: True or false to return the _source field or not, - or a list of fields to return. - :arg _source_excludes: List of fields to exclude from the - returned _source field. - :arg _source_includes: List of fields to extract and return from - the _source field. - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified). - :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed. Default is false. - :arg analyzer: The analyzer to use for the query string. - :arg conflicts: What to do when the operation encounters version - conflicts?. Valid choices are abort, proceed. - :arg default_operator: The default operator for query string - query (AND or OR). Valid choices are AND, OR. - :arg df: The field to use as default where no field prefix is - given in the query string. - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg from_: Starting offset. Default is 0. - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). - :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored. - :arg max_docs: Maximum number of documents to process (default: - all documents). - :arg pipeline: The pipeline id to preprocess incoming documents - with. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg q: Query in the Lucene query string syntax. - :arg refresh: Should the affected indexes be refreshed?. - :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting. - :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. Default is 0. - :arg routing: Comma-separated list of specific routing values. - :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search. - :arg scroll_size: Size on the scroll request powering the - operation. Default is 100. - :arg search_timeout: Explicit timeout for each search request. - Defaults to no timeout. - :arg search_type: Search operation type. Valid choices are - query_then_fetch, dfs_query_then_fetch. - :arg size: Deprecated, please use `max_docs` instead. - :arg slices: The number of slices this task should be divided - into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default is 1. - :arg sort: Comma-separated list of : pairs. - :arg stats: Specific 'tag' of the request for logging and - statistical purposes. - :arg terminate_after: The maximum number of documents to collect - for each shard, upon reaching which the query execution will terminate - early. - :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default is 1m. - :arg version: Whether to return document version as part of a - hit. - :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the operation. Defaults to 1, - meaning the primary shard only. Set to `all` for all shard copies, - otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1). Default is 1. - :arg wait_for_completion: Should this request wait until the - operation has completed before returning. Default is True. - """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_update_by_query"), - params=params, - headers=headers, - body=body, - ) - - @query_params("requests_per_second") - async def update_by_query_rethrottle( - self, - task_id: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Changes the number of requests per second for a particular Update By Query - operation. - - - :arg task_id: The task id to rethrottle. - :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - """ - if task_id in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'task_id'.") - - return await self.transport.perform_request( - "POST", - _make_path("_update_by_query", task_id, "_rethrottle"), - params=params, - headers=headers, - ) - - @query_params() - async def get_script_context( - self, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns all script contexts. - - """ - return await self.transport.perform_request( - "GET", "/_script_context", params=params, headers=headers - ) - - @query_params() - async def get_script_languages( - self, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Returns available script types, languages and contexts. - - """ - return await self.transport.perform_request( - "GET", "/_script_language", params=params, headers=headers - ) - - @query_params( - "allow_partial_pit_creation", - "expand_wildcards", - "keep_alive", - "preference", - "routing", - ) - async def create_pit( - self, - index: Any, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Creates point in time context. - - - :arg index: Comma-separated list of indices; use `_all` or empty - string to perform the operation on all indices. - :arg allow_partial_pit_creation: Allow if point in time can be - created with partial failures. - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices are all, - open, closed, hidden, none. - :arg keep_alive: Specify the keep alive for point in time. - :arg preference: Specify the node or shard the operation should - be performed on. Default is random. - :arg routing: Comma-separated list of specific routing values. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", - _make_path(index, "_search", "point_in_time"), - params=params, - headers=headers, - ) - - @query_params() - async def delete_all_pits( - self, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Deletes all active point in time searches. - - """ - return await self.transport.perform_request( - "DELETE", "/_search/point_in_time/_all", params=params, headers=headers - ) - - @query_params() - async def delete_pit( - self, - body: Any = None, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Deletes one or more point in time searches based on the IDs passed. - - - :arg body: The point-in-time ids to be deleted - """ - return await self.transport.perform_request( - "DELETE", - "/_search/point_in_time", - params=params, - headers=headers, - body=body, - ) - - @query_params() - async def get_all_pits( - self, - params: Any = None, - headers: Any = None, - ) -> Any: - """ - Lists all active point in time searches. - - """ - return await self.transport.perform_request( - "GET", "/_search/point_in_time/_all", params=params, headers=headers - ) + # AUTO-GENERATED-API-DEFINITIONS # \ No newline at end of file diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 09549068..4982931c 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -11,8 +11,6 @@ from fnmatch import fnmatch from typing import Any, Optional, Tuple, Type -from six import add_metaclass - from opensearchpy._async.client import AsyncOpenSearch from opensearchpy._async.helpers.index import AsyncIndex from opensearchpy._async.helpers.search import AsyncSearch @@ -25,6 +23,7 @@ ) from opensearchpy.helpers.document import DocumentMeta from opensearchpy.helpers.utils import DOC_META_FIELDS, META_FIELDS, ObjectBase, merge +from six import add_metaclass class AsyncIndexMeta(DocumentMeta): diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index 2b0501d9..18bfb4f5 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -10,11 +10,10 @@ from typing import Any -from six import iteritems, itervalues - from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy.helpers.faceted_search import FacetedResponse from opensearchpy.helpers.query import MatchAll +from six import iteritems, itervalues class AsyncFacetedSearch(object): diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index 93f04f05..d0660157 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -11,11 +11,10 @@ from itertools import chain from typing import Any -from six import iteritems - from opensearchpy.connection.async_connections import get_connection from opensearchpy.helpers.field import Nested, Text from opensearchpy.helpers.mapping import META_FIELDS, Properties +from six import iteritems class AsyncMapping(object): diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index 7f09ba7f..2812ca01 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -10,8 +10,6 @@ import copy from typing import Any, Sequence -from six import iteritems, string_types - from opensearchpy._async.helpers.actions import aiter, async_scan from opensearchpy.connection.async_connections import get_connection from opensearchpy.exceptions import IllegalOperation, TransportError @@ -20,6 +18,7 @@ from opensearchpy.helpers.response import Response from opensearchpy.helpers.search import AggsProxy, ProxyDescriptor, QueryProxy, Request from opensearchpy.helpers.utils import AttrDict, recursive_to_dict +from six import iteritems, string_types class AsyncSearch(Request): diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index 60ca210b..2360bb4c 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -9,11 +9,10 @@ from typing import Any -from six import string_types - import opensearchpy from opensearchpy._async.helpers.actions import aiter from opensearchpy.serializer import serializer +from six import string_types class AsyncConnections(object): diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 3f1edc4a..2aedb00d 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -26,10 +26,9 @@ from typing import Any -from six import string_types - import opensearchpy from opensearchpy.serializer import serializer +from six import string_types class Connections(object): diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index 816a29b6..5dbdc130 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -27,7 +27,6 @@ from typing import Any, Optional import six - from opensearchpy.connection.connections import get_connection from .utils import AttrDict, DslBase, merge diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index f3595bcf..048550c4 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -28,10 +28,9 @@ from fnmatch import fnmatch from typing import Any, Tuple, Type -from six import add_metaclass, iteritems - from opensearchpy.connection.connections import get_connection from opensearchpy.exceptions import NotFoundError, RequestError +from six import add_metaclass, iteritems from ..exceptions import IllegalOperation, ValidationException from .field import Field diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index 37d067c1..6979d1cc 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -27,9 +27,8 @@ from datetime import datetime, timedelta from typing import Any, Optional -from six import iteritems, itervalues - from opensearchpy.helpers.aggs import A +from six import iteritems, itervalues from .query import MatchAll, Nested, Range, Terms from .response import Response diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index f75ef19e..dd3ae1a4 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -28,10 +28,9 @@ from itertools import chain from typing import Any -from six import iteritems, itervalues - from opensearchpy.connection.connections import get_connection from opensearchpy.helpers.field import Nested, Text, construct_field +from six import iteritems, itervalues from .utils import DslBase diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 5693e916..d2ac01b5 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -28,11 +28,10 @@ import copy from typing import Any -from six import iteritems, string_types - from opensearchpy.connection.connections import get_connection from opensearchpy.exceptions import TransportError from opensearchpy.helpers import scan +from six import iteritems, string_types from ..exceptions import IllegalOperation from ..helpers.query import Bool, Q diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index c46c374a..333e2cc8 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -30,11 +30,10 @@ from copy import copy from typing import Any, Callable, Dict, Optional, Tuple +from opensearchpy.exceptions import UnknownDslObject, ValidationException from six import add_metaclass, iteritems from six.moves import map -from opensearchpy.exceptions import UnknownDslObject, ValidationException - SKIP_VALUES: Tuple[str, None] = ("", None) EXPAND__TO_DOT = True diff --git a/utils/build_dists.py b/utils/build_dists.py index 8e7b43a4..0aab45de 100644 --- a/utils/build_dists.py +++ b/utils/build_dists.py @@ -45,6 +45,9 @@ @contextlib.contextmanager # type: ignore def set_tmp_dir() -> None: + """ + makes and yields a temporary directory for any working files needed for a process during a build + """ global TMP_DIR TMP_DIR = tempfile.mkdtemp() yield TMP_DIR @@ -53,6 +56,12 @@ def set_tmp_dir() -> None: def run(*argv: Any, expect_exit_code: int = 0) -> None: + """ + runs a command within this script + :param argv: command to run e.g. "git" "checkout" "--" "setup.py" "opensearchpy/" + :param expect_exit_code: code to compare with actual exit code from command. will exit the process if they do not + match the proper exit code + """ global TMP_DIR if TMP_DIR is None: os.chdir(BASE_DIR) @@ -71,6 +80,10 @@ def run(*argv: Any, expect_exit_code: int = 0) -> None: def test_dist(dist: Any) -> None: + """ + validate that the distribution created works + :param dist: base directory of the distribution + """ with set_tmp_dir() as tmp_dir: # type: ignore dist_name = re.match( # type: ignore r"^(opensearchpy\d*)-", @@ -181,6 +194,11 @@ def test_dist(dist: Any) -> None: def main() -> None: + """ + creates a distribution given of the OpenSearch python client + Notes: does not run on MacOS; this script is generally driven by a GitHub Action located in + .github/workflows/unified-release.yml + """ run("git", "checkout", "--", "setup.py", "opensearchpy/") run("rm", "-rf", "build/", "dist/*", "*.egg-info", ".eggs") run("python", "setup.py", "sdist", "bdist_wheel") diff --git a/utils/generate_api.py b/utils/generate_api.py index d671a975..886b2c35 100644 --- a/utils/generate_api.py +++ b/utils/generate_api.py @@ -80,6 +80,10 @@ def blacken(filename: Any) -> None: + """ + runs 'black' https://pypi.org/project/black/ on the given file + :param filename: file to reformant + """ runner = CliRunner() result = runner.invoke(black.main, [str(filename)]) assert result.exit_code == 0, result.output @@ -87,6 +91,11 @@ def blacken(filename: Any) -> None: @lru_cache() def is_valid_url(url: str) -> bool: + """ + makes a call to the url + :param url: url to check + :return: True if status code is between HTTP 200 inclusive and 400 exclusive; False otherwise + """ return 200 <= http.request("HEAD", url).status < 400 @@ -97,9 +106,16 @@ def __init__(self, namespace: str) -> None: self.parse_orig() def add(self, api: Any) -> None: + """ + add an API to the list of modules + :param api: an API object + """ self._apis.append(api) def parse_orig(self) -> None: + """ + reads the written module and updates with important code specific to this client + """ self.orders = [] self.header = "from typing import Any, Collection, Optional, Tuple, Union\n\n" @@ -137,9 +153,15 @@ def _position(self, api: Any) -> Any: return len(self.orders) def sort(self) -> None: + """ + sorts the list of APIs by the Module._position key + """ self._apis.sort(key=self._position) def dump(self) -> None: + """ + writes the module out to disk + """ self.sort() # This code snippet adds headers to each generated module indicating that the code is generated. @@ -237,6 +259,9 @@ def dump(self) -> None: @property def filepath(self) -> Any: + """ + :return: absolute path to the module + """ return CODE_ROOT / f"opensearchpy/_async/client/{self.namespace}.py" @@ -287,6 +312,10 @@ def __init__(self, namespace: str, name: str, definition: Any) -> None: @property def all_parts(self) -> Dict[str, str]: + """ + updates the url parts from the specification + :return: dict of updated parts + """ parts = {} for url in self._def["url"]["paths"]: parts.update(url.get("parts", {})) @@ -322,6 +351,9 @@ def ind(item: Any) -> Any: @property def params(self) -> Any: + """ + :return: itertools.chain of required parts of the API + """ parts = self.all_parts params = self._def.get("params", {}) return chain( @@ -337,6 +369,9 @@ def params(self) -> Any: @property def body(self) -> Any: + """ + :return: body of the API spec + """ b = self._def.get("body", {}) if b: b.setdefault("required", False) @@ -344,6 +379,9 @@ def body(self) -> Any: @property def query_params(self) -> Any: + """ + :return: any query string parameters from the specification + """ return ( k for k in sorted(self._def.get("params", {}).keys()) @@ -352,9 +390,9 @@ def query_params(self) -> Any: @property def all_func_params(self) -> Any: - """Parameters that will be in the '@query_params' decorator list + """ + Parameters that will be in the '@query_params' decorator list and parameters that will be in the function signature. - This doesn't include """ params = list(self._def.get("params", {}).keys()) for url in self._def["url"]["paths"]: @@ -365,6 +403,9 @@ def all_func_params(self) -> Any: @property def path(self) -> Any: + """ + :return: the first lexically ordered path in url.paths + """ return max( (path for path in self._def["url"]["paths"]), key=lambda p: len(re.findall(r"\{([^}]+)\}", p["path"])), @@ -372,8 +413,12 @@ def path(self) -> Any: @property def method(self) -> Any: - # To adhere to the HTTP RFC we shouldn't send - # bodies in GET requests. + """ + To adhere to the HTTP RFC we shouldn't send + bodies in GET requests. + :return: + """ + default_method = self.path["methods"][0] if self.name == "refresh" or self.name == "flush": return "POST" @@ -385,6 +430,9 @@ def method(self) -> Any: @property def url_parts(self) -> Any: + """ + :return tuple of boolean (if the path is dynamic), list of url parts + """ path = self.path["path"] dynamic = "{" in path @@ -406,6 +454,9 @@ def url_parts(self) -> Any: @property def required_parts(self) -> Any: + """ + :return: list of parts of the url that are required plus the body + """ parts = self.all_parts required = [p for p in parts if parts[p]["required"]] # type: ignore if self.body.get("required"): @@ -413,6 +464,9 @@ def required_parts(self) -> Any: return required def to_python(self) -> Any: + """ + :return: rendered Jinja template + """ try: t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") except TemplateNotFound: @@ -426,6 +480,12 @@ def to_python(self) -> Any: def read_modules() -> Any: + """ + checks the opensearch-api spec at + https://raw.githubusercontent.com/opensearch-project/opensearch-api-specification/main/OpenSearch.openapi.json + and parses it into one or more API modules + :return: a dict of API objects + """ modules = {} # Load the OpenAPI specification file @@ -644,6 +704,13 @@ def read_modules() -> Any: def apply_patch(namespace: str, name: str, api: Any) -> Any: + """ + applies patches as specified in {name}.json + :param namespace: directory containing overrides + :param name: file to be prepended to ".json" containing override instructions + :param api: specific api to override + :return: modified api + """ override_file_path = ( CODE_ROOT / "utils/templates/overrides" / namespace / f"{name}.json" ) @@ -655,6 +722,10 @@ def apply_patch(namespace: str, name: str, api: Any) -> Any: def dump_modules(modules: Any) -> None: + """ + writes out modules to disk + :param modules: a list of python modules + """ for mod in modules.values(): mod.dump() diff --git a/utils/license_headers.py b/utils/license_headers.py index 575e9868..9724660b 100644 --- a/utils/license_headers.py +++ b/utils/license_headers.py @@ -47,6 +47,11 @@ def find_files_to_fix(sources: List[str]) -> Iterator[str]: def does_file_need_fix(filepath: str) -> bool: + """ + checks if the correct license header exists at the top of the file + :param filepath: an absolute or relative filepath to a file to check + :return: True if the file needs a header, False if it does not + """ if not re.search(r"\.py$", filepath): return False existing_header = "" @@ -64,6 +69,10 @@ def does_file_need_fix(filepath: str) -> bool: def add_header_to_file(filepath: str) -> None: + """ + writes the license header to the beginning of a file + :param filepath: relative or absolute filepath to update + """ with open(filepath, mode="r") as f: lines = list(f) i = 0 @@ -78,6 +87,12 @@ def add_header_to_file(filepath: str) -> None: def main() -> None: + """ + arguments: + fix: find all files without license headers and insert headers at the top of the file + check: prints a list of files without license headers + list of one or more directories: search in these directories + """ mode = sys.argv[1] assert mode in ("fix", "check") sources = [os.path.abspath(x) for x in sys.argv[2:]] From f3a0450818b84b8f70ea71eb681afa78aa47546a Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Sun, 31 Dec 2023 11:21:53 -0500 Subject: [PATCH 07/14] updated functions with missing docstrings or pylint ignore instructions; added a utility to automatically add these ignore instructions to most functions that should be self-describing; rolled back some automatically generated code mistakenly changed Signed-off-by: Mark Cohen --- noxfile.py | 7 +- opensearchpy/_async/client/__init__.py | 2008 ++++++++++++++++- opensearchpy/_async/helpers/document.py | 3 +- opensearchpy/_async/helpers/faceted_search.py | 3 +- opensearchpy/_async/helpers/mapping.py | 3 +- opensearchpy/_async/helpers/search.py | 3 +- opensearchpy/connection/async_connections.py | 3 +- opensearchpy/connection/connections.py | 3 +- opensearchpy/helpers/analysis.py | 1 + opensearchpy/helpers/document.py | 3 +- opensearchpy/helpers/faceted_search.py | 3 +- opensearchpy/helpers/mapping.py | 3 +- opensearchpy/helpers/search.py | 3 +- opensearchpy/helpers/utils.py | 3 +- setup.cfg | 2 +- test_opensearchpy/run_tests.py | 15 + test_opensearchpy/test_async/test_client.py | 6 + .../test_async/test_connection.py | 48 +- .../test_async/test_helpers/conftest.py | 7 + .../test_async/test_helpers/test_document.py | 46 + .../test_helpers/test_faceted_search.py | 7 + .../test_async/test_helpers/test_index.py | 13 + .../test_async/test_helpers/test_mapping.py | 8 + .../test_async/test_helpers/test_search.py | 32 + .../test_helpers/test_update_by_query.py | 8 + test_opensearchpy/test_async/test_http.py | 6 + .../test_async/test_http_connection.py | 5 + .../test_async/test_plugins_client.py | 1 + .../test_async/test_server/__init__.py | 10 +- .../test_async/test_server/conftest.py | 1 + .../test_async/test_server/test_clients.py | 7 +- .../test_server/test_helpers/conftest.py | 17 +- .../test_server/test_helpers/test_actions.py | 36 + .../test_server/test_helpers/test_data.py | 4 + .../test_server/test_helpers/test_document.py | 36 + .../test_helpers/test_faceted_search.py | 22 + .../test_server/test_helpers/test_index.py | 6 + .../test_server/test_helpers/test_mapping.py | 4 + .../test_server/test_helpers/test_search.py | 9 + .../test_helpers/test_update_by_query.py | 3 + .../test_server/test_plugins/test_alerting.py | 6 + .../test_plugins/test_index_management.py | 4 + .../test_server/test_rest_api_spec.py | 6 + .../test_security_plugin.py | 21 +- test_opensearchpy/test_async/test_signer.py | 7 + .../test_async/test_transport.py | 38 +- test_opensearchpy/test_cases.py | 8 + test_opensearchpy/test_client/__init__.py | 18 + test_opensearchpy/test_client/test_cluster.py | 3 + test_opensearchpy/test_client/test_http.py | 6 + test_opensearchpy/test_client/test_indices.py | 4 + .../test_client/test_overrides.py | 15 + .../test_client/test_plugins/test_alerting.py | 12 + .../test_plugins/test_index_management.py | 9 + .../test_plugins/test_plugins_client.py | 1 + .../test_client/test_point_in_time.py | 8 + .../test_client/test_remote_store.py | 1 + .../test_client/test_requests.py | 2 + test_opensearchpy/test_client/test_urllib3.py | 3 + test_opensearchpy/test_client/test_utils.py | 16 + .../test_connection/test_base_connection.py | 15 + .../test_requests_http_connection.py | 46 +- .../test_urllib3_http_connection.py | 32 + test_opensearchpy/test_connection_pool.py | 16 +- test_opensearchpy/test_exceptions.py | 2 + test_opensearchpy/test_helpers/conftest.py | 4 + .../test_helpers/test_actions.py | 19 +- test_opensearchpy/test_helpers/test_aggs.py | 31 + .../test_helpers/test_analysis.py | 11 + .../test_helpers/test_document.py | 47 + .../test_helpers/test_faceted_search.py | 7 + test_opensearchpy/test_helpers/test_field.py | 19 + test_opensearchpy/test_helpers/test_index.py | 14 + .../test_helpers/test_mapping.py | 8 + test_opensearchpy/test_helpers/test_query.py | 57 + test_opensearchpy/test_helpers/test_result.py | 22 + test_opensearchpy/test_helpers/test_search.py | 37 + .../test_helpers/test_update_by_query.py | 9 + test_opensearchpy/test_helpers/test_utils.py | 11 + .../test_helpers/test_validation.py | 10 + .../test_helpers/test_wrappers.py | 5 + test_opensearchpy/test_http_server.py | 9 + test_opensearchpy/test_serializer.py | 25 + test_opensearchpy/test_server/__init__.py | 2 + test_opensearchpy/test_server/conftest.py | 2 + test_opensearchpy/test_server/test_clients.py | 3 + .../test_server/test_helpers/conftest.py | 7 + .../test_server/test_helpers/test_actions.py | 32 + .../test_server/test_helpers/test_analysis.py | 3 + .../test_server/test_helpers/test_count.py | 3 + .../test_server/test_helpers/test_data.py | 4 + .../test_server/test_helpers/test_document.py | 36 + .../test_helpers/test_faceted_search.py | 12 + .../test_server/test_helpers/test_index.py | 6 + .../test_server/test_helpers/test_mapping.py | 4 + .../test_server/test_helpers/test_search.py | 9 + .../test_helpers/test_update_by_query.py | 3 + .../test_server/test_plugins/test_alerting.py | 6 + .../test_plugins/test_index_management.py | 4 + .../test_server/test_rest_api_spec.py | 26 +- .../test_server_secured/test_clients.py | 1 + .../test_security_plugin.py | 15 + test_opensearchpy/test_transport.py | 27 + test_opensearchpy/utils.py | 11 + utils/disable_pylint_check.py | 59 + 105 files changed, 3294 insertions(+), 33 deletions(-) create mode 100644 utils/disable_pylint_check.py diff --git a/noxfile.py b/noxfile.py index d453a3a2..d080903e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -32,7 +32,7 @@ SOURCE_FILES = ( "setup.py", "noxfile.py", - "opensearchpy/", + # "opensearchpy/", "test_opensearchpy/", "utils/", "samples/", @@ -43,6 +43,7 @@ @nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) # type: ignore def test(session: Any) -> None: + # pylint: disable=missing-function-docstring session.install(".") # ensure client can be imported without aiohttp session.run("python", "-c", "import opensearchpy\nprint(opensearchpy.OpenSearch())") @@ -59,6 +60,7 @@ def test(session: Any) -> None: @nox.session(python=["3.7"]) # type: ignore def format(session: Any) -> None: + # pylint: disable=missing-function-docstring session.install(".") session.install("black", "isort") @@ -71,6 +73,7 @@ def format(session: Any) -> None: @nox.session(python=["3.7"]) # type: ignore def lint(session: Any) -> None: + # pylint: disable=missing-function-docstring session.install( "flake8", "black", @@ -110,6 +113,7 @@ def lint(session: Any) -> None: @nox.session() # type: ignore def docs(session: Any) -> None: + # pylint: disable=missing-function-docstring session.install(".") session.install(".[docs]") with session.chdir("docs"): @@ -118,6 +122,7 @@ def docs(session: Any) -> None: @nox.session() # type: ignore def generate(session: Any) -> None: + # pylint: disable=missing-function-docstring session.install("-rdev-requirements.txt") session.run("python", "utils/generate_api.py") format(session) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index bf3532da..f88ee0b9 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -34,9 +34,6 @@ # -----------------------------------------------------------------------------------------+ -#replace_token# - - from __future__ import unicode_literals import logging @@ -58,6 +55,7 @@ from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") @@ -257,4 +255,2006 @@ async def close(self) -> None: """Closes the Transport and all internal connections""" await self.transport.close() - # AUTO-GENERATED-API-DEFINITIONS # \ No newline at end of file + # AUTO-GENERATED-API-DEFINITIONS # + @query_params() + async def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns whether the cluster is running. + + """ + try: + return await self.transport.perform_request( + "HEAD", "/", params=params, headers=headers + ) + except TransportError: + return False + + @query_params() + async def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns basic information about the cluster. + + """ + return await self.transport.perform_request( + "GET", "/", params=params, headers=headers + ) + + @query_params( + "pipeline", + "refresh", + "routing", + "timeout", + "version", + "version_type", + "wait_for_active_shards", + ) + async def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Creates a new document in the index. Returns a 409 response when a document + with a same ID already exists in the index. + + + :arg index: Index name. + :arg id: Document ID. + :arg body: The document + :arg pipeline: The pipeline id to preprocess incoming documents + with. + :arg refresh: If `true` then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh + to make this operation visible to search, if `false` (the default) then + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + """ + for param in (index, id, body): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + path = _make_path(index, "_create", id) + + return await self.transport.perform_request( + "PUT", path, params=params, headers=headers, body=body + ) + + @query_params( + "if_primary_term", + "if_seq_no", + "op_type", + "pipeline", + "refresh", + "require_alias", + "routing", + "timeout", + "version", + "version_type", + "wait_for_active_shards", + ) + async def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Creates or updates a document in an index. + + + :arg index: Index name. + :arg body: The document + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg op_type: Explicit operation type. Defaults to `index` for + requests with an explicit document ID, and to `create` for requests + without an explicit document ID. Valid choices are index, create. + :arg pipeline: The pipeline id to preprocess incoming documents + with. + :arg refresh: If `true` then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh + to make this operation visible to search, if `false` (the default) then + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: When true, requires destination to be an + alias. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + """ + for param in (index, body): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "POST" if id in SKIP_IN_PATH else "PUT", + _make_path(index, "_doc", id), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "pipeline", + "refresh", + "require_alias", + "routing", + "timeout", + "wait_for_active_shards", + ) + async def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to perform multiple index/update/delete operations in a single request. + + + :arg body: The operation definition and data (action-data + pairs), separated by newlines + :arg index: Default index for items which don't provide one. + :arg _source: True or false to return the _source field or not, + or default list of fields to return, can be overridden on each sub- + request. + :arg _source_excludes: Default list of fields to exclude from + the returned _source field, can be overridden on each sub-request. + :arg _source_includes: Default list of fields to extract and + return from the _source field, can be overridden on each sub-request. + :arg pipeline: The pipeline id to preprocess incoming documents + with. + :arg refresh: If `true` then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh + to make this operation visible to search, if `false` (the default) then + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: Sets require_alias for all incoming + documents. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + body = _bulk_body(self.transport.serializer, body) + return await self.transport.perform_request( + "POST", + _make_path(index, "_bulk"), + params=params, + headers=headers, + body=body, + ) + + @query_params() + async def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Explicitly clears the search context for a scroll. + + + :arg body: Comma-separated list of scroll IDs to clear if none + was specified via the scroll_id parameter + :arg scroll_id: Comma-separated list of scroll IDs to clear. + """ + if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: + raise ValueError("You need to supply scroll_id or body.") + elif scroll_id and not body: + body = {"scroll_id": [scroll_id]} + elif scroll_id: + params["scroll_id"] = scroll_id + + return await self.transport.perform_request( + "DELETE", "/_search/scroll", params=params, headers=headers, body=body + ) + + @query_params( + "allow_no_indices", + "analyze_wildcard", + "analyzer", + "default_operator", + "df", + "expand_wildcards", + "ignore_throttled", + "ignore_unavailable", + "lenient", + "min_score", + "preference", + "q", + "routing", + "terminate_after", + ) + async def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns number of documents matching a query. + + + :arg body: Query to restrict the results specified with the + Query DSL (optional) + :arg index: Comma-separated list of indices to restrict the + results. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg analyze_wildcard: Specify whether wildcard and prefix + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg default_operator: The default operator for query string + query (AND or OR). Valid choices are AND, OR. + :arg df: The field to use as default where no field prefix is + given in the query string. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_throttled: Whether specified concrete, expanded or + aliased indices should be ignored when throttled. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg lenient: Specify whether format-based query failures (such + as providing text to a numeric field) should be ignored. + :arg min_score: Include only documents with a specific `_score` + value in the result. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Comma-separated list of specific routing values. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. + """ + return await self.transport.perform_request( + "POST", + _make_path(index, "_count"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "if_primary_term", + "if_seq_no", + "refresh", + "routing", + "timeout", + "version", + "version_type", + "wait_for_active_shards", + ) + async def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Removes a document from the index. + + + :arg index: Index name. + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg refresh: If `true` then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh + to make this operation visible to search, if `false` (the default) then + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + """ + for param in (index, id): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "DELETE", _make_path(index, "_doc", id), params=params, headers=headers + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "allow_no_indices", + "analyze_wildcard", + "analyzer", + "conflicts", + "default_operator", + "df", + "expand_wildcards", + "from_", + "ignore_unavailable", + "lenient", + "max_docs", + "preference", + "q", + "refresh", + "request_cache", + "requests_per_second", + "routing", + "scroll", + "scroll_size", + "search_timeout", + "search_type", + "size", + "slices", + "sort", + "stats", + "terminate_after", + "timeout", + "version", + "wait_for_active_shards", + "wait_for_completion", + ) + async def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Deletes documents matching the provided query. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg body: The search definition using the Query DSL + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg analyze_wildcard: Specify whether wildcard and prefix + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. + :arg default_operator: The default operator for query string + query (AND or OR). Valid choices are AND, OR. + :arg df: The field to use as default where no field prefix is + given in the query string. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg lenient: Specify whether format-based query failures (such + as providing text to a numeric field) should be ignored. + :arg max_docs: Maximum number of documents to process (default: + all documents). + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Refresh the shard containing the document before + performing the operation. + :arg request_cache: Specify if request cache should be used for + this request or not, defaults to index level setting. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. + :arg search_timeout: Explicit timeout for each search request. + Defaults to no timeout. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. + :arg slices: The number of slices this task should be divided + into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. + :arg stats: Specific 'tag' of the request for logging and + statistical purposes. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. + :arg timeout: Time each individual bulk request should wait for + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. + """ + # from is a reserved word so it cannot be used, use from_ instead + if "from_" in params: + params["from"] = params.pop("from_") + + for param in (index, body): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_delete_by_query"), + params=params, + headers=headers, + body=body, + ) + + @query_params("requests_per_second") + async def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Changes the number of requests per second for a particular Delete By Query + operation. + + + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. + """ + if task_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'task_id'.") + + return await self.transport.perform_request( + "POST", + _make_path("_delete_by_query", task_id, "_rethrottle"), + params=params, + headers=headers, + ) + + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Deletes a script. + + + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. + """ + if id in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'id'.") + + return await self.transport.perform_request( + "DELETE", _make_path("_scripts", id), params=params, headers=headers + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "preference", + "realtime", + "refresh", + "routing", + "stored_fields", + "version", + "version_type", + ) + async def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about whether a document exists in an index. + + + :arg index: Index name. + :arg id: Document ID. + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg realtime: Specify whether to perform the operation in + realtime or search mode. + :arg refresh: Refresh the shard containing the document before + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + """ + for param in (index, id): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "HEAD", _make_path(index, "_doc", id), params=params, headers=headers + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "preference", + "realtime", + "refresh", + "routing", + "version", + "version_type", + ) + async def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about whether a document source exists in an index. + + + :arg index: Index name. + :arg id: Document ID. + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg realtime: Specify whether to perform the operation in + realtime or search mode. + :arg refresh: Refresh the shard containing the document before + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + """ + for param in (index, id): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + path = _make_path(index, "_source", id) + + return await self.transport.perform_request( + "HEAD", path, params=params, headers=headers + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "analyze_wildcard", + "analyzer", + "default_operator", + "df", + "lenient", + "preference", + "q", + "routing", + "stored_fields", + ) + async def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about why a specific matches (or doesn't match) a query. + + + :arg index: Index name. + :arg id: Document ID. + :arg body: The query definition using the Query DSL + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg analyze_wildcard: Specify whether wildcards and prefix + queries in the query string query should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg default_operator: The default operator for query string + query (AND or OR). Valid choices are AND, OR. + :arg df: The default field for query string query. Default is + _all. + :arg lenient: Specify whether format-based query failures (such + as providing text to a numeric field) should be ignored. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + """ + for param in (index, id): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + path = _make_path(index, "_explain", id) + + return await self.transport.perform_request( + "POST", path, params=params, headers=headers, body=body + ) + + @query_params( + "allow_no_indices", + "expand_wildcards", + "fields", + "ignore_unavailable", + "include_unmapped", + ) + async def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns the information about the capabilities of fields among multiple + indices. + + + :arg body: An index filter specified with the Query DSL + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fields: Comma-separated list of field names. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg include_unmapped: Indicates whether unmapped fields should + be included in the response. Default is false. + """ + return await self.transport.perform_request( + "POST", + _make_path(index, "_field_caps"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "preference", + "realtime", + "refresh", + "routing", + "stored_fields", + "version", + "version_type", + ) + async def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns a document. + + + :arg index: Index name. + :arg id: Document ID. + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg realtime: Specify whether to perform the operation in + realtime or search mode. + :arg refresh: Refresh the shard containing the document before + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + """ + for param in (index, id): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "GET", _make_path(index, "_doc", id), params=params, headers=headers + ) + + @query_params("cluster_manager_timeout", "master_timeout") + async def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns a script. + + + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + """ + if id in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'id'.") + + return await self.transport.perform_request( + "GET", _make_path("_scripts", id), params=params, headers=headers + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "preference", + "realtime", + "refresh", + "routing", + "version", + "version_type", + ) + async def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns the source of a document. + + + :arg index: Index name. + :arg id: Document ID. + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg realtime: Specify whether to perform the operation in + realtime or search mode. + :arg refresh: Refresh the shard containing the document before + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + """ + for param in (index, id): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + path = _make_path(index, "_source", id) + + return await self.transport.perform_request( + "GET", path, params=params, headers=headers + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "preference", + "realtime", + "refresh", + "routing", + "stored_fields", + ) + async def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to get multiple documents in one request. + + + :arg body: Document identifiers; can be either `docs` + (containing full document information) or `ids` (when index is provided + in the URL. + :arg index: Index name. + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg realtime: Specify whether to perform the operation in + realtime or search mode. + :arg refresh: Refresh the shard containing the document before + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_mget"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "ccs_minimize_roundtrips", + "max_concurrent_searches", + "max_concurrent_shard_requests", + "pre_filter_shard_size", + "rest_total_hits_as_int", + "search_type", + "typed_keys", + ) + async def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to execute several search operations in one request. + + + :arg body: The request definitions (metadata-search request + definition pairs), separated by newlines + :arg index: Comma-separated list of indices to use as default. + :arg ccs_minimize_roundtrips: Indicates whether network round- + trips should be minimized as part of cross-cluster search requests + execution. Default is True. + :arg max_concurrent_searches: Controls the maximum number of + concurrent searches the multi search api will execute. + :arg max_concurrent_shard_requests: The number of concurrent + shard requests each sub search executes concurrently per node. This + value should be used to limit the impact of the search on the cluster in + order to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. + :arg rest_total_hits_as_int: Indicates whether hits.total should + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. + :arg typed_keys: Specify whether aggregation and suggester names + should be prefixed by their respective types in the response. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + body = _bulk_body(self.transport.serializer, body) + return await self.transport.perform_request( + "POST", + _make_path(index, "_msearch"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "ccs_minimize_roundtrips", + "max_concurrent_searches", + "rest_total_hits_as_int", + "search_type", + "typed_keys", + ) + async def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to execute several search template operations in one request. + + + :arg body: The request definitions (metadata-search request + definition pairs), separated by newlines + :arg index: Comma-separated list of indices to use as default. + :arg ccs_minimize_roundtrips: Indicates whether network round- + trips should be minimized as part of cross-cluster search requests + execution. Default is True. + :arg max_concurrent_searches: Controls the maximum number of + concurrent searches the multi search api will execute. + :arg rest_total_hits_as_int: Indicates whether hits.total should + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. + :arg typed_keys: Specify whether aggregation and suggester names + should be prefixed by their respective types in the response. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + body = _bulk_body(self.transport.serializer, body) + return await self.transport.perform_request( + "POST", + _make_path(index, "_msearch", "template"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "field_statistics", + "fields", + "ids", + "offsets", + "payloads", + "positions", + "preference", + "realtime", + "routing", + "term_statistics", + "version", + "version_type", + ) + async def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns multiple termvectors in one request. + + + :arg body: Define ids, documents, parameters or a list of + parameters per document here. You must at least provide a list of + document ids. See documentation. + :arg index: The index in which the document resides. + :arg field_statistics: Specifies if document count, sum of + document frequencies and sum of total term frequencies should be + returned. Applies to all returned documents unless otherwise specified + in body 'params' or 'docs'. Default is True. + :arg fields: Comma-separated list of fields to return. Applies + to all returned documents unless otherwise specified in body 'params' or + 'docs'. + :arg ids: Comma-separated list of documents ids. You must define + ids as parameter or set 'ids' or 'docs' in the request body. + :arg offsets: Specifies if term offsets should be returned. + Applies to all returned documents unless otherwise specified in body + 'params' or 'docs'. Default is True. + :arg payloads: Specifies if term payloads should be returned. + Applies to all returned documents unless otherwise specified in body + 'params' or 'docs'. Default is True. + :arg positions: Specifies if term positions should be returned. + Applies to all returned documents unless otherwise specified in body + 'params' or 'docs'. Default is True. + :arg preference: Specify the node or shard the operation should + be performed on. Applies to all returned documents unless otherwise + specified in body 'params' or 'docs'. Default is random. + :arg realtime: Specifies if requests are real-time as opposed to + near-real-time. Default is True. + :arg routing: Routing value. Applies to all returned documents + unless otherwise specified in body 'params' or 'docs'. + :arg term_statistics: Specifies if total term frequency and + document frequency should be returned. Applies to all returned documents + unless otherwise specified in body 'params' or 'docs'. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + """ + path = _make_path(index, "_mtermvectors") + + return await self.transport.perform_request( + "POST", path, params=params, headers=headers, body=body + ) + + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Creates or updates a script. + + + :arg id: Script ID. + :arg body: The document + :arg context: Script context. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. + """ + for param in (id, body): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "PUT", + _make_path("_scripts", id, context), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" + ) + async def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to evaluate the quality of ranked search results over a set of typical + search queries. + + + :arg body: The ranking evaluation search definition, including + search requests, document ratings and ranking metric definition. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_rank_eval"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "max_docs", + "refresh", + "requests_per_second", + "scroll", + "slices", + "timeout", + "wait_for_active_shards", + "wait_for_completion", + ) + async def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to copy documents from one index to another, optionally filtering the + source documents by a query, changing the destination index settings, or + fetching the documents from a remote cluster. + + + :arg body: The search definition using the Query DSL and the + prototype for the index request. + :arg max_docs: Maximum number of documents to process (default: + all documents). + :arg refresh: Should the affected indexes be refreshed?. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. + :arg slices: The number of slices this task should be divided + into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be + set to `auto`. Default is 1. + :arg timeout: Time each individual bulk request should wait for + shards that are unavailable. Default is 1m. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "POST", "/_reindex", params=params, headers=headers, body=body + ) + + @query_params("requests_per_second") + async def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Changes the number of requests per second for a particular Reindex operation. + + + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. + """ + if task_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'task_id'.") + + return await self.transport.perform_request( + "POST", + _make_path("_reindex", task_id, "_rethrottle"), + params=params, + headers=headers, + ) + + @query_params() + async def render_search_template( + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to use the Mustache language to pre-render a search definition. + + + :arg body: The search definition template and its params + :arg id: The id of the stored search template. + """ + return await self.transport.perform_request( + "POST", + _make_path("_render", "template", id), + params=params, + headers=headers, + body=body, + ) + + @query_params() + async def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows an arbitrary script to be executed and a result to be returned. + + + :arg body: The script to execute + """ + return await self.transport.perform_request( + "POST", + "/_scripts/painless/_execute", + params=params, + headers=headers, + body=body, + ) + + @query_params("rest_total_hits_as_int", "scroll") + async def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to retrieve a large numbers of results from a single search request. + + + :arg body: The scroll ID if not passed by URL or query + parameter. + :arg scroll_id: Scroll ID. + :arg rest_total_hits_as_int: Indicates whether hits.total should + be rendered as an integer or an object in the rest search response. + Default is false. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. + """ + if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: + raise ValueError("You need to supply scroll_id or body.") + elif scroll_id and not body: + body = {"scroll_id": scroll_id} + elif scroll_id: + params["scroll_id"] = scroll_id + + return await self.transport.perform_request( + "POST", "/_search/scroll", params=params, headers=headers, body=body + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "allow_no_indices", + "allow_partial_search_results", + "analyze_wildcard", + "analyzer", + "batched_reduce_size", + "ccs_minimize_roundtrips", + "default_operator", + "df", + "docvalue_fields", + "expand_wildcards", + "explain", + "from_", + "ignore_throttled", + "ignore_unavailable", + "lenient", + "max_concurrent_shard_requests", + "pre_filter_shard_size", + "preference", + "q", + "request_cache", + "rest_total_hits_as_int", + "routing", + "scroll", + "search_type", + "seq_no_primary_term", + "size", + "sort", + "stats", + "stored_fields", + "suggest_field", + "suggest_mode", + "suggest_size", + "suggest_text", + "terminate_after", + "timeout", + "track_scores", + "track_total_hits", + "typed_keys", + "version", + ) + async def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns results matching a query. + + + :arg body: The search definition using the Query DSL + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg allow_partial_search_results: Indicate if an error should + be returned if there is a partial search failure or timeout. Default is + True. + :arg analyze_wildcard: Specify whether wildcard and prefix + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg batched_reduce_size: The number of shard results that + should be reduced at once on the coordinating node. This value should be + used as a protection mechanism to reduce the memory overhead per search + request if the potential number of shards in the request can be large. + Default is 512. + :arg ccs_minimize_roundtrips: Indicates whether network round- + trips should be minimized as part of cross-cluster search requests + execution. Default is True. + :arg default_operator: The default operator for query string + query (AND or OR). Valid choices are AND, OR. + :arg df: The field to use as default where no field prefix is + given in the query string. + :arg docvalue_fields: Comma-separated list of fields to return + as the docvalue representation of a field for each hit. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg explain: Specify whether to return detailed information + about score computation as part of a hit. + :arg from_: Starting offset. Default is 0. + :arg ignore_throttled: Whether specified concrete, expanded or + aliased indices should be ignored when throttled. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg lenient: Specify whether format-based query failures (such + as providing text to a numeric field) should be ignored. + :arg max_concurrent_shard_requests: The number of concurrent + shard requests per node this search executes concurrently. This value + should be used to limit the impact of the search on the cluster in order + to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg request_cache: Specify if request cache should be used for + this request or not, defaults to index level setting. + :arg rest_total_hits_as_int: Indicates whether hits.total should + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg seq_no_primary_term: Specify whether to return sequence + number and primary term of the last modification of each hit. + :arg size: Number of hits to return. Default is 10. + :arg sort: Comma-separated list of : pairs. + :arg stats: Specific 'tag' of the request for logging and + statistical purposes. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg suggest_field: Specify which field to use for suggestions. + :arg suggest_mode: Specify suggest mode. Valid choices are + missing, popular, always. + :arg suggest_size: How many suggestions to return in response. + :arg suggest_text: The source text for which the suggestions + should be returned. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. + :arg timeout: Operation timeout. + :arg track_scores: Whether to calculate and return scores even + if they are not used for sorting. + :arg track_total_hits: Indicate if the number of documents that + match the query should be tracked. + :arg typed_keys: Specify whether aggregation and suggester names + should be prefixed by their respective types in the response. + :arg version: Whether to return document version as part of a + hit. + """ + # from is a reserved word so it cannot be used, use from_ instead + if "from_" in params: + params["from"] = params.pop("from_") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_search"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "allow_no_indices", + "expand_wildcards", + "ignore_unavailable", + "local", + "preference", + "routing", + ) + async def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about the indices and shards that a search request would be + executed against. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg routing: Routing value. + """ + return await self.transport.perform_request( + "GET", _make_path(index, "_search_shards"), params=params, headers=headers + ) + + @query_params( + "allow_no_indices", + "ccs_minimize_roundtrips", + "expand_wildcards", + "explain", + "ignore_throttled", + "ignore_unavailable", + "preference", + "profile", + "rest_total_hits_as_int", + "routing", + "scroll", + "search_type", + "typed_keys", + ) + async def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Allows to use the Mustache language to pre-render a search definition. + + + :arg body: The search definition template and its params + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg ccs_minimize_roundtrips: Indicates whether network round- + trips should be minimized as part of cross-cluster search requests + execution. Default is True. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg explain: Specify whether to return detailed information + about score computation as part of a hit. + :arg ignore_throttled: Whether specified concrete, expanded or + aliased indices should be ignored when throttled. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg profile: Specify whether to profile the query execution. + :arg rest_total_hits_as_int: Indicates whether hits.total should + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. + :arg typed_keys: Specify whether aggregation and suggester names + should be prefixed by their respective types in the response. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_search", "template"), + params=params, + headers=headers, + body=body, + ) + + @query_params( + "field_statistics", + "fields", + "offsets", + "payloads", + "positions", + "preference", + "realtime", + "routing", + "term_statistics", + "version", + "version_type", + ) + async def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information and statistics about terms in the fields of a particular + document. + + + :arg index: The index in which the document resides. + :arg body: Define parameters and or supply a document to get + termvectors for. See documentation. + :arg id: Document ID. When not specified a doc param should be + supplied. + :arg field_statistics: Specifies if document count, sum of + document frequencies and sum of total term frequencies should be + returned. Default is True. + :arg fields: Comma-separated list of fields to return. + :arg offsets: Specifies if term offsets should be returned. + Default is True. + :arg payloads: Specifies if term payloads should be returned. + Default is True. + :arg positions: Specifies if term positions should be returned. + Default is True. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg realtime: Specifies if request is real-time as opposed to + near-real-time. Default is True. + :arg routing: Routing value. + :arg term_statistics: Specifies if total term frequency and + document frequency should be returned. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. + """ + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + + path = _make_path(index, "_termvectors", id) + + return await self.transport.perform_request( + "POST", path, params=params, headers=headers, body=body + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "if_primary_term", + "if_seq_no", + "lang", + "refresh", + "require_alias", + "retry_on_conflict", + "routing", + "timeout", + "wait_for_active_shards", + ) + async def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Updates a document with a script or partial document. + + + :arg index: Index name. + :arg id: Document ID. + :arg body: The request definition requires either `script` or + partial `doc` + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg lang: The script language. Default is painless. + :arg refresh: If `true` then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh + to make this operation visible to search, if `false` (the default) then + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: When true, requires destination to be an + alias. Default is false. + :arg retry_on_conflict: Specify how many times should the + operation be retried when a conflict occurs. Default is 0. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + """ + for param in (index, id, body): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + path = _make_path(index, "_update", id) + + return await self.transport.perform_request( + "POST", path, params=params, headers=headers, body=body + ) + + @query_params( + "_source", + "_source_excludes", + "_source_includes", + "allow_no_indices", + "analyze_wildcard", + "analyzer", + "conflicts", + "default_operator", + "df", + "expand_wildcards", + "from_", + "ignore_unavailable", + "lenient", + "max_docs", + "pipeline", + "preference", + "q", + "refresh", + "request_cache", + "requests_per_second", + "routing", + "scroll", + "scroll_size", + "search_timeout", + "search_type", + "size", + "slices", + "sort", + "stats", + "terminate_after", + "timeout", + "version", + "wait_for_active_shards", + "wait_for_completion", + ) + async def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Performs an update on every document in the index without changing the source, + for example to pick up a mapping change. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg body: The search definition using the Query DSL + :arg _source: True or false to return the _source field or not, + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). + :arg analyze_wildcard: Specify whether wildcard and prefix + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. + :arg default_operator: The default operator for query string + query (AND or OR). Valid choices are AND, OR. + :arg df: The field to use as default where no field prefix is + given in the query string. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). + :arg lenient: Specify whether format-based query failures (such + as providing text to a numeric field) should be ignored. + :arg max_docs: Maximum number of documents to process (default: + all documents). + :arg pipeline: The pipeline id to preprocess incoming documents + with. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Should the affected indexes be refreshed?. + :arg request_cache: Specify if request cache should be used for + this request or not, defaults to index level setting. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. + :arg search_timeout: Explicit timeout for each search request. + Defaults to no timeout. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. + :arg slices: The number of slices this task should be divided + into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. + :arg stats: Specific 'tag' of the request for logging and + statistical purposes. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. + :arg timeout: Time each individual bulk request should wait for + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. + :arg wait_for_active_shards: Sets the number of shard copies + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. + """ + # from is a reserved word so it cannot be used, use from_ instead + if "from_" in params: + params["from"] = params.pop("from_") + + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_update_by_query"), + params=params, + headers=headers, + body=body, + ) + + @query_params("requests_per_second") + async def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Changes the number of requests per second for a particular Update By Query + operation. + + + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. + """ + if task_id in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'task_id'.") + + return await self.transport.perform_request( + "POST", + _make_path("_update_by_query", task_id, "_rethrottle"), + params=params, + headers=headers, + ) + + @query_params() + async def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns all script contexts. + + """ + return await self.transport.perform_request( + "GET", "/_script_context", params=params, headers=headers + ) + + @query_params() + async def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns available script types, languages and contexts. + + """ + return await self.transport.perform_request( + "GET", "/_script_language", params=params, headers=headers + ) + + @query_params( + "allow_partial_pit_creation", + "expand_wildcards", + "keep_alive", + "preference", + "routing", + ) + async def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Creates point in time context. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_partial_pit_creation: Allow if point in time can be + created with partial failures. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg keep_alive: Specify the keep alive for point in time. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg routing: Comma-separated list of specific routing values. + """ + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + + return await self.transport.perform_request( + "POST", + _make_path(index, "_search", "point_in_time"), + params=params, + headers=headers, + ) + + @query_params() + async def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Deletes all active point in time searches. + + """ + return await self.transport.perform_request( + "DELETE", "/_search/point_in_time/_all", params=params, headers=headers + ) + + @query_params() + async def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Deletes one or more point in time searches based on the IDs passed. + + + :arg body: The point-in-time ids to be deleted + """ + return await self.transport.perform_request( + "DELETE", + "/_search/point_in_time", + params=params, + headers=headers, + body=body, + ) + + @query_params() + async def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Lists all active point in time searches. + + """ + return await self.transport.perform_request( + "GET", "/_search/point_in_time/_all", params=params, headers=headers + ) diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 4982931c..09549068 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -11,6 +11,8 @@ from fnmatch import fnmatch from typing import Any, Optional, Tuple, Type +from six import add_metaclass + from opensearchpy._async.client import AsyncOpenSearch from opensearchpy._async.helpers.index import AsyncIndex from opensearchpy._async.helpers.search import AsyncSearch @@ -23,7 +25,6 @@ ) from opensearchpy.helpers.document import DocumentMeta from opensearchpy.helpers.utils import DOC_META_FIELDS, META_FIELDS, ObjectBase, merge -from six import add_metaclass class AsyncIndexMeta(DocumentMeta): diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index 18bfb4f5..2b0501d9 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -10,10 +10,11 @@ from typing import Any +from six import iteritems, itervalues + from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy.helpers.faceted_search import FacetedResponse from opensearchpy.helpers.query import MatchAll -from six import iteritems, itervalues class AsyncFacetedSearch(object): diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index d0660157..93f04f05 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -11,10 +11,11 @@ from itertools import chain from typing import Any +from six import iteritems + from opensearchpy.connection.async_connections import get_connection from opensearchpy.helpers.field import Nested, Text from opensearchpy.helpers.mapping import META_FIELDS, Properties -from six import iteritems class AsyncMapping(object): diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index 2812ca01..7f09ba7f 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -10,6 +10,8 @@ import copy from typing import Any, Sequence +from six import iteritems, string_types + from opensearchpy._async.helpers.actions import aiter, async_scan from opensearchpy.connection.async_connections import get_connection from opensearchpy.exceptions import IllegalOperation, TransportError @@ -18,7 +20,6 @@ from opensearchpy.helpers.response import Response from opensearchpy.helpers.search import AggsProxy, ProxyDescriptor, QueryProxy, Request from opensearchpy.helpers.utils import AttrDict, recursive_to_dict -from six import iteritems, string_types class AsyncSearch(Request): diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index 2360bb4c..60ca210b 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -9,10 +9,11 @@ from typing import Any +from six import string_types + import opensearchpy from opensearchpy._async.helpers.actions import aiter from opensearchpy.serializer import serializer -from six import string_types class AsyncConnections(object): diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 2aedb00d..3f1edc4a 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -26,9 +26,10 @@ from typing import Any +from six import string_types + import opensearchpy from opensearchpy.serializer import serializer -from six import string_types class Connections(object): diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index 5dbdc130..816a29b6 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -27,6 +27,7 @@ from typing import Any, Optional import six + from opensearchpy.connection.connections import get_connection from .utils import AttrDict, DslBase, merge diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index 048550c4..f3595bcf 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -28,9 +28,10 @@ from fnmatch import fnmatch from typing import Any, Tuple, Type +from six import add_metaclass, iteritems + from opensearchpy.connection.connections import get_connection from opensearchpy.exceptions import NotFoundError, RequestError -from six import add_metaclass, iteritems from ..exceptions import IllegalOperation, ValidationException from .field import Field diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index 6979d1cc..37d067c1 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -27,9 +27,10 @@ from datetime import datetime, timedelta from typing import Any, Optional -from opensearchpy.helpers.aggs import A from six import iteritems, itervalues +from opensearchpy.helpers.aggs import A + from .query import MatchAll, Nested, Range, Terms from .response import Response from .search import Search diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index dd3ae1a4..f75ef19e 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -28,9 +28,10 @@ from itertools import chain from typing import Any +from six import iteritems, itervalues + from opensearchpy.connection.connections import get_connection from opensearchpy.helpers.field import Nested, Text, construct_field -from six import iteritems, itervalues from .utils import DslBase diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index d2ac01b5..5693e916 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -28,10 +28,11 @@ import copy from typing import Any +from six import iteritems, string_types + from opensearchpy.connection.connections import get_connection from opensearchpy.exceptions import TransportError from opensearchpy.helpers import scan -from six import iteritems, string_types from ..exceptions import IllegalOperation from ..helpers.query import Bool, Q diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 333e2cc8..c46c374a 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -30,10 +30,11 @@ from copy import copy from typing import Any, Callable, Dict, Optional, Tuple -from opensearchpy.exceptions import UnknownDslObject, ValidationException from six import add_metaclass, iteritems from six.moves import map +from opensearchpy.exceptions import UnknownDslObject, ValidationException + SKIP_VALUES: Tuple[str, None] = ("", None) EXPAND__TO_DOT = True diff --git a/setup.cfg b/setup.cfg index 301efb34..d65bae6a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,4 +28,4 @@ good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names [pylint.MESSAGE CONTROL] disable = all -enable = line-too-long, invalid-name, pointless-statement +enable = line-too-long, invalid-name, pointless-statement, missing-function-docstring diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index b37fd598..152cf2e8 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -41,6 +41,11 @@ def fetch_opensearch_repo() -> None: + """ + runs a git fetch origin on configured opensearch core repo + :return: None if environmental variables TEST_OPENSEARCH_YAML_DIR is set or TEST_OPENSEARCH_NOFETCH is set to False; + else returns nothing + """ # user is manually setting YAML dir, don't tamper with it if "TEST_OPENSEARCH_YAML_DIR" in environ: return @@ -90,6 +95,16 @@ def fetch_opensearch_repo() -> None: def run_all(argv: Any = None) -> None: + """ + run all the tests given arguments and environment variables + - sets defaults if argv is None, running "pytest --cov=opensearchpy --junitxml= + --log-level=DEBUG --cache-clear -vv --cov-report= None: + # pylint: disable=missing-function-docstring assert isinstance(self.client.transport, DummyTransport) assert count == self.client.transport.call_count def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: + # pylint: disable=missing-function-docstring assert isinstance(self.client.transport, DummyTransport) assert (method, url) in self.client.transport.calls calls = self.client.transport.calls[(method, url)] @@ -74,17 +76,21 @@ def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: return calls def setup_method(self, method: Any) -> None: + # pylint: disable=missing-function-docstring self.client = AsyncOpenSearch(transport_class=DummyTransport) class TestClient(OpenSearchTestCaseWithDummyTransport): async def test_our_transport_used(self) -> None: + # pylint: disable=missing-function-docstring assert isinstance(self.client.transport, DummyTransport) async def test_start_with_0_call(self) -> None: + # pylint: disable=missing-function-docstring self.assert_call_count_equals(0) async def test_each_call_is_recorded(self) -> None: + # pylint: disable=missing-function-docstring await self.client.transport.perform_request("GET", "/") await self.client.transport.perform_request( "DELETE", "/42", params={}, body="body" diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 2ab2529c..d2992d5f 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -63,12 +63,15 @@ async def _get_mock_connection( def _dummy_request(*args: Any, **kwargs: Any) -> Any: class DummyResponse: async def __aenter__(self, *_: Any, **__: Any) -> Any: + # pylint: disable=missing-function-docstring return self async def __aexit__(self, *_: Any, **__: Any) -> None: + # pylint: disable=missing-function-docstring pass async def text(self) -> Any: + # pylint: disable=missing-function-docstring return response_body.decode("utf-8", "surrogatepass") dummy_response: Any = DummyResponse() @@ -81,6 +84,7 @@ async def text(self) -> Any: return con async def test_ssl_context(self) -> None: + # pylint: disable=missing-function-docstring try: context = ssl.create_default_context() except AttributeError: @@ -97,10 +101,12 @@ async def test_ssl_context(self) -> None: assert con.session.connector._ssl == context async def test_opaque_id(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection(opaque_id="app-1") assert con.headers["x-opaque-id"] == "app-1" async def test_no_http_compression(self) -> None: + # pylint: disable=missing-function-docstring con = await self._get_mock_connection() assert not con.http_compress assert "accept-encoding" not in con.headers @@ -114,6 +120,7 @@ async def test_no_http_compression(self) -> None: assert "content-encoding" not in kwargs["headers"] async def test_http_compression(self) -> None: + # pylint: disable=missing-function-docstring con = await self._get_mock_connection({"http_compress": True}) assert con.http_compress assert con.headers["accept-encoding"] == "gzip,deflate" @@ -140,6 +147,7 @@ async def test_http_compression(self) -> None: assert "content-encoding" not in kwargs["headers"] async def test_url_prefix(self) -> None: + # pylint: disable=missing-function-docstring con = await self._get_mock_connection( connection_params={"url_prefix": "/_search/"} ) @@ -152,6 +160,7 @@ async def test_url_prefix(self) -> None: assert method == "GET" and str(yarl_url) == "http://localhost:9200/_search/" async def test_default_user_agent(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection() assert con._get_default_user_agent() == "opensearch-py/%s (Python %s)" % ( __versionstr__, @@ -159,10 +168,12 @@ async def test_default_user_agent(self) -> None: ) async def test_timeout_set(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection(timeout=42) assert 42 == con.timeout async def test_keep_alive_is_on_by_default(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection() assert { "connection": "keep-alive", @@ -171,6 +182,7 @@ async def test_keep_alive_is_on_by_default(self) -> None: } == con.headers async def test_http_auth(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection(http_auth="username:secret") assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -180,6 +192,7 @@ async def test_http_auth(self) -> None: } == con.headers async def test_http_auth_tuple(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection(http_auth=("username", "secret")) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -189,6 +202,7 @@ async def test_http_auth_tuple(self) -> None: } == con.headers async def test_http_auth_list(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection(http_auth=["username", "secret"]) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -198,6 +212,7 @@ async def test_http_auth_list(self) -> None: } == con.headers async def test_uses_https_if_verify_certs_is_off(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) @@ -211,6 +226,7 @@ async def test_uses_https_if_verify_certs_is_off(self) -> None: assert con.host == "https://localhost:9200" async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -221,16 +237,19 @@ async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: assert isinstance(con.session, aiohttp.ClientSession) async def test_doesnt_use_https_if_not_specified(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection() assert not con.use_ssl async def test_no_warning_when_using_ssl_context(self) -> None: + # pylint: disable=missing-function-docstring ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: AIOHttpConnection(ssl_context=ctx) assert w == [], str([x.message for x in w]) async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: + # pylint: disable=missing-function-docstring kwargs: Any for kwargs in ( {"ssl_show_warn": False}, @@ -255,6 +274,7 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> N @patch("ssl.SSLContext", return_value=MagicMock()) async def test_uses_given_ca_certs(self, ssl_context: Any, tmp_path: Any) -> None: + # pylint: disable=missing-function-docstring path = tmp_path / "ca_certs.pem" path.touch() ssl_context.return_value.load_verify_locations.return_value = None @@ -265,6 +285,7 @@ async def test_uses_given_ca_certs(self, ssl_context: Any, tmp_path: Any) -> Non @patch("ssl.SSLContext", return_value=MagicMock()) async def test_uses_default_ca_certs(self, ssl_context: Any) -> None: + # pylint: disable=missing-function-docstring ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True) ssl_context.return_value.load_verify_locations.assert_called_once_with( @@ -273,11 +294,13 @@ async def test_uses_default_ca_certs(self, ssl_context: Any) -> None: @patch("ssl.SSLContext", return_value=MagicMock()) async def test_uses_no_ca_certs(self, ssl_context: Any) -> None: + # pylint: disable=missing-function-docstring ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True, verify_certs=False) ssl_context.return_value.load_verify_locations.assert_not_called() async def test_trust_env(self) -> None: + # pylint: disable=missing-function-docstring con: Any = AIOHttpConnection(trust_env=True) await con._create_aiohttp_session() @@ -285,6 +308,7 @@ async def test_trust_env(self) -> None: assert con.session.trust_env is True async def test_trust_env_default_value_is_false(self) -> None: + # pylint: disable=missing-function-docstring con = AIOHttpConnection() await con._create_aiohttp_session() @@ -293,6 +317,7 @@ async def test_trust_env_default_value_is_false(self) -> None: @patch("opensearchpy.connection.base.logger") async def test_uncompressed_body_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -304,6 +329,7 @@ async def test_uncompressed_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) async def test_body_not_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = await self._get_mock_connection() @@ -314,6 +340,7 @@ async def test_body_not_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") async def test_failure_body_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = await self._get_mock_connection(response_code=404) with pytest.raises(NotFoundError) as e: await con.perform_request("GET", "/invalid", body=b'{"example": "body"}') @@ -327,6 +354,7 @@ async def test_failure_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) async def test_failure_body_not_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = await self._get_mock_connection(response_code=404) @@ -338,6 +366,7 @@ async def test_failure_body_not_logged(self, logger: Any) -> None: assert logger.debug.call_count == 0 async def test_surrogatepass_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") @@ -345,6 +374,7 @@ async def test_surrogatepass_into_bytes(self) -> None: @pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore async def test_recursion_error_reraised(self, exception_cls: Any) -> None: + # pylint: disable=missing-function-docstring conn = AIOHttpConnection() def request_raise(*_: Any, **__: Any) -> Any: @@ -358,6 +388,7 @@ def request_raise(*_: Any, **__: Any) -> Any: assert str(e.value) == "Wasn't modified!" async def test_json_errors_are_parsed(self) -> None: + # pylint: disable=missing-function-docstring con = await self._get_mock_connection( response_code=400, response_body=b'{"error": {"type": "snapshot_in_progress_exception"}}', @@ -379,21 +410,27 @@ class TestConnectionHttpServer: @classmethod def setup_class(cls) -> None: - # Start server + """ + Start server + """ cls.server = TestHTTPServer(port=8081) cls.server.start() @classmethod def teardown_class(cls) -> None: - # Stop server + """ + stop server + """ cls.server.stop() async def httpserver(self, conn: Any, **kwargs: Any) -> Any: + # pylint: disable=missing-function-docstring status, headers, data = await conn.perform_request("GET", "/", **kwargs) data = json.loads(data) return (status, data) async def test_aiohttp_connection(self) -> None: + # pylint: disable=missing-function-docstring # Defaults conn = AIOHttpConnection("localhost", port=8081, use_ssl=False) user_agent = conn._get_default_user_agent() @@ -454,12 +491,14 @@ async def test_aiohttp_connection(self) -> None: } async def test_aiohttp_connection_error(self) -> None: + # pylint: disable=missing-function-docstring conn = AIOHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): await conn.perform_request("GET", "/") async def test_default_connection_is_returned_by_default() -> None: + # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() con, con2 = object(), object() @@ -471,6 +510,7 @@ async def test_default_connection_is_returned_by_default() -> None: async def test_get_connection_created_connection_if_needed() -> None: + # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -484,6 +524,7 @@ async def test_get_connection_created_connection_if_needed() -> None: async def test_configure_preserves_unchanged_connections() -> None: + # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.configure( @@ -503,6 +544,7 @@ async def test_configure_preserves_unchanged_connections() -> None: async def test_remove_connection_removes_both_conn_and_conf() -> None: + # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.configure( @@ -520,6 +562,7 @@ async def test_remove_connection_removes_both_conn_and_conf() -> None: async def test_create_connection_constructs_client() -> None: + # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) @@ -528,6 +571,7 @@ async def test_create_connection_constructs_client() -> None: async def test_create_connection_adds_our_serializer() -> None: + # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) result = await c.get_connection("testing") diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index 7b2a16f6..7f809dcf 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -22,6 +22,10 @@ @fixture # type: ignore async def mock_client(dummy_response: Any) -> Any: + """ + yields a mock client with the dummy_response param + :param dummy_response: any kind of response for test + """ client = Mock() client.search.return_value = dummy_response await add_connection("mock", client) @@ -32,6 +36,7 @@ async def mock_client(dummy_response: Any) -> Any: @fixture # type: ignore def dummy_response() -> Any: + # pylint: disable=missing-function-docstring return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -81,6 +86,7 @@ def dummy_response() -> Any: @fixture # type: ignore def aggs_search() -> Any: + # pylint: disable=missing-function-docstring from opensearchpy._async.helpers.search import AsyncSearch s = AsyncSearch(index="flat-git") @@ -96,6 +102,7 @@ def aggs_search() -> Any: @fixture # type: ignore def aggs_data() -> Any: + # pylint: disable=missing-function-docstring return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 26c854c2..00dfb137 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -126,6 +126,7 @@ class Index: async def test_range_serializes_properly() -> None: + # pylint: disable=missing-function-docstring class DocumentD(document.AsyncDocument): lr: Any = field.LongRange() @@ -139,6 +140,7 @@ class DocumentD(document.AsyncDocument): async def test_range_deserializes_properly() -> None: + # pylint: disable=missing-function-docstring class DocumentD(InnerDoc): lr = field.LongRange() @@ -149,12 +151,14 @@ class DocumentD(InnerDoc): async def test_resolve_nested() -> None: + # pylint: disable=missing-function-docstring nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: + # pylint: disable=missing-function-docstring class DocumentA(document.AsyncDocument): name = field.Text() @@ -170,17 +174,20 @@ class DocumentB(document.AsyncDocument): async def test_ip_address_serializes_properly() -> None: + # pylint: disable=missing-function-docstring host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() async def test_matches_uses_index() -> None: + # pylint: disable=missing-function-docstring assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) async def test_matches_with_no_name_always_matches() -> None: + # pylint: disable=missing-function-docstring class DocumentD(document.AsyncDocument): pass @@ -189,6 +196,7 @@ class DocumentD(document.AsyncDocument): async def test_matches_accepts_wildcards() -> None: + # pylint: disable=missing-function-docstring class MyDoc(document.AsyncDocument): class Index: name = "my-*" @@ -198,6 +206,7 @@ class Index: async def test_assigning_attrlist_to_field() -> None: + # pylint: disable=missing-function-docstring sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -206,12 +215,14 @@ async def test_assigning_attrlist_to_field() -> None: async def test_optional_inner_objects_are_not_validated_if_missing() -> None: + # pylint: disable=missing-function-docstring d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None async def test_custom_field() -> None: + # pylint: disable=missing-function-docstring s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -223,12 +234,14 @@ async def test_custom_field() -> None: async def test_custom_field_mapping() -> None: + # pylint: disable=missing-function-docstring assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() async def test_custom_field_in_nested() -> None: + # pylint: disable=missing-function-docstring s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -237,6 +250,7 @@ async def test_custom_field_in_nested() -> None: async def test_multi_works_after_doc_has_been_saved() -> None: + # pylint: disable=missing-function-docstring c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -245,6 +259,7 @@ async def test_multi_works_after_doc_has_been_saved() -> None: async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: + # pylint: disable=missing-function-docstring # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -254,12 +269,14 @@ async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: async def test_null_value_for_object() -> None: + # pylint: disable=missing-function-docstring d = MyDoc(inner=None) assert d.inner is None async def test_inherited_doc_types_can_override_index() -> None: + # pylint: disable=missing-function-docstring class MyDocDifferentIndex(MySubDoc): _index: Any @@ -294,6 +311,7 @@ class Index: async def test_to_dict_with_meta() -> None: + # pylint: disable=missing-function-docstring d = MySubDoc(title="hello") d.meta.routing = "some-parent" @@ -305,6 +323,7 @@ async def test_to_dict_with_meta() -> None: async def test_to_dict_with_meta_includes_custom_index() -> None: + # pylint: disable=missing-function-docstring d = MySubDoc(title="hello") d.meta.index = "other-index" @@ -312,6 +331,7 @@ async def test_to_dict_with_meta_includes_custom_index() -> None: async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: + # pylint: disable=missing-function-docstring d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() @@ -319,6 +339,7 @@ async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: async def test_attribute_can_be_removed() -> None: + # pylint: disable=missing-function-docstring d = MyDoc(title="hello") del d.title @@ -326,6 +347,7 @@ async def test_attribute_can_be_removed() -> None: async def test_doc_type_can_be_correctly_pickled() -> None: + # pylint: disable=missing-function-docstring d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -341,6 +363,7 @@ async def test_doc_type_can_be_correctly_pickled() -> None: async def test_meta_is_accessible_even_on_empty_doc() -> None: + # pylint: disable=missing-function-docstring d = MyDoc() assert d.meta == {} @@ -349,6 +372,7 @@ async def test_meta_is_accessible_even_on_empty_doc() -> None: async def test_meta_field_mapping() -> None: + # pylint: disable=missing-function-docstring class User(document.AsyncDocument): username = field.Text() @@ -368,6 +392,7 @@ class Meta: async def test_multi_value_fields() -> None: + # pylint: disable=missing-function-docstring class Blog(document.AsyncDocument): tags = field.Keyword(multi=True) @@ -379,6 +404,7 @@ class Blog(document.AsyncDocument): async def test_docs_with_properties() -> None: + # pylint: disable=missing-function-docstring class User(document.AsyncDocument): pwd_hash: Any = field.Text() @@ -407,6 +433,7 @@ def password(self, pwd: Any) -> None: async def test_nested_can_be_assigned_to() -> None: + # pylint: disable=missing-function-docstring d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -418,12 +445,14 @@ async def test_nested_can_be_assigned_to() -> None: async def test_nested_can_be_none() -> None: + # pylint: disable=missing-function-docstring d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() async def test_nested_defaults_to_list_and_can_be_updated() -> None: + # pylint: disable=missing-function-docstring md = DocWithNested() assert [] == md.comments @@ -433,6 +462,7 @@ async def test_nested_defaults_to_list_and_can_be_updated() -> None: async def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] @@ -445,12 +475,14 @@ async def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: async def test_to_dict_ignores_empty_collections() -> None: + # pylint: disable=missing-function-docstring md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() async def test_declarative_mapping_definition() -> None: + # pylint: disable=missing-function-docstring assert issubclass(MyDoc, document.AsyncDocument) assert hasattr(MyDoc, "_doc_type") assert { @@ -464,6 +496,7 @@ async def test_declarative_mapping_definition() -> None: async def test_you_can_supply_own_mapping_instance() -> None: + # pylint: disable=missing-function-docstring class MyD(document.AsyncDocument): title = field.Text() @@ -478,6 +511,7 @@ class Meta: async def test_document_can_be_created_dynamically() -> None: + # pylint: disable=missing-function-docstring n = datetime.now() md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" @@ -499,6 +533,7 @@ async def test_document_can_be_created_dynamically() -> None: async def test_invalid_date_will_raise_exception() -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -506,6 +541,7 @@ async def test_invalid_date_will_raise_exception() -> None: async def test_document_inheritance() -> None: + # pylint: disable=missing-function-docstring assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.AsyncDocument) assert hasattr(MySubDoc, "_doc_type") @@ -520,6 +556,7 @@ async def test_document_inheritance() -> None: async def test_child_class_can_override_parent() -> None: + # pylint: disable=missing-function-docstring class DocumentA(document.AsyncDocument): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -538,6 +575,7 @@ class DocumentB(DocumentA): async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: + # pylint: disable=missing-function-docstring md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -548,6 +586,7 @@ async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: async def test_index_inheritance() -> None: + # pylint: disable=missing-function-docstring assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.AsyncDocument) @@ -565,6 +604,7 @@ async def test_index_inheritance() -> None: async def test_meta_fields_can_be_set_directly_in_init() -> None: + # pylint: disable=missing-function-docstring p = object() md: Any = MyDoc(_id=p, title="Hello World!") @@ -572,24 +612,28 @@ async def test_meta_fields_can_be_set_directly_in_init() -> None: async def test_save_no_index(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): await md.save(using="mock") async def test_delete_no_index(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): await md.delete(using="mock") async def test_update_no_fields() -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(IllegalOperation): await md.update() async def test_search_with_custom_alias_and_index(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -599,6 +643,7 @@ async def test_search_with_custom_alias_and_index(mock_client: Any) -> None: async def test_from_opensearch_respects_underscored_non_meta_fields() -> None: + # pylint: disable=missing-function-docstring doc: Any = { "_index": "test-index", "_id": "opensearch", @@ -623,6 +668,7 @@ class Index: async def test_nested_and_object_inner_doc() -> None: + # pylint: disable=missing-function-docstring class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 40f27871..74d7c6d0 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -33,6 +33,7 @@ class BlogSearch(AsyncFacetedSearch): async def test_query_is_created_properly() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch("python search") s = bs.build_search() @@ -56,6 +57,7 @@ async def test_query_is_created_properly() -> None: async def test_query_is_created_properly_with_sort_tuple() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -80,6 +82,7 @@ async def test_query_is_created_properly_with_sort_tuple() -> None: async def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -103,6 +106,7 @@ async def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: async def test_filters_are_applied_to_search_ant_relevant_facets() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -137,6 +141,7 @@ async def test_filters_are_applied_to_search_ant_relevant_facets() -> None: async def test_date_histogram_facet_with_1970_01_01_date() -> None: + # pylint: disable=missing-function-docstring dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -170,6 +175,7 @@ async def test_date_histogram_facet_with_1970_01_01_date() -> None: ], ) async def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: + # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -182,6 +188,7 @@ async def test_date_histogram_interval_types(interval_type: Any, interval: Any) async def test_date_histogram_no_interval_keyerror() -> None: + # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index eccbe773..4a494a97 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -28,6 +28,7 @@ class Post(AsyncDocument): async def test_multiple_doc_types_will_combine_mappings() -> None: + # pylint: disable=missing-function-docstring class User(AsyncDocument): username = Text() @@ -46,6 +47,7 @@ class User(AsyncDocument): async def test_search_is_limited_to_index_name() -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("my-index") s = i.search() @@ -53,6 +55,7 @@ async def test_search_is_limited_to_index_name() -> None: async def test_cloned_index_has_copied_settings_and_using() -> None: + # pylint: disable=missing-function-docstring client = object() i = AsyncIndex("my-index", using=client) i.settings(number_of_shards=1) @@ -66,6 +69,7 @@ async def test_cloned_index_has_copied_settings_and_using() -> None: async def test_cloned_index_has_analysis_attribute() -> None: + # pylint: disable=missing-function-docstring """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -86,6 +90,7 @@ async def test_cloned_index_has_analysis_attribute() -> None: async def test_settings_are_saved() -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -94,6 +99,7 @@ async def test_settings_are_saved() -> None: async def test_registered_doc_type_included_in_to_dict() -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("i", using="alias") i.document(Post) @@ -108,6 +114,7 @@ async def test_registered_doc_type_included_in_to_dict() -> None: async def test_registered_doc_type_included_in_search() -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("i", using="alias") i.document(Post) @@ -117,6 +124,7 @@ async def test_registered_doc_type_included_in_search() -> None: async def test_aliases_add_to_object() -> None: + # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -127,6 +135,7 @@ async def test_aliases_add_to_object() -> None: async def test_aliases_returned_from_to_dict() -> None: + # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -137,6 +146,7 @@ async def test_aliases_returned_from_to_dict() -> None: async def test_analyzers_added_to_object() -> None: + # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -153,6 +163,7 @@ async def test_analyzers_added_to_object() -> None: async def test_analyzers_returned_from_to_dict() -> None: + # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -166,6 +177,7 @@ async def test_analyzers_returned_from_to_dict() -> None: async def test_conflicting_analyzer_raises_error() -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -174,6 +186,7 @@ async def test_conflicting_analyzer_raises_error() -> None: async def test_index_template_can_have_order() -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("i-*") it = i.as_template("i", order=2) diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index 05264985..7c75db70 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -15,6 +15,7 @@ async def test_mapping_can_has_fields() -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("name", "text").field("tags", "keyword") @@ -24,6 +25,7 @@ async def test_mapping_can_has_fields() -> None: async def test_mapping_update_is_recursive() -> None: + # pylint: disable=missing-function-docstring m1 = mapping.AsyncMapping() m1.field("title", "text") m1.field("author", "object") @@ -57,6 +59,7 @@ async def test_mapping_update_is_recursive() -> None: async def test_properties_can_iterate_over_all_the_fields() -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -67,6 +70,7 @@ async def test_properties_can_iterate_over_all_the_fields() -> None: async def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: + # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -140,6 +144,7 @@ async def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: async def test_mapping_can_collect_multiple_analyzers() -> None: + # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -186,6 +191,7 @@ async def test_mapping_can_collect_multiple_analyzers() -> None: async def test_even_non_custom_analyzers_can_have_params() -> None: + # pylint: disable=missing-function-docstring a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.AsyncMapping() m.field("title", "text", analyzer=a1) @@ -196,6 +202,7 @@ async def test_even_non_custom_analyzers_can_have_params() -> None: async def test_resolve_field_can_resolve_multifields() -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("title", "text", fields={"keyword": Keyword()}) @@ -203,6 +210,7 @@ async def test_resolve_field_can_resolve_multifields() -> None: async def test_resolve_nested() -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index d01f0b80..14d0f498 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -24,12 +24,14 @@ async def test_expand__to_dot_is_respected() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() async def test_execute_uses_cache() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() r = object() s._response = r @@ -38,6 +40,7 @@ async def test_execute_uses_cache() -> None: async def test_cache_isnt_cloned() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s._response = object() @@ -45,12 +48,14 @@ async def test_cache_isnt_cloned() -> None: async def test_search_starts_with_no_query() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert s.query._proxied is None async def test_search_query_combines_query() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s2 = s.query("match", f=42) @@ -63,6 +68,7 @@ async def test_search_query_combines_query() -> None: async def test_query_can_be_assigned_to() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() q = Q("match", title="python") @@ -72,6 +78,7 @@ async def test_query_can_be_assigned_to() -> None: async def test_query_can_be_wrapped() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -87,6 +94,7 @@ async def test_query_can_be_wrapped() -> None: async def test_using() -> None: + # pylint: disable=missing-function-docstring o = object() o2 = object() s = search.AsyncSearch(using=o) @@ -97,18 +105,21 @@ async def test_using() -> None: async def test_methods_are_proxied_to_the_query() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match_all") assert s.query.to_dict() == {"match_all": {}} async def test_query_always_returns_search() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert isinstance(s.query("match", f=42), search.AsyncSearch) async def test_source_copied_on_clone() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -123,6 +134,7 @@ async def test_source_copied_on_clone() -> None: async def test_copy_clones() -> None: + # pylint: disable=missing-function-docstring from copy import copy s1 = search.AsyncSearch().source(["some", "fields"]) @@ -133,6 +145,7 @@ async def test_copy_clones() -> None: async def test_aggs_allow_two_metric() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -143,6 +156,7 @@ async def test_aggs_allow_two_metric() -> None: async def test_aggs_get_copied_on_change() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" @@ -175,6 +189,7 @@ async def test_aggs_get_copied_on_change() -> None: async def test_search_index() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch(index="i") assert s._index == ["i"] s = s.index("i2") @@ -206,6 +221,7 @@ async def test_search_index() -> None: async def test_doc_type_document_class() -> None: + # pylint: disable=missing-function-docstring class MyDocument(AsyncDocument): pass @@ -219,6 +235,7 @@ class MyDocument(AsyncDocument): async def test_sort() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.sort("fielda", "-fieldb") @@ -231,6 +248,7 @@ async def test_sort() -> None: async def test_sort_by_score() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -241,6 +259,7 @@ async def test_sort_by_score() -> None: async def test_slice() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -250,11 +269,13 @@ async def test_slice() -> None: async def test_index() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert {"from": 3, "size": 1} == s[3].to_dict() async def test_search_to_dict() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert {} == s.to_dict() @@ -284,6 +305,7 @@ async def test_search_to_dict() -> None: async def test_complex_example() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = ( s.query("match", title="python") @@ -335,6 +357,7 @@ async def test_complex_example() -> None: async def test_reverse() -> None: + # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -385,12 +408,14 @@ async def test_reverse() -> None: async def test_from_dict_doesnt_need_query() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() async def test_source() -> None: + # pylint: disable=missing-function-docstring assert {} == search.AsyncSearch().source().to_dict() assert { @@ -407,6 +432,7 @@ async def test_source() -> None: async def test_source_on_clone() -> None: + # pylint: disable=missing-function-docstring assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -422,6 +448,7 @@ async def test_source_on_clone() -> None: async def test_source_on_clear() -> None: + # pylint: disable=missing-function-docstring assert ( {} == search.AsyncSearch() @@ -432,6 +459,7 @@ async def test_source_on_clear() -> None: async def test_suggest_accepts_global_text() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch.from_dict( { "suggest": { @@ -454,6 +482,7 @@ async def test_suggest_accepts_global_text() -> None: async def test_suggest() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -463,6 +492,7 @@ async def test_suggest() -> None: async def test_exclude() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.exclude("match", title="python") @@ -476,6 +506,7 @@ async def test_exclude() -> None: async def test_update_from_dict() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -487,6 +518,7 @@ async def test_update_from_dict() -> None: async def test_rescore_query_to_dict() -> None: + # pylint: disable=missing-function-docstring s = search.AsyncSearch(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index b5380eeb..bf576511 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -20,12 +20,14 @@ async def test_ubq_starts_with_no_query() -> None: + # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() assert ubq.query._proxied is None async def test_ubq_to_dict() -> None: + # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() assert {} == ubq.to_dict() @@ -44,6 +46,7 @@ async def test_ubq_to_dict() -> None: async def test_complex_example() -> None: + # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -82,6 +85,7 @@ async def test_complex_example() -> None: async def test_exclude() -> None: + # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -95,6 +99,7 @@ async def test_exclude() -> None: async def test_reverse() -> None: + # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -131,12 +136,14 @@ async def test_reverse() -> None: async def test_from_dict_doesnt_need_query() -> None: + # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() async def test_overwrite_script() -> None: + # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} @@ -153,6 +160,7 @@ async def test_overwrite_script() -> None: async def test_update_by_query_response_success() -> None: + # pylint: disable=missing-function-docstring ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_async/test_http.py b/test_opensearchpy/test_async/test_http.py index 61323bd7..41f62c53 100644 --- a/test_opensearchpy/test_async/test_http.py +++ b/test_opensearchpy/test_async/test_http.py @@ -17,26 +17,31 @@ class TestHttpClient(OpenSearchTestCaseWithDummyTransport): async def test_head(self) -> None: + # pylint: disable=missing-function-docstring await self.client.http.head("/") self.assert_call_count_equals(1) assert [(None, None, None)] == self.assert_url_called("HEAD", "/", 1) async def test_get(self) -> None: + # pylint: disable=missing-function-docstring await self.client.http.get("/") self.assert_call_count_equals(1) assert [(None, None, None)] == self.assert_url_called("GET", "/", 1) async def test_put(self) -> None: + # pylint: disable=missing-function-docstring await self.client.http.put(url="/xyz", params={"X": "Y"}, body="body") self.assert_call_count_equals(1) assert [({"X": "Y"}, None, "body")] == self.assert_url_called("PUT", "/xyz", 1) async def test_post(self) -> None: + # pylint: disable=missing-function-docstring await self.client.http.post(url="/xyz", params={"X": "Y"}, body="body") self.assert_call_count_equals(1) assert [({"X": "Y"}, None, "body")] == self.assert_url_called("POST", "/xyz", 1) async def test_post_with_headers(self) -> None: + # pylint: disable=missing-function-docstring await self.client.http.post( url="/xyz", headers={"A": "B"}, params={"X": "Y"}, body="body" ) @@ -46,6 +51,7 @@ async def test_post_with_headers(self) -> None: ) async def test_delete(self) -> None: + # pylint: disable=missing-function-docstring await self.client.http.delete(url="/xyz", params={"X": "Y"}, body="body") self.assert_call_count_equals(1) assert [({"X": "Y"}, None, "body")] == self.assert_url_called( diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index 415f34cc..c45dc12b 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -38,18 +38,21 @@ class TestAsyncHttpConnection: def test_auth_as_tuple(self) -> None: + # pylint: disable=missing-function-docstring c = AsyncHttpConnection(http_auth=("username", "password")) assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" def test_auth_as_string(self) -> None: + # pylint: disable=missing-function-docstring c = AsyncHttpConnection(http_auth="username:password") assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" def test_auth_as_callable(self) -> None: + # pylint: disable=missing-function-docstring def auth_fn() -> None: pass @@ -59,6 +62,7 @@ def auth_fn() -> None: @pytest.mark.asyncio # type: ignore @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) async def test_basicauth_in_request_session(self, mock_request: Any) -> None: + # pylint: disable=missing-function-docstring async def do_request(*args: Any, **kwargs: Any) -> Any: response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() @@ -91,6 +95,7 @@ async def do_request(*args: Any, **kwargs: Any) -> Any: @pytest.mark.asyncio # type: ignore @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) async def test_callable_in_request_session(self, mock_request: Any) -> None: + # pylint: disable=missing-function-docstring def auth_fn(*args: Any, **kwargs: Any) -> Any: return { "Test": "PASSED", diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index 49222b08..72933257 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -20,6 +20,7 @@ class TestPluginsClient: async def test_plugins_client(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: client = AsyncOpenSearch() # testing double-init here diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index f62ef33c..6a84337c 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -34,13 +34,19 @@ class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): # type: ignore - async def asyncSetUp(self) -> None: # pylint: disable=invalid-name + async def asyncSetUp( + self, + ) -> None: + # pylint: disable=invalid-name,missing-function-docstring self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self) -> None: # pylint: disable=invalid-name + async def asyncTearDown( + self, + ) -> None: + # pylint: disable=invalid-name,missing-function-docstring wipe_cluster(self.client) if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 42deb3cc..666a7904 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -41,6 +41,7 @@ @pytest.fixture(scope="function") # type: ignore async def async_client() -> Any: + # pylint: disable=missing-function-docstring client = None try: if not hasattr(opensearchpy, "AsyncOpenSearch"): diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index cee6bc7b..506f1c8d 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -37,11 +37,13 @@ class TestUnicode: async def test_indices_analyze(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: async def test_bulk_works_with_string_body(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -49,6 +51,7 @@ async def test_bulk_works_with_string_body(self, async_client: Any) -> None: assert len(response["items"]) == 1 async def test_bulk_works_with_bytestring_body(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -60,7 +63,9 @@ class TestYarlMissing: async def test_aiohttp_connection_works_without_yarl( self, async_client: Any, monkeypatch: Any ) -> None: - # This is a defensive test case for if aiohttp suddenly stops using yarl. + """ + This is a defensive test case for if aiohttp suddenly stops using yarl. + """ from opensearchpy._async import http_aiohttp monkeypatch.setattr(http_aiohttp, "yarl", False) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 1aa9bb18..21d20285 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -36,6 +36,7 @@ @fixture(scope="function") # type: ignore async def client() -> Any: + # pylint: disable=missing-function-docstring client = await get_test_client(verify_certs=False, http_auth=("admin", "admin")) await add_connection("default", client) return client @@ -43,6 +44,7 @@ async def client() -> Any: @fixture(scope="function") # type: ignore async def opensearch_version(client: Any) -> Any: + # pylint: disable=missing-function-docstring info = await client.info() print(info) yield tuple( @@ -53,6 +55,7 @@ async def opensearch_version(client: Any) -> Any: @fixture # type: ignore async def write_client(client: Any) -> Any: + # pylint: disable=missing-function-docstring yield client await client.indices.delete("test-*", ignore=404) await client.indices.delete_template("test-template", ignore=404) @@ -60,7 +63,9 @@ async def write_client(client: Any) -> Any: @fixture # type: ignore async def data_client(client: Any) -> Any: - # create mappings + """ + create mappings + """ await create_git_index(client, "git") await create_flat_git_index(client, "flat-git") # load data @@ -73,6 +78,11 @@ async def data_client(client: Any) -> Any: @fixture # type: ignore async def pull_request(write_client: Any) -> Any: + """ + create dummy pull request instance + :param write_client: #todo not used + :return: instance of PullRequest + """ await PullRequest.init() pr = PullRequest( _id=42, @@ -97,6 +107,11 @@ async def pull_request(write_client: Any) -> Any: @fixture # type: ignore async def setup_ubq_tests(client: Any) -> str: + """ + #todo what's a ubq test? this is ignored. should it be deleted? + :param client: + :return: an index name + """ index = "test-git" await create_git_index(client, index) await async_bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index c6c54df0..9e779baf 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -60,6 +60,9 @@ def __init__( self._fail_with = fail_with async def bulk(self, *args: Any, **kwargs: Any) -> Any: + """ + increments number of times called and, when it equals fail_at, raises self.fail_with when + """ self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -68,6 +71,7 @@ async def bulk(self, *args: Any, **kwargs: Any) -> Any: class TestStreamingBulk(object): async def test_actions_remain_unchanged(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring actions1 = [{"_id": 1}, {"_id": 2}] async for ok, item in actions.async_streaming_bulk( async_client, actions1, index="test-index" @@ -76,6 +80,7 @@ async def test_actions_remain_unchanged(self, async_client: Any) -> None: assert [{"_id": 1}, {"_id": 2}] == actions1 async def test_all_documents_get_inserted(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True @@ -88,6 +93,7 @@ async def test_all_documents_get_inserted(self, async_client: Any) -> None: ] async def test_documents_data_types(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring async def async_gen() -> Any: for x in range(100): await asyncio.sleep(0) @@ -124,6 +130,7 @@ def sync_gen() -> Any: async def test_all_errors_from_chunk_are_raised_on_failure( self, async_client: Any ) -> None: + # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -144,6 +151,7 @@ async def test_all_errors_from_chunk_are_raised_on_failure( assert False, "exception should have been raised" async def test_different_op_types(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring await async_client.index(index="i", id=45, body={}) await async_client.index(index="i", id=42, body={}) docs = [ @@ -159,6 +167,7 @@ async def test_different_op_types(self, async_client: Any) -> None: assert {"f": "v"} == (await async_client.get(index="i", id=47))["_source"] async def test_transport_error_can_becaught(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient(async_client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -193,6 +202,7 @@ async def test_transport_error_can_becaught(self, async_client: Any) -> None: } == results[1][1] async def test_rejected_documents_are_retried(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -223,6 +233,7 @@ async def test_rejected_documents_are_retried(self, async_client: Any) -> None: async def test_rejected_documents_are_retried_at_most_max_retries_times( self, async_client: Any ) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -254,6 +265,7 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( async def test_transport_error_is_raised_with_max_retries( self, async_client: Any ) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( async_client, fail_at=(1, 2, 3, 4), @@ -280,6 +292,7 @@ async def streaming_bulk() -> Any: class TestBulk(object): async def test_bulk_works_with_single_item(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -293,6 +306,7 @@ async def test_bulk_works_with_single_item(self, async_client: Any) -> None: ] async def test_all_documents_get_inserted(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -306,6 +320,7 @@ async def test_all_documents_get_inserted(self, async_client: Any) -> None: ] async def test_stats_only_reports_numbers(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True @@ -316,6 +331,7 @@ async def test_stats_only_reports_numbers(self, async_client: Any) -> None: assert 100 == (await async_client.count(index="test-index"))["count"] async def test_errors_are_reported_correctly(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -343,6 +359,7 @@ async def test_errors_are_reported_correctly(self, async_client: Any) -> None: ) or "mapper_parsing_exception" in repr(error["index"]["error"]) async def test_error_is_raised(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -356,6 +373,7 @@ async def test_error_is_raised(self, async_client: Any) -> None: await actions.async_bulk(async_client, [{"a": 42}, {"a": "c"}], index="i") async def test_ignore_error_if_raised(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring # ignore the status code 400 in tuple await actions.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -389,6 +407,7 @@ async def test_ignore_error_if_raised(self, async_client: Any) -> None: ) async def test_errors_are_collected_properly(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -446,6 +465,7 @@ def __await__(self) -> Any: @pytest.fixture(scope="function") # type: ignore async def scan_teardown(async_client: Any) -> Any: + # pylint: disable=missing-function-docstring yield await async_client.clear_scroll(scroll_id="_all") @@ -454,6 +474,7 @@ class TestScan(object): async def test_order_can_be_preserved( self, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -477,6 +498,7 @@ async def test_order_can_be_preserved( async def test_all_documents_are_read( self, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -493,6 +515,7 @@ async def test_all_documents_are_read( assert set(range(100)) == set(d["_source"]["answer"] for d in docs) async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -531,6 +554,7 @@ async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None async def test_initial_search_error( self, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring with patch.object(async_client, "clear_scroll", new_callable=AsyncMock): with patch.object( async_client, @@ -583,6 +607,7 @@ async def test_initial_search_error( async def test_no_scroll_id_fast_route( self, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: @@ -601,6 +626,7 @@ async def test_no_scroll_id_fast_route( async def test_logger( self, logger_mock: Any, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -642,6 +668,7 @@ async def test_logger( ) async def test_clear_scroll(self, async_client: Any, scan_teardown: Any) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -688,6 +715,7 @@ async def test_clear_scroll(self, async_client: Any, scan_teardown: Any) -> None async def test_scan_auth_kwargs_forwarded( self, async_client: Any, scan_teardown: Any, kwargs: Any ) -> None: + # pylint: disable=missing-function-docstring ((key, val),) = kwargs.items() with patch.object( @@ -730,6 +758,7 @@ async def test_scan_auth_kwargs_forwarded( async def test_scan_auth_kwargs_favor_scroll_kwargs_option( self, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring with patch.object( async_client, "search", @@ -779,6 +808,7 @@ async def test_scan_auth_kwargs_favor_scroll_kwargs_option( async def test_async_scan_with_missing_hits_key( self, async_client: Any, scan_teardown: Any ) -> None: + # pylint: disable=missing-function-docstring with patch.object( async_client, "search", @@ -807,6 +837,7 @@ async def test_async_scan_with_missing_hits_key( @pytest.fixture(scope="function") # type: ignore async def reindex_setup(async_client: Any) -> Any: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -825,6 +856,7 @@ class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client: Any, reindex_setup: Any ) -> None: + # pylint: disable=missing-function-docstring await actions.async_reindex( async_client, "test_index", @@ -846,6 +878,7 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( async def test_reindex_accepts_a_query( self, async_client: Any, reindex_setup: Any ) -> None: + # pylint: disable=missing-function-docstring await actions.async_reindex( async_client, "test_index", @@ -867,6 +900,7 @@ async def test_reindex_accepts_a_query( async def test_all_documents_get_moved( self, async_client: Any, reindex_setup: Any ) -> None: + # pylint: disable=missing-function-docstring await actions.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() @@ -889,6 +923,7 @@ async def test_all_documents_get_moved( @pytest.fixture(scope="function") # type: ignore async def parent_reindex_setup(async_client: Any) -> None: + # pylint: disable=missing-function-docstring body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -919,6 +954,7 @@ class TestParentChildReindex: async def test_children_are_reindexed_correctly( self, async_client: Any, parent_reindex_setup: Any ) -> None: + # pylint: disable=missing-function-docstring await actions.async_reindex(async_client, "test-index", "real-index") assert {"question_answer": "question"} == ( await async_client.get(index="real-index", id=42) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index b0f396f2..667c1a33 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -13,6 +13,7 @@ async def create_flat_git_index(client: Any, index: Any) -> None: + # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -56,6 +57,7 @@ async def create_flat_git_index(client: Any, index: Any) -> None: async def create_git_index(client: Any, index: Any) -> None: + # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -1078,6 +1080,7 @@ async def create_git_index(client: Any, index: Any) -> None: def flatten_doc(d: Any) -> Dict[str, Any]: + # pylint: disable=missing-function-docstring src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1087,6 +1090,7 @@ def flatten_doc(d: Any) -> Dict[str, Any]: def create_test_git_data(d: Any) -> Dict[str, Any]: + # pylint: disable=missing-function-docstring src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index bf02161d..0f452090 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -117,6 +117,7 @@ class Index: async def test_serialization(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await SerializationDoc.init() await write_client.index( index="test-serialization", @@ -147,6 +148,7 @@ async def test_serialization(write_client: Any) -> None: async def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: + # pylint: disable=missing-function-docstring history_query = Q( "nested", path="comments.history", @@ -175,6 +177,7 @@ async def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None async def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: + # pylint: disable=missing-function-docstring s = PullRequest.search().query( "nested", inner_hits={}, @@ -190,6 +193,7 @@ async def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> async def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: + # pylint: disable=missing-function-docstring s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -202,6 +206,7 @@ async def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: async def test_update_object_field(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -222,6 +227,7 @@ async def test_update_object_field(write_client: Any) -> None: async def test_update_script(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -232,6 +238,7 @@ async def test_update_script(write_client: Any) -> None: async def test_update_retry_on_conflict(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -253,6 +260,7 @@ async def test_update_retry_on_conflict(write_client: Any) -> None: async def test_update_conflicting_version( write_client: Any, retry_on_conflict: bool ) -> None: + # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -270,6 +278,7 @@ async def test_update_conflicting_version( async def test_save_and_update_return_doc_meta(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = await w.save(return_doc_meta=True) @@ -294,32 +303,38 @@ async def test_save_and_update_return_doc_meta(write_client: Any) -> None: async def test_init(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await Repository.init(index="test-git") assert await write_client.indices.exists(index="test-git") async def test_get_raises_404_on_index_missing(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(NotFoundError): await Repository.get("opensearch-dsl-php", index="not-there") async def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(NotFoundError): await Repository.get("opensearch-dsl-php") async def test_get_returns_none_if_404_ignored(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert None is await Repository.get("opensearch-dsl-php", ignore=404) async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( data_client: Any, ) -> None: + # pylint: disable=missing-function-docstring assert None is await Repository.get("42", index="not-there", ignore=404) async def test_get(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -328,14 +343,17 @@ async def test_get(data_client: Any) -> None: async def test_exists_return_true(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert await Repository.exists("opensearch-py") async def test_exists_false(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert not await Repository.exists("opensearch-dsl-php") async def test_get_with_tz_date(data_client: Any) -> None: + # pylint: disable=missing-function-docstring first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -348,6 +366,7 @@ async def test_get_with_tz_date(data_client: Any) -> None: async def test_save_with_tz_date(data_client: Any) -> None: + # pylint: disable=missing-function-docstring tzinfo = timezone("Europe/Prague") first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -375,6 +394,7 @@ async def test_save_with_tz_date(data_client: Any) -> None: async def test_mget(data_client: Any) -> None: + # pylint: disable=missing-function-docstring commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -385,11 +405,13 @@ async def test_mget(data_client: Any) -> None: async def test_mget_raises_exception_when_missing_param_is_invalid( data_client: Any, ) -> None: + # pylint: disable=missing-function-docstring with raises(ValueError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") async def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(NotFoundError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") @@ -397,12 +419,14 @@ async def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> async def test_mget_ignores_missing_docs_when_missing_param_is_skip( data_client: Any, ) -> None: + # pylint: disable=missing-function-docstring commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" async def test_update_works_from_search_response(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = (await Repository.search().execute())[0] await opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -414,6 +438,7 @@ async def test_update_works_from_search_response(data_client: Any) -> None: async def test_update(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -438,6 +463,7 @@ async def test_update(data_client: Any) -> None: async def test_save_updates_existing_doc(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -453,6 +479,7 @@ async def test_save_updates_existing_doc(data_client: Any) -> None: async def test_save_automatically_uses_seq_no_and_primary_term( data_client: Any, ) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -463,6 +490,7 @@ async def test_save_automatically_uses_seq_no_and_primary_term( async def test_delete_automatically_uses_seq_no_and_primary_term( data_client: Any, ) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -471,12 +499,14 @@ async def test_delete_automatically_uses_seq_no_and_primary_term( async def assert_doc_equals(expected: Any, actual: Any) -> None: + # pylint: disable=missing-function-docstring async for f in aiter(expected): assert f in actual assert actual[f] == expected[f] async def test_can_save_to_different_index(write_client: Any) -> None: + # pylint: disable=missing-function-docstring test_repo = Repository(description="testing", meta={"id": 42}) assert await test_repo.save(index="test-document") @@ -494,6 +524,7 @@ async def test_can_save_to_different_index(write_client: Any) -> None: async def test_save_without_skip_empty_will_include_empty_fields( write_client: Any, ) -> None: + # pylint: disable=missing-function-docstring test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert await test_repo.save(index="test-document", skip_empty=False) @@ -509,6 +540,7 @@ async def test_save_without_skip_empty_will_include_empty_fields( async def test_delete(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await write_client.create( index="test-document", id="opensearch-py", @@ -530,10 +562,12 @@ async def test_delete(write_client: Any) -> None: async def test_search(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert await Repository.search().count() == 1 async def test_search_returns_proper_doc_classes(data_client: Any) -> None: + # pylint: disable=missing-function-docstring result = await Repository.search().execute() opensearch_repo = result.hits[0] @@ -543,6 +577,7 @@ async def test_search_returns_proper_doc_classes(data_client: Any) -> None: async def test_refresh_mapping(data_client: Any) -> None: + # pylint: disable=missing-function-docstring class Commit(AsyncDocument): _index: Any @@ -559,6 +594,7 @@ class Index: async def test_highlight_in_meta(data_client: Any) -> None: + # pylint: disable=missing-function-docstring commit = ( await Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 783f902d..0a12e0a5 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -56,6 +56,10 @@ class MetricSearch(AsyncFacetedSearch): @pytest.fixture(scope="function") # type: ignore def commit_search_cls(opensearch_version: Any) -> Any: + """ + :param opensearch_version: not used. #todo should be removed? + :return: an AsyncFacetedSearch for git commits + """ interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(AsyncFacetedSearch): @@ -81,6 +85,11 @@ class CommitSearch(AsyncFacetedSearch): @pytest.fixture(scope="function") # type: ignore def repo_search_cls(opensearch_version: Any) -> Any: + """ + creates a mock AsyncFacetedSearch instance to search a git repo + :param opensearch_version: + :return: + """ interval_type = "calendar_interval" class RepoSearch(AsyncFacetedSearch): @@ -102,6 +111,10 @@ def search(self) -> Any: @pytest.fixture(scope="function") # type: ignore def pr_search_cls(opensearch_version: Any) -> Any: + """ + :param opensearch_version: not used here... #TODO remove this parameter? + :return: an AsyncFacetedSearch for pull requests + """ interval_type = "calendar_interval" class PRSearch(AsyncFacetedSearch): @@ -120,6 +133,7 @@ class PRSearch(AsyncFacetedSearch): async def test_facet_with_custom_metric(data_client: Any) -> None: + # pylint: disable=missing-function-docstring ms = MetricSearch() r = await ms.execute() @@ -129,6 +143,7 @@ async def test_facet_with_custom_metric(data_client: Any) -> None: async def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring prs = pr_search_cls() r = await prs.execute() @@ -137,6 +152,7 @@ async def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: async def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = await prs.execute() @@ -149,6 +165,7 @@ async def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) - async def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring rs = repo_search_cls() r = await rs.execute() @@ -157,6 +174,7 @@ async def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> No async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring rs = repo_search_cls() r = await rs.execute() @@ -169,6 +187,7 @@ async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: async def test_empty_search_finds_everything( data_client: Any, opensearch_version: Any, commit_search_cls: Any ) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls() r = await cs.execute() assert r.hits.total.value == 52 @@ -215,6 +234,7 @@ async def test_empty_search_finds_everything( async def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = await cs.execute() @@ -261,6 +281,7 @@ async def test_term_filters_are_shown_as_selected_and_data_is_filtered( async def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"deletions": "better"}) r = await cs.execute() @@ -269,6 +290,7 @@ async def test_range_filters_are_shown_as_selected_and_data_is_filtered( async def test_pagination(data_client: Any, commit_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index e2670e55..8c9e6f42 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -26,6 +26,7 @@ class Post(AsyncDocument): async def test_index_template_works(write_client: Any) -> None: + # pylint: disable=missing-function-docstring it = AsyncIndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -47,6 +48,7 @@ async def test_index_template_works(write_client: Any) -> None: async def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) await i.save() @@ -62,6 +64,7 @@ async def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: async def test_index_exists(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert await AsyncIndex("git").exists() assert not await AsyncIndex("not-there").exists() @@ -69,6 +72,7 @@ async def test_index_exists(data_client: Any) -> None: async def test_index_can_be_created_with_settings_and_mappings( write_client: Any, ) -> None: + # pylint: disable=missing-function-docstring i = AsyncIndex("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -94,6 +98,7 @@ async def test_index_can_be_created_with_settings_and_mappings( async def test_delete(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -105,6 +110,7 @@ async def test_delete(write_client: Any) -> None: async def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: + # pylint: disable=missing-function-docstring i1: Any = AsyncIndex("test-index-1", using=write_client) i2: Any = AsyncIndex("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index 0aa3c0b8..5975ad20 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -21,6 +21,7 @@ async def test_mapping_saved_into_opensearch(write_client: Any) -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -43,6 +44,7 @@ async def test_mapping_saved_into_opensearch(write_client: Any) -> None: async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( write_client: Any, ) -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -68,6 +70,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( async def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client: Any, ) -> None: + # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) @@ -97,6 +100,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_with_anal async def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: + # pylint: disable=missing-function-docstring await write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index a42daf6c..2f6d9c79 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -43,6 +43,7 @@ class Index: async def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: + # pylint: disable=missing-function-docstring has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -64,6 +65,7 @@ async def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> N async def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -80,6 +82,7 @@ async def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: async def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = AsyncSearch(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -91,6 +94,7 @@ async def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: async def test_scan_respects_doc_types(data_client: Any) -> None: + # pylint: disable=missing-function-docstring result = Repository.search().scan() repos = await get_result(result) @@ -100,6 +104,7 @@ async def test_scan_respects_doc_types(data_client: Any) -> None: async def test_scan_iterates_through_all_docs(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = AsyncSearch(index="flat-git") result = s.scan() commits = await get_result(result) @@ -109,6 +114,7 @@ async def test_scan_iterates_through_all_docs(data_client: Any) -> None: async def get_result(b: Any) -> Any: + # pylint: disable=missing-function-docstring a = [] async for i in b: a.append(i) @@ -116,6 +122,7 @@ async def get_result(b: Any) -> Any: async def test_multi_search(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = AsyncSearch(index="flat-git") @@ -133,6 +140,7 @@ async def test_multi_search(data_client: Any) -> None: async def test_multi_missing(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = AsyncSearch(index="flat-git") s3 = AsyncSearch(index="does_not_exist") @@ -156,6 +164,7 @@ async def test_multi_missing(data_client: Any) -> None: async def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = AsyncSearch(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) r = await s.execute() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index a26d046e..68e98e97 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -21,6 +21,7 @@ async def test_update_by_query_no_script( write_client: Any, setup_ubq_tests: Any ) -> None: + # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -42,6 +43,7 @@ async def test_update_by_query_no_script( async def test_update_by_query_with_script( write_client: Any, setup_ubq_tests: Any ) -> None: + # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -61,6 +63,7 @@ async def test_update_by_query_with_script( async def test_delete_by_query_with_script( write_client: Any, setup_ubq_tests: Any ) -> None: + # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 4e742c2f..31596875 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -28,6 +28,7 @@ class TestAlertingPlugin(AsyncOpenSearchTestCase): "Plugin not supported for opensearch version", ) async def test_create_destination(self) -> None: + # pylint: disable=missing-function-docstring # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -44,6 +45,7 @@ async def test_create_destination(self) -> None: "Plugin not supported for opensearch version", ) async def test_get_destination(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy destination await self.test_create_destination() @@ -59,6 +61,7 @@ async def test_get_destination(self) -> None: "Plugin not supported for opensearch version", ) async def test_create_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy destination await self.test_create_destination() @@ -124,6 +127,7 @@ async def test_create_monitor(self) -> None: "Plugin not supported for opensearch version", ) async def test_search_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy monitor await self.test_create_monitor() @@ -142,6 +146,7 @@ async def test_search_monitor(self) -> None: "Plugin not supported for opensearch version", ) async def test_get_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy monitor await self.test_create_monitor() @@ -166,6 +171,7 @@ async def test_get_monitor(self) -> None: "Plugin not supported for opensearch version", ) async def test_run_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy monitor await self.test_create_monitor() diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index 0ffae69f..22d19490 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -69,6 +69,7 @@ class TestIndexManagementPlugin(AsyncOpenSearchTestCase): } async def test_create_policy(self) -> None: + # pylint: disable=missing-function-docstring # Test to create policy response = await self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -78,6 +79,7 @@ async def test_create_policy(self) -> None: self.assertIn("_id", response) async def test_get_policy(self) -> None: + # pylint: disable=missing-function-docstring # Create a policy await self.test_create_policy() @@ -89,6 +91,7 @@ async def test_get_policy(self) -> None: self.assertEqual(response["_id"], self.POLICY_NAME) async def test_update_policy(self) -> None: + # pylint: disable=missing-function-docstring # Create a policy await self.test_create_policy() @@ -111,6 +114,7 @@ async def test_update_policy(self) -> None: self.assertIn("_id", response) async def test_delete_policy(self) -> None: + # pylint: disable=missing-function-docstring # Create a policy await self.test_create_policy() diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index c56ed70c..91be3623 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -54,6 +54,10 @@ async def await_if_coro(x: Any) -> Any: + """ + awaits if x is a coroutine + :return: x + """ if inspect.iscoroutine(x): return await x return x @@ -211,6 +215,7 @@ async def _feature_enabled(self, name: str) -> Any: @pytest.fixture(scope="function") # type: ignore def async_runner(async_client: Any) -> AsyncYamlRunner: + # pylint: disable=missing-function-docstring return AsyncYamlRunner(async_client) @@ -218,6 +223,7 @@ def async_runner(async_client: Any) -> AsyncYamlRunner: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore async def test_rest_api_spec(test_spec: Any, async_runner: Any) -> None: + # pylint: disable=missing-function-docstring if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index e9b8f329..415e10ee 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -40,17 +40,20 @@ class TestSecurityPlugin(IsolatedAsyncioTestCase): # type: ignore USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - async def asyncSetUp(self) -> None: # pylint: disable=invalid-name + async def asyncSetUp(self) -> None: + # pylint: disable=invalid-name, missing-function-docstring self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self) -> None: # pylint: disable=invalid-name + async def asyncTearDown(self) -> None: + # pylint: disable=missing-function-docstring, invalid-name if self.client: await self.client.close() async def test_create_role(self) -> None: + # pylint: disable=missing-function-docstring # Test to create role response = await self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -60,6 +63,7 @@ async def test_create_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) async def test_create_role_with_body_param_empty(self) -> None: + # pylint: disable=missing-function-docstring try: await self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -68,6 +72,7 @@ async def test_create_role_with_body_param_empty(self) -> None: assert False async def test_get_role(self) -> None: + # pylint: disable=missing-function-docstring # Create a role await self.test_create_role() @@ -78,6 +83,7 @@ async def test_get_role(self) -> None: self.assertIn(self.ROLE_NAME, response) async def test_update_role(self) -> None: + # pylint: disable=missing-function-docstring # Create a role await self.test_create_role() @@ -93,6 +99,7 @@ async def test_update_role(self) -> None: self.assertEqual("OK", response.get("status")) async def test_delete_role(self) -> None: + # pylint: disable=missing-function-docstring # Create a role await self.test_create_role() @@ -106,6 +113,7 @@ async def test_delete_role(self) -> None: response = await self.client.security.get_role(self.ROLE_NAME) async def test_create_user(self) -> None: + # pylint: disable=missing-function-docstring # Test to create user response = await self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -115,6 +123,7 @@ async def test_create_user(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) async def test_create_user_with_body_param_empty(self) -> None: + # pylint: disable=missing-function-docstring try: await self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -123,6 +132,7 @@ async def test_create_user_with_body_param_empty(self) -> None: assert False async def test_create_user_with_role(self) -> None: + # pylint: disable=missing-function-docstring await self.test_create_role() # Test to create user @@ -138,6 +148,7 @@ async def test_create_user_with_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) async def test_get_user(self) -> None: + # pylint: disable=missing-function-docstring # Create a user await self.test_create_user() @@ -148,6 +159,7 @@ async def test_get_user(self) -> None: self.assertIn(self.USER_NAME, response) async def test_update_user(self) -> None: + # pylint: disable=missing-function-docstring # Create a user await self.test_create_user() @@ -163,6 +175,7 @@ async def test_update_user(self) -> None: self.assertEqual("OK", response.get("status")) async def test_delete_user(self) -> None: + # pylint: disable=missing-function-docstring # Create a user await self.test_create_user() @@ -176,11 +189,13 @@ async def test_delete_user(self) -> None: response = await self.client.security.get_user(self.USER_NAME) async def test_health_check(self) -> None: + # pylint: disable=missing-function-docstring response = await self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) async def test_health(self) -> None: + # pylint: disable=missing-function-docstring response = await self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -214,6 +229,7 @@ async def test_health(self) -> None: } async def test_update_audit_config(self) -> None: + # pylint: disable=missing-function-docstring response = await self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) @@ -221,6 +237,7 @@ async def test_update_audit_config(self) -> None: self.assertEqual("OK", response.get("status")) async def test_update_audit_configuration(self) -> None: + # pylint: disable=missing-function-docstring response = await self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 664f6a95..8500a231 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -18,6 +18,7 @@ class TestAsyncSigner: def mock_session(self) -> Mock: + # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -31,6 +32,7 @@ def mock_session(self) -> Mock: return dummy_session async def test_aws_signer_async_as_http_auth(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -42,6 +44,7 @@ async def test_aws_signer_async_as_http_auth(self) -> None: assert "X-Amz-Security-Token" in headers async def test_aws_signer_async_when_region_is_null(self) -> None: + # pylint: disable=missing-function-docstring session = self.mock_session() from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -55,6 +58,7 @@ async def test_aws_signer_async_when_region_is_null(self) -> None: assert str(e.value) == "Region cannot be empty" async def test_aws_signer_async_when_credentials_is_null(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-1" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -64,6 +68,7 @@ async def test_aws_signer_async_when_credentials_is_null(self) -> None: assert str(e.value) == "Credentials cannot be empty" async def test_aws_signer_async_when_service_is_specified(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" service = "aoss" @@ -79,6 +84,7 @@ async def test_aws_signer_async_when_service_is_specified(self) -> None: class TestAsyncSignerWithFrozenCredentials(TestAsyncSigner): def mock_session(self, disable_get_frozen: bool = True) -> Mock: + # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -91,6 +97,7 @@ def mock_session(self, disable_get_frozen: bool = True) -> Mock: return dummy_session async def test_aws_signer_async_frozen_credentials_as_http_auth(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index e119b020..846103da 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -64,6 +64,7 @@ async def perform_request(self, *args: Any, **kwargs: Any) -> Any: return self.status, self.headers, self.data async def close(self) -> None: + # pylint: disable=missing-function-docstring if self.closed: raise RuntimeError("This connection is already closed") self.closed = True @@ -122,6 +123,7 @@ async def close(self) -> None: class TestTransport: async def test_single_connection_uses_dummy_connection_pool(self) -> None: + # pylint: disable=missing-function-docstring t1: Any = AsyncTransport([{}]) await t1._async_call() assert isinstance(t1.connection_pool, DummyConnectionPool) @@ -130,6 +132,7 @@ async def test_single_connection_uses_dummy_connection_pool(self) -> None: assert isinstance(t2.connection_pool, DummyConnectionPool) async def test_request_timeout_extracted_from_params_and_passed(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -142,6 +145,7 @@ async def test_request_timeout_extracted_from_params_and_passed(self) -> None: } == t.get_connection().calls[0][1] async def test_timeout_extracted_from_params_and_passed(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"timeout": 84}) @@ -154,6 +158,7 @@ async def test_timeout_extracted_from_params_and_passed(self) -> None: } == t.get_connection().calls[0][1] async def test_opaque_id(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{}], opaque_id="app-1", connection_class=DummyConnection ) @@ -178,6 +183,7 @@ async def test_opaque_id(self) -> None: } == t.get_connection().calls[1][1] async def test_request_with_custom_user_agent_header(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request( @@ -191,6 +197,7 @@ async def test_request_with_custom_user_agent_header(self) -> None: } == t.get_connection().calls[0][1] async def test_send_get_body_as_source(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -200,6 +207,7 @@ async def test_send_get_body_as_source(self) -> None: assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] async def test_send_get_body_as_post(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -209,6 +217,7 @@ async def test_send_get_body_as_post(self) -> None: assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] async def test_body_gets_encoded_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好") @@ -221,6 +230,7 @@ async def test_body_gets_encoded_into_bytes(self) -> None: ) == t.get_connection().calls[0][0] async def test_body_bytes_get_passed_untouched(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -229,6 +239,7 @@ async def test_body_bytes_get_passed_untouched(self) -> None: assert ("GET", "/", None, body) == t.get_connection().calls[0][0] async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好\uda6a") @@ -241,18 +252,21 @@ async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: ) == t.get_connection().calls[0][0] async def test_kwargs_passed_on_to_connections(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{"host": "google.com"}], port=123) await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host async def test_kwargs_passed_on_to_connection_pool(self) -> None: + # pylint: disable=missing-function-docstring dt = object() t: Any = AsyncTransport([{}, {}], dead_timeout=dt) await t._async_call() assert dt is t.connection_pool.dead_timeout async def test_custom_connection_class(self) -> None: + # pylint: disable=missing-function-docstring class MyConnection(object): def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs @@ -263,6 +277,7 @@ def __init__(self, **kwargs: Any) -> None: assert isinstance(t.connection_pool.connections[0], MyConnection) async def test_add_connection(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) @@ -270,6 +285,7 @@ async def test_add_connection(self) -> None: assert "http://google.com:1234" == t.connection_pool.connections[1].host async def test_request_will_fail_after_x_retries(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -285,6 +301,7 @@ async def test_request_will_fail_after_x_retries(self) -> None: assert 4 == len(t.get_connection().calls) async def test_failed_connection_will_be_marked_as_dead(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, @@ -302,6 +319,7 @@ async def test_failed_connection_will_be_marked_as_dead(self) -> None: async def test_resurrected_connection_will_be_marked_as_live_on_success( self, ) -> None: + # pylint: disable=missing-function-docstring for method in ("GET", "HEAD"): t: Any = AsyncTransport([{}, {}], connection_class=DummyConnection) await t._async_call() @@ -315,6 +333,7 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success( assert 1 == len(t.connection_pool.dead_count) async def test_sniff_will_use_seed_connections(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection ) @@ -326,6 +345,7 @@ async def test_sniff_will_use_seed_connections(self) -> None: assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -338,6 +358,7 @@ async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -352,6 +373,7 @@ async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: ].calls[0] async def test_sniff_uses_sniff_timeout(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -365,6 +387,7 @@ async def test_sniff_uses_sniff_timeout(self) -> None: ].calls[0] async def test_sniff_reuses_connection_instances_if_possible(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, @@ -379,6 +402,7 @@ async def test_sniff_reuses_connection_instances_if_possible(self) -> None: assert connection is t.get_connection() async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"exception": ConnectionError(None, "abandon ship", Exception())}, @@ -407,6 +431,7 @@ async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: async def test_sniff_on_fail_failing_does_not_prevent_retires( self, sniff_hosts: Any ) -> None: + # pylint: disable=missing-function-docstring sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = AsyncTransport( [ @@ -428,6 +453,7 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires( assert 1 == len(conn_data.calls) async def test_sniff_after_n_seconds(self, event_loop: Any) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -449,8 +475,10 @@ async def test_sniff_after_n_seconds(self, event_loop: Any) -> None: assert event_loop.time() - 1 < t.last_sniff < event_loop.time() + 0.01 async def test_sniff_7x_publish_host(self) -> None: - # Test the response shaped when a 7.x node has publish_host set - # and the returend data is shaped in the fqdn/ip:port format. + """ + Test the response shaped when a 7.x node has publish_host set + and the returned data is shaped in the fqdn/ip:port format. + """ t: Any = AsyncTransport( [{"data": CLUSTER_NODES_7X_PUBLISH_HOST}], connection_class=DummyConnection, @@ -465,6 +493,7 @@ async def test_sniff_7x_publish_host(self) -> None: } async def test_transport_close_closes_all_pool_connections(self) -> None: + # pylint: disable=missing-function-docstring t1: Any = AsyncTransport([{}], connection_class=DummyConnection) await t1._async_call() @@ -482,6 +511,7 @@ async def test_transport_close_closes_all_pool_connections(self) -> None: async def test_sniff_on_start_error_if_no_sniffed_hosts( self, event_loop: Any ) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"data": ""}, @@ -501,6 +531,7 @@ async def test_sniff_on_start_error_if_no_sniffed_hosts( async def test_sniff_on_start_waits_for_sniff_to_complete( self, event_loop: Any ) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"delay": 1, "data": ""}, @@ -539,6 +570,7 @@ async def test_sniff_on_start_waits_for_sniff_to_complete( async def test_sniff_on_start_close_unlocks_async_calls( self, event_loop: Any ) -> None: + # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"delay": 10, "data": CLUSTER_NODES}, @@ -566,6 +598,7 @@ async def test_sniff_on_start_close_unlocks_async_calls( assert duration < 1 async def test_init_connection_pool_with_many_hosts(self) -> None: + # pylint: disable=missing-function-docstring """ Check init of connection pool with multiple connections. @@ -584,6 +617,7 @@ async def test_init_connection_pool_with_many_hosts(self) -> None: await t._async_call() async def test_init_pool_with_connection_class_to_many_hosts(self) -> None: + # pylint: disable=missing-function-docstring """ Check init of connection pool with user specified connection_class. diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index a03f0e44..7652be71 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -36,6 +36,7 @@ class DummyTransport(object): def __init__( self, hosts: Sequence[str], responses: Any = None, **kwargs: Any ) -> None: + # pylint: disable=missing-function-docstring self.hosts = hosts self.responses = responses self.call_count: int = 0 @@ -49,6 +50,7 @@ def perform_request( body: Optional[bytes] = None, headers: Optional[Mapping[str, str]] = None, ) -> Any: + # pylint: disable=missing-function-docstring resp: Any = (200, {}) if self.responses: resp = self.responses[self.call_count] @@ -59,13 +61,16 @@ def perform_request( class OpenSearchTestCase(TestCase): def setUp(self) -> None: + # pylint: disable=missing-function-docstring super(OpenSearchTestCase, self).setUp() self.client: Any = OpenSearch(transport_class=DummyTransport) # type: ignore def assert_call_count_equals(self, count: int) -> None: + # pylint: disable=missing-function-docstring self.assertEqual(count, self.client.transport.call_count) def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: + # pylint: disable=missing-function-docstring self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) @@ -74,12 +79,15 @@ def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: class TestOpenSearchTestCase(OpenSearchTestCase): def test_our_transport_used(self) -> None: + # pylint: disable=missing-function-docstring self.assertIsInstance(self.client.transport, DummyTransport) def test_start_with_0_call(self) -> None: + # pylint: disable=missing-function-docstring self.assert_call_count_equals(0) def test_each_call_is_recorded(self) -> None: + # pylint: disable=missing-function-docstring self.client.transport.perform_request("GET", "/") self.client.transport.perform_request("DELETE", "/42", params={}, body="body") self.assert_call_count_equals(2) diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index 55fcd4a9..6b4d683d 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -37,12 +37,15 @@ class TestNormalizeHosts(TestCase): def test_none_uses_defaults(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual([{}], _normalize_hosts(None)) def test_strings_are_used_as_hostnames(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual([{"host": "elastic.co"}], _normalize_hosts(["elastic.co"])) def test_strings_are_parsed_for_port_and_user(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( [ {"host": "elastic.co", "port": 42}, @@ -52,6 +55,7 @@ def test_strings_are_parsed_for_port_and_user(self) -> None: ) def test_strings_are_parsed_for_scheme(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( [ {"host": "elastic.co", "port": 42, "use_ssl": True}, @@ -69,22 +73,26 @@ def test_strings_are_parsed_for_scheme(self) -> None: ) def test_dicts_are_left_unchanged(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( [{"host": "local", "extra": 123}], _normalize_hosts([{"host": "local", "extra": 123}]), ) def test_single_string_is_wrapped_in_list(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual([{"host": "elastic.co"}], _normalize_hosts("elastic.co")) class TestClient(OpenSearchTestCase): def test_request_timeout_is_passed_through_unescaped(self) -> None: + # pylint: disable=missing-function-docstring self.client.ping(request_timeout=0.1) calls = self.assert_url_called("HEAD", "/") self.assertEqual([({"request_timeout": 0.1}, {}, None)], calls) def test_params_is_copied_when(self) -> None: + # pylint: disable=missing-function-docstring rt = object() params = dict(request_timeout=rt) self.client.ping(params=params) @@ -97,6 +105,7 @@ def test_params_is_copied_when(self) -> None: self.assertFalse(calls[0][0] is calls[1][0]) def test_headers_is_copied_when(self) -> None: + # pylint: disable=missing-function-docstring hv = "value" headers = dict(Authentication=hv) self.client.ping(headers=headers) @@ -109,39 +118,47 @@ def test_headers_is_copied_when(self) -> None: self.assertFalse(calls[0][0] is calls[1][0]) def test_from_in_search(self) -> None: + # pylint: disable=missing-function-docstring self.client.search(index="i", from_=10) calls = self.assert_url_called("POST", "/i/_search") self.assertEqual([({"from": "10"}, {}, None)], calls) def test_repr_contains_hosts(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual("", repr(self.client)) def test_repr_subclass(self) -> None: + # pylint: disable=missing-function-docstring class OtherOpenSearch(OpenSearch): pass self.assertEqual("", repr(OtherOpenSearch())) def test_repr_contains_hosts_passed_in(self) -> None: + # pylint: disable=missing-function-docstring self.assertIn("opensearchpy.org", repr(OpenSearch(["opensearch.org:123"]))) def test_repr_truncates_host_to_5(self) -> None: + # pylint: disable=missing-function-docstring hosts = [{"host": "opensearch" + str(i)} for i in range(10)] client = OpenSearch(hosts) self.assertNotIn("opensearch5", repr(client)) self.assertIn("...", repr(client)) def test_index_uses_post_if_id_is_empty(self) -> None: + # pylint: disable=missing-function-docstring self.client.index(index="my-index", id="", body={}) self.assert_url_called("POST", "/my-index/_doc") def test_index_uses_put_if_id_is_not_empty(self) -> None: + # pylint: disable=missing-function-docstring self.client.index(index="my-index", id=0, body={}) self.assert_url_called("PUT", "/my-index/_doc/0") def test_tasks_get_without_task_id_deprecated(self) -> None: + # pylint: disable=missing-function-docstring warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get() @@ -156,6 +173,7 @@ def test_tasks_get_without_task_id_deprecated(self) -> None: ) def test_tasks_get_with_task_id_not_deprecated(self) -> None: + # pylint: disable=missing-function-docstring warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get("task-1") diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index 3e9ad987..605eac4c 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -30,10 +30,12 @@ class TestCluster(OpenSearchTestCase): def test_stats_without_node_id(self) -> None: + # pylint: disable=missing-function-docstring self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") def test_stats_with_node_id(self) -> None: + # pylint: disable=missing-function-docstring self.client.cluster.stats("node-1") self.assert_url_called("GET", "/_cluster/stats/nodes/node-1") @@ -41,6 +43,7 @@ def test_stats_with_node_id(self) -> None: self.assert_url_called("GET", "/_cluster/stats/nodes/node-2") def test_state_with_index_without_metric_defaults_to_all(self) -> None: + # pylint: disable=missing-function-docstring self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") diff --git a/test_opensearchpy/test_client/test_http.py b/test_opensearchpy/test_client/test_http.py index 145cbb6d..3d230c88 100644 --- a/test_opensearchpy/test_client/test_http.py +++ b/test_opensearchpy/test_client/test_http.py @@ -13,16 +13,19 @@ class TestHttp(OpenSearchTestCase): def test_http_get(self) -> None: + # pylint: disable=missing-function-docstring self.client.http.get("/") self.assert_call_count_equals(1) self.assertEqual([(None, None, None)], self.assert_url_called("GET", "/", 1)) def test_http_head(self) -> None: + # pylint: disable=missing-function-docstring self.client.http.head("/") self.assert_call_count_equals(1) self.assertEqual([(None, None, None)], self.assert_url_called("HEAD", "/", 1)) def test_http_put(self) -> None: + # pylint: disable=missing-function-docstring self.client.http.put("/xyz", headers={"X": "Y"}, body="body") self.assert_call_count_equals(1) self.assertEqual( @@ -30,6 +33,7 @@ def test_http_put(self) -> None: ) def test_http_post(self) -> None: + # pylint: disable=missing-function-docstring self.client.http.post("/xyz", headers={"X": "Y"}, body="body") self.assert_call_count_equals(1) self.assertEqual( @@ -37,6 +41,7 @@ def test_http_post(self) -> None: ) def test_http_post_with_params(self) -> None: + # pylint: disable=missing-function-docstring self.client.http.post( "/xyz", headers={"X": "Y"}, params={"A": "B"}, body="body" ) @@ -47,6 +52,7 @@ def test_http_post_with_params(self) -> None: ) def test_http_delete(self) -> None: + # pylint: disable=missing-function-docstring self.client.http.delete("/xyz", headers={"X": "Y"}, body="body") self.assert_call_count_equals(1) self.assertEqual( diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index d45405e5..bf4e0f71 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -30,18 +30,22 @@ class TestIndices(OpenSearchTestCase): def test_create_one_index(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.create("test-index") self.assert_url_called("PUT", "/test-index") def test_delete_multiple_indices(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.delete(["test-index", "second.index", "third/index"]) self.assert_url_called("DELETE", "/test-index,second.index,third%2Findex") def test_exists_index(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.exists("second.index,third/index") self.assert_url_called("HEAD", "/second.index,third%2Findex") def test_passing_empty_value_for_required_param_raises_exception(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(ValueError, self.client.indices.exists, index=None) self.assertRaises(ValueError, self.client.indices.exists, index=[]) self.assertRaises(ValueError, self.client.indices.exists, index="") diff --git a/test_opensearchpy/test_client/test_overrides.py b/test_opensearchpy/test_client/test_overrides.py index 160a8bdd..9b69fc9f 100644 --- a/test_opensearchpy/test_client/test_overrides.py +++ b/test_opensearchpy/test_client/test_overrides.py @@ -32,34 +32,42 @@ class TestOverriddenUrlTargets(OpenSearchTestCase): def test_create(self) -> None: + # pylint: disable=missing-function-docstring self.client.create(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_create/test-id") def test_delete(self) -> None: + # pylint: disable=missing-function-docstring self.client.delete(index="test-index", id="test-id") self.assert_url_called("DELETE", "/test-index/_doc/test-id") def test_exists(self) -> None: + # pylint: disable=missing-function-docstring self.client.exists(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_doc/test-id") def test_explain(self) -> None: + # pylint: disable=missing-function-docstring self.client.explain(index="test-index", id="test-id") self.assert_url_called("POST", "/test-index/_explain/test-id") def test_get(self) -> None: + # pylint: disable=missing-function-docstring self.client.get(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_doc/test-id") def test_get_source(self) -> None: + # pylint: disable=missing-function-docstring self.client.get_source(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_source/test-id") def test_exists_source(self) -> None: + # pylint: disable=missing-function-docstring self.client.exists_source(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_source/test-id") def test_index(self) -> None: + # pylint: disable=missing-function-docstring self.client.index(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_doc") @@ -67,6 +75,7 @@ def test_index(self) -> None: self.assert_url_called("PUT", "/test-index/_doc/test-id") def test_termvectors(self) -> None: + # pylint: disable=missing-function-docstring self.client.termvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_termvectors") @@ -74,14 +83,17 @@ def test_termvectors(self) -> None: self.assert_url_called("POST", "/test-index/_termvectors/test-id") def test_mtermvectors(self) -> None: + # pylint: disable=missing-function-docstring self.client.mtermvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_mtermvectors") def test_update(self) -> None: + # pylint: disable=missing-function-docstring self.client.update(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_update/test-id") def test_cluster_state(self) -> None: + # pylint: disable=missing-function-docstring self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") @@ -92,6 +104,7 @@ def test_cluster_state(self) -> None: self.assert_url_called("GET", "/_cluster/state/test-metric/test-index") def test_cluster_stats(self) -> None: + # pylint: disable=missing-function-docstring self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") @@ -99,6 +112,7 @@ def test_cluster_stats(self) -> None: self.assert_url_called("GET", "/_cluster/stats/nodes/test-node") def test_indices_put_mapping(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.put_mapping(body={}) self.assert_url_called("PUT", "/_all/_mapping") @@ -106,5 +120,6 @@ def test_indices_put_mapping(self) -> None: self.assert_url_called("PUT", "/test-index/_mapping") def test_tasks_get(self) -> None: + # pylint: disable=missing-function-docstring with pytest.warns(DeprecationWarning): self.client.tasks.get() diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index f012ccbb..7a9b3df8 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -12,40 +12,48 @@ class TestAlerting(OpenSearchTestCase): def test_create_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Test Post Method self.client.alerting.create_monitor({}) self.assert_url_called("POST", "/_plugins/_alerting/monitors") def test_run_monitor(self) -> None: + # pylint: disable=missing-function-docstring self.client.alerting.run_monitor("...") self.assert_url_called("POST", "/_plugins/_alerting/monitors/.../_execute") def test_get_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Test Get Method self.client.alerting.get_monitor("...") self.assert_url_called("GET", "/_plugins/_alerting/monitors/...") def test_search_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Test Search Method self.client.alerting.search_monitor({}) self.assert_url_called("GET", "/_plugins/_alerting/monitors/_search") def test_update_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Test Update Method self.client.alerting.update_monitor("...") self.assert_url_called("PUT", "/_plugins/_alerting/monitors/...") def test_delete_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Test Delete Method self.client.alerting.delete_monitor("...") self.assert_url_called("DELETE", "/_plugins/_alerting/monitors/...") def test_create_destination(self) -> None: + # pylint: disable=missing-function-docstring # Test Post Method self.client.alerting.create_destination({}) self.assert_url_called("POST", "/_plugins/_alerting/destinations") def test_get_destination(self) -> None: + # pylint: disable=missing-function-docstring # Test Get Method # Get a specific destination @@ -57,20 +65,24 @@ def test_get_destination(self) -> None: self.assert_url_called("GET", "/_plugins/_alerting/destinations") def test_update_destination(self) -> None: + # pylint: disable=missing-function-docstring # Test Update Method self.client.alerting.update_destination("...") self.assert_url_called("PUT", "/_plugins/_alerting/destinations/...") def test_delete_destination(self) -> None: + # pylint: disable=missing-function-docstring # Test Delete Method self.client.alerting.delete_destination("...") self.assert_url_called("DELETE", "/_plugins/_alerting/destinations/...") def test_get_alerts(self) -> None: + # pylint: disable=missing-function-docstring self.client.alerting.get_alerts() self.assert_url_called("GET", "/_plugins/_alerting/monitors/alerts") def test_acknowledge_alerts(self) -> None: + # pylint: disable=missing-function-docstring self.client.alerting.acknowledge_alert("...") self.assert_url_called( "POST", "/_plugins/_alerting/monitors/.../_acknowledge/alerts" diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index a2052163..53a5cc16 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -12,10 +12,12 @@ class TestIndexManagement(OpenSearchTestCase): def test_create_policy(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.put_policy("...") self.assert_url_called("PUT", "/_plugins/_ism/policies/...") def test_update_policy(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.put_policy( "...", params={"if_seq_no": 7, "if_primary_term": 1} ) @@ -25,26 +27,32 @@ def test_update_policy(self) -> None: ) def test_add_policy(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.add_policy("...") self.assert_url_called("POST", "/_plugins/_ism/add/...") def test_get_policy(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.get_policy("...") self.assert_url_called("GET", "/_plugins/_ism/policies/...") def test_remove_policy_from_index(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.remove_policy_from_index("...") self.assert_url_called("POST", "/_plugins/_ism/remove/...") def test_change_policy(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.change_policy("...") self.assert_url_called("POST", "/_plugins/_ism/change_policy/...") def test_retry(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.retry("...") self.assert_url_called("POST", "/_plugins/_ism/retry/...") def test_explain_index(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.explain_index("...", show_policy=True) self.assertEqual( [({"show_policy": b"true"}, {}, None)], @@ -52,5 +60,6 @@ def test_explain_index(self) -> None: ) def test_delete_policy(self) -> None: + # pylint: disable=missing-function-docstring self.client.index_management.delete_policy("...") self.assert_url_called("DELETE", "/_plugins/_ism/policies/...") diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index 793afda2..a92b07cb 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -14,6 +14,7 @@ class TestPluginsClient(TestCase): def test_plugins_client(self) -> None: + # pylint: disable=missing-function-docstring with self.assertWarns(Warning) as w: client = OpenSearch() # double-init diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index 38a4b8cc..cb9331f4 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -12,35 +12,43 @@ class TestPointInTime(OpenSearchTestCase): def test_create_one_point_in_time(self) -> None: + # pylint: disable=missing-function-docstring index_name = "test-index" self.client.create_point_in_time(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") def test_delete_one_point_in_time(self) -> None: + # pylint: disable=missing-function-docstring self.client.delete_point_in_time(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") def test_delete_all_point_in_time(self) -> None: + # pylint: disable=missing-function-docstring self.client.delete_point_in_time(all=True) self.assert_url_called("DELETE", "/_search/point_in_time/_all") def test_list_all_point_in_time(self) -> None: + # pylint: disable=missing-function-docstring self.client.list_all_point_in_time() self.assert_url_called("GET", "/_search/point_in_time/_all") def test_create_pit(self) -> None: + # pylint: disable=missing-function-docstring index_name = "test-index" self.client.create_pit(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") def test_delete_pit(self) -> None: + # pylint: disable=missing-function-docstring self.client.delete_pit(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") def test_delete_all_pits(self) -> None: + # pylint: disable=missing-function-docstring self.client.delete_all_pits() self.assert_url_called("DELETE", "/_search/point_in_time/_all") def test_get_all_pits(self) -> None: + # pylint: disable=missing-function-docstring self.client.get_all_pits() self.assert_url_called("GET", "/_search/point_in_time/_all") diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py index faf91297..8d83d13d 100644 --- a/test_opensearchpy/test_client/test_remote_store.py +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -11,5 +11,6 @@ class TestRemoteStore(OpenSearchTestCase): def test_remote_store_restore(self) -> None: + # pylint: disable=missing-function-docstring self.client.remote_store.restore(body=["index-1"]) self.assert_url_called("POST", "/_remotestore/_restore") diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py index b3ac3d6f..53db865e 100644 --- a/test_opensearchpy/test_client/test_requests.py +++ b/test_opensearchpy/test_client/test_requests.py @@ -14,6 +14,7 @@ class TestRequests(TestCase): def test_connection_class(self) -> None: + # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=RequestsHttpConnection) self.assertEqual(client.transport.pool_maxsize, None) self.assertEqual(client.transport.connection_class, RequestsHttpConnection) @@ -22,6 +23,7 @@ def test_connection_class(self) -> None: ) def test_pool_maxsize(self) -> None: + # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=RequestsHttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) self.assertEqual( diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py index d30c85e7..599f1cf2 100644 --- a/test_opensearchpy/test_client/test_urllib3.py +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -16,11 +16,13 @@ class TestUrlLib3(TestCase): def test_default(self) -> None: + # pylint: disable=missing-function-docstring client = OpenSearch() self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertEqual(client.transport.pool_maxsize, None) def test_connection_class(self) -> None: + # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=Urllib3HttpConnection) self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertIsInstance( @@ -31,6 +33,7 @@ def test_connection_class(self) -> None: ) def test_pool_maxsize(self) -> None: + # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=Urllib3HttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) # https://github.com/python/cpython/blob/3.12/Lib/queue.py#L35 diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index efed662a..44c106d0 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -36,13 +36,16 @@ class TestQueryParams(TestCase): def setup_method(self, _: Any) -> None: + # pylint: disable=missing-function-docstring self.calls: Any = [] @query_params("simple_param") def func_to_wrap(self, *args: Any, **kwargs: Any) -> None: + # pylint: disable=missing-function-docstring self.calls.append((args, kwargs)) def test_handles_params(self) -> None: + # pylint: disable=missing-function-docstring self.func_to_wrap(params={"simple_param_2": "2"}, simple_param="3") self.assertEqual( self.calls, @@ -58,18 +61,21 @@ def test_handles_params(self) -> None: ) def test_handles_headers(self) -> None: + # pylint: disable=missing-function-docstring self.func_to_wrap(headers={"X-Opaque-Id": "app-1"}) self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "app-1"}})] ) def test_handles_opaque_id(self) -> None: + # pylint: disable=missing-function-docstring self.func_to_wrap(opaque_id="request-id") self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "request-id"}})] ) def test_handles_empty_none_and_normalization(self) -> None: + # pylint: disable=missing-function-docstring self.func_to_wrap(params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) @@ -86,6 +92,7 @@ def test_handles_empty_none_and_normalization(self) -> None: self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {"x": "y"}})) def test_non_escaping_params(self) -> None: + # pylint: disable=missing-function-docstring # the query_params decorator doesn't validate "timeout" it simply avoids escaping as it did self.func_to_wrap(simple_param="x", timeout="4s") self.assertEqual( @@ -111,6 +118,7 @@ def test_non_escaping_params(self) -> None: ) def test_per_call_authentication(self) -> None: + # pylint: disable=missing-function-docstring self.func_to_wrap(api_key=("name", "key")) self.assertEqual( self.calls[-1], @@ -156,6 +164,7 @@ def test_per_call_authentication(self) -> None: class TestMakePath(TestCase): def test_handles_unicode(self) -> None: + # pylint: disable=missing-function-docstring id = "中文" self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) @@ -164,28 +173,34 @@ def test_handles_unicode(self) -> None: class TestEscape(TestCase): def test_handles_ascii(self) -> None: + # pylint: disable=missing-function-docstring string = "abc123" self.assertEqual(b"abc123", _escape(string)) def test_handles_unicode(self) -> None: + # pylint: disable=missing-function-docstring string = "中文" self.assertEqual(b"\xe4\xb8\xad\xe6\x96\x87", _escape(string)) def test_handles_bytestring(self) -> None: + # pylint: disable=missing-function-docstring string = b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0" self.assertEqual(string, _escape(string)) class TestBulkBody(TestCase): def test_proper_bulk_body_as_string_is_not_modified(self) -> None: + # pylint: disable=missing-function-docstring string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(string_body, _bulk_body(None, string_body)) def test_proper_bulk_body_as_bytestring_is_not_modified(self) -> None: + # pylint: disable=missing-function-docstring bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(bytestring_body, _bulk_body(None, bytestring_body)) def test_bulk_body_as_string_adds_trailing_newline(self) -> None: + # pylint: disable=missing-function-docstring string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', @@ -193,6 +208,7 @@ def test_bulk_body_as_string_adds_trailing_newline(self) -> None: ) def test_bulk_body_as_bytestring_adds_trailing_newline(self) -> None: + # pylint: disable=missing-function-docstring bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index 6725849a..f599d7cf 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -46,6 +46,7 @@ class TestBaseConnection(TestCase): def test_empty_warnings(self) -> None: + # pylint: disable=missing-function-docstring con = Connection() with warnings.catch_warnings(record=True) as w: con._raise_warnings(()) @@ -54,6 +55,7 @@ def test_empty_warnings(self) -> None: self.assertEqual(w, []) def test_raises_warnings(self) -> None: + # pylint: disable=missing-function-docstring con = Connection() with warnings.catch_warnings(record=True) as warn: @@ -76,6 +78,7 @@ def test_raises_warnings(self) -> None: ) def test_raises_warnings_when_folded(self) -> None: + # pylint: disable=missing-function-docstring con = Connection() with warnings.catch_warnings(record=True) as warn: con._raise_warnings( @@ -88,6 +91,7 @@ def test_raises_warnings_when_folded(self) -> None: self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) def test_ipv6_host_and_port(self) -> None: + # pylint: disable=missing-function-docstring for kwargs, expected_host in [ ({"host": "::1"}, "http://[::1]:9200"), ({"host": "::1", "port": 443}, "http://[::1]:443"), @@ -99,6 +103,7 @@ def test_ipv6_host_and_port(self) -> None: assert conn.host == expected_host def test_compatibility_accept_header(self) -> None: + # pylint: disable=missing-function-docstring try: conn = Connection() assert "accept" not in conn.headers @@ -119,28 +124,33 @@ def test_compatibility_accept_header(self) -> None: os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") def test_ca_certs_ssl_cert_file(self) -> None: + # pylint: disable=missing-function-docstring cert = "/path/to/clientcert.pem" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_FILE", cert) assert Connection.default_ca_certs() == cert def test_ca_certs_ssl_cert_dir(self) -> None: + # pylint: disable=missing-function-docstring cert = "/path/to/clientcert/dir" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_DIR", cert) assert Connection.default_ca_certs() == cert def test_ca_certs_certifi(self) -> None: + # pylint: disable=missing-function-docstring import certifi assert Connection.default_ca_certs() == certifi.where() def test_no_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring with MonkeyPatch().context() as monkeypatch: monkeypatch.setitem(sys.modules, "certifi", None) assert Connection.default_ca_certs() is None def test_default_connection_is_returned_by_default(self) -> None: + # pylint: disable=missing-function-docstring c = connections.Connections() con, con2 = object(), object() @@ -151,6 +161,7 @@ def test_default_connection_is_returned_by_default(self) -> None: assert c.get_connection() is con def test_get_connection_created_connection_if_needed(self) -> None: + # pylint: disable=missing-function-docstring c = connections.Connections() c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -166,6 +177,7 @@ def test_get_connection_created_connection_if_needed(self) -> None: assert [{"host": "localhost"}] == local.transport.hosts def test_configure_preserves_unchanged_connections(self) -> None: + # pylint: disable=missing-function-docstring c = connections.Connections() c.configure( @@ -184,6 +196,7 @@ def test_configure_preserves_unchanged_connections(self) -> None: assert new_default is not default def test_remove_connection_removes_both_conn_and_conf(self) -> None: + # pylint: disable=missing-function-docstring c = connections.Connections() c.configure( @@ -200,6 +213,7 @@ def test_remove_connection_removes_both_conn_and_conf(self) -> None: c.get_connection("default") def test_create_connection_constructs_client(self) -> None: + # pylint: disable=missing-function-docstring c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) @@ -207,6 +221,7 @@ def test_create_connection_constructs_client(self) -> None: assert [{"host": "opensearch.com"}] == con.transport.hosts def test_create_connection_adds_our_serializer(self) -> None: + # pylint: disable=missing-function-docstring c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index a081fd05..b594b179 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -84,20 +84,24 @@ def _get_request(self, connection: Any, *args: Any, **kwargs: Any) -> Any: return args[0] def test_custom_http_auth_is_allowed(self) -> None: + # pylint: disable=missing-function-docstring auth = AuthBase() c = RequestsHttpConnection(http_auth=auth) self.assertEqual(auth, c.session.auth) def test_timeout_set(self) -> None: + # pylint: disable=missing-function-docstring con = RequestsHttpConnection(timeout=42) self.assertEqual(42, con.timeout) def test_opaque_id(self) -> None: + # pylint: disable=missing-function-docstring con = RequestsHttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") def test_no_http_compression(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() self.assertFalse(con.http_compress) @@ -110,6 +114,7 @@ def test_no_http_compression(self) -> None: self.assertNotIn("accept-encoding", req.headers) def test_http_compression(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection( {"http_compress": True}, ) @@ -133,6 +138,7 @@ def test_http_compression(self) -> None: self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") def test_uses_https_if_verify_certs_is_off(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( {"use_ssl": True, "url_prefix": "url", "verify_certs": False} @@ -150,19 +156,23 @@ def test_uses_https_if_verify_certs_is_off(self) -> None: self.assertEqual(None, request.body) def test_uses_given_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring path = "/path/to/my/ca_certs.pem" c = RequestsHttpConnection(ca_certs=path) self.assertEqual(path, c.session.verify) def test_uses_default_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring c = RequestsHttpConnection() self.assertEqual(Connection.default_ca_certs(), c.session.verify) def test_uses_no_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring c = RequestsHttpConnection(verify_certs=False) self.assertFalse(c.session.verify) def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( { @@ -181,6 +191,7 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: self.assertEqual(None, request.body) def test_merge_headers(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection( connection_params={"headers": {"h1": "v1", "h2": "v2"}} ) @@ -190,12 +201,14 @@ def test_merge_headers(self) -> None: self.assertEqual(req.headers["h3"], "v3") def test_default_headers(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() req = self._get_request(con, "GET", "/") self.assertEqual(req.headers["content-type"], "application/json") self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) def test_custom_headers(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() req = self._get_request( con, @@ -210,37 +223,45 @@ def test_custom_headers(self) -> None: self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") def test_http_auth(self) -> None: + # pylint: disable=missing-function-docstring con = RequestsHttpConnection(http_auth="username:secret") self.assertEqual(("username", "secret"), con.session.auth) def test_http_auth_tuple(self) -> None: + # pylint: disable=missing-function-docstring con = RequestsHttpConnection(http_auth=("username", "secret")) self.assertEqual(("username", "secret"), con.session.auth) def test_http_auth_list(self) -> None: + # pylint: disable=missing-function-docstring con = RequestsHttpConnection(http_auth=["username", "secret"]) self.assertEqual(("username", "secret"), con.session.auth) def test_repr(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) self.assertEqual( "", repr(con) ) def test_conflict_error_is_returned_on_409(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=409) self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") def test_not_found_error_is_returned_on_404(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") def test_request_error_is_returned_on_400(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=400) self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("opensearchpy.connection.base.logger") def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @@ -248,6 +269,7 @@ def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection( response_body=b'{"answer": 42}', response_code=500 ) @@ -276,6 +298,7 @@ def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") status, headers, data = con.perform_request( "GET", @@ -315,6 +338,7 @@ def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_uncompressed_body_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -338,6 +362,7 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_body_not_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection() @@ -348,6 +373,7 @@ def test_body_not_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_failure_body_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) with pytest.raises(NotFoundError) as e: con.perform_request("GET", "/invalid", body=b'{"example": "body"}') @@ -361,6 +387,7 @@ def test_failure_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_failure_body_not_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection(response_code=404) @@ -372,6 +399,7 @@ def test_failure_body_not_logged(self, logger: Any) -> None: self.assertEqual(logger.debug.call_count, 0) def test_defaults(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() request = self._get_request(con, "GET", "/") @@ -380,6 +408,7 @@ def test_defaults(self) -> None: self.assertEqual(None, request.body) def test_params_properly_encoded(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() request = self._get_request( con, "GET", "/", params={"param": "value with spaces"} @@ -390,6 +419,7 @@ def test_params_properly_encoded(self) -> None: self.assertEqual(None, request.body) def test_body_attached(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() request = self._get_request(con, "GET", "/", body='{"answer": 42}') @@ -398,6 +428,7 @@ def test_body_attached(self) -> None: self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) def test_http_auth_attached(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection({"http_auth": "username:secret"}) request = self._get_request(con, "GET", "/") @@ -405,6 +436,7 @@ def test_http_auth_attached(self) -> None: @patch("opensearchpy.connection.base.tracer") def test_url_prefix(self, tracer: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) request = self._get_request( con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 @@ -422,12 +454,14 @@ def test_url_prefix(self, tracer: Any) -> None: ) def test_surrogatepass_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: + # pylint: disable=missing-function-docstring conn = RequestsHttpConnection() def send_raise(*_: Any, **__: Any) -> Any: @@ -440,6 +474,7 @@ def send_raise(*_: Any, **__: Any) -> Any: self.assertEqual(str(e.value), "Wasn't modified!") def mock_session(self) -> Any: + # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -452,6 +487,7 @@ def mock_session(self) -> Any: return dummy_session def test_aws_signer_as_http_auth(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" import requests @@ -470,6 +506,7 @@ def test_aws_signer_as_http_auth(self) -> None: self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) def test_aws_signer_when_service_is_specified(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-1" service = "aoss" @@ -489,6 +526,7 @@ def test_aws_signer_when_service_is_specified(self) -> None: @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") def test_aws_signer_signs_with_query_string(self, mock_sign: Any) -> None: + # pylint: disable=missing-function-docstring region = "us-west-1" service = "aoss" @@ -514,7 +552,7 @@ class TestRequestsConnectionRedirect(TestCase): @classmethod def setup_class(cls) -> None: - # Start servers + """Start servers""" cls.server1 = TestHTTPServer(port=8080) cls.server1.start() cls.server2 = TestHTTPServer(port=8090) @@ -522,12 +560,13 @@ def setup_class(cls) -> None: @classmethod def teardown_class(cls) -> None: - # Stop servers + """Stop servers""" cls.server2.stop() cls.server1.stop() # allow_redirects = False def test_redirect_failure_when_allow_redirect_false(self) -> None: + # pylint: disable=missing-function-docstring conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) with pytest.raises(TransportError) as e: conn.perform_request("GET", "/redirect", allow_redirects=False) @@ -535,6 +574,7 @@ def test_redirect_failure_when_allow_redirect_false(self) -> None: # allow_redirects = True (Default) def test_redirect_success_when_allow_redirect_true(self) -> None: + # pylint: disable=missing-function-docstring conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) user_agent = conn._get_default_user_agent() status, headers, data = conn.perform_request("GET", "/redirect") @@ -552,6 +592,7 @@ def test_redirect_success_when_allow_redirect_true(self) -> None: class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): def mock_session(self) -> Any: + # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -566,6 +607,7 @@ def mock_session(self) -> Any: def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth( self, ) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" import requests diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index cca1945d..3e220bb2 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -66,6 +66,7 @@ def _dummy_urlopen(*args: Any, **kwargs: Any) -> Any: return con def test_ssl_context(self) -> None: + # pylint: disable=missing-function-docstring try: context = ssl.create_default_context() except AttributeError: @@ -82,10 +83,12 @@ def test_ssl_context(self) -> None: self.assertTrue(con.use_ssl) def test_opaque_id(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") def test_no_http_compression(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection() self.assertFalse(con.http_compress) self.assertNotIn("accept-encoding", con.headers) @@ -99,6 +102,7 @@ def test_no_http_compression(self) -> None: self.assertNotIn("content-encoding", kwargs["headers"]) def test_http_compression(self) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection({"http_compress": True}) self.assertTrue(con.http_compress) self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") @@ -126,6 +130,7 @@ def test_http_compression(self) -> None: self.assertNotIn("content-encoding", kwargs["headers"]) def test_default_user_agent(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection() self.assertEqual( con._get_default_user_agent(), @@ -133,10 +138,12 @@ def test_default_user_agent(self) -> None: ) def test_timeout_set(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(timeout=42) self.assertEqual(42, con.timeout) def test_keep_alive_is_on_by_default(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection() self.assertEqual( { @@ -148,6 +155,7 @@ def test_keep_alive_is_on_by_default(self) -> None: ) def test_http_auth(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(http_auth="username:secret") self.assertEqual( { @@ -160,6 +168,7 @@ def test_http_auth(self) -> None: ) def test_http_auth_tuple(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(http_auth=("username", "secret")) self.assertEqual( { @@ -172,6 +181,7 @@ def test_http_auth_tuple(self) -> None: ) def test_http_auth_list(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(http_auth=["username", "secret"]) self.assertEqual( { @@ -188,6 +198,7 @@ def test_http_auth_list(self) -> None: return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), ) def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: + # pylint: disable=missing-function-docstring from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") @@ -205,6 +216,7 @@ def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: self.assertIn("X-Amz-Content-SHA256", headers) def test_aws_signer_as_http_auth(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -217,6 +229,7 @@ def test_aws_signer_as_http_auth(self) -> None: self.assertIn("X-Amz-Content-SHA256", headers) def test_aws_signer_when_region_is_null(self) -> None: + # pylint: disable=missing-function-docstring session = self.mock_session() from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -230,6 +243,7 @@ def test_aws_signer_when_region_is_null(self) -> None: self.assertEqual(str(e.value), "Region cannot be empty") def test_aws_signer_when_credentials_is_null(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-1" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -243,6 +257,7 @@ def test_aws_signer_when_credentials_is_null(self) -> None: self.assertEqual(str(e.value), "Credentials cannot be empty") def test_aws_signer_when_service_is_specified(self) -> None: + # pylint: disable=missing-function-docstring region = "us-west-1" service = "aoss" @@ -256,6 +271,7 @@ def test_aws_signer_when_service_is_specified(self) -> None: self.assertIn("X-Amz-Security-Token", headers) def mock_session(self) -> Any: + # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -268,6 +284,7 @@ def mock_session(self) -> Any: return dummy_session def test_uses_https_if_verify_certs_is_off(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertEqual(1, len(w)) @@ -279,6 +296,7 @@ def test_uses_https_if_verify_certs_is_off(self) -> None: self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: + # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -288,16 +306,19 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) def test_doesnt_use_https_if_not_specified(self) -> None: + # pylint: disable=missing-function-docstring con = Urllib3HttpConnection() self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) def test_no_warning_when_using_ssl_context(self) -> None: + # pylint: disable=missing-function-docstring ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: Urllib3HttpConnection(ssl_context=ctx) self.assertEqual(0, len(w)) def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: + # pylint: disable=missing-function-docstring kwargs: Any for kwargs in ( {"ssl_show_warn": False}, @@ -321,20 +342,24 @@ def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: ) def test_uses_given_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring path = "/path/to/my/ca_certs.pem" c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) self.assertEqual(path, c.pool.ca_certs) def test_uses_default_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring c = Urllib3HttpConnection(use_ssl=True) self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) def test_uses_no_ca_certs(self) -> None: + # pylint: disable=missing-function-docstring c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertIsNone(c.pool.ca_certs) @patch("opensearchpy.connection.base.logger") def test_uncompressed_body_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -346,6 +371,7 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_body_not_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection() @@ -356,6 +382,7 @@ def test_body_not_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_failure_body_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) with pytest.raises(NotFoundError) as e: con.perform_request("GET", "/invalid", body=b'{"example": "body"}') @@ -369,6 +396,7 @@ def test_failure_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_failure_body_not_logged(self, logger: Any) -> None: + # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection(response_code=404) @@ -380,12 +408,14 @@ def test_failure_body_not_logged(self, logger: Any) -> None: self.assertEqual(logger.debug.call_count, 0) def test_surrogatepass_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: + # pylint: disable=missing-function-docstring conn = Urllib3HttpConnection() def urlopen_raise(*_: Any, **__: Any) -> Any: @@ -400,6 +430,7 @@ def urlopen_raise(*_: Any, **__: Any) -> Any: class TestSignerWithFrozenCredentials(TestUrllib3HttpConnection): def mock_session(self) -> Any: + # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -414,6 +445,7 @@ def mock_session(self) -> Any: def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth( self, ) -> None: + # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 9f1a7d9a..8aea61e9 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -41,15 +41,18 @@ class TestConnectionPool(TestCase): def test_dummy_cp_raises_exception_on_more_connections(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(ImproperlyConfigured, DummyConnectionPool, []) self.assertRaises( ImproperlyConfigured, DummyConnectionPool, [object(), object()] ) def test_raises_exception_when_no_connections_defined(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(ImproperlyConfigured, ConnectionPool, []) def test_default_round_robin(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) connections = set() @@ -58,6 +61,7 @@ def test_default_round_robin(self) -> None: self.assertEqual(connections, set(range(100))) def test_disable_shuffling(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)], randomize_hosts=False) connections = [] @@ -66,6 +70,7 @@ def test_disable_shuffling(self) -> None: self.assertEqual(connections, list(range(100))) def test_selectors_have_access_to_connection_opts(self) -> None: + # pylint: disable=missing-function-docstring class MySelector(RoundRobinSelector): def select(self, connections: Any) -> Any: return self.connection_opts[ @@ -84,6 +89,7 @@ def select(self, connections: Any) -> Any: self.assertEqual(connections, [x * x for x in range(100)]) def test_dead_nodes_are_removed_from_active_connections(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -93,6 +99,7 @@ def test_dead_nodes_are_removed_from_active_connections(self) -> None: self.assertEqual((now + 60, 42), pool.dead.get()) def test_connection_is_skipped_when_dead(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(2)]) pool.mark_dead(0) @@ -102,6 +109,7 @@ def test_connection_is_skipped_when_dead(self) -> None: ) def test_new_connection_is_not_marked_dead(self) -> None: + # pylint: disable=missing-function-docstring # Create 10 connections pool = ConnectionPool([(Connection(), {}) for _ in range(10)]) @@ -112,9 +120,10 @@ def test_new_connection_is_not_marked_dead(self) -> None: # Nothing should be marked dead self.assertEqual(0, len(pool.dead_count)) - def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible( + def test_connection_is_forcibly_resurrected_when_no_live_ones_are_available( self, ) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(2)]) pool.dead_count[0] = 1 pool.mark_dead(0) # failed twice, longer timeout @@ -125,6 +134,7 @@ def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible( self.assertEqual([1], pool.connections) def test_connection_is_resurrected_after_its_timeout(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -134,6 +144,7 @@ def test_connection_is_resurrected_after_its_timeout(self) -> None: self.assertEqual(100, len(pool.connections)) def test_force_resurrect_always_returns_a_connection(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(0, {})]) pool.connections = [] @@ -142,6 +153,7 @@ def test_force_resurrect_always_returns_a_connection(self) -> None: self.assertTrue(pool.dead.empty()) def test_already_failed_connection_has_longer_timeout(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 @@ -151,6 +163,7 @@ def test_already_failed_connection_has_longer_timeout(self) -> None: self.assertEqual((now + 4 * 60, 42), pool.dead.get()) def test_timeout_for_failed_connections_is_limitted(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 245 @@ -160,6 +173,7 @@ def test_timeout_for_failed_connections_is_limitted(self) -> None: self.assertEqual((now + 32 * 60, 42), pool.dead.get()) def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self) -> None: + # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index a918e2b0..8f845382 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -32,6 +32,7 @@ class TestTransformError(TestCase): def test_transform_error_parse_with_error_reason(self) -> None: + # pylint: disable=missing-function-docstring e = TransportError( 500, "InternalServerError", @@ -43,6 +44,7 @@ def test_transform_error_parse_with_error_reason(self) -> None: ) def test_transform_error_parse_with_error_string(self) -> None: + # pylint: disable=missing-function-docstring e = TransportError( 500, "InternalServerError", {"error": "something error message"} ) diff --git a/test_opensearchpy/test_helpers/conftest.py b/test_opensearchpy/test_helpers/conftest.py index 06355e24..d5ebd933 100644 --- a/test_opensearchpy/test_helpers/conftest.py +++ b/test_opensearchpy/test_helpers/conftest.py @@ -35,6 +35,7 @@ @fixture # type: ignore def mock_client(dummy_response: Any) -> Any: + # pylint: disable=missing-function-docstring client = Mock() client.search.return_value = dummy_response add_connection("mock", client) @@ -45,6 +46,7 @@ def mock_client(dummy_response: Any) -> Any: @fixture # type: ignore def dummy_response() -> Any: + # pylint: disable=missing-function-docstring return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -94,6 +96,7 @@ def dummy_response() -> Any: @fixture # type: ignore def aggs_search() -> Any: + # pylint: disable=missing-function-docstring from opensearchpy import Search s = Search(index="flat-git") @@ -109,6 +112,7 @@ def aggs_search() -> Any: @fixture # type: ignore def aggs_data() -> Any: + # pylint: disable=missing-function-docstring return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index c43c7322..26913b47 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -62,6 +62,7 @@ class TestParallelBulk(TestCase): side_effect=mock_process_bulk_chunk, ) def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: + # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(OpenSearch(), actions, chunk_size=2)) @@ -69,6 +70,7 @@ def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: @mock.patch("opensearchpy.OpenSearch.bulk") def test_with_all_options(self, _bulk: Any) -> None: + # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) list( helpers.parallel_bulk( @@ -92,6 +94,7 @@ def test_with_all_options(self, _bulk: Any) -> None: def test_process_bulk_chunk_with_all_options( self, _process_bulk_chunk: Any ) -> None: + # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) client = OpenSearch() list( @@ -127,6 +130,7 @@ def test_process_bulk_chunk_with_all_options( ], ) def test_chunk_sent_from_different_threads(self, _process_bulk_chunk: Any) -> None: + # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) @@ -136,15 +140,20 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk: Any) -> No class TestChunkActions(TestCase): def setup_method(self, _: Any) -> None: + """ + creates some documents for testing + """ self.actions: Any = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip def test_expand_action(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) self.assertEqual( helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) ) def test_expand_action_actions(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( helpers.expand_action( {"_op_type": "delete", "_id": "id", "_index": "index"} @@ -176,6 +185,7 @@ def test_expand_action_actions(self) -> None: ) def test_expand_action_options(self) -> None: + # pylint: disable=missing-function-docstring for option in ( "_id", "_index", @@ -207,6 +217,7 @@ def test_expand_action_options(self) -> None: ) def test__source_metadata_or_source(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( helpers.expand_action({"_source": {"key": "val"}}), ({"index": {}}, {"key": "val"}), @@ -235,6 +246,7 @@ def test__source_metadata_or_source(self) -> None: ) def test_chunks_are_chopped_by_byte_size(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( 100, len( @@ -243,6 +255,7 @@ def test_chunks_are_chopped_by_byte_size(self) -> None: ) def test_chunks_are_chopped_by_chunk_size(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( 10, len( @@ -253,6 +266,7 @@ def test_chunks_are_chopped_by_chunk_size(self) -> None: ) def test_chunks_are_chopped_by_byte_size_properly(self) -> None: + # pylint: disable=missing-function-docstring max_byte_size = 170 chunks = list( helpers._chunk_actions( @@ -268,6 +282,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self) -> None: class TestExpandActions(TestCase): def test_string_actions_are_marked_as_simple_inserts(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) @@ -280,7 +295,9 @@ class TestScanFunction(TestCase): def test_scan_with_missing_hits_key( self, mock_search: Mock, mock_scroll: Mock, mock_clear_scroll: Mock ) -> None: - # Simulate a response where the 'hits' key is missing + """ + Simulate a response where the 'hits' key is missing + """ mock_search.return_value = {"_scroll_id": "dummy_scroll_id", "_shards": {}} mock_scroll.side_effect = [{"_scroll_id": "dummy_scroll_id", "_shards": {}}] diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 006edbe4..ea01cf58 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -30,6 +30,7 @@ def test_repr() -> None: + # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -37,6 +38,7 @@ def test_repr() -> None: def test_meta() -> None: + # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -50,6 +52,7 @@ def test_meta() -> None: def test_meta_from_dict() -> None: + # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -59,6 +62,7 @@ def test_meta_from_dict() -> None: def test_aggs_creates_proper_agg() -> None: + # pylint: disable=missing-function-docstring a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) @@ -66,6 +70,7 @@ def test_aggs_creates_proper_agg() -> None: def test_aggs_handles_nested_aggs_properly() -> None: + # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -74,11 +79,13 @@ def test_aggs_handles_nested_aggs_properly() -> None: def test_aggs_passes_aggs_through() -> None: + # pylint: disable=missing-function-docstring a = aggs.A("terms", field="tags") assert aggs.A(a) is a def test_aggs_from_dict() -> None: + # pylint: disable=missing-function-docstring d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -95,6 +102,7 @@ def test_aggs_from_dict() -> None: def test_aggs_fails_with_incorrect_dict() -> None: + # pylint: disable=missing-function-docstring correct_d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -115,6 +123,7 @@ def test_aggs_fails_with_incorrect_dict() -> None: def test_aggs_fails_with_agg_and_params() -> None: + # pylint: disable=missing-function-docstring a = aggs.A("terms", field="tags") with raises(Exception): @@ -122,6 +131,7 @@ def test_aggs_fails_with_agg_and_params() -> None: def test_buckets_are_nestable() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -131,6 +141,7 @@ def test_buckets_are_nestable() -> None: def test_metric_inside_buckets() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a.metric("max_score", "max", field="score") @@ -140,6 +151,7 @@ def test_metric_inside_buckets() -> None: def test_buckets_equals_counts_subaggs() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") b = aggs.Terms(field="tags") @@ -148,6 +160,7 @@ def test_buckets_equals_counts_subaggs() -> None: def test_buckets_to_dict() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") @@ -166,6 +179,7 @@ def test_buckets_to_dict() -> None: def test_nested_buckets_are_reachable_as_getitem() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -174,6 +188,7 @@ def test_nested_buckets_are_reachable_as_getitem() -> None: def test_nested_buckets_are_settable_as_getitem() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a["per_author"] = aggs.A("terms", field="author.raw") @@ -181,6 +196,7 @@ def test_nested_buckets_are_settable_as_getitem() -> None: def test_filter_can_be_instantiated_using_positional_args() -> None: + # pylint: disable=missing-function-docstring a = aggs.Filter(query.Q("term", f=42)) assert {"filter": {"term": {"f": 42}}} == a.to_dict() @@ -189,6 +205,7 @@ def test_filter_can_be_instantiated_using_positional_args() -> None: def test_filter_aggregation_as_nested_agg() -> None: + # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") a.bucket("filtered", "filter", query.Q("term", f=42)) @@ -199,6 +216,7 @@ def test_filter_aggregation_as_nested_agg() -> None: def test_filter_aggregation_with_nested_aggs() -> None: + # pylint: disable=missing-function-docstring a = aggs.Filter(query.Q("term", f=42)) a.bucket("testing", "terms", field="tags") @@ -209,6 +227,7 @@ def test_filter_aggregation_with_nested_aggs() -> None: def test_filters_correctly_identifies_the_hash() -> None: + # pylint: disable=missing-function-docstring a = aggs.A( "filters", filters={ @@ -229,6 +248,7 @@ def test_filters_correctly_identifies_the_hash() -> None: def test_bucket_sort_agg() -> None: + # pylint: disable=missing-function-docstring bucket_sort_agg = aggs.BucketSort(sort=[{"total_sales": {"order": "desc"}}], size=3) assert bucket_sort_agg.to_dict() == { "bucket_sort": {"sort": [{"total_sales": {"order": "desc"}}], "size": 3} @@ -254,6 +274,7 @@ def test_bucket_sort_agg() -> None: def test_bucket_sort_agg_only_trnunc() -> None: + # pylint: disable=missing-function-docstring bucket_sort_agg = aggs.BucketSort(**{"from": 1, "size": 1}) assert bucket_sort_agg.to_dict() == {"bucket_sort": {"from": 1, "size": 1}} @@ -266,24 +287,28 @@ def test_bucket_sort_agg_only_trnunc() -> None: def test_geohash_grid_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.GeohashGrid(**{"field": "centroid", "precision": 3}) assert {"geohash_grid": {"field": "centroid", "precision": 3}} == a.to_dict() def test_geotile_grid_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.GeotileGrid(**{"field": "centroid", "precision": 3}) assert {"geotile_grid": {"field": "centroid", "precision": 3}} == a.to_dict() def test_boxplot_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.Boxplot(field="load_time") assert {"boxplot": {"field": "load_time"}} == a.to_dict() def test_rare_terms_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.RareTerms(field="the-field") a.bucket("total_sales", "sum", field="price") a.bucket( @@ -305,17 +330,20 @@ def test_rare_terms_aggregation() -> None: def test_variable_width_histogram_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.VariableWidthHistogram(field="price", buckets=2) assert {"variable_width_histogram": {"buckets": 2, "field": "price"}} == a.to_dict() def test_median_absolute_deviation_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.MedianAbsoluteDeviation(field="rating") assert {"median_absolute_deviation": {"field": "rating"}} == a.to_dict() def test_t_test_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.TTest( a={"field": "startup_time_before"}, b={"field": "startup_time_after"}, @@ -332,6 +360,7 @@ def test_t_test_aggregation() -> None: def test_inference_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.Inference(model_id="model-id", buckets_path={"agg_name": "agg_name"}) assert { "inference": {"buckets_path": {"agg_name": "agg_name"}, "model_id": "model-id"} @@ -339,6 +368,7 @@ def test_inference_aggregation() -> None: def test_moving_percentiles_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.DateHistogram() a.bucket("the_percentile", "percentiles", field="price", percents=[1.0, 99.0]) a.pipeline( @@ -359,6 +389,7 @@ def test_moving_percentiles_aggregation() -> None: def test_normalize_aggregation() -> None: + # pylint: disable=missing-function-docstring a = aggs.Normalize(buckets_path="normalized", method="percent_of_sum") assert { "normalize": {"buckets_path": "normalized", "method": "percent_of_sum"} diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index d335b565..4c645df2 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -30,12 +30,14 @@ def test_analyzer_serializes_as_name() -> None: + # pylint: disable=missing-function-docstring a = analysis.analyzer("my_analyzer") assert "my_analyzer" == a.to_dict() def test_analyzer_has_definition() -> None: + # pylint: disable=missing-function-docstring a = analysis.CustomAnalyzer( "my_analyzer", tokenizer="keyword", filter=["lowercase"] ) @@ -48,6 +50,7 @@ def test_analyzer_has_definition() -> None: def test_simple_multiplexer_filter() -> None: + # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -76,6 +79,7 @@ def test_simple_multiplexer_filter() -> None: def test_multiplexer_with_custom_filter() -> None: + # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -107,6 +111,7 @@ def test_multiplexer_with_custom_filter() -> None: def test_conditional_token_filter() -> None: + # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -144,6 +149,7 @@ def test_conditional_token_filter() -> None: def test_conflicting_nested_filters_cause_error() -> None: + # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -166,12 +172,14 @@ def test_conflicting_nested_filters_cause_error() -> None: def test_normalizer_serializes_as_name() -> None: + # pylint: disable=missing-function-docstring n = analysis.normalizer("my_normalizer") assert "my_normalizer" == n.to_dict() def test_normalizer_has_definition() -> None: + # pylint: disable=missing-function-docstring n = analysis.CustomNormalizer( "my_normalizer", filter=["lowercase", "asciifolding"], char_filter=["quote"] ) @@ -184,6 +192,7 @@ def test_normalizer_has_definition() -> None: def test_tokenizer() -> None: + # pylint: disable=missing-function-docstring t = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) assert t.to_dict() == "trigram" @@ -191,6 +200,7 @@ def test_tokenizer() -> None: def test_custom_analyzer_can_collect_custom_items() -> None: + # pylint: disable=missing-function-docstring trigram = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) my_stop = analysis.token_filter("my_stop", "stop", stopwords=["a", "b"]) umlauts = analysis.char_filter("umlauts", "pattern_replace", mappings=["ü=>ue"]) @@ -218,6 +228,7 @@ def test_custom_analyzer_can_collect_custom_items() -> None: def test_stemmer_analyzer_can_pass_name() -> None: + # pylint: disable=missing-function-docstring t = analysis.token_filter( "my_english_filter", name="minimal_english", type="stemmer" ) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index d60dd6b2..08351c0d 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -136,6 +136,7 @@ class Index: def test_range_serializes_properly() -> None: + # pylint: disable=missing-function-docstring class DocumentD(document.Document): lr = field.LongRange() @@ -149,6 +150,7 @@ class DocumentD(document.Document): def test_range_deserializes_properly() -> None: + # pylint: disable=missing-function-docstring class DocumentD(document.InnerDoc): lr = field.LongRange() @@ -159,12 +161,14 @@ class DocumentD(document.InnerDoc): def test_resolve_nested() -> None: + # pylint: disable=missing-function-docstring nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: + # pylint: disable=missing-function-docstring class DocumentA(document.Document): name = field.Text() @@ -180,17 +184,20 @@ class DocumentB(document.Document): def test_ip_address_serializes_properly() -> None: + # pylint: disable=missing-function-docstring host: Any = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() def test_matches_uses_index() -> None: + # pylint: disable=missing-function-docstring assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) def test_matches_with_no_name_always_matches() -> None: + # pylint: disable=missing-function-docstring class DocumentD(document.Document): pass @@ -199,6 +206,7 @@ class DocumentD(document.Document): def test_matches_accepts_wildcards() -> None: + # pylint: disable=missing-function-docstring class MyDoc(document.Document): class Index: name = "my-*" @@ -208,6 +216,7 @@ class Index: def test_assigning_attrlist_to_field() -> None: + # pylint: disable=missing-function-docstring sc: Any = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -216,12 +225,14 @@ def test_assigning_attrlist_to_field() -> None: def test_optional_inner_objects_are_not_validated_if_missing() -> None: + # pylint: disable=missing-function-docstring d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None def test_custom_field() -> None: + # pylint: disable=missing-function-docstring s1: Any = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s1.to_dict() @@ -233,12 +244,14 @@ def test_custom_field() -> None: def test_custom_field_mapping() -> None: + # pylint: disable=missing-function-docstring assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() def test_custom_field_in_nested() -> None: + # pylint: disable=missing-function-docstring s: Any = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -247,6 +260,7 @@ def test_custom_field_in_nested() -> None: def test_multi_works_after_doc_has_been_saved() -> None: + # pylint: disable=missing-function-docstring c: Any = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -255,6 +269,7 @@ def test_multi_works_after_doc_has_been_saved() -> None: def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: + # pylint: disable=missing-function-docstring # Issue #359 c: Any = DocWithNested(comments=[Comment(title="First!")]) @@ -264,12 +279,14 @@ def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: def test_null_value_for_object() -> None: + # pylint: disable=missing-function-docstring d: Any = MyDoc(inner=None) assert d.inner is None def test_inherited_doc_types_can_override_index() -> None: + # pylint: disable=missing-function-docstring class MyDocDifferentIndex(MySubDoc): _index: Any @@ -304,6 +321,7 @@ class Index: def test_to_dict_with_meta() -> None: + # pylint: disable=missing-function-docstring d: Any = MySubDoc(title="hello") d.meta.routing = "some-parent" @@ -315,6 +333,7 @@ def test_to_dict_with_meta() -> None: def test_to_dict_with_meta_includes_custom_index() -> None: + # pylint: disable=missing-function-docstring d: Any = MySubDoc(title="hello") d.meta.index = "other-index" @@ -322,6 +341,7 @@ def test_to_dict_with_meta_includes_custom_index() -> None: def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: + # pylint: disable=missing-function-docstring d: Any = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() @@ -329,6 +349,7 @@ def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: def test_attribute_can_be_removed() -> None: + # pylint: disable=missing-function-docstring d: Any = MyDoc(title="hello") del d.title @@ -336,6 +357,7 @@ def test_attribute_can_be_removed() -> None: def test_doc_type_can_be_correctly_pickled() -> None: + # pylint: disable=missing-function-docstring d: Any = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -351,6 +373,7 @@ def test_doc_type_can_be_correctly_pickled() -> None: def test_meta_is_accessible_even_on_empty_doc() -> None: + # pylint: disable=missing-function-docstring d1: Any = MyDoc() assert d1.meta == {} @@ -359,6 +382,7 @@ def test_meta_is_accessible_even_on_empty_doc() -> None: def test_meta_field_mapping() -> None: + # pylint: disable=missing-function-docstring class User(document.Document): username = field.Text() @@ -378,6 +402,7 @@ class Meta: def test_multi_value_fields() -> None: + # pylint: disable=missing-function-docstring class Blog(document.Document): tags = field.Keyword(multi=True) @@ -389,6 +414,7 @@ class Blog(document.Document): def test_docs_with_properties() -> None: + # pylint: disable=missing-function-docstring class User(document.Document): pwd_hash: Any = field.Text() @@ -417,6 +443,7 @@ def password(self, pwd: Any) -> None: def test_nested_can_be_assigned_to() -> None: + # pylint: disable=missing-function-docstring d1: Any = DocWithNested(comments=[Comment(title="First!")]) d2: Any = DocWithNested() @@ -428,12 +455,14 @@ def test_nested_can_be_assigned_to() -> None: def test_nested_can_be_none() -> None: + # pylint: disable=missing-function-docstring d: Any = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() def test_nested_defaults_to_list_and_can_be_updated() -> None: + # pylint: disable=missing-function-docstring md: Any = DocWithNested() assert [] == md.comments @@ -443,6 +472,7 @@ def test_nested_defaults_to_list_and_can_be_updated() -> None: def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] @@ -455,12 +485,14 @@ def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: def test_to_dict_ignores_empty_collections() -> None: + # pylint: disable=missing-function-docstring md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() def test_declarative_mapping_definition() -> None: + # pylint: disable=missing-function-docstring assert issubclass(MyDoc, document.Document) assert hasattr(MyDoc, "_doc_type") assert { @@ -474,6 +506,7 @@ def test_declarative_mapping_definition() -> None: def test_you_can_supply_own_mapping_instance() -> None: + # pylint: disable=missing-function-docstring class MyD(document.Document): title = field.Text() @@ -488,6 +521,7 @@ class Meta: def test_document_can_be_created_dynamically() -> None: + # pylint: disable=missing-function-docstring n = datetime.now() md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" @@ -509,6 +543,7 @@ def test_document_can_be_created_dynamically() -> None: def test_invalid_date_will_raise_exception() -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -516,6 +551,7 @@ def test_invalid_date_will_raise_exception() -> None: def test_document_inheritance() -> None: + # pylint: disable=missing-function-docstring assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.Document) assert hasattr(MySubDoc, "_doc_type") @@ -530,6 +566,7 @@ def test_document_inheritance() -> None: def test_child_class_can_override_parent() -> None: + # pylint: disable=missing-function-docstring class DocumentA(document.Document): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -548,6 +585,7 @@ class DocumentB(DocumentA): def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: + # pylint: disable=missing-function-docstring md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -558,6 +596,7 @@ def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: def test_index_inheritance() -> None: + # pylint: disable=missing-function-docstring assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.Document) @@ -575,6 +614,7 @@ def test_index_inheritance() -> None: def test_meta_fields_can_be_set_directly_in_init() -> None: + # pylint: disable=missing-function-docstring p = object() md: Any = MyDoc(_id=p, title="Hello World!") @@ -582,24 +622,28 @@ def test_meta_fields_can_be_set_directly_in_init() -> None: def test_save_no_index(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): md.save(using="mock") def test_delete_no_index(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): md.delete(using="mock") def test_update_no_fields() -> None: + # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(IllegalOperation): md.update() def test_search_with_custom_alias_and_index(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -609,6 +653,7 @@ def test_search_with_custom_alias_and_index(mock_client: Any) -> None: def test_from_opensearch_respects_underscored_non_meta_fields() -> None: + # pylint: disable=missing-function-docstring doc = { "_index": "test-index", "_id": "opensearch", @@ -633,6 +678,7 @@ class Index: def test_nested_and_object_inner_doc() -> None: + # pylint: disable=missing-function-docstring class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) @@ -650,6 +696,7 @@ class MySubDocWithNested(MyDoc): def test_save_double(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring class MyDocumentWithDouble(MyDoc): a_double: Union[float, field.Double] = field.Double() diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index d1874541..922d6989 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -50,6 +50,7 @@ class BlogSearch(FacetedSearch): def test_query_is_created_properly() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch("python search") s = bs.build_search() @@ -73,6 +74,7 @@ def test_query_is_created_properly() -> None: def test_query_is_created_properly_with_sort_tuple() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -97,6 +99,7 @@ def test_query_is_created_properly_with_sort_tuple() -> None: def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -120,6 +123,7 @@ def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: def test_filters_are_applied_to_search_ant_relevant_facets() -> None: + # pylint: disable=missing-function-docstring bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -154,6 +158,7 @@ def test_filters_are_applied_to_search_ant_relevant_facets() -> None: def test_date_histogram_facet_with_1970_01_01_date() -> None: + # pylint: disable=missing-function-docstring dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -187,6 +192,7 @@ def test_date_histogram_facet_with_1970_01_01_date() -> None: ], ) def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: + # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -199,6 +205,7 @@ def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> Non def test_date_histogram_no_interval_keyerror() -> None: + # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index 65dbab5a..6afb7684 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -37,6 +37,7 @@ def test_date_range_deserialization() -> None: + # pylint: disable=missing-function-docstring data = {"lt": "2018-01-01T00:30:10"} r = field.DateRange().deserialize(data) @@ -46,6 +47,7 @@ def test_date_range_deserialization() -> None: def test_boolean_deserialization() -> None: + # pylint: disable=missing-function-docstring bf = field.Boolean() assert not bf.deserialize("false") @@ -59,6 +61,7 @@ def test_boolean_deserialization() -> None: def test_date_field_can_have_default_tz() -> None: + # pylint: disable=missing-function-docstring f: Any = field.Date(default_timezone="UTC") now = datetime.now() @@ -74,6 +77,7 @@ def test_date_field_can_have_default_tz() -> None: def test_custom_field_car_wrap_other_field() -> None: + # pylint: disable=missing-function-docstring class MyField(field.CustomField): @property def builtin_type(self) -> Any: @@ -85,6 +89,7 @@ def builtin_type(self) -> Any: def test_field_from_dict() -> None: + # pylint: disable=missing-function-docstring f = field.construct_field({"type": "text", "index": "not_analyzed"}) assert isinstance(f, field.Text) @@ -92,6 +97,7 @@ def test_field_from_dict() -> None: def test_multi_fields_are_accepted_and_parsed() -> None: + # pylint: disable=missing-function-docstring f = field.construct_field( "text", fields={"raw": {"type": "keyword"}, "eng": field.Text(analyzer="english")}, @@ -108,6 +114,7 @@ def test_multi_fields_are_accepted_and_parsed() -> None: def test_nested_provides_direct_access_to_its_fields() -> None: + # pylint: disable=missing-function-docstring f = field.Nested(properties={"name": {"type": "text", "index": "not_analyzed"}}) assert "name" in f @@ -115,6 +122,7 @@ def test_nested_provides_direct_access_to_its_fields() -> None: def test_field_supports_multiple_analyzers() -> None: + # pylint: disable=missing-function-docstring f = field.Text(analyzer="snowball", search_analyzer="keyword") assert { "analyzer": "snowball", @@ -124,6 +132,7 @@ def test_field_supports_multiple_analyzers() -> None: def test_multifield_supports_multiple_analyzers() -> None: + # pylint: disable=missing-function-docstring f = field.Text( fields={ "f1": field.Text(search_analyzer="keyword", analyzer="snowball"), @@ -144,6 +153,7 @@ def test_multifield_supports_multiple_analyzers() -> None: def test_scaled_float() -> None: + # pylint: disable=missing-function-docstring with pytest.raises(TypeError): field.ScaledFloat() # type: ignore f: Any = field.ScaledFloat(scaling_factor=123) @@ -151,6 +161,7 @@ def test_scaled_float() -> None: def test_ipaddress() -> None: + # pylint: disable=missing-function-docstring f = field.Ip() assert f.deserialize("127.0.0.1") == ip_address("127.0.0.1") assert f.deserialize("::1") == ip_address("::1") @@ -161,6 +172,7 @@ def test_ipaddress() -> None: def test_float() -> None: + # pylint: disable=missing-function-docstring f = field.Float() assert f.deserialize("42") == 42.0 assert f.deserialize(None) is None @@ -169,6 +181,7 @@ def test_float() -> None: def test_integer() -> None: + # pylint: disable=missing-function-docstring f = field.Integer() assert f.deserialize("42") == 42 assert f.deserialize(None) is None @@ -177,6 +190,7 @@ def test_integer() -> None: def test_binary() -> None: + # pylint: disable=missing-function-docstring f = field.Binary() assert f.deserialize(base64.b64encode(b"42")) == b"42" assert f.deserialize(f.serialize(b"42")) == b"42" @@ -184,27 +198,32 @@ def test_binary() -> None: def test_constant_keyword() -> None: + # pylint: disable=missing-function-docstring f = field.ConstantKeyword() assert f.to_dict() == {"type": "constant_keyword"} def test_rank_features() -> None: + # pylint: disable=missing-function-docstring f = field.RankFeatures() assert f.to_dict() == {"type": "rank_features"} def test_object_dynamic_values() -> None: + # pylint: disable=missing-function-docstring for dynamic in True, False, "strict": f = field.Object(dynamic=dynamic) assert f.to_dict()["dynamic"] == dynamic def test_object_disabled() -> None: + # pylint: disable=missing-function-docstring f = field.Object(enabled=False) assert f.to_dict() == {"type": "object", "enabled": False} def test_object_constructor() -> None: + # pylint: disable=missing-function-docstring expected = {"type": "object", "properties": {"inner_int": {"type": "integer"}}} class Inner(InnerDoc): diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index eac720b4..11dbd418 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -39,6 +39,7 @@ class Post(Document): def test_multiple_doc_types_will_combine_mappings() -> None: + # pylint: disable=missing-function-docstring class User(Document): username = Text() @@ -57,6 +58,7 @@ class User(Document): def test_search_is_limited_to_index_name() -> None: + # pylint: disable=missing-function-docstring i = Index("my-index") s = i.search() @@ -64,6 +66,7 @@ def test_search_is_limited_to_index_name() -> None: def test_cloned_index_has_copied_settings_and_using() -> None: + # pylint: disable=missing-function-docstring client = object() i: Any = Index("my-index", using=client) i.settings(number_of_shards=1) @@ -77,6 +80,7 @@ def test_cloned_index_has_copied_settings_and_using() -> None: def test_cloned_index_has_analysis_attribute() -> None: + # pylint: disable=missing-function-docstring """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -97,6 +101,7 @@ def test_cloned_index_has_analysis_attribute() -> None: def test_settings_are_saved() -> None: + # pylint: disable=missing-function-docstring i: Any = Index("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -105,6 +110,7 @@ def test_settings_are_saved() -> None: def test_registered_doc_type_included_in_to_dict() -> None: + # pylint: disable=missing-function-docstring i: Any = Index("i", using="alias") i.document(Post) @@ -119,6 +125,7 @@ def test_registered_doc_type_included_in_to_dict() -> None: def test_registered_doc_type_included_in_search() -> None: + # pylint: disable=missing-function-docstring i: Any = Index("i", using="alias") i.document(Post) @@ -128,6 +135,7 @@ def test_registered_doc_type_included_in_search() -> None: def test_aliases_add_to_object() -> None: + # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -138,6 +146,7 @@ def test_aliases_add_to_object() -> None: def test_aliases_returned_from_to_dict() -> None: + # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -148,6 +157,7 @@ def test_aliases_returned_from_to_dict() -> None: def test_analyzers_added_to_object() -> None: + # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -164,6 +174,7 @@ def test_analyzers_added_to_object() -> None: def test_analyzers_returned_from_to_dict() -> None: + # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -177,6 +188,7 @@ def test_analyzers_returned_from_to_dict() -> None: def test_conflicting_analyzer_raises_error() -> None: + # pylint: disable=missing-function-docstring i: Any = Index("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -185,6 +197,7 @@ def test_conflicting_analyzer_raises_error() -> None: def test_index_template_can_have_order() -> None: + # pylint: disable=missing-function-docstring i: Any = Index("i-*") it = i.as_template("i", order=2) @@ -192,6 +205,7 @@ def test_index_template_can_have_order() -> None: def test_index_template_save_result(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring it: Any = IndexTemplate("test-template", "test-*") assert it.save(using="mock") == mock_client.indices.put_template() diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index 6e4af163..98ca7e42 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -31,6 +31,7 @@ def test_mapping_can_has_fields() -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("name", "text").field("tags", "keyword") @@ -40,6 +41,7 @@ def test_mapping_can_has_fields() -> None: def test_mapping_update_is_recursive() -> None: + # pylint: disable=missing-function-docstring m1 = mapping.Mapping() m1.field("title", "text") m1.field("author", "object") @@ -73,6 +75,7 @@ def test_mapping_update_is_recursive() -> None: def test_properties_can_iterate_over_all_the_fields() -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -83,6 +86,7 @@ def test_properties_can_iterate_over_all_the_fields() -> None: def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: + # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -156,6 +160,7 @@ def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: def test_mapping_can_collect_multiple_analyzers() -> None: + # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -202,6 +207,7 @@ def test_mapping_can_collect_multiple_analyzers() -> None: def test_even_non_custom_analyzers_can_have_params() -> None: + # pylint: disable=missing-function-docstring a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.Mapping() m.field("title", "text", analyzer=a1) @@ -212,6 +218,7 @@ def test_even_non_custom_analyzers_can_have_params() -> None: def test_resolve_field_can_resolve_multifields() -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("title", "text", fields={"keyword": Keyword()}) @@ -219,6 +226,7 @@ def test_resolve_field_can_resolve_multifields() -> None: def test_resolve_nested() -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 7fb2f3a0..65d798a0 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -32,6 +32,7 @@ def test_empty_query_is_match_all() -> None: + # pylint: disable=missing-function-docstring q = query.Q() assert isinstance(q, query.MatchAll) @@ -39,40 +40,48 @@ def test_empty_query_is_match_all() -> None: def test_match_to_dict() -> None: + # pylint: disable=missing-function-docstring assert {"match": {"f": "value"}} == query.Match(f="value").to_dict() def test_match_to_dict_extra() -> None: + # pylint: disable=missing-function-docstring assert {"match": {"f": "value", "boost": 2}} == query.Match( f="value", boost=2 ).to_dict() def test_fuzzy_to_dict() -> None: + # pylint: disable=missing-function-docstring assert {"fuzzy": {"f": "value"}} == query.Fuzzy(f="value").to_dict() def test_prefix_to_dict() -> None: + # pylint: disable=missing-function-docstring assert {"prefix": {"f": "value"}} == query.Prefix(f="value").to_dict() def test_term_to_dict() -> None: + # pylint: disable=missing-function-docstring assert {"term": {"_type": "article"}} == query.Term(_type="article").to_dict() def test_bool_to_dict() -> None: + # pylint: disable=missing-function-docstring bool = query.Bool(must=[query.Match(f="value")], should=[]) assert {"bool": {"must": [{"match": {"f": "value"}}]}} == bool.to_dict() def test_dismax_to_dict() -> None: + # pylint: disable=missing-function-docstring assert {"dis_max": {"queries": [{"term": {"_type": "article"}}]}} == query.DisMax( queries=[query.Term(_type="article")] ).to_dict() def test_bool_from_dict_issue_318() -> None: + # pylint: disable=missing-function-docstring d = {"bool": {"must_not": {"match": {"field": "value"}}}} q = query.Q(d) @@ -80,12 +89,14 @@ def test_bool_from_dict_issue_318() -> None: def test_repr() -> None: + # pylint: disable=missing-function-docstring bool = query.Bool(must=[query.Match(f="value")], should=[]) assert "Bool(must=[Match(f='value')])" == repr(bool) def test_query_clone() -> None: + # pylint: disable=missing-function-docstring bool = query.Bool( must=[query.Match(x=42)], should=[query.Match(g="v2")], @@ -98,6 +109,7 @@ def test_query_clone() -> None: def test_bool_converts_its_init_args_to_queries() -> None: + # pylint: disable=missing-function-docstring q = query.Bool(must=[{"match": {"f": "value"}}]) assert len(q.must) == 1 @@ -105,6 +117,7 @@ def test_bool_converts_its_init_args_to_queries() -> None: def test_two_queries_make_a_bool() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f="value1") q2 = query.Match(message={"query": "this is a test", "opeartor": "and"}) q = q1 & q2 @@ -114,6 +127,7 @@ def test_two_queries_make_a_bool() -> None: def test_other_and_bool_appends_other_to_must() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f="value1") qb = query.Bool() @@ -123,6 +137,7 @@ def test_other_and_bool_appends_other_to_must() -> None: def test_bool_and_other_appends_other_to_must() -> None: + # pylint: disable=missing-function-docstring q1: Any = query.Match(f="value1") qb: Any = query.Bool() @@ -132,6 +147,7 @@ def test_bool_and_other_appends_other_to_must() -> None: def test_bool_and_other_sets_min_should_match_if_needed() -> None: + # pylint: disable=missing-function-docstring q1 = query.Q("term", category=1) q2 = query.Q( "bool", should=[query.Q("term", name="aaa"), query.Q("term", name="bbb")] @@ -146,6 +162,7 @@ def test_bool_and_other_sets_min_should_match_if_needed() -> None: def test_bool_with_different_minimum_should_match_should_not_be_combined() -> None: + # pylint: disable=missing-function-docstring q1 = query.Q( "bool", minimum_should_match=2, @@ -185,10 +202,12 @@ def test_bool_with_different_minimum_should_match_should_not_be_combined() -> No def test_empty_bool_has_min_should_match_0() -> None: + # pylint: disable=missing-function-docstring assert 0 == query.Bool()._min_should_match def test_query_and_query_creates_bool() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.Match(g=47) @@ -198,6 +217,7 @@ def test_query_and_query_creates_bool() -> None: def test_match_all_and_query_equals_other() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.MatchAll() @@ -206,24 +226,28 @@ def test_match_all_and_query_equals_other() -> None: def test_not_match_all_is_match_none() -> None: + # pylint: disable=missing-function-docstring q = query.MatchAll() assert ~q == query.MatchNone() def test_not_match_none_is_match_all() -> None: + # pylint: disable=missing-function-docstring q = query.MatchNone() assert ~q == query.MatchAll() def test_invert_empty_bool_is_match_none() -> None: + # pylint: disable=missing-function-docstring q = query.Bool() assert ~q == query.MatchNone() def test_match_none_or_query_equals_query() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.MatchNone() @@ -231,6 +255,7 @@ def test_match_none_or_query_equals_query() -> None: def test_match_none_and_query_equals_match_none() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.MatchNone() @@ -238,6 +263,7 @@ def test_match_none_and_query_equals_match_none() -> None: def test_bool_and_bool() -> None: + # pylint: disable=missing-function-docstring qt1, qt2, qt3 = query.Match(f=1), query.Match(f=2), query.Match(f=3) q1 = query.Bool(must=[qt1], should=[qt2]) @@ -254,6 +280,7 @@ def test_bool_and_bool() -> None: def test_bool_and_bool_with_min_should_match() -> None: + # pylint: disable=missing-function-docstring qt1, qt2 = query.Match(f=1), query.Match(f=2) q1 = query.Q("bool", minimum_should_match=1, should=[qt1]) q2 = query.Q("bool", minimum_should_match=1, should=[qt2]) @@ -262,18 +289,21 @@ def test_bool_and_bool_with_min_should_match() -> None: def test_inverted_query_becomes_bool_with_must_not() -> None: + # pylint: disable=missing-function-docstring q = query.Match(f=42) assert ~q == query.Bool(must_not=[query.Match(f=42)]) def test_inverted_query_with_must_not_become_should() -> None: + # pylint: disable=missing-function-docstring q = query.Q("bool", must_not=[query.Q("match", f=1), query.Q("match", f=2)]) assert ~q == query.Q("bool", should=[query.Q("match", f=1), query.Q("match", f=2)]) def test_inverted_query_with_must_and_must_not() -> None: + # pylint: disable=missing-function-docstring q = query.Q( "bool", must=[query.Q("match", f=3), query.Q("match", f=4)], @@ -294,12 +324,14 @@ def test_inverted_query_with_must_and_must_not() -> None: def test_double_invert_returns_original_query() -> None: + # pylint: disable=missing-function-docstring q = query.Match(f=42) assert q == ~~q def test_bool_query_gets_inverted_internally() -> None: + # pylint: disable=missing-function-docstring q = query.Bool(must_not=[query.Match(f=42)], must=[query.Match(g="v")]) assert ~q == query.Bool( @@ -313,6 +345,7 @@ def test_bool_query_gets_inverted_internally() -> None: def test_match_all_or_something_is_match_all() -> None: + # pylint: disable=missing-function-docstring q1 = query.MatchAll() q2 = query.Match(f=42) @@ -321,6 +354,7 @@ def test_match_all_or_something_is_match_all() -> None: def test_or_produces_bool_with_should() -> None: + # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.Match(g="v") @@ -329,6 +363,7 @@ def test_or_produces_bool_with_should() -> None: def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: + # pylint: disable=missing-function-docstring q = query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -337,6 +372,7 @@ def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: + # pylint: disable=missing-function-docstring q = ~query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -348,6 +384,7 @@ def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: def test_bool_will_append_another_query_with_or() -> None: + # pylint: disable=missing-function-docstring qb = query.Bool(should=[query.Match(f="v"), query.Match(f="v2")]) q = query.Match(g=42) @@ -355,6 +392,7 @@ def test_bool_will_append_another_query_with_or() -> None: def test_bool_queries_with_only_should_get_concatenated() -> None: + # pylint: disable=missing-function-docstring q1 = query.Bool(should=[query.Match(f=1), query.Match(f=2)]) q2 = query.Bool(should=[query.Match(f=3), query.Match(f=4)]) @@ -364,6 +402,7 @@ def test_bool_queries_with_only_should_get_concatenated() -> None: def test_two_bool_queries_append_one_to_should_if_possible() -> None: + # pylint: disable=missing-function-docstring q1 = query.Bool(should=[query.Match(f="v")]) q2 = query.Bool(must=[query.Match(f="v")]) @@ -376,11 +415,13 @@ def test_two_bool_queries_append_one_to_should_if_possible() -> None: def test_queries_are_registered() -> None: + # pylint: disable=missing-function-docstring assert "match" in query.Query._classes assert query.Query._classes["match"] is query.Match def test_defining_query_registers_it() -> None: + # pylint: disable=missing-function-docstring class MyQuery(query.Query): name = "my_query" @@ -389,12 +430,14 @@ class MyQuery(query.Query): def test_query_passes_query_through() -> None: + # pylint: disable=missing-function-docstring q = query.Match(f="value1") assert query.Q(q) is q def test_query_constructs_query_by_name() -> None: + # pylint: disable=missing-function-docstring q = query.Q("match", f="value") assert isinstance(q, query.Match) @@ -402,18 +445,21 @@ def test_query_constructs_query_by_name() -> None: def test_query_translates_double_underscore_to_dots_in_param_names() -> None: + # pylint: disable=missing-function-docstring q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params def test_query_doesn_translate_double_underscore_to_dots_in_param_names() -> None: + # pylint: disable=missing-function-docstring q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params def test_query_constructs_simple_query_from_dict() -> None: + # pylint: disable=missing-function-docstring q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) @@ -421,17 +467,20 @@ def test_query_constructs_simple_query_from_dict() -> None: def test_query_constructs_compound_query_from_dict() -> None: + # pylint: disable=missing-function-docstring q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) def test_query_raises_error_when_passed_in_dict_and_params() -> None: + # pylint: disable=missing-function-docstring with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") def test_query_raises_error_when_passed_in_query_and_params() -> None: + # pylint: disable=missing-function-docstring q = query.Match(f="value1") with raises(Exception): @@ -439,11 +488,13 @@ def test_query_raises_error_when_passed_in_query_and_params() -> None: def test_query_raises_error_on_unknown_query() -> None: + # pylint: disable=missing-function-docstring with raises(Exception): query.Q("not a query", f="value") def test_match_all_and_anything_is_anything() -> None: + # pylint: disable=missing-function-docstring q = query.MatchAll() s = query.Match(f=42) @@ -452,6 +503,7 @@ def test_match_all_and_anything_is_anything() -> None: def test_function_score_with_functions() -> None: + # pylint: disable=missing-function-docstring q = query.Q( "function_score", functions=[query.SF("script_score", script="doc['comment_count'] * _score")], @@ -465,6 +517,7 @@ def test_function_score_with_functions() -> None: def test_function_score_with_no_function_is_boost_factor() -> None: + # pylint: disable=missing-function-docstring q = query.Q( "function_score", functions=[query.SF({"weight": 20, "filter": query.Q("term", f=42)})], @@ -476,6 +529,7 @@ def test_function_score_with_no_function_is_boost_factor() -> None: def test_function_score_to_dict() -> None: + # pylint: disable=missing-function-docstring q = query.Q( "function_score", query=query.Q("match", title="python"), @@ -505,6 +559,7 @@ def test_function_score_to_dict() -> None: def test_function_score_with_single_function() -> None: + # pylint: disable=missing-function-docstring d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -523,6 +578,7 @@ def test_function_score_with_single_function() -> None: def test_function_score_from_dict() -> None: + # pylint: disable=missing-function-docstring d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -552,6 +608,7 @@ def test_function_score_from_dict() -> None: def test_script_score() -> None: + # pylint: disable=missing-function-docstring d = { "script_score": { "query": {"match_all": {}}, diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index 302abdc5..9c21e618 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -38,10 +38,16 @@ @fixture # type: ignore def agg_response(aggs_search: Any, aggs_data: Any) -> Any: + """ + :param aggs_search: aggregation search + :param aggs_data: data to aggregate + :return: the aggregated data + """ return response.Response(aggs_search, aggs_data) def test_agg_response_is_pickleable(agg_response: Any) -> None: + # pylint: disable=missing-function-docstring assert agg_response.hits == [] r = pickle.loads(pickle.dumps(agg_response)) @@ -51,6 +57,7 @@ def test_agg_response_is_pickleable(agg_response: Any) -> None: def test_response_is_pickleable(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) assert res.hits r = pickle.loads(pickle.dumps(res)) @@ -61,6 +68,7 @@ def test_response_is_pickleable(dummy_response: Any) -> None: def test_hit_is_pickleable(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = pickle.loads(pickle.dumps(res.hits)) @@ -69,6 +77,7 @@ def test_hit_is_pickleable(dummy_response: Any) -> None: def test_response_stores_search(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring s = Search() r = response.Response(s, dummy_response) @@ -76,6 +85,7 @@ def test_response_stores_search(dummy_response: Any) -> None: def test_interactive_helpers(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = res.hits h = hits[0] @@ -99,6 +109,7 @@ def test_interactive_helpers(dummy_response: Any) -> None: def test_empty_response_is_false(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring dummy_response["hits"]["hits"] = [] res = response.Response(Search(), dummy_response) @@ -106,11 +117,13 @@ def test_empty_response_is_false(dummy_response: Any) -> None: def test_len_response(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) assert len(res) == 4 def test_iterating_over_response_gives_you_hits(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = list(h for h in res) @@ -128,6 +141,7 @@ def test_iterating_over_response_gives_you_hits(dummy_response: Any) -> None: def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = res.hits @@ -136,6 +150,7 @@ def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response: Any) -> No def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) h = res.hits[0] @@ -152,6 +167,7 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> No def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: + # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) assert res[0] is res.hits[0] @@ -159,15 +175,18 @@ def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: def test_aggregation_base(agg_response: Any) -> None: + # pylint: disable=missing-function-docstring assert agg_response.aggs is agg_response.aggregations assert isinstance(agg_response.aggs, response.AggResponse) def test_metric_agg_works(agg_response: Any) -> None: + # pylint: disable=missing-function-docstring assert 25052.0 == agg_response.aggs.sum_lines.value def test_aggregations_can_be_iterated_over(agg_response: Any) -> None: + # pylint: disable=missing-function-docstring aggs = [a for a in agg_response.aggs] assert len(aggs) == 3 @@ -177,6 +196,7 @@ def test_aggregations_can_be_iterated_over(agg_response: Any) -> None: def test_aggregations_can_be_retrieved_by_name( agg_response: Any, aggs_search: Any ) -> None: + # pylint: disable=missing-function-docstring a = agg_response.aggs["popular_files"] assert isinstance(a, BucketData) @@ -185,6 +205,7 @@ def test_aggregations_can_be_retrieved_by_name( def test_bucket_response_can_be_iterated_over(agg_response: Any) -> None: + # pylint: disable=missing-function-docstring popular_files = agg_response.aggregations.popular_files buckets = [b for b in popular_files] @@ -193,6 +214,7 @@ def test_bucket_response_can_be_iterated_over(agg_response: Any) -> None: def test_bucket_keys_get_deserialized(aggs_data: Any, aggs_search: Any) -> None: + # pylint: disable=missing-function-docstring class Commit(Document): info = Object(properties={"committed_date": Date()}) diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index c7fa20c5..5950cd05 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -35,12 +35,14 @@ def test_expand__to_dot_is_respected() -> None: + # pylint: disable=missing-function-docstring s = search.Search().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() def test_execute_uses_cache() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() r: Any = object() s._response = r @@ -49,6 +51,7 @@ def test_execute_uses_cache() -> None: def test_cache_can_be_ignored(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search(using="mock") r: Any = object() s._response = r @@ -58,6 +61,7 @@ def test_cache_can_be_ignored(mock_client: Any) -> None: def test_iter_iterates_over_hits() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() s._response = [1, 2, 3] @@ -65,6 +69,7 @@ def test_iter_iterates_over_hits() -> None: def test_cache_isnt_cloned() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() s._response = object() @@ -72,12 +77,14 @@ def test_cache_isnt_cloned() -> None: def test_search_starts_with_no_query() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() assert s.query._proxied is None def test_search_query_combines_query() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() s2 = s.query("match", f=42) @@ -90,6 +97,7 @@ def test_search_query_combines_query() -> None: def test_query_can_be_assigned_to() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() q = Q("match", title="python") @@ -99,6 +107,7 @@ def test_query_can_be_assigned_to() -> None: def test_query_can_be_wrapped() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -114,6 +123,7 @@ def test_query_can_be_wrapped() -> None: def test_using() -> None: + # pylint: disable=missing-function-docstring o: Any = object() o2: Any = object() s: Any = search.Search(using=o) @@ -124,18 +134,21 @@ def test_using() -> None: def test_methods_are_proxied_to_the_query() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search().query("match_all") assert s.query.to_dict() == {"match_all": {}} def test_query_always_returns_search() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() assert isinstance(s.query("match", f=42), search.Search) def test_source_copied_on_clone() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -150,6 +163,7 @@ def test_source_copied_on_clone() -> None: def test_copy_clones() -> None: + # pylint: disable=missing-function-docstring from copy import copy s1: Any = search.Search().source(["some", "fields"]) @@ -160,6 +174,7 @@ def test_copy_clones() -> None: def test_aggs_allow_two_metric() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -170,6 +185,7 @@ def test_aggs_allow_two_metric() -> None: def test_aggs_get_copied_on_change() -> None: + # pylint: disable=missing-function-docstring s: Any = search.Search().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" @@ -202,6 +218,7 @@ def test_aggs_get_copied_on_change() -> None: def test_search_index() -> None: + # pylint: disable=missing-function-docstring s = search.Search(index="i") assert s._index == ["i"] s = s.index("i2") @@ -233,6 +250,7 @@ def test_search_index() -> None: def test_doc_type_document_class() -> None: + # pylint: disable=missing-function-docstring class MyDocument(Document): pass @@ -246,6 +264,7 @@ class MyDocument(Document): def test_sort() -> None: + # pylint: disable=missing-function-docstring s = search.Search() s = s.sort("fielda", "-fieldb") @@ -258,6 +277,7 @@ def test_sort() -> None: def test_sort_by_score() -> None: + # pylint: disable=missing-function-docstring s = search.Search() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -268,6 +288,7 @@ def test_sort_by_score() -> None: def test_collapse() -> None: + # pylint: disable=missing-function-docstring s = search.Search() inner_hits = {"name": "most_recent", "size": 5, "sort": [{"@timestamp": "desc"}]} @@ -302,6 +323,7 @@ def test_collapse() -> None: def test_slice() -> None: + # pylint: disable=missing-function-docstring s = search.Search() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -311,11 +333,13 @@ def test_slice() -> None: def test_index() -> None: + # pylint: disable=missing-function-docstring s = search.Search() assert {"from": 3, "size": 1} == s[3].to_dict() def test_search_to_dict() -> None: + # pylint: disable=missing-function-docstring s = search.Search() assert {} == s.to_dict() @@ -345,6 +369,7 @@ def test_search_to_dict() -> None: def test_complex_example() -> None: + # pylint: disable=missing-function-docstring s = search.Search() s = ( s.query("match", title="python") @@ -396,6 +421,7 @@ def test_complex_example() -> None: def test_reverse() -> None: + # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -446,12 +472,14 @@ def test_reverse() -> None: def test_from_dict_doesnt_need_query() -> None: + # pylint: disable=missing-function-docstring s = search.Search.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() def test_params_being_passed_to_search(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring s = search.Search(using="mock") s = s.params(routing="42") s.execute() @@ -460,6 +488,7 @@ def test_params_being_passed_to_search(mock_client: Any) -> None: def test_source() -> None: + # pylint: disable=missing-function-docstring assert {} == search.Search().source().to_dict() assert { @@ -474,6 +503,7 @@ def test_source() -> None: def test_source_on_clone() -> None: + # pylint: disable=missing-function-docstring assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -489,6 +519,7 @@ def test_source_on_clone() -> None: def test_source_on_clear() -> None: + # pylint: disable=missing-function-docstring assert ( {} == search.Search() @@ -499,6 +530,7 @@ def test_source_on_clear() -> None: def test_suggest_accepts_global_text() -> None: + # pylint: disable=missing-function-docstring s = search.Search.from_dict( { "suggest": { @@ -521,6 +553,7 @@ def test_suggest_accepts_global_text() -> None: def test_suggest() -> None: + # pylint: disable=missing-function-docstring s = search.Search() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -530,6 +563,7 @@ def test_suggest() -> None: def test_exclude() -> None: + # pylint: disable=missing-function-docstring s = search.Search() s = s.exclude("match", title="python") @@ -543,6 +577,7 @@ def test_exclude() -> None: def test_delete_by_query(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring s = search.Search(using="mock").query("match", lang="java") s.delete() @@ -552,6 +587,7 @@ def test_delete_by_query(mock_client: Any) -> None: def test_update_from_dict() -> None: + # pylint: disable=missing-function-docstring s = search.Search() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -563,6 +599,7 @@ def test_update_from_dict() -> None: def test_rescore_query_to_dict() -> None: + # pylint: disable=missing-function-docstring s = search.Search(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index afcfabc9..8545782c 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -32,12 +32,14 @@ def test_ubq_starts_with_no_query() -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery() assert ubq.query._proxied is None def test_ubq_to_dict() -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery() assert {} == ubq.to_dict() @@ -54,6 +56,7 @@ def test_ubq_to_dict() -> None: def test_complex_example() -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -92,6 +95,7 @@ def test_complex_example() -> None: def test_exclude() -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -105,6 +109,7 @@ def test_exclude() -> None: def test_reverse() -> None: + # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -141,12 +146,14 @@ def test_reverse() -> None: def test_from_dict_doesnt_need_query() -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() def test_params_being_passed_to_search(mock_client: Any) -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery(using="mock") ubq = ubq.params(routing="42") ubq.execute() @@ -157,6 +164,7 @@ def test_params_being_passed_to_search(mock_client: Any) -> None: def test_overwrite_script() -> None: + # pylint: disable=missing-function-docstring ubq = UpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} @@ -173,6 +181,7 @@ def test_overwrite_script() -> None: def test_update_by_query_response_success() -> None: + # pylint: disable=missing-function-docstring ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index c67bcb8b..8c45a23c 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -33,6 +33,7 @@ def test_attrdict_pickle() -> None: + # pylint: disable=missing-function-docstring ad = utils.AttrDict({}) pickled_ad = pickle.dumps(ad) @@ -40,6 +41,7 @@ def test_attrdict_pickle() -> None: def test_attrlist_pickle() -> None: + # pylint: disable=missing-function-docstring al = utils.AttrList([]) pickled_al = pickle.dumps(al) @@ -47,6 +49,7 @@ def test_attrlist_pickle() -> None: def test_attrlist_slice() -> None: + # pylint: disable=missing-function-docstring class MyAttrDict(utils.AttrDict): pass @@ -55,6 +58,7 @@ class MyAttrDict(utils.AttrDict): def test_merge() -> None: + # pylint: disable=missing-function-docstring a = utils.AttrDict({"a": {"b": 42, "c": 47}}) b = {"a": {"b": 123, "d": -12}, "e": [1, 2, 3]} @@ -64,6 +68,7 @@ def test_merge() -> None: def test_merge_conflict() -> None: + # pylint: disable=missing-function-docstring for d in ( {"a": 42}, {"a": {"b": 47}}, @@ -74,6 +79,7 @@ def test_merge_conflict() -> None: def test_attrdict_bool() -> None: + # pylint: disable=missing-function-docstring d = utils.AttrDict({}) assert not d @@ -82,6 +88,7 @@ def test_attrdict_bool() -> None: def test_attrlist_items_get_wrapped_during_iteration() -> None: + # pylint: disable=missing-function-docstring al = utils.AttrList([1, object(), [1], {}]) ls = list(iter(al)) @@ -91,6 +98,7 @@ def test_attrlist_items_get_wrapped_during_iteration() -> None: def test_serializer_deals_with_attr_versions() -> None: + # pylint: disable=missing-function-docstring d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( @@ -99,6 +107,7 @@ def test_serializer_deals_with_attr_versions() -> None: def test_serializer_deals_with_objects_with_to_dict() -> None: + # pylint: disable=missing-function-docstring class MyClass(object): def to_dict(self) -> int: return 42 @@ -107,12 +116,14 @@ def to_dict(self) -> int: def test_recursive_to_dict() -> None: + # pylint: disable=missing-function-docstring assert utils.recursive_to_dict({"k": [1, (1.0, {"v": Q("match", key="val")})]}) == { "k": [1, (1.0, {"v": {"match": {"key": "val"}}})] } def test_attrdict_get() -> None: + # pylint: disable=missing-function-docstring a = utils.AttrDict({"a": {"b": 42, "c": 47}}) assert a.get("a", {}).get("b", 0) == 42 assert a.get("a", {}).get("e", 0) == 0 diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index 67f2fe50..b076ed1e 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -75,6 +75,7 @@ class Log(Document): def test_required_int_can_be_0() -> None: + # pylint: disable=missing-function-docstring class DT(Document): i = Integer(required=True) @@ -83,6 +84,7 @@ class DT(Document): def test_required_field_cannot_be_empty_list() -> None: + # pylint: disable=missing-function-docstring class DT(Document): i = Integer(required=True) @@ -92,6 +94,7 @@ class DT(Document): def test_validation_works_for_lists_of_values() -> None: + # pylint: disable=missing-function-docstring class DT(Document): i = Date(required=True) @@ -104,6 +107,7 @@ class DT(Document): def test_field_with_custom_clean() -> None: + # pylint: disable=missing-function-docstring ls = Log() ls.full_clean() @@ -111,6 +115,7 @@ def test_field_with_custom_clean() -> None: def test_empty_object() -> None: + # pylint: disable=missing-function-docstring d: Any = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) d.inner = {} @@ -118,6 +123,7 @@ def test_empty_object() -> None: def test_missing_required_field_raises_validation_exception() -> None: + # pylint: disable=missing-function-docstring d1: Any = BlogPost() with raises(ValidationException): d1.full_clean() @@ -133,6 +139,7 @@ def test_missing_required_field_raises_validation_exception() -> None: def test_boolean_doesnt_treat_false_as_empty() -> None: + # pylint: disable=missing-function-docstring d: Any = BlogPostWithStatus() with raises(ValidationException): d.full_clean() @@ -143,6 +150,7 @@ def test_boolean_doesnt_treat_false_as_empty() -> None: def test_custom_validation_on_nested_gets_run() -> None: + # pylint: disable=missing-function-docstring d: Any = BlogPost( authors=[Author(name="Guian", email="king@example.com")], created=None ) @@ -154,6 +162,7 @@ def test_custom_validation_on_nested_gets_run() -> None: def test_accessing_known_fields_returns_empty_value() -> None: + # pylint: disable=missing-function-docstring d: Any = BlogPost() assert [] == d.authors @@ -164,6 +173,7 @@ def test_accessing_known_fields_returns_empty_value() -> None: def test_empty_values_are_not_serialized() -> None: + # pylint: disable=missing-function-docstring d: Any = BlogPost( authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}], created=None ) diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index 6826b83e..b53e7eb0 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -45,6 +45,7 @@ ], ) def test_range_contains(kwargs: Any, item: Any) -> None: + # pylint: disable=missing-function-docstring assert item in Range(**kwargs) @@ -59,6 +60,7 @@ def test_range_contains(kwargs: Any, item: Any) -> None: ], ) def test_range_not_contains(kwargs: Any, item: Any) -> None: + # pylint: disable=missing-function-docstring assert item not in Range(**kwargs) @@ -73,6 +75,7 @@ def test_range_not_contains(kwargs: Any, item: Any) -> None: ], ) def test_range_raises_value_error_on_wrong_params(args: Any, kwargs: Any) -> None: + # pylint: disable=missing-function-docstring with pytest.raises(ValueError): Range(*args, **kwargs) @@ -87,6 +90,7 @@ def test_range_raises_value_error_on_wrong_params(args: Any, kwargs: Any) -> Non ], ) def test_range_lower(range: Any, lower: Any, inclusive: Any) -> None: + # pylint: disable=missing-function-docstring assert (lower, inclusive) == range.lower @@ -100,4 +104,5 @@ def test_range_lower(range: Any, lower: Any, inclusive: Any) -> None: ], ) def test_range_upper(range: Any, upper: Any, inclusive: Any) -> None: + # pylint: disable=missing-function-docstring assert (upper, inclusive) == range.upper diff --git a/test_opensearchpy/test_http_server.py b/test_opensearchpy/test_http_server.py index 713d1931..b0ef4a8e 100644 --- a/test_opensearchpy/test_http_server.py +++ b/test_opensearchpy/test_http_server.py @@ -17,6 +17,9 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler): __test__ = False def do_GET(self) -> None: # pylint: disable=invalid-name + """ + writes a response out to a file given mocked parameters on this object + """ headers = self.headers if self.path == "/redirect": @@ -49,6 +52,9 @@ def __init__(self, host: str = "localhost", port: int = 8080) -> None: self._server_thread = None def start(self) -> None: + """ + start the test HTTP server + """ if self._server_thread is not None: return @@ -56,6 +62,9 @@ def start(self) -> None: self._server_thread.start() def stop(self) -> None: + """ + stop the test HTTP server + """ if self._server_thread is None: return self.socket.close() diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index 524887d0..dc1656fb 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -49,18 +49,21 @@ def requires_numpy_and_pandas() -> None: + # pylint: disable=missing-function-docstring if np is None or pd is None: raise SkipTest("Test requires numpy or pandas to be available") class TestJSONSerializer(TestCase): def test_datetime_serialization(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": datetime(2010, 10, 1, 2, 30)}), ) def test_decimal_serialization(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() if sys.version_info[:2] == (2, 6): @@ -68,6 +71,7 @@ def test_decimal_serialization(self) -> None: self.assertEqual('{"d":3.8}', JSONSerializer().dumps({"d": Decimal("3.8")})) def test_uuid_serialization(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( '{"d":"00000000-0000-0000-0000-000000000003"}', JSONSerializer().dumps( @@ -76,11 +80,13 @@ def test_uuid_serialization(self) -> None: ) def test_serializes_numpy_bool(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual('{"d":true}', JSONSerializer().dumps({"d": np.bool_(True)})) def test_serializes_numpy_integers(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() ser = JSONSerializer() @@ -102,6 +108,7 @@ def test_serializes_numpy_integers(self) -> None: self.assertEqual(ser.dumps({"d": np_type(1)}), '{"d":1}') def test_serializes_numpy_floats(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() ser = JSONSerializer() @@ -113,6 +120,7 @@ def test_serializes_numpy_floats(self) -> None: self.assertRegex(ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$') def test_serializes_numpy_datetime(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -121,6 +129,7 @@ def test_serializes_numpy_datetime(self) -> None: ) def test_serializes_numpy_ndarray(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -134,6 +143,7 @@ def test_serializes_numpy_ndarray(self) -> None: ) def test_serializes_numpy_nan_to_nan(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -142,6 +152,7 @@ def test_serializes_numpy_nan_to_nan(self) -> None: ) def test_serializes_pandas_timestamp(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -150,6 +161,7 @@ def test_serializes_pandas_timestamp(self) -> None: ) def test_serializes_pandas_series(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -158,6 +170,7 @@ def test_serializes_pandas_series(self) -> None: ) def test_serializes_pandas_na(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() if not hasattr(pd, "NA"): # pandas.NA added in v1 @@ -168,6 +181,7 @@ def test_serializes_pandas_na(self) -> None: ) def test_raises_serialization_error_pandas_nat(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() if not hasattr(pd, "NaT"): @@ -175,6 +189,7 @@ def test_raises_serialization_error_pandas_nat(self) -> None: self.assertRaises(SerializationError, JSONSerializer().dumps, {"d": pd.NaT}) def test_serializes_pandas_category(self) -> None: + # pylint: disable=missing-function-docstring requires_numpy_and_pandas() cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) @@ -190,33 +205,41 @@ def test_serializes_pandas_category(self) -> None: ) def test_raises_serialization_error_on_dump_error(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, JSONSerializer().dumps, object()) def test_raises_serialization_error_on_load_error(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, JSONSerializer().loads, object()) self.assertRaises(SerializationError, JSONSerializer().loads, "") self.assertRaises(SerializationError, JSONSerializer().loads, "{{") def test_strings_are_left_untouched(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual("你好", JSONSerializer().dumps("你好")) class TestTextSerializer(TestCase): def test_strings_are_left_untouched(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual("你好", TextSerializer().dumps("你好")) def test_raises_serialization_error_on_dump_error(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, TextSerializer().dumps, {}) class TestDeserializer(TestCase): def setup_method(self, _: Any) -> None: + # pylint: disable=missing-function-docstring self.de = Deserializer(DEFAULT_SERIALIZERS) def test_deserializes_json_by_default(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual({"some": "data"}, self.de.loads('{"some":"data"}')) def test_deserializes_text_with_correct_ct(self) -> None: + # pylint: disable=missing-function-docstring self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain") ) @@ -226,9 +249,11 @@ def test_deserializes_text_with_correct_ct(self) -> None: ) def test_raises_serialization_error_on_unknown_mimetype(self) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, self.de.loads, "{}", "text/html") def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized( self, ) -> None: + # pylint: disable=missing-function-docstring self.assertRaises(ImproperlyConfigured, Deserializer, {}) diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index 36b548b5..b3c17950 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -35,6 +35,7 @@ def get_client(**kwargs: Any) -> Any: + # pylint: disable=missing-function-docstring global client if client is False: raise SkipTest("No client is available") @@ -54,6 +55,7 @@ def get_client(**kwargs: Any) -> Any: def setup_module() -> None: + # pylint: disable=missing-function-docstring get_client() diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index e1f83e4b..063a55f0 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -45,6 +45,7 @@ @pytest.fixture(scope="session") # type: ignore def sync_client_factory() -> Any: + # pylint: disable=missing-function-docstring client = None try: # Configure the client optionally with an HTTP conn class @@ -85,6 +86,7 @@ def sync_client_factory() -> Any: @pytest.fixture(scope="function") # type: ignore def sync_client(sync_client_factory: Any) -> Any: + # pylint: disable=missing-function-docstring try: yield sync_client_factory finally: diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index e945b69a..f128d497 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -32,11 +32,13 @@ class TestUnicode(OpenSearchTestCase): def test_indices_analyze(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.analyze(body='{"text": "привет"}') class TestBulk(OpenSearchTestCase): def test_bulk_works_with_string_body(self) -> None: + # pylint: disable=missing-function-docstring docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = self.client.bulk(body=docs) @@ -44,6 +46,7 @@ def test_bulk_works_with_string_body(self) -> None: self.assertEqual(1, len(response["items"])) def test_bulk_works_with_bytestring_body(self) -> None: + # pylint: disable=missing-function-docstring docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = self.client.bulk(body=docs) diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 4bcc90f8..81a4088e 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -46,6 +46,7 @@ @fixture(scope="session") # type: ignore def client() -> Any: + # pylint: disable=missing-function-docstring client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", client) return client @@ -53,6 +54,7 @@ def client() -> Any: @fixture(scope="session") # type: ignore def opensearch_version(client: Any) -> Any: + # pylint: disable=missing-function-docstring info = client.info() print(info) yield tuple( @@ -63,6 +65,7 @@ def opensearch_version(client: Any) -> Any: @fixture # type: ignore def write_client(client: Any) -> Any: + # pylint: disable=missing-function-docstring yield client client.indices.delete("test-*", ignore=404) client.indices.delete_template("test-template", ignore=404) @@ -70,6 +73,7 @@ def write_client(client: Any) -> Any: @fixture(scope="session") # type: ignore def data_client(client: Any) -> Any: + # pylint: disable=missing-function-docstring # create mappings create_git_index(client, "git") create_flat_git_index(client, "flat-git") @@ -83,6 +87,7 @@ def data_client(client: Any) -> Any: @fixture # type: ignore def pull_request(write_client: Any) -> Any: + # pylint: disable=missing-function-docstring PullRequest.init() pr = PullRequest( _id=42, @@ -107,6 +112,8 @@ def pull_request(write_client: Any) -> Any: @fixture # type: ignore def setup_ubq_tests(client: Any) -> str: + # pylint: disable=missing-function-docstring + # todo what's a ubq test? index = "test-git" create_git_index(client, index) bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index fad77eeb..beb4d388 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -50,6 +50,7 @@ def __init__( self._fail_with = fail_with def bulk(self, *args: Any, **kwargs: Any) -> Any: + # pylint: disable=missing-function-docstring self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -58,6 +59,7 @@ def bulk(self, *args: Any, **kwargs: Any) -> Any: class TestStreamingBulk(OpenSearchTestCase): def test_actions_remain_unchanged(self) -> None: + # pylint: disable=missing-function-docstring actions = [{"_id": 1}, {"_id": 2}] for ok, item in helpers.streaming_bulk( self.client, actions, index="test-index" @@ -66,6 +68,7 @@ def test_actions_remain_unchanged(self) -> None: self.assertEqual([{"_id": 1}, {"_id": 2}], actions) def test_all_documents_get_inserted(self) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True @@ -78,6 +81,7 @@ def test_all_documents_get_inserted(self) -> None: ) def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -98,6 +102,7 @@ def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: assert False, "exception should have been raised" def test_different_op_types(self) -> Any: + # pylint: disable=missing-function-docstring if self.opensearch_version() < (0, 90, 1): raise SkipTest("update supported since 0.90.1") self.client.index(index="i", id=45, body={}) @@ -120,6 +125,7 @@ def test_different_op_types(self) -> Any: self.assertEqual({"f": "v"}, self.client.get(index="i", id=47)["_source"]) def test_transport_error_can_becaught(self) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient(self.client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -156,6 +162,7 @@ def test_transport_error_can_becaught(self) -> None: ) def test_rejected_documents_are_retried(self) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( self.client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -183,6 +190,7 @@ def test_rejected_documents_are_retried(self) -> None: self.assertEqual(4, failing_client._called) def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( self.client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -211,6 +219,7 @@ def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: self.assertEqual(4, failing_client._called) def test_transport_error_is_raised_with_max_retries(self) -> None: + # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( self.client, fail_at=(1, 2, 3, 4), @@ -235,6 +244,7 @@ def streaming_bulk() -> Any: class TestBulk(OpenSearchTestCase): def test_bulk_works_with_single_item(self) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": 42, "_id": 1}] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -248,6 +258,7 @@ def test_bulk_works_with_single_item(self) -> None: ) def test_all_documents_get_inserted(self) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -261,6 +272,7 @@ def test_all_documents_get_inserted(self) -> None: ) def test_stats_only_reports_numbers(self) -> None: + # pylint: disable=missing-function-docstring docs = [{"answer": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True, stats_only=True @@ -271,6 +283,7 @@ def test_stats_only_reports_numbers(self) -> None: self.assertEqual(100, self.client.count(index="test-index")["count"]) def test_errors_are_reported_correctly(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -298,6 +311,7 @@ def test_errors_are_reported_correctly(self) -> None: ) def test_error_is_raised(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -316,6 +330,7 @@ def test_error_is_raised(self) -> None: ) def test_ignore_error_if_raised(self) -> None: + # pylint: disable=missing-function-docstring # ignore the status code 400 in tuple helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -349,6 +364,7 @@ def test_ignore_error_if_raised(self) -> None: helpers.bulk(failing_client, [{"a": 42}], index="i", ignore_status=(599,)) def test_errors_are_collected_properly(self) -> None: + # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -388,6 +404,7 @@ def teardown_method(self, m: Any) -> None: super(TestScan, self).teardown_method(m) def test_order_can_be_preserved(self) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -408,6 +425,7 @@ def test_order_can_be_preserved(self) -> None: self.assertEqual(list(range(100)), list(d["_source"]["answer"] for d in docs)) def test_all_documents_are_read(self) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -421,6 +439,7 @@ def test_all_documents_are_read(self) -> None: self.assertEqual(set(range(100)), set(d["_source"]["answer"] for d in docs)) def test_scroll_error(self) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -456,6 +475,7 @@ def test_scroll_error(self) -> None: self.assertEqual(data[-1], {"scroll_data": 42}) def test_initial_search_error(self) -> None: + # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -482,6 +502,7 @@ def test_initial_search_error(self) -> None: client_mock.scroll.assert_not_called() def test_no_scroll_id_fast_route(self) -> None: + # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = {"no": "_scroll_id"} data = list(helpers.scan(self.client, index="test_index")) @@ -491,6 +512,7 @@ def test_no_scroll_id_fast_route(self) -> None: client_mock.clear_scroll.assert_not_called() def test_scan_auth_kwargs_forwarded(self) -> None: + # pylint: disable=missing-function-docstring for key, val in { "api_key": ("name", "value"), "http_auth": ("username", "password"), @@ -523,6 +545,7 @@ def test_scan_auth_kwargs_forwarded(self) -> None: self.assertEqual(api_mock.call_args[1][key], val) def test_scan_auth_kwargs_favor_scroll_kwargs_option(self) -> None: + # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "scroll_id", @@ -555,6 +578,7 @@ def test_scan_auth_kwargs_favor_scroll_kwargs_option(self) -> None: @patch("opensearchpy.helpers.actions.logger") def test_logger(self, logger_mock: Any) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -590,6 +614,7 @@ def test_logger(self, logger_mock: Any) -> None: logger_mock.warning.assert_called() def test_clear_scroll(self) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -617,6 +642,7 @@ def test_clear_scroll(self) -> None: spy.assert_not_called() def test_shards_no_skipped_field(self) -> None: + # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -646,6 +672,7 @@ def test_shards_no_skipped_field(self) -> None: class TestReindex(OpenSearchTestCase): def setup_method(self, _: Any) -> None: + # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -659,6 +686,7 @@ def setup_method(self, _: Any) -> None: self.client.bulk(bulk, refresh=True) def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: + # pylint: disable=missing-function-docstring helpers.reindex( self.client, "test_index", @@ -678,6 +706,7 @@ def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: ) def test_reindex_accepts_a_query(self) -> None: + # pylint: disable=missing-function-docstring helpers.reindex( self.client, "test_index", @@ -697,6 +726,7 @@ def test_reindex_accepts_a_query(self) -> None: ) def test_all_documents_get_moved(self) -> None: + # pylint: disable=missing-function-docstring helpers.reindex(self.client, "test_index", "prod_index") self.client.indices.refresh() @@ -716,6 +746,7 @@ def test_all_documents_get_moved(self) -> None: class TestParentChildReindex(OpenSearchTestCase): def setup_method(self, _: Any) -> None: + # pylint: disable=missing-function-docstring body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -742,6 +773,7 @@ def setup_method(self, _: Any) -> None: self.client.indices.refresh(index="test-index") def test_children_are_reindexed_correctly(self) -> None: + # pylint: disable=missing-function-docstring helpers.reindex(self.client, "test-index", "real-index") self.assertEqual( diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index 9f8c69f6..b4502551 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -30,6 +30,7 @@ def test_simulate_with_just__builtin_tokenizer(client: Any) -> None: + # pylint: disable=missing-function-docstring a = analyzer("my-analyzer", tokenizer="keyword") tokens = a.simulate("Hello World!", using=client).tokens @@ -38,6 +39,7 @@ def test_simulate_with_just__builtin_tokenizer(client: Any) -> None: def test_simulate_complex(client: Any) -> None: + # pylint: disable=missing-function-docstring a = analyzer( "my-analyzer", tokenizer=tokenizer("split_words", "simple_pattern_split", pattern=":"), @@ -51,6 +53,7 @@ def test_simulate_complex(client: Any) -> None: def test_simulate_builtin(client: Any) -> None: + # pylint: disable=missing-function-docstring a = analyzer("my-analyzer", "english") tokens = a.simulate("fixes running").tokens diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index 20b43608..62264090 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -30,11 +30,13 @@ def test_count_all(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Search(using=data_client).index("git") assert 53 == s.count() def test_count_prefetch(data_client: Any, mocker: Any) -> None: + # pylint: disable=missing-function-docstring mocker.spy(data_client, "count") search = Search(using=data_client).index("git") @@ -48,6 +50,7 @@ def test_count_prefetch(data_client: Any, mocker: Any) -> None: def test_count_filter(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Search(using=data_client).index("git").filter(~Q("exists", field="parent_shas")) # initial commit + repo document assert 2 == s.count() diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 9118ffed..b1c27be8 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -30,6 +30,7 @@ def create_flat_git_index(client: Any, index: Any) -> None: + # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -73,6 +74,7 @@ def create_flat_git_index(client: Any, index: Any) -> None: def create_git_index(client: Any, index: Any) -> None: + # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -1095,6 +1097,7 @@ def create_git_index(client: Any, index: Any) -> None: def flatten_doc(d: Any) -> Dict[str, Any]: + # pylint: disable=missing-function-docstring src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1104,6 +1107,7 @@ def flatten_doc(d: Any) -> Dict[str, Any]: def create_test_git_data(d: Any) -> Dict[str, Any]: + # pylint: disable=missing-function-docstring src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index 53e24173..47b0fd4e 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -132,6 +132,7 @@ class Index: def test_serialization(write_client: Any) -> None: + # pylint: disable=missing-function-docstring SerializationDoc.init() write_client.index( index="test-serialization", @@ -162,6 +163,7 @@ def test_serialization(write_client: Any) -> None: def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: + # pylint: disable=missing-function-docstring history_query = Q( "nested", path="comments.history", @@ -190,6 +192,7 @@ def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: + # pylint: disable=missing-function-docstring s = PullRequest.search().query( "nested", inner_hits={}, @@ -205,6 +208,7 @@ def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: + # pylint: disable=missing-function-docstring s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -217,6 +221,7 @@ def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: def test_update_object_field(write_client: Any) -> None: + # pylint: disable=missing-function-docstring Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -237,6 +242,7 @@ def test_update_object_field(write_client: Any) -> None: def test_update_script(write_client: Any) -> None: + # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -247,6 +253,7 @@ def test_update_script(write_client: Any) -> None: def test_update_retry_on_conflict(write_client: Any) -> None: + # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -262,6 +269,7 @@ def test_update_retry_on_conflict(write_client: Any) -> None: @pytest.mark.parametrize("retry_on_conflict", [None, 0]) # type: ignore def test_update_conflicting_version(write_client: Any, retry_on_conflict: Any) -> None: + # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -279,6 +287,7 @@ def test_update_conflicting_version(write_client: Any, retry_on_conflict: Any) - def test_save_and_update_return_doc_meta(write_client: Any) -> None: + # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = w.save(return_doc_meta=True) @@ -303,32 +312,38 @@ def test_save_and_update_return_doc_meta(write_client: Any) -> None: def test_init(write_client: Any) -> None: + # pylint: disable=missing-function-docstring Repository.init(index="test-git") assert write_client.indices.exists(index="test-git") def test_get_raises_404_on_index_missing(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(NotFoundError): Repository.get("opensearch-dsl-php", index="not-there") def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(NotFoundError): Repository.get("opensearch-dsl-php") def test_get_returns_none_if_404_ignored(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert None is Repository.get("opensearch-dsl-php", ignore=404) def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( data_client: Any, ) -> None: + # pylint: disable=missing-function-docstring assert None is Repository.get("42", index="not-there", ignore=404) def test_get(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -337,14 +352,17 @@ def test_get(data_client: Any) -> None: def test_exists_return_true(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert Repository.exists("opensearch-py") def test_exists_false(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert not Repository.exists("opensearch-dsl-php") def test_get_with_tz_date(data_client: Any) -> None: + # pylint: disable=missing-function-docstring first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -357,6 +375,7 @@ def test_get_with_tz_date(data_client: Any) -> None: def test_save_with_tz_date(data_client: Any) -> None: + # pylint: disable=missing-function-docstring tzinfo = timezone("Europe/Prague") first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -384,6 +403,7 @@ def test_save_with_tz_date(data_client: Any) -> None: def test_mget(data_client: Any) -> None: + # pylint: disable=missing-function-docstring commits = Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -392,22 +412,26 @@ def test_mget(data_client: Any) -> None: def test_mget_raises_exception_when_missing_param_is_invalid(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(ValueError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: + # pylint: disable=missing-function-docstring with raises(NotFoundError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client: Any) -> None: + # pylint: disable=missing-function-docstring commits = Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" def test_update_works_from_search_response(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = Repository.search().execute()[0] opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -419,6 +443,7 @@ def test_update_works_from_search_response(data_client: Any) -> None: def test_update(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -443,6 +468,7 @@ def test_update(data_client: Any) -> None: def test_save_updates_existing_doc(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -456,6 +482,7 @@ def test_save_updates_existing_doc(data_client: Any) -> None: def test_save_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -464,6 +491,7 @@ def test_save_automatically_uses_seq_no_and_primary_term(data_client: Any) -> No def test_delete_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: + # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -472,12 +500,14 @@ def test_delete_automatically_uses_seq_no_and_primary_term(data_client: Any) -> def assert_doc_equals(expected: Any, actual: Any) -> None: + # pylint: disable=missing-function-docstring for f in expected: assert f in actual assert actual[f] == expected[f] def test_can_save_to_different_index(write_client: Any) -> None: + # pylint: disable=missing-function-docstring test_repo = Repository(description="testing", meta={"id": 42}) assert test_repo.save(index="test-document") @@ -493,6 +523,7 @@ def test_can_save_to_different_index(write_client: Any) -> None: def test_save_without_skip_empty_will_include_empty_fields(write_client: Any) -> None: + # pylint: disable=missing-function-docstring test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert test_repo.save(index="test-document", skip_empty=False) @@ -508,6 +539,7 @@ def test_save_without_skip_empty_will_include_empty_fields(write_client: Any) -> def test_delete(write_client: Any) -> None: + # pylint: disable=missing-function-docstring write_client.create( index="test-document", id="opensearch-py", @@ -529,10 +561,12 @@ def test_delete(write_client: Any) -> None: def test_search(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert Repository.search().count() == 1 def test_search_returns_proper_doc_classes(data_client: Any) -> None: + # pylint: disable=missing-function-docstring result = Repository.search().execute() opensearch_repo = result.hits[0] @@ -542,6 +576,7 @@ def test_search_returns_proper_doc_classes(data_client: Any) -> None: def test_refresh_mapping(data_client: Any) -> None: + # pylint: disable=missing-function-docstring class Commit(Document): class Index: name = "git" @@ -558,6 +593,7 @@ class Index: def test_highlight_in_meta(data_client: Any) -> None: + # pylint: disable=missing-function-docstring commit = ( Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 54e49c9d..522b6e05 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -68,6 +68,7 @@ class MetricSearch(FacetedSearch): @pytest.fixture(scope="session") # type: ignore def commit_search_cls(opensearch_version: Any) -> Any: + # pylint: disable=missing-function-docstring interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(FacetedSearch): @@ -93,6 +94,7 @@ class CommitSearch(FacetedSearch): @pytest.fixture(scope="session") # type: ignore def repo_search_cls(opensearch_version: Any) -> Any: + # pylint: disable=missing-function-docstring interval_type = "calendar_interval" class RepoSearch(FacetedSearch): @@ -114,6 +116,7 @@ def search(self) -> Any: @pytest.fixture(scope="session") # type: ignore def pr_search_cls(opensearch_version: Any) -> Any: + # pylint: disable=missing-function-docstring interval_type = "calendar_interval" class PRSearch(FacetedSearch): @@ -132,6 +135,7 @@ class PRSearch(FacetedSearch): def test_facet_with_custom_metric(data_client: Any) -> None: + # pylint: disable=missing-function-docstring ms = MetricSearch() r = ms.execute() @@ -141,6 +145,7 @@ def test_facet_with_custom_metric(data_client: Any) -> None: def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring prs = pr_search_cls() r = prs.execute() @@ -149,6 +154,7 @@ def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = prs.execute() @@ -161,6 +167,7 @@ def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring rs = repo_search_cls() r = rs.execute() @@ -169,6 +176,7 @@ def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring rs = repo_search_cls() r = rs.execute() @@ -181,6 +189,7 @@ def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: def test_empty_search_finds_everything( data_client: Any, opensearch_version: Any, commit_search_cls: Any ) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls() r = cs.execute() assert r.hits.total.value == 52 @@ -227,6 +236,7 @@ def test_empty_search_finds_everything( def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = cs.execute() @@ -273,6 +283,7 @@ def test_term_filters_are_shown_as_selected_and_data_is_filtered( def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"deletions": "better"}) r = cs.execute() @@ -281,6 +292,7 @@ def test_range_filters_are_shown_as_selected_and_data_is_filtered( def test_pagination(data_client: Any, commit_search_cls: Any) -> None: + # pylint: disable=missing-function-docstring cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 5b8250b4..8a7566e0 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -36,6 +36,7 @@ class Post(Document): def test_index_template_works(write_client: Any) -> None: + # pylint: disable=missing-function-docstring it = IndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -57,6 +58,7 @@ def test_index_template_works(write_client: Any) -> None: def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: + # pylint: disable=missing-function-docstring i = Index("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) i.save() @@ -69,11 +71,13 @@ def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: def test_index_exists(data_client: Any) -> None: + # pylint: disable=missing-function-docstring assert Index("git").exists() assert not Index("not-there").exists() def test_index_can_be_created_with_settings_and_mappings(write_client: Any) -> None: + # pylint: disable=missing-function-docstring i = Index("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -99,6 +103,7 @@ def test_index_can_be_created_with_settings_and_mappings(write_client: Any) -> N def test_delete(write_client: Any) -> None: + # pylint: disable=missing-function-docstring write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -110,6 +115,7 @@ def test_delete(write_client: Any) -> None: def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: + # pylint: disable=missing-function-docstring i1 = Index("test-index-1", using=write_client) i2 = Index("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index 62d608a6..675b645a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -33,6 +33,7 @@ def test_mapping_saved_into_opensearch(write_client: Any) -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -55,6 +56,7 @@ def test_mapping_saved_into_opensearch(write_client: Any) -> None: def test_mapping_saved_into_opensearch_when_index_already_exists_closed( write_client: Any, ) -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -80,6 +82,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_closed( def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client: Any, ) -> None: + # pylint: disable=missing-function-docstring m = mapping.Mapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) @@ -109,6 +112,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: + # pylint: disable=missing-function-docstring write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index bbad6cc3..1f37471d 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -64,6 +64,7 @@ class Index: def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: + # pylint: disable=missing-function-docstring has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -85,6 +86,7 @@ def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -101,6 +103,7 @@ def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Search(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -112,6 +115,7 @@ def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: def test_scan_respects_doc_types(data_client: Any) -> None: + # pylint: disable=missing-function-docstring repos = list(Repository.search().scan()) assert 1 == len(repos) @@ -120,6 +124,7 @@ def test_scan_respects_doc_types(data_client: Any) -> None: def test_scan_iterates_through_all_docs(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Search(index="flat-git") commits = list(s.scan()) @@ -129,6 +134,7 @@ def test_scan_iterates_through_all_docs(data_client: Any) -> None: def test_response_is_cached(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Repository.search() repos = list(s) @@ -137,6 +143,7 @@ def test_response_is_cached(data_client: Any) -> None: def test_multi_search(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = Search(index="flat-git") @@ -154,6 +161,7 @@ def test_multi_search(data_client: Any) -> None: def test_multi_missing(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = Search(index="flat-git") s3 = Search(index="does_not_exist") @@ -177,6 +185,7 @@ def test_multi_missing(data_client: Any) -> None: def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: + # pylint: disable=missing-function-docstring s = Search(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index 07d592c7..c1d30a6e 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -31,6 +31,7 @@ def test_update_by_query_no_script(write_client: Any, setup_ubq_tests: Any) -> None: + # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -50,6 +51,7 @@ def test_update_by_query_no_script(write_client: Any, setup_ubq_tests: Any) -> N def test_update_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: + # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -67,6 +69,7 @@ def test_update_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> def test_delete_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: + # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 6ecac372..0bf05c54 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -23,6 +23,7 @@ class TestAlertingPlugin(OpenSearchTestCase): "Plugin not supported for opensearch version", ) def test_create_destination(self) -> None: + # pylint: disable=missing-function-docstring # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -39,6 +40,7 @@ def test_create_destination(self) -> None: "Plugin not supported for opensearch version", ) def test_get_destination(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy destination self.test_create_destination() @@ -54,6 +56,7 @@ def test_get_destination(self) -> None: "Plugin not supported for opensearch version", ) def test_create_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy destination self.test_create_destination() @@ -119,6 +122,7 @@ def test_create_monitor(self) -> None: "Plugin not supported for opensearch version", ) def test_search_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy monitor self.test_create_monitor() @@ -137,6 +141,7 @@ def test_search_monitor(self) -> None: "Plugin not supported for opensearch version", ) def test_get_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy monitor self.test_create_monitor() @@ -161,6 +166,7 @@ def test_get_monitor(self) -> None: "Plugin not supported for opensearch version", ) def test_run_monitor(self) -> None: + # pylint: disable=missing-function-docstring # Create a dummy monitor self.test_create_monitor() diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index 1d2b696f..77fabee5 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -64,6 +64,7 @@ class TestIndexManagementPlugin(OpenSearchTestCase): } def test_create_policy(self) -> None: + # pylint: disable=missing-function-docstring # Test to create policy response = self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -73,6 +74,7 @@ def test_create_policy(self) -> None: self.assertIn("_id", response) def test_get_policy(self) -> None: + # pylint: disable=missing-function-docstring # Create a policy self.test_create_policy() @@ -84,6 +86,7 @@ def test_get_policy(self) -> None: self.assertEqual(response["_id"], self.POLICY_NAME) def test_update_policy(self) -> None: + # pylint: disable=missing-function-docstring # Create a policy self.test_create_policy() @@ -106,6 +109,7 @@ def test_update_policy(self) -> None: self.assertIn("_id", response) def test_delete_policy(self) -> None: + # pylint: disable=missing-function-docstring # Create a policy self.test_create_policy() diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 8df3895a..e1cd315e 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -143,6 +143,7 @@ class YamlRunner: def __init__(self, client: Any) -> None: + # pylint: disable=missing-function-docstring self.client = client self.last_response: Any = None @@ -152,12 +153,13 @@ def __init__(self, client: Any) -> None: self._state: Any = {} def use_spec(self, test_spec: Any) -> None: + # pylint: disable=missing-function-docstring self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) def setup(self) -> Any: - # Pull skips from individual tests to not do unnecessary setup. + """Pull skips from individual tests to not do unnecessary setup.""" skip_code: Any = [] for action in self._run_code: assert len(action) == 1 @@ -175,11 +177,13 @@ def setup(self) -> Any: self.run_code(self._setup_code) def teardown(self) -> Any: + # pylint: disable=missing-function-docstring if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) def opensearch_version(self) -> Any: + # pylint: disable=missing-function-docstring global OPENSEARCH_VERSION if OPENSEARCH_VERSION is None: version_string = (self.client.info())["version"]["number"] @@ -190,9 +194,11 @@ def opensearch_version(self) -> Any: return OPENSEARCH_VERSION def section(self, name: str) -> None: + # pylint: disable=missing-function-docstring print(("=" * 10) + " " + name + " " + ("=" * 10)) def run(self) -> Any: + # pylint: disable=missing-function-docstring try: self.setup() self.section("test") @@ -216,6 +222,7 @@ def run_code(self, test: Any) -> Any: raise RuntimeError("Invalid action type %r" % (action_type,)) def run_do(self, action: Any) -> Any: + # pylint: disable=missing-function-docstring api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -282,6 +289,7 @@ def run_do(self, action: Any) -> Any: ) def run_catch(self, catch: Any, exception: Any) -> None: + # pylint: disable=missing-function-docstring if catch == "param": assert isinstance(exception, TypeError) return @@ -297,6 +305,7 @@ def run_catch(self, catch: Any, exception: Any) -> None: self.last_response = exception.info def run_skip(self, skip: Any) -> Any: + # pylint: disable=missing-function-docstring global IMPLEMENTED_FEATURES if "features" in skip: @@ -319,31 +328,37 @@ def run_skip(self, skip: Any) -> Any: pytest.skip(reason) def run_gt(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value def run_gte(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value def run_lt(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value def run_lte(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value def run_set(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) def run_is_false(self, action: Any) -> None: + # pylint: disable=missing-function-docstring try: value = self._lookup(action) except AssertionError: @@ -352,16 +367,19 @@ def run_is_false(self, action: Any) -> None: assert value in FALSEY_VALUES def run_is_true(self, action: Any) -> None: + # pylint: disable=missing-function-docstring value = self._lookup(action) assert value not in FALSEY_VALUES def run_length(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) def run_match(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) @@ -380,6 +398,7 @@ def run_match(self, action: Any) -> None: self._assert_match_equals(value, expected) def run_contains(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] @@ -388,6 +407,7 @@ def run_contains(self, action: Any) -> None: raise AssertionError("%s is not contained by %s" % (expected, value)) def run_transform_and_set(self, action: Any) -> None: + # pylint: disable=missing-function-docstring for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: @@ -463,6 +483,7 @@ def _assert_match_equals(self, a: Any, b: Any) -> None: @pytest.fixture(scope="function") # type: ignore def sync_runner(sync_client: Any) -> Any: + # pylint: disable=missing-function-docstring return YamlRunner(sync_client) @@ -472,7 +493,7 @@ def sync_runner(sync_client: Any) -> Any: def load_rest_api_tests() -> None: - # Try loading the REST API test specs from OpenSearch core. + """Try loading the REST API test specs from OpenSearch core.""" try: # Construct the HTTP and OpenSearch client http = urllib3.PoolManager(retries=10) @@ -542,6 +563,7 @@ def load_rest_api_tests() -> None: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore def test_rest_api_spec(test_spec: Any, sync_runner: Any) -> None: + # pylint: disable=missing-function-docstring if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_server_secured/test_clients.py b/test_opensearchpy/test_server_secured/test_clients.py index c885b58d..52f33ccc 100644 --- a/test_opensearchpy/test_server_secured/test_clients.py +++ b/test_opensearchpy/test_server_secured/test_clients.py @@ -15,6 +15,7 @@ class TestSecurity(TestCase): def test_security(self) -> None: + # pylint: disable=missing-function-docstring client = OpenSearch( OPENSEARCH_URL, http_auth=("admin", "admin"), diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 8f8c6937..2196eb73 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -44,6 +44,7 @@ def tearDown(self) -> None: self.client.close() def test_create_role(self) -> None: + # pylint: disable=missing-function-docstring # Test to create role response = self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -53,6 +54,7 @@ def test_create_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) def test_create_role_with_body_param_empty(self) -> None: + # pylint: disable=missing-function-docstring try: self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -61,6 +63,7 @@ def test_create_role_with_body_param_empty(self) -> None: assert False def test_get_role(self) -> None: + # pylint: disable=missing-function-docstring # Create a role self.test_create_role() @@ -71,6 +74,7 @@ def test_get_role(self) -> None: self.assertIn(self.ROLE_NAME, response) def test_update_role(self) -> None: + # pylint: disable=missing-function-docstring # Create a role self.test_create_role() @@ -84,6 +88,7 @@ def test_update_role(self) -> None: self.assertEqual("OK", response.get("status")) def test_delete_role(self) -> None: + # pylint: disable=missing-function-docstring # Create a role self.test_create_role() @@ -97,6 +102,7 @@ def test_delete_role(self) -> None: response = self.client.security.get_role(self.ROLE_NAME) def test_create_user(self) -> None: + # pylint: disable=missing-function-docstring # Test to create user response = self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -106,6 +112,7 @@ def test_create_user(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) def test_create_user_with_body_param_empty(self) -> None: + # pylint: disable=missing-function-docstring try: self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -114,6 +121,7 @@ def test_create_user_with_body_param_empty(self) -> None: assert False def test_create_user_with_role(self) -> None: + # pylint: disable=missing-function-docstring self.test_create_role() # Test to create user @@ -129,6 +137,7 @@ def test_create_user_with_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) def test_get_user(self) -> None: + # pylint: disable=missing-function-docstring # Create a user self.test_create_user() @@ -139,6 +148,7 @@ def test_get_user(self) -> None: self.assertIn(self.USER_NAME, response) def test_update_user(self) -> None: + # pylint: disable=missing-function-docstring # Create a user self.test_create_user() @@ -152,6 +162,7 @@ def test_update_user(self) -> None: self.assertEqual("OK", response.get("status")) def test_delete_user(self) -> None: + # pylint: disable=missing-function-docstring # Create a user self.test_create_user() @@ -165,11 +176,13 @@ def test_delete_user(self) -> None: response = self.client.security.get_user(self.USER_NAME) def test_health_check(self) -> None: + # pylint: disable=missing-function-docstring response = self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) def test_health(self) -> None: + # pylint: disable=missing-function-docstring response = self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -203,6 +216,7 @@ def test_health(self) -> None: } def test_update_audit_config(self) -> None: + # pylint: disable=missing-function-docstring response = self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) @@ -210,6 +224,7 @@ def test_update_audit_config(self) -> None: self.assertEqual("OK", response.get("status")) def test_update_audit_configuration(self) -> None: + # pylint: disable=missing-function-docstring response = self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 6a09b83b..93e12fdc 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -109,6 +109,7 @@ def perform_request(self, *args: Any, **kwargs: Any) -> Any: class TestHostsInfoCallback(TestCase): def test_cluster_manager_only_nodes_are_ignored(self) -> None: + # pylint: disable=missing-function-docstring nodes = [ {"roles": ["cluster_manager"]}, {"roles": ["cluster_manager", "data", "ingest"]}, @@ -126,12 +127,14 @@ def test_cluster_manager_only_nodes_are_ignored(self) -> None: class TestTransport(TestCase): def test_single_connection_uses_dummy_connection_pool(self) -> None: + # pylint: disable=missing-function-docstring t1: Any = Transport([{}]) self.assertIsInstance(t1.connection_pool, DummyConnectionPool) t2: Any = Transport([{"host": "localhost"}]) self.assertIsInstance(t2.connection_pool, DummyConnectionPool) def test_request_timeout_extracted_from_params_and_passed(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -143,6 +146,7 @@ def test_request_timeout_extracted_from_params_and_passed(self) -> None: ) def test_timeout_extracted_from_params_and_passed(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"timeout": 84}) @@ -154,6 +158,7 @@ def test_timeout_extracted_from_params_and_passed(self) -> None: ) def test_opaque_id(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) t.perform_request("GET", "/") @@ -174,6 +179,7 @@ def test_opaque_id(self) -> None: ) def test_request_with_custom_user_agent_header(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) @@ -188,6 +194,7 @@ def test_request_with_custom_user_agent_header(self) -> None: ) def test_send_get_body_as_source(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -199,6 +206,7 @@ def test_send_get_body_as_source(self) -> None: ) def test_send_get_body_as_post(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -208,6 +216,7 @@ def test_send_get_body_as_post(self) -> None: self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) def test_body_gets_encoded_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好") @@ -218,6 +227,7 @@ def test_body_gets_encoded_into_bytes(self) -> None: ) def test_body_bytes_get_passed_untouched(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -226,6 +236,7 @@ def test_body_bytes_get_passed_untouched(self) -> None: self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好\uda6a") @@ -236,16 +247,19 @@ def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: ) def test_kwargs_passed_on_to_connections(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{"host": "google.com"}], port=123) self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) def test_kwargs_passed_on_to_connection_pool(self) -> None: + # pylint: disable=missing-function-docstring dt = object() t: Any = Transport([{}, {}], dead_timeout=dt) self.assertIs(dt, t.connection_pool.dead_timeout) def test_custom_connection_class(self) -> None: + # pylint: disable=missing-function-docstring class MyConnection(Connection): def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs @@ -255,6 +269,7 @@ def __init__(self, **kwargs: Any) -> None: self.assertIsInstance(t.connection_pool.connections[0], MyConnection) def test_add_connection(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) @@ -264,6 +279,7 @@ def test_add_connection(self) -> None: ) def test_request_will_fail_after_x_retries(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -273,6 +289,7 @@ def test_request_will_fail_after_x_retries(self) -> None: self.assertEqual(4, len(t.get_connection().calls)) def test_failed_connection_will_be_marked_as_dead(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, @@ -282,6 +299,7 @@ def test_failed_connection_will_be_marked_as_dead(self) -> None: self.assertEqual(0, len(t.connection_pool.connections)) def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: + # pylint: disable=missing-function-docstring for method in ("GET", "HEAD"): t: Any = Transport([{}, {}], connection_class=DummyConnection) con1 = t.connection_pool.get_connection() @@ -294,6 +312,7 @@ def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: self.assertEqual(1, len(t.connection_pool.dead_count)) def test_sniff_will_use_seed_connections(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t.set_connections([{"data": "invalid"}]) @@ -302,6 +321,7 @@ def test_sniff_will_use_seed_connections(self) -> None: self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -311,6 +331,7 @@ def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_ignores_sniff_timeout(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -323,6 +344,7 @@ def test_sniff_on_start_ignores_sniff_timeout(self) -> None: ) def test_sniff_uses_sniff_timeout(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -335,6 +357,7 @@ def test_sniff_uses_sniff_timeout(self) -> None: ) def test_sniff_reuses_connection_instances_if_possible(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, @@ -347,6 +370,7 @@ def test_sniff_reuses_connection_instances_if_possible(self) -> None: self.assertIs(connection, t.get_connection()) def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [ {"exception": ConnectionError(None, "abandon ship", Exception())}, @@ -366,6 +390,7 @@ def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: def test_sniff_on_fail_failing_does_not_prevent_retires( self, sniff_hosts: Any ) -> None: + # pylint: disable=missing-function-docstring sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = Transport( [ @@ -386,6 +411,7 @@ def test_sniff_on_fail_failing_does_not_prevent_retires( self.assertEqual(1, len(conn_data.calls)) def test_sniff_after_n_seconds(self) -> None: + # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -404,6 +430,7 @@ def test_sniff_after_n_seconds(self) -> None: self.assertTrue(time.time() - 1 < t.last_sniff < time.time() + 0.01) def test_sniff_7x_publish_host(self) -> None: + # pylint: disable=missing-function-docstring # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t: Any = Transport( diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index d4469600..6f315517 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -60,6 +60,7 @@ def wipe_cluster(client: Any) -> None: def wipe_cluster_settings(client: Any) -> None: + # pylint: disable=missing-function-docstring settings = client.cluster.get_settings() new_settings: Any = {} for name, value in settings.items(): @@ -97,6 +98,7 @@ def wipe_snapshots(client: Any) -> None: def wipe_data_streams(client: Any) -> None: + # pylint: disable=missing-function-docstring try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: @@ -104,6 +106,7 @@ def wipe_data_streams(client: Any) -> None: def wipe_indices(client: Any) -> None: + # pylint: disable=missing-function-docstring client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", @@ -112,6 +115,7 @@ def wipe_indices(client: Any) -> None: def wipe_searchable_snapshot_indices(client: Any) -> None: + # pylint: disable=missing-function-docstring cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", @@ -122,16 +126,19 @@ def wipe_searchable_snapshot_indices(client: Any) -> None: def wipe_slm_policies(client: Any) -> None: + # pylint: disable=missing-function-docstring for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) def wipe_auto_follow_patterns(client: Any) -> None: + # pylint: disable=missing-function-docstring for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) def wipe_node_shutdown_metadata(client: Any) -> None: + # pylint: disable=missing-function-docstring shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. @@ -144,6 +151,7 @@ def wipe_node_shutdown_metadata(client: Any) -> None: def wipe_tasks(client: Any) -> None: + # pylint: disable=missing-function-docstring tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): @@ -151,6 +159,7 @@ def wipe_tasks(client: Any) -> None: def wait_for_pending_tasks(client: Any, filter: Any, timeout: int = 30) -> None: + # pylint: disable=missing-function-docstring end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True).split("\n") @@ -159,6 +168,7 @@ def wait_for_pending_tasks(client: Any, filter: Any, timeout: int = 30) -> None: def wait_for_pending_datafeeds_and_jobs(client: Any, timeout: int = 30) -> None: + # pylint: disable=missing-function-docstring end_time = time.time() + timeout while time.time() < end_time: if ( @@ -172,6 +182,7 @@ def wait_for_pending_datafeeds_and_jobs(client: Any, timeout: int = 30) -> None: def wait_for_cluster_state_updates_to_finish(client: Any, timeout: int = 30) -> None: + # pylint: disable=missing-function-docstring end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): diff --git a/utils/disable_pylint_check.py b/utils/disable_pylint_check.py new file mode 100644 index 00000000..85ddec39 --- /dev/null +++ b/utils/disable_pylint_check.py @@ -0,0 +1,59 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +import os +import re +from typing import Generator + +import black + + +def test_files() -> Generator[str, None, None]: + """ + generator to yield full paths to files that look like unit tests + """ + # TODO should support tests in __init__.py files + test_source_files_re = re.compile(r".*test_[^/]+\.py$") + include_dirs = ["test_opensearchpy", "samples", "benchmarks"] + for top in include_dirs: + for root, dirs, files in os.walk(top, topdown=True): + for name in files: + full_path = os.path.join(root, name) + if test_source_files_re.match(full_path): + yield full_path + + +if __name__ == "__main__": + """ + adds a disable instruction for test_ methods for missing-function-docstring. test methods typically have good names + and can go without docstring for comments. this is destructive so use git as part of the process. + + """ + MISSING_FUNCTION_DOCSTRING_DISABLE = "# pylint: disable=missing-function-docstring" + test_method_re = re.compile( + r"(?P[^\S\r\n]*)(?Pasync)*(?P\s*def\stest_.*:)" + r"(?P(\n|.)*?)(?P\s*#\spylint.*)*", + flags=re.MULTILINE, + ) + for file in test_files(): + new_file_contents = "" # pylint: disable=C0103 + with open(file) as test_file: + print(f"Working on {file}") + full_file = test_file.read() + # TODO multiline function declarations are not supported + new_file_contents = re.sub( + test_method_re, + r"\g\g\g\n\g\g" + + MISSING_FUNCTION_DOCSTRING_DISABLE, + full_file, + ) + new_file_contents = black.format_str( + new_file_contents, mode=black.FileMode() + ) + with open(f"{file}", "w") as new_file: + new_file.write(new_file_contents) From d26eba5ccd646ffce4110841fee14ff46be7d2e6 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Mon, 8 Jan 2024 10:02:58 -0500 Subject: [PATCH 08/14] * ignoring opensearchpy for pylint and then added it back to noxfile.py * fixed some lints; created a feature flag for newer dynamic pylint so now lints can be fixed first in legacy code and then enabled by multiple people * extracted a method for per-folder linting * updated noxfile.lint_per_folder with type hints * enabled unspecified-encoding in pylint * added disable missing-function-docstring pragma to test_clients.py in test_async and test_server * added more encodings to pass unspecified-encoding pylint tests * updated changelog Signed-off-by: Mark Cohen --- CHANGELOG.md | 1 + noxfile.py | 63 ++++++++++++++++++- setup.cfg | 3 +- setup.py | 6 +- .../test_async/test_server/test_clients.py | 2 + test_opensearchpy/test_server/test_clients.py | 2 + utils/build_dists.py | 10 +-- utils/disable_pylint_check.py | 4 +- utils/generate_api.py | 14 ++--- utils/license_headers.py | 6 +- 10 files changed, 89 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 835c403d..c002d868 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added +- Added pylint `unspecified-encoding` and `missing-function-docstring` and ignored opensearchpy for lints; added new linting per directory option behind feature flag (([#643](https://github.com/opensearch-project/opensearch-py/pull/643))) - Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) - Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611)) - Added a log collection guide ([#579](https://github.com/opensearch-project/opensearch-py/pull/579)) diff --git a/noxfile.py b/noxfile.py index d080903e..be11c083 100644 --- a/noxfile.py +++ b/noxfile.py @@ -32,7 +32,7 @@ SOURCE_FILES = ( "setup.py", "noxfile.py", - # "opensearchpy/", + "opensearchpy/", "test_opensearchpy/", "utils/", "samples/", @@ -92,7 +92,15 @@ def lint(session: Any) -> None: session.run("isort", "--check", *SOURCE_FILES) session.run("black", "--check", *SOURCE_FILES) session.run("flake8", *SOURCE_FILES) - session.run("pylint", *SOURCE_FILES) + if ( + # run export NOXFILE_PYLINT_PARAMS_FEATURE=true on the command line to run this code + "NOXFILE_PYLINT_PARAMS_FEATURE" in session.env + and session.env["NOXFILE_PYLINT_PARAMS_FEATURE"] + ): + lint_per_folder(session) + else: + session.run("pylint", *SOURCE_FILES) + session.run("python", "utils/license_headers.py", "check", *SOURCE_FILES) # Workaround to make '-r' to still work despite uninstalling aiohttp below. @@ -111,6 +119,57 @@ def lint(session: Any) -> None: session.run("mypy", "--strict", "test_opensearchpy/test_types/sync_types.py") +def lint_per_folder(session: Any) -> None: + """ + allows configuration of pylint rules per folder and runs a pylint command for each folder + :param session: the current nox session + """ + # tests should not require function docstrings - tests function names describe themselves; + # opensearchpy is generated; may require in the generator code some places + default_enable = [ + "line-too-long", + "invalid-name", + "pointless-statement", + "unspecified-encoding", + "missing-function-docstring", + ] + override_enable = { + "test_opensearchpy/": [ + "line-too-long", + # "invalid-name", lots of short functions with one or two character names + "pointless-statement", + "unspecified-encoding", + "redefined-outer-name", + ], + # "opensearchpy/": [""], + } + # import-outside-toplevel + # enable = line-too-long, invalid-name, pointless-statement, unspecified-encoding, + # missing-function-docstring + # should fail the build: redefined-outer-name, , line-too-long, invalid-name, + # pointless-statement, + # import-outside-toplevel, unused-variable, unexpected-keyword-arg, + # raise-missing-from, invalid-unary-operand-type, + # attribute-defined-outside-init, unspecified-encoding + # should be warnings: super-with-arguments, too-few-public-methods, redefined-builtin, + # too-many-arguments + # (how many is too many?), useless-object-inheritance, too-many-locals, + # too-many-branches, dangerous-default-value, + # arguments-renamed + # warn, then fail later (low priority): too-many-locals, unnecessary-dunder-call, + # too-many-public-methods, + # no-else-return, invalid-overridden-method, cyclic-import + # does this conflict with isort? wrong-import-position + for source_file in SOURCE_FILES: + args = ["--disable=all"] + if source_file in override_enable: + args.append(f"--enable={','.join(override_enable[source_file])}") + else: + args.append(f"--enable={','.join(default_enable)}") + args.append(source_file) + session.run("pylint", *args) + + @nox.session() # type: ignore def docs(session: Any) -> None: # pylint: disable=missing-function-docstring diff --git a/setup.cfg b/setup.cfg index d65bae6a..74529987 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,4 +28,5 @@ good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names [pylint.MESSAGE CONTROL] disable = all -enable = line-too-long, invalid-name, pointless-statement, missing-function-docstring +enable = line-too-long, invalid-name, pointless-statement, missing-function-docstring, unspecified-encoding +ignore=opensearchpy diff --git a/setup.py b/setup.py index 78cc2a23..057fda18 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,9 @@ PACKAGE_VERSION = "" BASE_DIR = abspath(dirname(__file__)) -with open(join(BASE_DIR, PACKAGE_NAME.replace("-", ""), "_version.py")) as f: +with open( + join(BASE_DIR, PACKAGE_NAME.replace("-", ""), "_version.py"), encoding="utf-8" +) as f: data = f.read() m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) if m: @@ -42,7 +44,7 @@ else: raise Exception(f"Invalid version: {data}") -with open(join(BASE_DIR, "README.md")) as f: +with open(join(BASE_DIR, "README.md"), encoding="utf-8") as f: long_description = f.read().strip() MODULE_DIR = PACKAGE_NAME.replace("-", "") diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index f544a858..226efd96 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -76,11 +76,13 @@ async def test_aiohttp_connection_works_without_yarl( class TestClose: async def test_close_doesnt_break_client(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring await async_client.cluster.health() await async_client.close() await async_client.cluster.health() async def test_with_doesnt_break_client(self, async_client: Any) -> None: + # pylint: disable=missing-function-docstring for _ in range(2): async with async_client as client: await client.cluster.health() diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index a236299e..4bc6170a 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -56,11 +56,13 @@ def test_bulk_works_with_bytestring_body(self) -> None: class TestClose(OpenSearchTestCase): def test_close_doesnt_break_client(self) -> None: + # pylint: disable=missing-function-docstring self.client.cluster.health() self.client.close() self.client.cluster.health() def test_with_doesnt_break_client(self) -> None: + # pylint: disable=missing-function-docstring for _ in range(2): with self.client as client: client.cluster.health() diff --git a/utils/build_dists.py b/utils/build_dists.py index 0aab45de..dce731c2 100644 --- a/utils/build_dists.py +++ b/utils/build_dists.py @@ -205,7 +205,7 @@ def main() -> None: # Grab the major version to be used as a suffix. version_path = os.path.join(BASE_DIR, "opensearchpy/_version.py") - with open(version_path) as f: + with open(version_path, encoding="utf-8") as f: data = f.read() m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) if m: @@ -272,22 +272,22 @@ def main() -> None: # Ensure that the version within 'opensearchpy/_version.py' is correct. version_path = os.path.join(BASE_DIR, f"opensearchpy{suffix}/_version.py") - with open(version_path) as f: + with open(version_path, encoding="utf-8") as f: version_data = f.read() version_data = re.sub( r"__versionstr__: str = \"[^\"]+\"", '__versionstr__: str = "%s"' % version, version_data, ) - with open(version_path, "w") as f: + with open(version_path, "w", encoding="utf-8") as f: f.truncate() f.write(version_data) # Rewrite setup.py with the new name. setup_py_path = os.path.join(BASE_DIR, "setup.py") - with open(setup_py_path) as f: + with open(setup_py_path, encoding="utf-8") as f: setup_py = f.read() - with open(setup_py_path, "w") as f: + with open(setup_py_path, "w", encoding="utf-8") as f: f.truncate() assert 'PACKAGE_NAME = "opensearch-py"' in setup_py f.write( diff --git a/utils/disable_pylint_check.py b/utils/disable_pylint_check.py index 85ddec39..ae40d216 100644 --- a/utils/disable_pylint_check.py +++ b/utils/disable_pylint_check.py @@ -42,7 +42,7 @@ def test_files() -> Generator[str, None, None]: ) for file in test_files(): new_file_contents = "" # pylint: disable=C0103 - with open(file) as test_file: + with open(file, encoding="utf-8") as test_file: print(f"Working on {file}") full_file = test_file.read() # TODO multiline function declarations are not supported @@ -55,5 +55,5 @@ def test_files() -> Generator[str, None, None]: new_file_contents = black.format_str( new_file_contents, mode=black.FileMode() ) - with open(f"{file}", "w") as new_file: + with open(f"{file}", "w", encoding="utf-8") as new_file: new_file.write(new_file_contents) diff --git a/utils/generate_api.py b/utils/generate_api.py index 886b2c35..5daef74f 100644 --- a/utils/generate_api.py +++ b/utils/generate_api.py @@ -122,7 +122,7 @@ def parse_orig(self) -> None: namespace_new = "".join(word.capitalize() for word in self.namespace.split("_")) self.header += "class " + namespace_new + "Client(NamespacedClient):" if os.path.exists(self.filepath): - with open(self.filepath) as f: + with open(self.filepath, encoding="utf-8") as f: content = f.read() header_lines = [] for line in content.split("\n"): @@ -175,7 +175,7 @@ def dump(self) -> None: # Identifying the insertion point for the "THIS CODE IS AUTOMATICALLY GENERATED" header. if os.path.exists(self.filepath): - with open(self.filepath, "r") as f: + with open(self.filepath, "r", encoding="utf-8") as f: content = f.read() if header_separator in content: update_header = False @@ -202,7 +202,7 @@ def dump(self) -> None: generated_file_header_path = os.path.join( current_script_folder, "generated_file_headers.txt" ) - with open(generated_file_header_path, "r") as header_file: + with open(generated_file_header_path, "r", encoding="utf-8") as header_file: header_content = header_file.read() # Imports are temporarily removed from the header and are regenerated later to ensure imports are updated after code generation. @@ -210,7 +210,7 @@ def dump(self) -> None: line for line in self.header.split("\n") if "from .utils import" not in line ) - with open(self.filepath, "w") as f: + with open(self.filepath, "w", encoding="utf-8") as f: if update_header is True: f.write( self.header[:license_position] @@ -233,7 +233,7 @@ def dump(self) -> None: # Generating imports for each module utils_imports = "" file_content = "" - with open(self.filepath, "r") as f: + with open(self.filepath, "r", encoding="utf-8") as f: content = f.read() keywords = [ "SKIP_IN_PATH", @@ -254,7 +254,7 @@ def dump(self) -> None: utils_imports = result file_content = content.replace("#replace_token#", utils_imports) - with open(self.filepath, "w") as f: + with open(self.filepath, "w", encoding="utf-8") as f: f.write(file_content) @property @@ -715,7 +715,7 @@ def apply_patch(namespace: str, name: str, api: Any) -> Any: CODE_ROOT / "utils/templates/overrides" / namespace / f"{name}.json" ) if os.path.exists(override_file_path): - with open(override_file_path) as f: + with open(override_file_path, encoding="utf-8") as f: override_json = json.load(f) api = deepmerge.always_merger.merge(api, override_json) return api diff --git a/utils/license_headers.py b/utils/license_headers.py index 9724660b..8e630130 100644 --- a/utils/license_headers.py +++ b/utils/license_headers.py @@ -55,7 +55,7 @@ def does_file_need_fix(filepath: str) -> bool: if not re.search(r"\.py$", filepath): return False existing_header = "" - with open(filepath, mode="r") as f: + with open(filepath, mode="r", encoding="utf-8") as f: for line in f: line = line.strip() if len(line) == 0 or line in LINES_TO_KEEP: @@ -73,14 +73,14 @@ def add_header_to_file(filepath: str) -> None: writes the license header to the beginning of a file :param filepath: relative or absolute filepath to update """ - with open(filepath, mode="r") as f: + with open(filepath, mode="r", encoding="utf-8") as f: lines = list(f) i = 0 for i, line in enumerate(lines): if len(line) > 0 and line not in LINES_TO_KEEP: break lines = lines[:i] + [LICENSE_HEADER] + lines[i:] - with open(filepath, mode="w") as f: + with open(filepath, mode="w", encoding="utf-8") as f: f.truncate() f.write("".join(lines)) print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") From 0a2302f4434166bcf2a6f752a4d031e9a40967ba Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Mon, 8 Jan 2024 17:09:43 -0500 Subject: [PATCH 09/14] updated CHANGELOG.md entry removed the feature flag for pylint lint_per_folder fixed failures from mypy and pylint removed pylint MESSAGE CONTROL config from setup.cfg after relocating to lint_per_folder method Signed-off-by: Mark Cohen --- CHANGELOG.md | 2 +- benchmarks/bench_async.py | 5 +- benchmarks/bench_info_sync.py | 13 +- benchmarks/bench_sync.py | 30 ++-- docs/source/conf.py | 2 +- noxfile.py | 43 ++---- .../advanced_index_actions_sample.py | 4 +- samples/aws/search_requests.py | 9 +- samples/aws/search_urllib3.py | 7 +- samples/bulk/bulk_array.py | 2 +- samples/bulk/bulk_helpers.py | 6 +- samples/bulk/bulk_ld.py | 2 +- .../document_lifecycle_sample.py | 3 +- samples/hello/hello.py | 17 ++- samples/hello/hello_async.py | 11 +- .../index_template/index_template_sample.py | 1 + samples/json/json_hello.py | 12 +- samples/json/json_hello_async.py | 15 +- samples/logging/log_collection_sample.py | 11 +- setup.cfg | 4 - test_opensearchpy/run_tests.py | 7 +- .../test_async/test_connection.py | 7 +- .../test_async/test_plugins_client.py | 3 +- .../test_server/test_helpers/conftest.py | 15 +- .../test_plugins/test_plugins_client.py | 3 +- .../test_requests_http_connection.py | 15 +- .../test_urllib3_http_connection.py | 6 +- .../test_helpers/test_actions.py | 4 +- .../test_server/test_helpers/conftest.py | 22 ++- .../test_server/test_helpers/test_actions.py | 4 +- .../test_server/test_rest_api_spec.py | 20 +-- utils/build_dists.py | 39 ++--- utils/disable_pylint_check.py | 11 +- utils/generate_api.py | 140 +++++++++--------- utils/license_headers.py | 14 +- 35 files changed, 277 insertions(+), 232 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c002d868..14592afc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added -- Added pylint `unspecified-encoding` and `missing-function-docstring` and ignored opensearchpy for lints; added new linting per directory option behind feature flag (([#643](https://github.com/opensearch-project/opensearch-py/pull/643))) +- Added pylint `unspecified-encoding` and `missing-function-docstring` and ignored opensearchpy for lints (([#643](https://github.com/opensearch-project/opensearch-py/pull/643))) - Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) - Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611)) - Added a log collection guide ([#579](https://github.com/opensearch-project/opensearch-py/pull/579)) diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index 3df4e0ed..251b0b99 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -36,8 +36,9 @@ async def index_records(client: Any, index_name: str, item_count: int) -> None: async def test_async(client_count: int = 1, item_count: int = 1) -> None: """ - asynchronously index with item_count records and run client_count clients. This function can be used to - test balancing the number of items indexed with the number of documents. + asynchronously index with item_count records and run client_count + clients. This function can be used to test balancing the number of + items indexed with the number of documents. """ host = "localhost" port = 9200 diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 36e59814..88fb8698 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -22,13 +22,12 @@ def get_info(client: Any, request_count: int) -> float: """get info from client""" - tt: float = 0 - for n in range(request_count): + total_time: float = 0 + for request in range(request_count): start = time.time() * 1000 client.info() - total_time = time.time() * 1000 - start - tt += total_time - return tt + total_time += time.time() * 1000 - start + return total_time def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: @@ -71,8 +70,8 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) - thread.start() latency = 0 - for t in threads: - latency += t.join() + for thread in threads: + latency += thread.join() print(f"latency={latency}") diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index d86085d2..3580e596 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -23,29 +23,29 @@ def index_records(client: Any, index_name: str, item_count: int) -> Any: """bulk index item_count records into index_name""" - tt = 0 - for n in range(10): + total_time = 0 + for iteration in range(10): data: Any = [] - for i in range(item_count): + for item in range(item_count): data.append( json.dumps({"index": {"_index": index_name, "_id": str(uuid.uuid4())}}) ) - data.append(json.dumps({"value": i})) + data.append(json.dumps({"value": item})) data = "\n".join(data) start = time.time() * 1000 - rc = client.bulk(data) - if rc["errors"]: - raise Exception(rc["errors"]) + response = client.bulk(data) + if response["errors"]: + raise Exception(response["errors"]) - server_time = rc["took"] - total_time = time.time() * 1000 - start + server_time = response["took"] + this_time = time.time() * 1000 - start - if total_time < server_time: - raise Exception(f"total={total_time} < server={server_time}") + if this_time < server_time: + raise Exception(f"total={this_time} < server={server_time}") - tt += total_time - server_time - return tt + total_time += this_time - server_time + return total_time def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: @@ -105,8 +105,8 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N thread.start() latency = 0 - for t in threads: - latency += t.join() + for thread in threads: + latency += thread.join() clients[0].indices.refresh(index=index_name) count = clients[0].count(index=index_name) diff --git a/docs/source/conf.py b/docs/source/conf.py index 64ff3c52..3f59c07d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -108,7 +108,7 @@ # The default options for autodoc directives. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_default_options autodoc_default_options: Any = { - # If set, autodoc will generate document for the members of the target module, class or exception. # noqa: E501 + # If set, autodoc will generate document for the members of the target module, class or exception. # noqa: E501, pylint: disable=line-too-long # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-option-automodule-members "members": True, "show-inheritance": True, diff --git a/noxfile.py b/noxfile.py index be11c083..0bce70ac 100644 --- a/noxfile.py +++ b/noxfile.py @@ -92,14 +92,8 @@ def lint(session: Any) -> None: session.run("isort", "--check", *SOURCE_FILES) session.run("black", "--check", *SOURCE_FILES) session.run("flake8", *SOURCE_FILES) - if ( - # run export NOXFILE_PYLINT_PARAMS_FEATURE=true on the command line to run this code - "NOXFILE_PYLINT_PARAMS_FEATURE" in session.env - and session.env["NOXFILE_PYLINT_PARAMS_FEATURE"] - ): - lint_per_folder(session) - else: - session.run("pylint", *SOURCE_FILES) + + lint_per_folder(session) session.run("python", "utils/license_headers.py", "check", *SOURCE_FILES) @@ -124,14 +118,16 @@ def lint_per_folder(session: Any) -> None: allows configuration of pylint rules per folder and runs a pylint command for each folder :param session: the current nox session """ - # tests should not require function docstrings - tests function names describe themselves; - # opensearchpy is generated; may require in the generator code some places + + # TODO determine how to separate generated code from generator code for linting + exclude_path_from_linting = ["opensearchpy/"] default_enable = [ "line-too-long", "invalid-name", "pointless-statement", "unspecified-encoding", "missing-function-docstring", + "unspecified-encoding", ] override_enable = { "test_opensearchpy/": [ @@ -139,28 +135,15 @@ def lint_per_folder(session: Any) -> None: # "invalid-name", lots of short functions with one or two character names "pointless-statement", "unspecified-encoding", - "redefined-outer-name", - ], - # "opensearchpy/": [""], + "missing-function-docstring", + "unspecified-encoding", + ] } - # import-outside-toplevel - # enable = line-too-long, invalid-name, pointless-statement, unspecified-encoding, - # missing-function-docstring - # should fail the build: redefined-outer-name, , line-too-long, invalid-name, - # pointless-statement, - # import-outside-toplevel, unused-variable, unexpected-keyword-arg, - # raise-missing-from, invalid-unary-operand-type, - # attribute-defined-outside-init, unspecified-encoding - # should be warnings: super-with-arguments, too-few-public-methods, redefined-builtin, - # too-many-arguments - # (how many is too many?), useless-object-inheritance, too-many-locals, - # too-many-branches, dangerous-default-value, - # arguments-renamed - # warn, then fail later (low priority): too-many-locals, unnecessary-dunder-call, - # too-many-public-methods, - # no-else-return, invalid-overridden-method, cyclic-import - # does this conflict with isort? wrong-import-position + for source_file in SOURCE_FILES: + if source_file in exclude_path_from_linting: + continue + args = ["--disable=all"] if source_file in override_enable: args.append(f"--enable={','.join(override_enable[source_file])}") diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index b5af6be4..a5df9368 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -19,8 +19,8 @@ def main() -> None: """ - demonstrates various functions to operate on the index (e.g. clear different levels of cache, refreshing the - index) + demonstrates various functions to operate on the index + (e.g. clear different levels of cache, refreshing the index) """ # Set up client = OpenSearch( diff --git a/samples/aws/search_requests.py b/samples/aws/search_requests.py index 743d3d96..7b86602a 100644 --- a/samples/aws/search_requests.py +++ b/samples/aws/search_requests.py @@ -21,10 +21,13 @@ def main() -> None: """ - connects to a cluster specified in environment variables, creates an index, inserts documents, + connects to a cluster specified in environment variables, + creates an index, inserts documents, searches the index, deletes the document, deletes the index. - the environment variables are "ENDPOINT" for the cluster endpoint, AWS_REGION for the region in which the cluster - is hosted, and SERVICE to indicate if this is an ES 7.10.2 compatible cluster + the environment variables are "ENDPOINT" for the cluster + endpoint, AWS_REGION for the region in which the cluster + is hosted, and SERVICE to indicate if this is an ES 7.10.2 + compatible cluster """ # verbose logging logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) diff --git a/samples/aws/search_urllib3.py b/samples/aws/search_urllib3.py index c7382d0d..675856c2 100644 --- a/samples/aws/search_urllib3.py +++ b/samples/aws/search_urllib3.py @@ -21,9 +21,10 @@ def main() -> None: """ - 1. connects to an OpenSearch cluster on AWS defined by environment variables (i.e. ENDPOINT - cluster endpoint like - my-test-domain.us-east-1.es.amazonaws.com; AWS_REGION like us-east-1, us-west-2; and SERVICE like es which - differentiates beteween serverless and the managed service. + 1. connects to an OpenSearch cluster on AWS defined by environment variables + (i.e. ENDPOINT - cluster endpoint like my-test-domain.us-east-1.es. + amazonaws.com; AWS_REGION like us-east-1, us-west-2; and SERVICE like es which + differentiates between serverless and the managed service. 2. creates an index called "movies" and adds a single document 3. queries for that document 4. deletes the document diff --git a/samples/bulk/bulk_array.py b/samples/bulk/bulk_array.py index a7814ddb..38ddc2fe 100755 --- a/samples/bulk/bulk_array.py +++ b/samples/bulk/bulk_array.py @@ -53,7 +53,7 @@ def main() -> None: data.append({"index": {"_index": index_name, "_id": i}}) data.append({"value": i}) - rc = client.bulk(data) + rc = client.bulk(data) # pylint: disable=invalid-name if rc["errors"]: print("There were errors:") for item in rc["items"]: diff --git a/samples/bulk/bulk_helpers.py b/samples/bulk/bulk_helpers.py index 02150e25..ff1d5a98 100755 --- a/samples/bulk/bulk_helpers.py +++ b/samples/bulk/bulk_helpers.py @@ -18,8 +18,8 @@ def main() -> None: """ - demonstrates how to bulk load data using opensearchpy.helpers including examples of serial, parallel, and streaming - bulk load + demonstrates how to bulk load data using opensearchpy.helpers + including examples of serial, parallel, and streaming bulk load """ # connect to an instance of OpenSearch @@ -56,7 +56,7 @@ def main() -> None: data.append({"_index": index_name, "_id": i, "value": i}) # serialized bulk raising an exception on error - rc = helpers.bulk(client, data) + rc = helpers.bulk(client, data) # pylint: disable=invalid-name print(f"Bulk-inserted {rc[0]} items (bulk).") # parallel bulk with explicit error checking diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py index a6c3a585..67dc2cfe 100755 --- a/samples/bulk/bulk_ld.py +++ b/samples/bulk/bulk_ld.py @@ -55,7 +55,7 @@ def main() -> None: data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" data += json.dumps({"value": i}) + "\n" - rc = client.bulk(data) + rc = client.bulk(data) # pylint: disable=invalid-name if rc["errors"]: print("There were errors:") for item in rc["items"]: diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index b195262b..e4330e55 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -18,7 +18,8 @@ def main() -> None: """ - provides samples for different ways to handle documents including indexing, searching, updating, and deleting + provides samples for different ways to handle documents + including indexing, searching, updating, and deleting """ # Connect to OpenSearch client = OpenSearch( diff --git a/samples/hello/hello.py b/samples/hello/hello.py index 39744752..b3d4cb10 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -17,8 +17,9 @@ def main() -> None: """ - an example showing how to create an synchronous connection to OpenSearch, create an index, index a document - and search to return the document + an example showing how to create an synchronous connection to + OpenSearch, create an index, index a document and search to + return the document """ host = "localhost" port = 9200 @@ -49,19 +50,21 @@ def main() -> None: document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} - id = "1" + doc_id = "1" - response = client.index(index=index_name, body=document, id=id, refresh=True) + response = client.index(index=index_name, body=document, id=doc_id, refresh=True) print(response) # search for a document - q = "miller" + user_query = "miller" query = { "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + "query": { + "multi_match": {"query": user_query, "fields": ["title^2", "director"]} + }, } response = client.search(body=query, index=index_name) @@ -70,7 +73,7 @@ def main() -> None: # delete the document - response = client.delete(index=index_name, id=id) + response = client.delete(index=index_name, id=doc_id) print(response) diff --git a/samples/hello/hello_async.py b/samples/hello/hello_async.py index a3620dba..7fbba5a2 100755 --- a/samples/hello/hello_async.py +++ b/samples/hello/hello_async.py @@ -17,8 +17,9 @@ async def main() -> None: """ - an example showing how to create an asynchronous connection to OpenSearch, create an index, index a document - and search to return the document + an example showing how to create an asynchronous connection + to OpenSearch, create an index, index a document and + search to return the document """ # connect to OpenSearch host = "localhost" @@ -68,11 +69,13 @@ async def main() -> None: await client.indices.refresh(index=index_name) # search for a document - q = "miller" + user_query = "miller" query = { "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + "query": { + "multi_match": {"query": user_query, "fields": ["title^2", "director"]} + }, } results = await client.search(body=query, index=index_name) diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index 00978aba..65b75a70 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -13,6 +13,7 @@ def main() -> None: """ + # pylint: disable=line-too-long 1. connects to an OpenSearch instance running on localhost 2. Create an index template named `books` with default settings and mappings for indices of the `books-*` pattern. You can create an index template to define default settings and mappings for indices diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py index 9a8c213f..df6bd324 100755 --- a/samples/json/json_hello.py +++ b/samples/json/json_hello.py @@ -46,24 +46,26 @@ def main() -> None: document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} - id = "1" + doc_id = "1" - print(client.http.put(f"/{index_name}/_doc/{id}?refresh=true", body=document)) + print(client.http.put(f"/{index_name}/_doc/{doc_id}?refresh=true", body=document)) # search for a document - q = "miller" + user_query = "miller" query = { "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + "query": { + "multi_match": {"query": user_query, "fields": ["title^2", "director"]} + }, } print(client.http.post(f"/{index_name}/_search", body=query)) # delete the document - print(client.http.delete(f"/{index_name}/_doc/{id}")) + print(client.http.delete(f"/{index_name}/_doc/{doc_id}")) # delete the index diff --git a/samples/json/json_hello_async.py b/samples/json/json_hello_async.py index 499def22..2f02b826 100755 --- a/samples/json/json_hello_async.py +++ b/samples/json/json_hello_async.py @@ -17,7 +17,8 @@ async def main() -> None: """ - this sample uses asyncio and AsyncOpenSearch to asynchronously connect to local OpenSearch cluster, create an index, + this sample uses asyncio and AsyncOpenSearch to asynchronously + connect to local OpenSearch cluster, create an index, index data, search the index, delete the document, delete the index """ # connect to OpenSearch @@ -51,28 +52,30 @@ async def main() -> None: document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} - id = "1" + doc_id = "1" print( await client.http.put( - f"/{index_name}/_doc/{id}?refresh=true", body=document + f"/{index_name}/_doc/{doc_id}?refresh=true", body=document ) ) # search for a document - q = "miller" + user_query = "miller" query = { "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + "query": { + "multi_match": {"query": user_query, "fields": ["title^2", "director"]} + }, } print(await client.http.post(f"/{index_name}/_search", body=query)) # delete the document - print(await client.http.delete(f"/{index_name}/_doc/{id}")) + print(await client.http.delete(f"/{index_name}/_doc/{doc_id}")) # delete the index diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py index 84ff0194..6559b468 100644 --- a/samples/logging/log_collection_sample.py +++ b/samples/logging/log_collection_sample.py @@ -24,8 +24,9 @@ def main() -> None: """ - sample for custom logging; this shows how to create a console handler, connect to OpenSearch, define a custom - logger and log to an OpenSearch index + sample for custom logging; this shows how to create a + console handler, connect to OpenSearch, define a custom + logger, and log to an OpenSearch index """ print("Collecting logs.") @@ -90,9 +91,9 @@ def emit(self, record: logging.LogRecord) -> None: index=self._build_index_name(), body=document, ) - except Exception as e: - print(f"Failed to send log to OpenSearch: {e}") - logging.warning(f"Failed to send log to OpenSearch: {e}") + except Exception as ex: + print(f"Failed to send log to OpenSearch: {ex}") + logging.warning(f"Failed to send log to OpenSearch: {ex}") raise print("Creating an instance of OpenSearchHandler and adding it to the logger...") diff --git a/setup.cfg b/setup.cfg index 74529987..62150074 100644 --- a/setup.cfg +++ b/setup.cfg @@ -26,7 +26,3 @@ ignore_missing_imports=True max-line-length = 240 good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names -[pylint.MESSAGE CONTROL] -disable = all -enable = line-too-long, invalid-name, pointless-statement, missing-function-docstring, unspecified-encoding -ignore=opensearchpy diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index 152cf2e8..e5e3e933 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -43,8 +43,8 @@ def fetch_opensearch_repo() -> None: """ runs a git fetch origin on configured opensearch core repo - :return: None if environmental variables TEST_OPENSEARCH_YAML_DIR is set or TEST_OPENSEARCH_NOFETCH is set to False; - else returns nothing + :return: None if environmental variables TEST_OPENSEARCH_YAML_DIR + is set or TEST_OPENSEARCH_NOFETCH is set to False; else returns nothing """ # user is manually setting YAML dir, don't tamper with it if "TEST_OPENSEARCH_YAML_DIR" in environ: @@ -97,7 +97,8 @@ def fetch_opensearch_repo() -> None: def run_all(argv: Any = None) -> None: """ run all the tests given arguments and environment variables - - sets defaults if argv is None, running "pytest --cov=opensearchpy --junitxml= + - sets defaults if argv is None, running "pytest --cov=opensearchpy + --junitxml= --log-level=DEBUG --cache-clear -vv --cov-report= None: # it means SSLContext is not available for that version of python # and we should skip this test. pytest.skip( - "Test test_ssl_context is skipped cause SSLContext is not available for this version of Python" + "Test test_ssl_context is skipped cause SSLContext is " + "not available for this version of Python" ) con = AIOHttpConnection(use_ssl=True, ssl_context=context) @@ -217,8 +218,8 @@ async def test_uses_https_if_verify_certs_is_off(self) -> None: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) assert ( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure." - == str(w[0].message) + "Connecting to https://localhost:9200 using SSL with " + "verify_certs=False is insecure." == str(w[0].message) ) assert con.use_ssl diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index 72933257..5852fa5e 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -27,5 +27,6 @@ async def test_plugins_client(self) -> None: client.plugins.__init__(client) # type: ignore assert ( str(w[0].message) - == "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use `AsyncOpenSearch.plugin.alerting` instead." + == "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use " + "`AsyncOpenSearch.plugin.alerting` instead." ) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 21d20285..305b74cf 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -47,10 +47,17 @@ async def opensearch_version(client: Any) -> Any: # pylint: disable=missing-function-docstring info = await client.info() print(info) - yield tuple( - int(x) - for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") # type: ignore - ) + yield tuple(int(x) for x in match_version(info)) + + +async def match_version(info: Any) -> Any: + """ + matches the major version from the given client info + :param info: + """ + match = re.match(r"^([0-9.]+)", info["version"]["number"]) + assert match is not None + yield match.group(1).split(".") @fixture # type: ignore diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index a92b07cb..2146905c 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -21,5 +21,6 @@ def test_plugins_client(self) -> None: client.plugins.__init__(client) # type: ignore self.assertEqual( str(w.warnings[0].message), - "Cannot load `alerting` directly to OpenSearch as it already exists. Use `OpenSearch.plugin.alerting` instead.", + "Cannot load `alerting` directly to OpenSearch as " + "it already exists. Use `OpenSearch.plugin.alerting` instead.", ) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index b594b179..ee648610 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -145,7 +145,8 @@ def test_uses_https_if_verify_certs_is_off(self) -> None: ) self.assertEqual(1, len(w)) self.assertEqual( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", + "Connecting to https://localhost:9200 using SSL with " + "verify_certs=False is insecure.", str(w[0].message), ) @@ -309,8 +310,12 @@ def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: # trace request self.assertEqual(1, tracer.info.call_count) + trace_curl_cmd = """ + curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' + -d '{\n "question": "what\\u0027s that?"\n}' + """ self.assertEqual( - """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""", + trace_curl_cmd, tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], ) # trace response @@ -447,9 +452,13 @@ def test_url_prefix(self, tracer: Any) -> None: self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) # trace request + trace_curl_cmd = ( + "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' " + "-d '{\n \"answer\": 42\n}'" + ) self.assertEqual(1, tracer.info.call_count) self.assertEqual( - "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' -d '{\n \"answer\": 42\n}'", + trace_curl_cmd, tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], ) diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 3e220bb2..0479adf2 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -74,7 +74,8 @@ def test_ssl_context(self) -> None: # it means SSLContext is not available for that version of python # and we should skip this test. raise SkipTest( - "Test test_ssl_context is skipped cause SSLContext is not available for this version of python" + "Test test_ssl_context is skipped cause SSLContext" + " is not available for this version of python" ) con = Urllib3HttpConnection(use_ssl=True, ssl_context=context) @@ -289,7 +290,8 @@ def test_uses_https_if_verify_certs_is_off(self) -> None: con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertEqual(1, len(w)) self.assertEqual( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", + "Connecting to https://localhost:9200 using SSL with " + "verify_certs=False is insecure.", str(w[0].message), ) diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 26913b47..26d79861 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -143,7 +143,9 @@ def setup_method(self, _: Any) -> None: """ creates some documents for testing """ - self.actions: Any = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip + self.actions: Any = [ + ({"index": {}}, {"some": "datá", "i": i}) for i in range(100) + ] def test_expand_action(self) -> None: # pylint: disable=missing-function-docstring diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 81a4088e..e35f9a60 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -54,13 +54,23 @@ def client() -> Any: @fixture(scope="session") # type: ignore def opensearch_version(client: Any) -> Any: - # pylint: disable=missing-function-docstring - info = client.info() + """ + yields a major version from the client + :param client: client to connect to opensearch + """ + info: Any = client.info() print(info) - yield tuple( - int(x) - for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") # type: ignore - ) + yield tuple(int(x) for x in match_version(info)) + + +def match_version(info: Any) -> Any: + """ + matches the major version from the given client info + :param info: + """ + match = re.match(r"^([0-9.]+)", info["version"]["number"]) + assert match is not None + yield match.group(1).split(".") @fixture # type: ignore diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index beb4d388..32a17b50 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -531,7 +531,9 @@ def test_scan_auth_kwargs_forwarded(self) -> None: } client_mock.clear_scroll.return_value = {} - data = list(helpers.scan(self.client, index="test_index", **{key: val})) # type: ignore + data = list( + helpers.scan(self.client, index="test_index", **{key: val}) # type: ignore + ) self.assertEqual(data, [{"search_data": 1}]) diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index e1cd315e..612fd6b2 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -74,19 +74,19 @@ # Warning about date_histogram.interval deprecation is raised randomly "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipeline/10_basic", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/pit/10_basic", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/clone/40_wait_for_completion[0]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/forcemerge/20_wait_for_completion[0]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/open/30_wait_for_completion[0]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/shrink/50_wait_for_completion[0]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/split/40_wait_for_completion[0]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/clone/40_wait_for_completion[0]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/forcemerge/20_wait_for_completion[0]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/open/30_wait_for_completion[0]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/shrink/50_wait_for_completion[0]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/split/40_wait_for_completion[0]", # pylint: disable=line-too-long "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cat/nodes/10_basic[1]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cat/nodeattrs/10_basic[1]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[2]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[3]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[2]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[3]", # pylint: disable=line-too-long "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cat/indices/10_basic[2]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/10_basic[6]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/20_request_timeout", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/search/aggregation/20_terms[4]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/10_basic[6]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/20_request_timeout", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/search/aggregation/20_terms[4]", # pylint: disable=line-too-long "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/tasks/list/10_basic[0]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/index/90_unsigned_long[1]", "search/aggregation/250_moving_fn[1]", diff --git a/utils/build_dists.py b/utils/build_dists.py index dce731c2..5f3ee283 100644 --- a/utils/build_dists.py +++ b/utils/build_dists.py @@ -59,7 +59,8 @@ def run(*argv: Any, expect_exit_code: int = 0) -> None: """ runs a command within this script :param argv: command to run e.g. "git" "checkout" "--" "setup.py" "opensearchpy/" - :param expect_exit_code: code to compare with actual exit code from command. will exit the process if they do not + :param expect_exit_code: code to compare with actual exit code from command. + will exit the process if they do not match the proper exit code """ global TMP_DIR @@ -124,7 +125,7 @@ def test_dist(dist: Any) -> None: run( venv_python, "-c", - f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", + f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", # pylint: disable=line-too-long ) # Install aiohttp and see that async is now available @@ -133,7 +134,7 @@ def test_dist(dist: Any) -> None: run( venv_python, "-c", - f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", + f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", # pylint: disable=line-too-long ) # Only need to test 'async_types' for non-aliased package @@ -205,11 +206,13 @@ def main() -> None: # Grab the major version to be used as a suffix. version_path = os.path.join(BASE_DIR, "opensearchpy/_version.py") - with open(version_path, encoding="utf-8") as f: - data = f.read() - m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) - if m: - version = m.group(1) + with open(version_path, encoding="utf-8") as file: + data = file.read() + version_match = re.search( + r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M + ) + if version_match: + version = version_match.group(1) else: raise Exception(f"Invalid version: {data}") @@ -272,25 +275,25 @@ def main() -> None: # Ensure that the version within 'opensearchpy/_version.py' is correct. version_path = os.path.join(BASE_DIR, f"opensearchpy{suffix}/_version.py") - with open(version_path, encoding="utf-8") as f: - version_data = f.read() + with open(version_path, encoding="utf-8") as file: + version_data = file.read() version_data = re.sub( r"__versionstr__: str = \"[^\"]+\"", '__versionstr__: str = "%s"' % version, version_data, ) - with open(version_path, "w", encoding="utf-8") as f: - f.truncate() - f.write(version_data) + with open(version_path, "w", encoding="utf-8") as file: + file.truncate() + file.write(version_data) # Rewrite setup.py with the new name. setup_py_path = os.path.join(BASE_DIR, "setup.py") - with open(setup_py_path, encoding="utf-8") as f: - setup_py = f.read() - with open(setup_py_path, "w", encoding="utf-8") as f: - f.truncate() + with open(setup_py_path, encoding="utf-8") as file: + setup_py = file.read() + with open(setup_py_path, "w", encoding="utf-8") as file: + file.truncate() assert 'PACKAGE_NAME = "opensearch-py"' in setup_py - f.write( + file.write( setup_py.replace( 'PACKAGE_NAME = "opensearch-py"', 'PACKAGE_NAME = "opensearch-py%s"' % suffix, diff --git a/utils/disable_pylint_check.py b/utils/disable_pylint_check.py index ae40d216..be24c8ef 100644 --- a/utils/disable_pylint_check.py +++ b/utils/disable_pylint_check.py @@ -30,13 +30,14 @@ def test_files() -> Generator[str, None, None]: if __name__ == "__main__": """ - adds a disable instruction for test_ methods for missing-function-docstring. test methods typically have good names - and can go without docstring for comments. this is destructive so use git as part of the process. - + adds a disable instruction for test_ methods for missing-function-docstring. + test methods typically have good names and can go without docstring for + comments. this is destructive so use git as part of the process. """ MISSING_FUNCTION_DOCSTRING_DISABLE = "# pylint: disable=missing-function-docstring" test_method_re = re.compile( - r"(?P[^\S\r\n]*)(?Pasync)*(?P\s*def\stest_.*:)" + r"(?P[^\S\r\n]*)(?Pasync)*" + r"(?P\s*def\stest_.*:)" r"(?P(\n|.)*?)(?P\s*#\spylint.*)*", flags=re.MULTILINE, ) @@ -48,7 +49,7 @@ def test_files() -> Generator[str, None, None]: # TODO multiline function declarations are not supported new_file_contents = re.sub( test_method_re, - r"\g\g\g\n\g\g" + r"\g\g\g\n\g\g" # pylint: disable=line-too-long + MISSING_FUNCTION_DOCSTRING_DISABLE, full_file, ) diff --git a/utils/generate_api.py b/utils/generate_api.py index 5daef74f..86868f65 100644 --- a/utils/generate_api.py +++ b/utils/generate_api.py @@ -122,8 +122,8 @@ def parse_orig(self) -> None: namespace_new = "".join(word.capitalize() for word in self.namespace.split("_")) self.header += "class " + namespace_new + "Client(NamespacedClient):" if os.path.exists(self.filepath): - with open(self.filepath, encoding="utf-8") as f: - content = f.read() + with open(self.filepath, encoding="utf-8") as file: + content = file.read() header_lines = [] for line in content.split("\n"): header_lines.append(line) @@ -138,7 +138,7 @@ def parse_orig(self) -> None: if "security.py" in str(self.filepath): # TODO: FIXME, import code header_lines.append( - " from ._patch import health_check, update_audit_config # type: ignore" + " from ._patch import health_check, update_audit_config # type: ignore" # pylint: disable=line-too-long ) break self.header = "\n".join(header_lines) @@ -164,9 +164,10 @@ def dump(self) -> None: """ self.sort() - # This code snippet adds headers to each generated module indicating that the code is generated. - # The separator is the last line in the "THIS CODE IS AUTOMATICALLY GENERATED" header. - header_separator = "# -----------------------------------------------------------------------------------------+" + # This code snippet adds headers to each generated module indicating + # that the code is generated.The separator is the last line in the + # "THIS CODE IS AUTOMATICALLY GENERATED" header. + header_separator = "# -----------------------------------------------------------------------------------------+" # pylint: disable=line-too-long license_header_end_1 = "# GitHub history for details." license_header_end_2 = "# under the License." @@ -175,8 +176,8 @@ def dump(self) -> None: # Identifying the insertion point for the "THIS CODE IS AUTOMATICALLY GENERATED" header. if os.path.exists(self.filepath): - with open(self.filepath, "r", encoding="utf-8") as f: - content = f.read() + with open(self.filepath, "r", encoding="utf-8") as file: + content = file.read() if header_separator in content: update_header = False header_end_position = ( @@ -205,14 +206,15 @@ def dump(self) -> None: with open(generated_file_header_path, "r", encoding="utf-8") as header_file: header_content = header_file.read() - # Imports are temporarily removed from the header and are regenerated later to ensure imports are updated after code generation. + # Imports are temporarily removed from the header and are regenerated + # later to ensure imports are updated after code generation. self.header = "\n".join( line for line in self.header.split("\n") if "from .utils import" not in line ) - with open(self.filepath, "w", encoding="utf-8") as f: + with open(self.filepath, "w", encoding="utf-8") as file: if update_header is True: - f.write( + file.write( self.header[:license_position] + "\n" + header_content @@ -221,20 +223,20 @@ def dump(self) -> None: + self.header[license_position:] ) else: - f.write( + file.write( self.header[:header_position] + "\n" + "#replace_token#\n" + self.header[header_position:] ) for api in self._apis: - f.write(api.to_python()) + file.write(api.to_python()) # Generating imports for each module utils_imports = "" file_content = "" - with open(self.filepath, "r", encoding="utf-8") as f: - content = f.read() + with open(self.filepath, "r", encoding="utf-8") as file: + content = file.read() keywords = [ "SKIP_IN_PATH", "_normalize_hosts", @@ -254,8 +256,8 @@ def dump(self) -> None: utils_imports = result file_content = content.replace("#replace_token#", utils_imports) - with open(self.filepath, "w", encoding="utf-8") as f: - f.write(file_content) + with open(self.filepath, "w", encoding="utf-8") as file: + file.write(file_content) @property def filepath(self) -> Any: @@ -320,12 +322,12 @@ def all_parts(self) -> Dict[str, str]: for url in self._def["url"]["paths"]: parts.update(url.get("parts", {})) - for p in parts: - if "required" not in parts[p]: - parts[p]["required"] = all( - p in url.get("parts", {}) for url in self._def["url"]["paths"] + for part in parts: + if "required" not in parts[part]: + parts[part]["required"] = all( + part in url.get("parts", {}) for url in self._def["url"]["paths"] ) - parts[p]["type"] = "Any" + parts[part]["type"] = "Any" # This piece of logic corresponds to calling # client.tasks.get() w/o a task_id which was erroneously @@ -372,10 +374,10 @@ def body(self) -> Any: """ :return: body of the API spec """ - b = self._def.get("body", {}) - if b: - b.setdefault("required", False) - return b + body_api_spec = self._def.get("body", {}) + if body_api_spec: + body_api_spec.setdefault("required", False) + return body_api_spec @property def query_params(self) -> Any: @@ -383,9 +385,9 @@ def query_params(self) -> Any: :return: any query string parameters from the specification """ return ( - k - for k in sorted(self._def.get("params", {}).keys()) - if k not in self.all_parts + key + for key in sorted(self._def.get("params", {}).keys()) + if key not in self.all_parts ) @property @@ -468,11 +470,11 @@ def to_python(self) -> Any: :return: rendered Jinja template """ try: - t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") + template = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") except TemplateNotFound: - t = jinja_env.get_template("base") + template = jinja_env.get_template("base") - return t.render( + return template.render( api=self, substitutions={v: k for k, v in SUBSTITUTIONS.items()}, global_query_params=GLOBAL_QUERY_PARAMS, @@ -490,48 +492,51 @@ def read_modules() -> Any: # Load the OpenAPI specification file response = requests.get( - "https://raw.githubusercontent.com/opensearch-project/opensearch-api-specification/main/OpenSearch.openapi.json" + "https://raw.githubusercontent.com/opensearch-project/opensearch-api-" + "specification/main/OpenSearch.openapi.json" ) data = response.json() list_of_dicts = [] for path in data["paths"]: - for x in data["paths"][path]: - if data["paths"][path][x]["x-operation-group"] == "nodes.hot_threads": - if "deprecated" in data["paths"][path][x]: + for param in data["paths"][path]: # pylint: disable=invalid-name + if data["paths"][path][param]["x-operation-group"] == "nodes.hot_threads": + if "deprecated" in data["paths"][path][param]: continue - data["paths"][path][x].update({"path": path, "method": x}) - list_of_dicts.append(data["paths"][path][x]) + data["paths"][path][param].update({"path": path, "method": param}) + list_of_dicts.append(data["paths"][path][param]) # Update parameters in each endpoint - for p in list_of_dicts: - if "parameters" in p: + for param_dict in list_of_dicts: + if "parameters" in param_dict: params = [] parts = [] # Iterate over the list of parameters and update them - for x in p["parameters"]: - if "schema" in x and "$ref" in x["schema"]: - schema_path_ref = x["schema"]["$ref"].split("/")[-1] - x["schema"] = data["components"]["schemas"][schema_path_ref] - params.append(x) + for param in param_dict["parameters"]: + if "schema" in param and "$ref" in param["schema"]: + schema_path_ref = param["schema"]["$ref"].split("/")[-1] + param["schema"] = data["components"]["schemas"][schema_path_ref] + params.append(param) else: - params.append(x) + params.append(param) # Iterate over the list of updated parameters to separate "parts" from "params" - k = params.copy() - for q in k: - if q["in"] == "path": - parts.append(q) - params.remove(q) + params_copy = params.copy() + for param in params_copy: + if param["in"] == "path": + parts.append(param) + params.remove(param) # Convert "params" and "parts" into the structure required for generator. params_new = {} parts_new = {} - for m in params: - a = dict(type=m["schema"]["type"], description=m["description"]) + for m in params: # pylint: disable=invalid-name + a = dict( # pylint: disable=invalid-name + type=m["schema"]["type"], description=m["description"] + ) # pylint: disable=invalid-name if "default" in m["schema"]: a.update({"default": m["schema"]["default"]}) @@ -548,22 +553,25 @@ def read_modules() -> Any: params_new.update({m["name"]: a}) # Removing the deprecated "type" - if p["x-operation-group"] != "nodes.hot_threads" and "type" in params_new: + if ( + param_dict["x-operation-group"] != "nodes.hot_threads" + and "type" in params_new + ): params_new.pop("type") if ( - p["x-operation-group"] == "cluster.health" + param_dict["x-operation-group"] == "cluster.health" and "ensure_node_commissioned" in params_new ): params_new.pop("ensure_node_commissioned") if bool(params_new): - p.update({"params": params_new}) + param_dict.update({"params": params_new}) - p.pop("parameters") + param_dict.pop("parameters") - for n in parts: - b = dict(type=n["schema"]["type"]) + for n in parts: # pylint: disable=invalid-name + b = dict(type=n["schema"]["type"]) # pylint: disable=invalid-name if "description" in n: b.update({"description": n["description"]}) @@ -586,7 +594,7 @@ def read_modules() -> Any: parts_new.update({n["name"]: b}) if bool(parts_new): - p.update({"parts": parts_new}) + param_dict.update({"parts": parts_new}) # Sort the input list by the value of the "x-operation-group" key list_of_dicts = sorted(list_of_dicts, key=itemgetter("x-operation-group")) @@ -610,7 +618,7 @@ def read_modules() -> Any: # Extract the HTTP methods from the data in the current subgroup methods = [] parts_final = {} - for z in value2: + for z in value2: # pylint: disable=invalid-name methods.append(z["method"].upper()) # Update 'api' dictionary @@ -630,9 +638,9 @@ def read_modules() -> Any: body = {"required": False} if "required" in z["requestBody"]: body.update({"required": z["requestBody"]["required"]}) - q = z["requestBody"]["content"]["application/json"]["schema"][ - "$ref" - ].split("/")[-1] + q = z["requestBody"]["content"][ # pylint: disable=invalid-name + "application/json" + ]["schema"]["$ref"].split("/")[-1] if "description" in data["components"]["schemas"][q]: body.update( { @@ -715,8 +723,8 @@ def apply_patch(namespace: str, name: str, api: Any) -> Any: CODE_ROOT / "utils/templates/overrides" / namespace / f"{name}.json" ) if os.path.exists(override_file_path): - with open(override_file_path, encoding="utf-8") as f: - override_json = json.load(f) + with open(override_file_path, encoding="utf-8") as file: + override_json = json.load(file) api = deepmerge.always_merger.merge(api, override_json) return api diff --git a/utils/license_headers.py b/utils/license_headers.py index 8e630130..0405476e 100644 --- a/utils/license_headers.py +++ b/utils/license_headers.py @@ -55,8 +55,8 @@ def does_file_need_fix(filepath: str) -> bool: if not re.search(r"\.py$", filepath): return False existing_header = "" - with open(filepath, mode="r", encoding="utf-8") as f: - for line in f: + with open(filepath, mode="r", encoding="utf-8") as file: + for line in file: line = line.strip() if len(line) == 0 or line in LINES_TO_KEEP: pass @@ -73,16 +73,16 @@ def add_header_to_file(filepath: str) -> None: writes the license header to the beginning of a file :param filepath: relative or absolute filepath to update """ - with open(filepath, mode="r", encoding="utf-8") as f: - lines = list(f) + with open(filepath, mode="r", encoding="utf-8") as file: + lines = list(file) i = 0 for i, line in enumerate(lines): if len(line) > 0 and line not in LINES_TO_KEEP: break lines = lines[:i] + [LICENSE_HEADER] + lines[i:] - with open(filepath, mode="w", encoding="utf-8") as f: - f.truncate() - f.write("".join(lines)) + with open(filepath, mode="w", encoding="utf-8") as file: + file.truncate() + file.write("".join(lines)) print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") From fcc65a3e36732c92b744b8299d68020d9445256a Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Mon, 8 Jan 2024 17:09:43 -0500 Subject: [PATCH 10/14] removed pylint ignore missing-function-docstring Signed-off-by: Mark Cohen --- docs/source/conf.py | 2 +- noxfile.py | 35 ++++++++--- test_opensearchpy/run_tests.py | 2 +- test_opensearchpy/test_async/test_client.py | 6 -- .../test_async/test_connection.py | 40 ------------- .../test_async/test_helpers/conftest.py | 3 - .../test_async/test_helpers/test_document.py | 46 -------------- .../test_helpers/test_faceted_search.py | 7 --- .../test_async/test_helpers/test_index.py | 13 ---- .../test_async/test_helpers/test_mapping.py | 8 --- .../test_async/test_helpers/test_search.py | 32 ---------- .../test_helpers/test_update_by_query.py | 8 --- test_opensearchpy/test_async/test_http.py | 6 -- .../test_async/test_http_connection.py | 5 -- .../test_async/test_plugins_client.py | 1 - .../test_async/test_server/conftest.py | 1 - .../test_async/test_server/test_clients.py | 5 -- .../test_server/test_helpers/conftest.py | 16 ++--- .../test_server/test_helpers/test_actions.py | 33 ---------- .../test_server/test_helpers/test_data.py | 4 -- .../test_server/test_helpers/test_document.py | 36 ----------- .../test_helpers/test_faceted_search.py | 9 --- .../test_server/test_helpers/test_index.py | 6 -- .../test_server/test_helpers/test_mapping.py | 4 -- .../test_server/test_helpers/test_search.py | 9 --- .../test_helpers/test_update_by_query.py | 3 - .../test_server/test_plugins/test_alerting.py | 6 -- .../test_plugins/test_index_management.py | 4 -- .../test_server/test_rest_api_spec.py | 2 - .../test_security_plugin.py | 17 +----- test_opensearchpy/test_async/test_signer.py | 7 --- .../test_async/test_transport.py | 32 ---------- test_opensearchpy/test_cases.py | 8 --- test_opensearchpy/test_client/__init__.py | 18 ------ test_opensearchpy/test_client/test_cluster.py | 3 - test_opensearchpy/test_client/test_http.py | 6 -- test_opensearchpy/test_client/test_indices.py | 4 -- .../test_client/test_overrides.py | 15 ----- .../test_client/test_plugins/test_alerting.py | 12 ---- .../test_plugins/test_index_management.py | 9 --- .../test_plugins/test_plugins_client.py | 1 - .../test_client/test_point_in_time.py | 8 --- .../test_client/test_remote_store.py | 1 - .../test_client/test_requests.py | 2 - test_opensearchpy/test_client/test_urllib3.py | 3 - test_opensearchpy/test_client/test_utils.py | 16 ----- .../test_connection/test_base_connection.py | 15 ----- .../test_requests_http_connection.py | 47 +-------------- .../test_urllib3_http_connection.py | 32 ---------- test_opensearchpy/test_connection_pool.py | 14 ----- test_opensearchpy/test_exceptions.py | 2 - test_opensearchpy/test_helpers/conftest.py | 4 -- .../test_helpers/test_actions.py | 12 ---- test_opensearchpy/test_helpers/test_aggs.py | 31 ---------- .../test_helpers/test_analysis.py | 11 ---- .../test_helpers/test_document.py | 47 --------------- .../test_helpers/test_faceted_search.py | 7 --- test_opensearchpy/test_helpers/test_field.py | 19 ------ test_opensearchpy/test_helpers/test_index.py | 14 ----- .../test_helpers/test_mapping.py | 8 --- test_opensearchpy/test_helpers/test_query.py | 57 ------------------ test_opensearchpy/test_helpers/test_result.py | 17 ------ test_opensearchpy/test_helpers/test_search.py | 37 ------------ .../test_helpers/test_update_by_query.py | 9 --- test_opensearchpy/test_helpers/test_utils.py | 11 ---- .../test_helpers/test_validation.py | 10 ---- .../test_helpers/test_wrappers.py | 5 -- test_opensearchpy/test_serializer.py | 25 -------- test_opensearchpy/test_server/__init__.py | 2 - test_opensearchpy/test_server/conftest.py | 2 - test_opensearchpy/test_server/test_clients.py | 5 -- .../test_server/test_helpers/conftest.py | 7 +-- .../test_server/test_helpers/test_actions.py | 32 ---------- .../test_server/test_helpers/test_analysis.py | 3 - .../test_server/test_helpers/test_count.py | 3 - .../test_server/test_helpers/test_data.py | 4 -- .../test_server/test_helpers/test_document.py | 36 ----------- .../test_helpers/test_faceted_search.py | 12 ---- .../test_server/test_helpers/test_index.py | 6 -- .../test_server/test_helpers/test_mapping.py | 4 -- .../test_server/test_helpers/test_search.py | 9 --- .../test_helpers/test_update_by_query.py | 3 - .../test_server/test_plugins/test_alerting.py | 6 -- .../test_plugins/test_index_management.py | 4 -- .../test_server/test_rest_api_spec.py | 22 ------- .../test_server_secured/test_clients.py | 1 - .../test_security_plugin.py | 15 ----- test_opensearchpy/test_transport.py | 27 --------- test_opensearchpy/utils.py | 11 ---- utils/build_dists.py | 4 +- utils/disable_pylint_check.py | 60 ------------------- 91 files changed, 42 insertions(+), 1184 deletions(-) delete mode 100644 utils/disable_pylint_check.py diff --git a/docs/source/conf.py b/docs/source/conf.py index 3f59c07d..64ff3c52 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -108,7 +108,7 @@ # The default options for autodoc directives. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_default_options autodoc_default_options: Any = { - # If set, autodoc will generate document for the members of the target module, class or exception. # noqa: E501, pylint: disable=line-too-long + # If set, autodoc will generate document for the members of the target module, class or exception. # noqa: E501 # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-option-automodule-members "members": True, "show-inheritance": True, diff --git a/noxfile.py b/noxfile.py index 0bce70ac..8b88a2df 100644 --- a/noxfile.py +++ b/noxfile.py @@ -43,7 +43,10 @@ @nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) # type: ignore def test(session: Any) -> None: - # pylint: disable=missing-function-docstring + """ + runs all tests with a fresh python environment using "python setup.py test" + :param session: current nox session + """ session.install(".") # ensure client can be imported without aiohttp session.run("python", "-c", "import opensearchpy\nprint(opensearchpy.OpenSearch())") @@ -60,7 +63,10 @@ def test(session: Any) -> None: @nox.session(python=["3.7"]) # type: ignore def format(session: Any) -> None: - # pylint: disable=missing-function-docstring + """ + runs black and isort to format the files accordingly + :param session: current nox session + """ session.install(".") session.install("black", "isort") @@ -73,7 +79,10 @@ def format(session: Any) -> None: @nox.session(python=["3.7"]) # type: ignore def lint(session: Any) -> None: - # pylint: disable=missing-function-docstring + """ + runs isort, black, flake8, pylint, and mypy to check the files according to each utility's function + :param session: current nox session + """ session.install( "flake8", "black", @@ -127,7 +136,6 @@ def lint_per_folder(session: Any) -> None: "pointless-statement", "unspecified-encoding", "missing-function-docstring", - "unspecified-encoding", ] override_enable = { "test_opensearchpy/": [ @@ -135,8 +143,7 @@ def lint_per_folder(session: Any) -> None: # "invalid-name", lots of short functions with one or two character names "pointless-statement", "unspecified-encoding", - "missing-function-docstring", - "unspecified-encoding", + # "missing-function-docstring", test names usually are, self describing ] } @@ -144,7 +151,11 @@ def lint_per_folder(session: Any) -> None: if source_file in exclude_path_from_linting: continue - args = ["--disable=all"] + args = [ + "--disable=all", + "--max-line-length=240", + "--good-names-rgxs=^[_a-z][_a-z0-9]?$", + ] if source_file in override_enable: args.append(f"--enable={','.join(override_enable[source_file])}") else: @@ -155,7 +166,10 @@ def lint_per_folder(session: Any) -> None: @nox.session() # type: ignore def docs(session: Any) -> None: - # pylint: disable=missing-function-docstring + """ + builds the html documentation for the client + :param session: current nox session + """ session.install(".") session.install(".[docs]") with session.chdir("docs"): @@ -164,7 +178,10 @@ def docs(session: Any) -> None: @nox.session() # type: ignore def generate(session: Any) -> None: - # pylint: disable=missing-function-docstring + """ + generates the base API code + :param session: current nox session + """ session.install("-rdev-requirements.txt") session.run("python", "utils/generate_api.py") format(session) diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index e5e3e933..b54218a0 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -87,7 +87,7 @@ def fetch_opensearch_repo() -> None: "cd %s && git remote add origin https://github.com/opensearch-project/opensearch.git" % repo_path, shell=True, - ) + ) # TODO this fails when the remote already exists; should clean up or ignore? # fetch the sha commit, version from info() print("Fetching opensearch repo...") diff --git a/test_opensearchpy/test_async/test_client.py b/test_opensearchpy/test_async/test_client.py index 5da17c79..5badce18 100644 --- a/test_opensearchpy/test_async/test_client.py +++ b/test_opensearchpy/test_async/test_client.py @@ -63,12 +63,10 @@ async def perform_request( class OpenSearchTestCaseWithDummyTransport: def assert_call_count_equals(self, count: int) -> None: - # pylint: disable=missing-function-docstring assert isinstance(self.client.transport, DummyTransport) assert count == self.client.transport.call_count def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: - # pylint: disable=missing-function-docstring assert isinstance(self.client.transport, DummyTransport) assert (method, url) in self.client.transport.calls calls = self.client.transport.calls[(method, url)] @@ -76,21 +74,17 @@ def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: return calls def setup_method(self, method: Any) -> None: - # pylint: disable=missing-function-docstring self.client = AsyncOpenSearch(transport_class=DummyTransport) class TestClient(OpenSearchTestCaseWithDummyTransport): async def test_our_transport_used(self) -> None: - # pylint: disable=missing-function-docstring assert isinstance(self.client.transport, DummyTransport) async def test_start_with_0_call(self) -> None: - # pylint: disable=missing-function-docstring self.assert_call_count_equals(0) async def test_each_call_is_recorded(self) -> None: - # pylint: disable=missing-function-docstring await self.client.transport.perform_request("GET", "/") await self.client.transport.perform_request( "DELETE", "/42", params={}, body="body" diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index e0252e14..d154e7ac 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -63,15 +63,12 @@ async def _get_mock_connection( def _dummy_request(*args: Any, **kwargs: Any) -> Any: class DummyResponse: async def __aenter__(self, *_: Any, **__: Any) -> Any: - # pylint: disable=missing-function-docstring return self async def __aexit__(self, *_: Any, **__: Any) -> None: - # pylint: disable=missing-function-docstring pass async def text(self) -> Any: - # pylint: disable=missing-function-docstring return response_body.decode("utf-8", "surrogatepass") dummy_response: Any = DummyResponse() @@ -84,7 +81,6 @@ async def text(self) -> Any: return con async def test_ssl_context(self) -> None: - # pylint: disable=missing-function-docstring try: context = ssl.create_default_context() except AttributeError: @@ -102,12 +98,10 @@ async def test_ssl_context(self) -> None: assert con.session.connector._ssl == context async def test_opaque_id(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection(opaque_id="app-1") assert con.headers["x-opaque-id"] == "app-1" async def test_no_http_compression(self) -> None: - # pylint: disable=missing-function-docstring con = await self._get_mock_connection() assert not con.http_compress assert "accept-encoding" not in con.headers @@ -121,7 +115,6 @@ async def test_no_http_compression(self) -> None: assert "content-encoding" not in kwargs["headers"] async def test_http_compression(self) -> None: - # pylint: disable=missing-function-docstring con = await self._get_mock_connection({"http_compress": True}) assert con.http_compress assert con.headers["accept-encoding"] == "gzip,deflate" @@ -148,7 +141,6 @@ async def test_http_compression(self) -> None: assert "content-encoding" not in kwargs["headers"] async def test_url_prefix(self) -> None: - # pylint: disable=missing-function-docstring con = await self._get_mock_connection( connection_params={"url_prefix": "/_search/"} ) @@ -161,7 +153,6 @@ async def test_url_prefix(self) -> None: assert method == "GET" and str(yarl_url) == "http://localhost:9200/_search/" async def test_default_user_agent(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection() assert con._get_default_user_agent() == "opensearch-py/%s (Python %s)" % ( __versionstr__, @@ -169,12 +160,10 @@ async def test_default_user_agent(self) -> None: ) async def test_timeout_set(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection(timeout=42) assert 42 == con.timeout async def test_keep_alive_is_on_by_default(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection() assert { "connection": "keep-alive", @@ -183,7 +172,6 @@ async def test_keep_alive_is_on_by_default(self) -> None: } == con.headers async def test_http_auth(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection(http_auth="username:secret") assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -193,7 +181,6 @@ async def test_http_auth(self) -> None: } == con.headers async def test_http_auth_tuple(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection(http_auth=("username", "secret")) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -203,7 +190,6 @@ async def test_http_auth_tuple(self) -> None: } == con.headers async def test_http_auth_list(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection(http_auth=["username", "secret"]) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -213,7 +199,6 @@ async def test_http_auth_list(self) -> None: } == con.headers async def test_uses_https_if_verify_certs_is_off(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) @@ -227,7 +212,6 @@ async def test_uses_https_if_verify_certs_is_off(self) -> None: assert con.host == "https://localhost:9200" async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -238,19 +222,16 @@ async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: assert isinstance(con.session, aiohttp.ClientSession) async def test_doesnt_use_https_if_not_specified(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection() assert not con.use_ssl async def test_no_warning_when_using_ssl_context(self) -> None: - # pylint: disable=missing-function-docstring ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: AIOHttpConnection(ssl_context=ctx) assert w == [], str([x.message for x in w]) async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: - # pylint: disable=missing-function-docstring kwargs: Any for kwargs in ( {"ssl_show_warn": False}, @@ -275,7 +256,6 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> N @patch("ssl.SSLContext", return_value=MagicMock()) async def test_uses_given_ca_certs(self, ssl_context: Any, tmp_path: Any) -> None: - # pylint: disable=missing-function-docstring path = tmp_path / "ca_certs.pem" path.touch() ssl_context.return_value.load_verify_locations.return_value = None @@ -286,7 +266,6 @@ async def test_uses_given_ca_certs(self, ssl_context: Any, tmp_path: Any) -> Non @patch("ssl.SSLContext", return_value=MagicMock()) async def test_uses_default_ca_certs(self, ssl_context: Any) -> None: - # pylint: disable=missing-function-docstring ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True) ssl_context.return_value.load_verify_locations.assert_called_once_with( @@ -295,13 +274,11 @@ async def test_uses_default_ca_certs(self, ssl_context: Any) -> None: @patch("ssl.SSLContext", return_value=MagicMock()) async def test_uses_no_ca_certs(self, ssl_context: Any) -> None: - # pylint: disable=missing-function-docstring ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True, verify_certs=False) ssl_context.return_value.load_verify_locations.assert_not_called() async def test_trust_env(self) -> None: - # pylint: disable=missing-function-docstring con: Any = AIOHttpConnection(trust_env=True) await con._create_aiohttp_session() @@ -309,7 +286,6 @@ async def test_trust_env(self) -> None: assert con.session.trust_env is True async def test_trust_env_default_value_is_false(self) -> None: - # pylint: disable=missing-function-docstring con = AIOHttpConnection() await con._create_aiohttp_session() @@ -318,7 +294,6 @@ async def test_trust_env_default_value_is_false(self) -> None: @patch("opensearchpy.connection.base.logger") async def test_uncompressed_body_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -330,7 +305,6 @@ async def test_uncompressed_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) async def test_body_not_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = await self._get_mock_connection() @@ -341,7 +315,6 @@ async def test_body_not_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") async def test_failure_body_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = await self._get_mock_connection(response_code=404) with pytest.raises(NotFoundError) as e: await con.perform_request("GET", "/invalid", body=b'{"example": "body"}') @@ -355,7 +328,6 @@ async def test_failure_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) async def test_failure_body_not_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = await self._get_mock_connection(response_code=404) @@ -367,7 +339,6 @@ async def test_failure_body_not_logged(self, logger: Any) -> None: assert logger.debug.call_count == 0 async def test_surrogatepass_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") @@ -375,7 +346,6 @@ async def test_surrogatepass_into_bytes(self) -> None: @pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore async def test_recursion_error_reraised(self, exception_cls: Any) -> None: - # pylint: disable=missing-function-docstring conn = AIOHttpConnection() def request_raise(*_: Any, **__: Any) -> Any: @@ -389,7 +359,6 @@ def request_raise(*_: Any, **__: Any) -> Any: assert str(e.value) == "Wasn't modified!" async def test_json_errors_are_parsed(self) -> None: - # pylint: disable=missing-function-docstring con = await self._get_mock_connection( response_code=400, response_body=b'{"error": {"type": "snapshot_in_progress_exception"}}', @@ -425,13 +394,11 @@ def teardown_class(cls) -> None: cls.server.stop() async def httpserver(self, conn: Any, **kwargs: Any) -> Any: - # pylint: disable=missing-function-docstring status, headers, data = await conn.perform_request("GET", "/", **kwargs) data = json.loads(data) return (status, data) async def test_aiohttp_connection(self) -> None: - # pylint: disable=missing-function-docstring # Defaults conn = AIOHttpConnection("localhost", port=8081, use_ssl=False) user_agent = conn._get_default_user_agent() @@ -492,14 +459,12 @@ async def test_aiohttp_connection(self) -> None: } async def test_aiohttp_connection_error(self) -> None: - # pylint: disable=missing-function-docstring conn = AIOHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): await conn.perform_request("GET", "/") async def test_default_connection_is_returned_by_default() -> None: - # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() con, con2 = object(), object() @@ -511,7 +476,6 @@ async def test_default_connection_is_returned_by_default() -> None: async def test_get_connection_created_connection_if_needed() -> None: - # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -525,7 +489,6 @@ async def test_get_connection_created_connection_if_needed() -> None: async def test_configure_preserves_unchanged_connections() -> None: - # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.configure( @@ -545,7 +508,6 @@ async def test_configure_preserves_unchanged_connections() -> None: async def test_remove_connection_removes_both_conn_and_conf() -> None: - # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.configure( @@ -563,7 +525,6 @@ async def test_remove_connection_removes_both_conn_and_conf() -> None: async def test_create_connection_constructs_client() -> None: - # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) @@ -572,7 +533,6 @@ async def test_create_connection_constructs_client() -> None: async def test_create_connection_adds_our_serializer() -> None: - # pylint: disable=missing-function-docstring c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) result = await c.get_connection("testing") diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index 7f809dcf..b0d4a68b 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -36,7 +36,6 @@ async def mock_client(dummy_response: Any) -> Any: @fixture # type: ignore def dummy_response() -> Any: - # pylint: disable=missing-function-docstring return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -86,7 +85,6 @@ def dummy_response() -> Any: @fixture # type: ignore def aggs_search() -> Any: - # pylint: disable=missing-function-docstring from opensearchpy._async.helpers.search import AsyncSearch s = AsyncSearch(index="flat-git") @@ -102,7 +100,6 @@ def aggs_search() -> Any: @fixture # type: ignore def aggs_data() -> Any: - # pylint: disable=missing-function-docstring return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 00dfb137..26c854c2 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -126,7 +126,6 @@ class Index: async def test_range_serializes_properly() -> None: - # pylint: disable=missing-function-docstring class DocumentD(document.AsyncDocument): lr: Any = field.LongRange() @@ -140,7 +139,6 @@ class DocumentD(document.AsyncDocument): async def test_range_deserializes_properly() -> None: - # pylint: disable=missing-function-docstring class DocumentD(InnerDoc): lr = field.LongRange() @@ -151,14 +149,12 @@ class DocumentD(InnerDoc): async def test_resolve_nested() -> None: - # pylint: disable=missing-function-docstring nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: - # pylint: disable=missing-function-docstring class DocumentA(document.AsyncDocument): name = field.Text() @@ -174,20 +170,17 @@ class DocumentB(document.AsyncDocument): async def test_ip_address_serializes_properly() -> None: - # pylint: disable=missing-function-docstring host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() async def test_matches_uses_index() -> None: - # pylint: disable=missing-function-docstring assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) async def test_matches_with_no_name_always_matches() -> None: - # pylint: disable=missing-function-docstring class DocumentD(document.AsyncDocument): pass @@ -196,7 +189,6 @@ class DocumentD(document.AsyncDocument): async def test_matches_accepts_wildcards() -> None: - # pylint: disable=missing-function-docstring class MyDoc(document.AsyncDocument): class Index: name = "my-*" @@ -206,7 +198,6 @@ class Index: async def test_assigning_attrlist_to_field() -> None: - # pylint: disable=missing-function-docstring sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -215,14 +206,12 @@ async def test_assigning_attrlist_to_field() -> None: async def test_optional_inner_objects_are_not_validated_if_missing() -> None: - # pylint: disable=missing-function-docstring d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None async def test_custom_field() -> None: - # pylint: disable=missing-function-docstring s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -234,14 +223,12 @@ async def test_custom_field() -> None: async def test_custom_field_mapping() -> None: - # pylint: disable=missing-function-docstring assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() async def test_custom_field_in_nested() -> None: - # pylint: disable=missing-function-docstring s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -250,7 +237,6 @@ async def test_custom_field_in_nested() -> None: async def test_multi_works_after_doc_has_been_saved() -> None: - # pylint: disable=missing-function-docstring c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -259,7 +245,6 @@ async def test_multi_works_after_doc_has_been_saved() -> None: async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: - # pylint: disable=missing-function-docstring # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -269,14 +254,12 @@ async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: async def test_null_value_for_object() -> None: - # pylint: disable=missing-function-docstring d = MyDoc(inner=None) assert d.inner is None async def test_inherited_doc_types_can_override_index() -> None: - # pylint: disable=missing-function-docstring class MyDocDifferentIndex(MySubDoc): _index: Any @@ -311,7 +294,6 @@ class Index: async def test_to_dict_with_meta() -> None: - # pylint: disable=missing-function-docstring d = MySubDoc(title="hello") d.meta.routing = "some-parent" @@ -323,7 +305,6 @@ async def test_to_dict_with_meta() -> None: async def test_to_dict_with_meta_includes_custom_index() -> None: - # pylint: disable=missing-function-docstring d = MySubDoc(title="hello") d.meta.index = "other-index" @@ -331,7 +312,6 @@ async def test_to_dict_with_meta_includes_custom_index() -> None: async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: - # pylint: disable=missing-function-docstring d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() @@ -339,7 +319,6 @@ async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: async def test_attribute_can_be_removed() -> None: - # pylint: disable=missing-function-docstring d = MyDoc(title="hello") del d.title @@ -347,7 +326,6 @@ async def test_attribute_can_be_removed() -> None: async def test_doc_type_can_be_correctly_pickled() -> None: - # pylint: disable=missing-function-docstring d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -363,7 +341,6 @@ async def test_doc_type_can_be_correctly_pickled() -> None: async def test_meta_is_accessible_even_on_empty_doc() -> None: - # pylint: disable=missing-function-docstring d = MyDoc() assert d.meta == {} @@ -372,7 +349,6 @@ async def test_meta_is_accessible_even_on_empty_doc() -> None: async def test_meta_field_mapping() -> None: - # pylint: disable=missing-function-docstring class User(document.AsyncDocument): username = field.Text() @@ -392,7 +368,6 @@ class Meta: async def test_multi_value_fields() -> None: - # pylint: disable=missing-function-docstring class Blog(document.AsyncDocument): tags = field.Keyword(multi=True) @@ -404,7 +379,6 @@ class Blog(document.AsyncDocument): async def test_docs_with_properties() -> None: - # pylint: disable=missing-function-docstring class User(document.AsyncDocument): pwd_hash: Any = field.Text() @@ -433,7 +407,6 @@ def password(self, pwd: Any) -> None: async def test_nested_can_be_assigned_to() -> None: - # pylint: disable=missing-function-docstring d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -445,14 +418,12 @@ async def test_nested_can_be_assigned_to() -> None: async def test_nested_can_be_none() -> None: - # pylint: disable=missing-function-docstring d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() async def test_nested_defaults_to_list_and_can_be_updated() -> None: - # pylint: disable=missing-function-docstring md = DocWithNested() assert [] == md.comments @@ -462,7 +433,6 @@ async def test_nested_defaults_to_list_and_can_be_updated() -> None: async def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] @@ -475,14 +445,12 @@ async def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: async def test_to_dict_ignores_empty_collections() -> None: - # pylint: disable=missing-function-docstring md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() async def test_declarative_mapping_definition() -> None: - # pylint: disable=missing-function-docstring assert issubclass(MyDoc, document.AsyncDocument) assert hasattr(MyDoc, "_doc_type") assert { @@ -496,7 +464,6 @@ async def test_declarative_mapping_definition() -> None: async def test_you_can_supply_own_mapping_instance() -> None: - # pylint: disable=missing-function-docstring class MyD(document.AsyncDocument): title = field.Text() @@ -511,7 +478,6 @@ class Meta: async def test_document_can_be_created_dynamically() -> None: - # pylint: disable=missing-function-docstring n = datetime.now() md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" @@ -533,7 +499,6 @@ async def test_document_can_be_created_dynamically() -> None: async def test_invalid_date_will_raise_exception() -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -541,7 +506,6 @@ async def test_invalid_date_will_raise_exception() -> None: async def test_document_inheritance() -> None: - # pylint: disable=missing-function-docstring assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.AsyncDocument) assert hasattr(MySubDoc, "_doc_type") @@ -556,7 +520,6 @@ async def test_document_inheritance() -> None: async def test_child_class_can_override_parent() -> None: - # pylint: disable=missing-function-docstring class DocumentA(document.AsyncDocument): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -575,7 +538,6 @@ class DocumentB(DocumentA): async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: - # pylint: disable=missing-function-docstring md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -586,7 +548,6 @@ async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: async def test_index_inheritance() -> None: - # pylint: disable=missing-function-docstring assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.AsyncDocument) @@ -604,7 +565,6 @@ async def test_index_inheritance() -> None: async def test_meta_fields_can_be_set_directly_in_init() -> None: - # pylint: disable=missing-function-docstring p = object() md: Any = MyDoc(_id=p, title="Hello World!") @@ -612,28 +572,24 @@ async def test_meta_fields_can_be_set_directly_in_init() -> None: async def test_save_no_index(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): await md.save(using="mock") async def test_delete_no_index(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): await md.delete(using="mock") async def test_update_no_fields() -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(IllegalOperation): await md.update() async def test_search_with_custom_alias_and_index(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -643,7 +599,6 @@ async def test_search_with_custom_alias_and_index(mock_client: Any) -> None: async def test_from_opensearch_respects_underscored_non_meta_fields() -> None: - # pylint: disable=missing-function-docstring doc: Any = { "_index": "test-index", "_id": "opensearch", @@ -668,7 +623,6 @@ class Index: async def test_nested_and_object_inner_doc() -> None: - # pylint: disable=missing-function-docstring class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 74d7c6d0..40f27871 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -33,7 +33,6 @@ class BlogSearch(AsyncFacetedSearch): async def test_query_is_created_properly() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch("python search") s = bs.build_search() @@ -57,7 +56,6 @@ async def test_query_is_created_properly() -> None: async def test_query_is_created_properly_with_sort_tuple() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -82,7 +80,6 @@ async def test_query_is_created_properly_with_sort_tuple() -> None: async def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -106,7 +103,6 @@ async def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: async def test_filters_are_applied_to_search_ant_relevant_facets() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -141,7 +137,6 @@ async def test_filters_are_applied_to_search_ant_relevant_facets() -> None: async def test_date_histogram_facet_with_1970_01_01_date() -> None: - # pylint: disable=missing-function-docstring dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -175,7 +170,6 @@ async def test_date_histogram_facet_with_1970_01_01_date() -> None: ], ) async def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: - # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -188,7 +182,6 @@ async def test_date_histogram_interval_types(interval_type: Any, interval: Any) async def test_date_histogram_no_interval_keyerror() -> None: - # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index 4a494a97..eccbe773 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -28,7 +28,6 @@ class Post(AsyncDocument): async def test_multiple_doc_types_will_combine_mappings() -> None: - # pylint: disable=missing-function-docstring class User(AsyncDocument): username = Text() @@ -47,7 +46,6 @@ class User(AsyncDocument): async def test_search_is_limited_to_index_name() -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("my-index") s = i.search() @@ -55,7 +53,6 @@ async def test_search_is_limited_to_index_name() -> None: async def test_cloned_index_has_copied_settings_and_using() -> None: - # pylint: disable=missing-function-docstring client = object() i = AsyncIndex("my-index", using=client) i.settings(number_of_shards=1) @@ -69,7 +66,6 @@ async def test_cloned_index_has_copied_settings_and_using() -> None: async def test_cloned_index_has_analysis_attribute() -> None: - # pylint: disable=missing-function-docstring """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -90,7 +86,6 @@ async def test_cloned_index_has_analysis_attribute() -> None: async def test_settings_are_saved() -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -99,7 +94,6 @@ async def test_settings_are_saved() -> None: async def test_registered_doc_type_included_in_to_dict() -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("i", using="alias") i.document(Post) @@ -114,7 +108,6 @@ async def test_registered_doc_type_included_in_to_dict() -> None: async def test_registered_doc_type_included_in_search() -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("i", using="alias") i.document(Post) @@ -124,7 +117,6 @@ async def test_registered_doc_type_included_in_search() -> None: async def test_aliases_add_to_object() -> None: - # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -135,7 +127,6 @@ async def test_aliases_add_to_object() -> None: async def test_aliases_returned_from_to_dict() -> None: - # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -146,7 +137,6 @@ async def test_aliases_returned_from_to_dict() -> None: async def test_analyzers_added_to_object() -> None: - # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -163,7 +153,6 @@ async def test_analyzers_added_to_object() -> None: async def test_analyzers_returned_from_to_dict() -> None: - # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -177,7 +166,6 @@ async def test_analyzers_returned_from_to_dict() -> None: async def test_conflicting_analyzer_raises_error() -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -186,7 +174,6 @@ async def test_conflicting_analyzer_raises_error() -> None: async def test_index_template_can_have_order() -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("i-*") it = i.as_template("i", order=2) diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index 7c75db70..05264985 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -15,7 +15,6 @@ async def test_mapping_can_has_fields() -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("name", "text").field("tags", "keyword") @@ -25,7 +24,6 @@ async def test_mapping_can_has_fields() -> None: async def test_mapping_update_is_recursive() -> None: - # pylint: disable=missing-function-docstring m1 = mapping.AsyncMapping() m1.field("title", "text") m1.field("author", "object") @@ -59,7 +57,6 @@ async def test_mapping_update_is_recursive() -> None: async def test_properties_can_iterate_over_all_the_fields() -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -70,7 +67,6 @@ async def test_properties_can_iterate_over_all_the_fields() -> None: async def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: - # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -144,7 +140,6 @@ async def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: async def test_mapping_can_collect_multiple_analyzers() -> None: - # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -191,7 +186,6 @@ async def test_mapping_can_collect_multiple_analyzers() -> None: async def test_even_non_custom_analyzers_can_have_params() -> None: - # pylint: disable=missing-function-docstring a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.AsyncMapping() m.field("title", "text", analyzer=a1) @@ -202,7 +196,6 @@ async def test_even_non_custom_analyzers_can_have_params() -> None: async def test_resolve_field_can_resolve_multifields() -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("title", "text", fields={"keyword": Keyword()}) @@ -210,7 +203,6 @@ async def test_resolve_field_can_resolve_multifields() -> None: async def test_resolve_nested() -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index 14d0f498..d01f0b80 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -24,14 +24,12 @@ async def test_expand__to_dot_is_respected() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() async def test_execute_uses_cache() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() r = object() s._response = r @@ -40,7 +38,6 @@ async def test_execute_uses_cache() -> None: async def test_cache_isnt_cloned() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s._response = object() @@ -48,14 +45,12 @@ async def test_cache_isnt_cloned() -> None: async def test_search_starts_with_no_query() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert s.query._proxied is None async def test_search_query_combines_query() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s2 = s.query("match", f=42) @@ -68,7 +63,6 @@ async def test_search_query_combines_query() -> None: async def test_query_can_be_assigned_to() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() q = Q("match", title="python") @@ -78,7 +72,6 @@ async def test_query_can_be_assigned_to() -> None: async def test_query_can_be_wrapped() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -94,7 +87,6 @@ async def test_query_can_be_wrapped() -> None: async def test_using() -> None: - # pylint: disable=missing-function-docstring o = object() o2 = object() s = search.AsyncSearch(using=o) @@ -105,21 +97,18 @@ async def test_using() -> None: async def test_methods_are_proxied_to_the_query() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match_all") assert s.query.to_dict() == {"match_all": {}} async def test_query_always_returns_search() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert isinstance(s.query("match", f=42), search.AsyncSearch) async def test_source_copied_on_clone() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -134,7 +123,6 @@ async def test_source_copied_on_clone() -> None: async def test_copy_clones() -> None: - # pylint: disable=missing-function-docstring from copy import copy s1 = search.AsyncSearch().source(["some", "fields"]) @@ -145,7 +133,6 @@ async def test_copy_clones() -> None: async def test_aggs_allow_two_metric() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -156,7 +143,6 @@ async def test_aggs_allow_two_metric() -> None: async def test_aggs_get_copied_on_change() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" @@ -189,7 +175,6 @@ async def test_aggs_get_copied_on_change() -> None: async def test_search_index() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch(index="i") assert s._index == ["i"] s = s.index("i2") @@ -221,7 +206,6 @@ async def test_search_index() -> None: async def test_doc_type_document_class() -> None: - # pylint: disable=missing-function-docstring class MyDocument(AsyncDocument): pass @@ -235,7 +219,6 @@ class MyDocument(AsyncDocument): async def test_sort() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.sort("fielda", "-fieldb") @@ -248,7 +231,6 @@ async def test_sort() -> None: async def test_sort_by_score() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -259,7 +241,6 @@ async def test_sort_by_score() -> None: async def test_slice() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -269,13 +250,11 @@ async def test_slice() -> None: async def test_index() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert {"from": 3, "size": 1} == s[3].to_dict() async def test_search_to_dict() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() assert {} == s.to_dict() @@ -305,7 +284,6 @@ async def test_search_to_dict() -> None: async def test_complex_example() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = ( s.query("match", title="python") @@ -357,7 +335,6 @@ async def test_complex_example() -> None: async def test_reverse() -> None: - # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -408,14 +385,12 @@ async def test_reverse() -> None: async def test_from_dict_doesnt_need_query() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() async def test_source() -> None: - # pylint: disable=missing-function-docstring assert {} == search.AsyncSearch().source().to_dict() assert { @@ -432,7 +407,6 @@ async def test_source() -> None: async def test_source_on_clone() -> None: - # pylint: disable=missing-function-docstring assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -448,7 +422,6 @@ async def test_source_on_clone() -> None: async def test_source_on_clear() -> None: - # pylint: disable=missing-function-docstring assert ( {} == search.AsyncSearch() @@ -459,7 +432,6 @@ async def test_source_on_clear() -> None: async def test_suggest_accepts_global_text() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch.from_dict( { "suggest": { @@ -482,7 +454,6 @@ async def test_suggest_accepts_global_text() -> None: async def test_suggest() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -492,7 +463,6 @@ async def test_suggest() -> None: async def test_exclude() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s = s.exclude("match", title="python") @@ -506,7 +476,6 @@ async def test_exclude() -> None: async def test_update_from_dict() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -518,7 +487,6 @@ async def test_update_from_dict() -> None: async def test_rescore_query_to_dict() -> None: - # pylint: disable=missing-function-docstring s = search.AsyncSearch(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index bf576511..b5380eeb 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -20,14 +20,12 @@ async def test_ubq_starts_with_no_query() -> None: - # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() assert ubq.query._proxied is None async def test_ubq_to_dict() -> None: - # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() assert {} == ubq.to_dict() @@ -46,7 +44,6 @@ async def test_ubq_to_dict() -> None: async def test_complex_example() -> None: - # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -85,7 +82,6 @@ async def test_complex_example() -> None: async def test_exclude() -> None: - # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -99,7 +95,6 @@ async def test_exclude() -> None: async def test_reverse() -> None: - # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -136,14 +131,12 @@ async def test_reverse() -> None: async def test_from_dict_doesnt_need_query() -> None: - # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() async def test_overwrite_script() -> None: - # pylint: disable=missing-function-docstring ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} @@ -160,7 +153,6 @@ async def test_overwrite_script() -> None: async def test_update_by_query_response_success() -> None: - # pylint: disable=missing-function-docstring ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_async/test_http.py b/test_opensearchpy/test_async/test_http.py index 41f62c53..61323bd7 100644 --- a/test_opensearchpy/test_async/test_http.py +++ b/test_opensearchpy/test_async/test_http.py @@ -17,31 +17,26 @@ class TestHttpClient(OpenSearchTestCaseWithDummyTransport): async def test_head(self) -> None: - # pylint: disable=missing-function-docstring await self.client.http.head("/") self.assert_call_count_equals(1) assert [(None, None, None)] == self.assert_url_called("HEAD", "/", 1) async def test_get(self) -> None: - # pylint: disable=missing-function-docstring await self.client.http.get("/") self.assert_call_count_equals(1) assert [(None, None, None)] == self.assert_url_called("GET", "/", 1) async def test_put(self) -> None: - # pylint: disable=missing-function-docstring await self.client.http.put(url="/xyz", params={"X": "Y"}, body="body") self.assert_call_count_equals(1) assert [({"X": "Y"}, None, "body")] == self.assert_url_called("PUT", "/xyz", 1) async def test_post(self) -> None: - # pylint: disable=missing-function-docstring await self.client.http.post(url="/xyz", params={"X": "Y"}, body="body") self.assert_call_count_equals(1) assert [({"X": "Y"}, None, "body")] == self.assert_url_called("POST", "/xyz", 1) async def test_post_with_headers(self) -> None: - # pylint: disable=missing-function-docstring await self.client.http.post( url="/xyz", headers={"A": "B"}, params={"X": "Y"}, body="body" ) @@ -51,7 +46,6 @@ async def test_post_with_headers(self) -> None: ) async def test_delete(self) -> None: - # pylint: disable=missing-function-docstring await self.client.http.delete(url="/xyz", params={"X": "Y"}, body="body") self.assert_call_count_equals(1) assert [({"X": "Y"}, None, "body")] == self.assert_url_called( diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index c45dc12b..415f34cc 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -38,21 +38,18 @@ class TestAsyncHttpConnection: def test_auth_as_tuple(self) -> None: - # pylint: disable=missing-function-docstring c = AsyncHttpConnection(http_auth=("username", "password")) assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" def test_auth_as_string(self) -> None: - # pylint: disable=missing-function-docstring c = AsyncHttpConnection(http_auth="username:password") assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" def test_auth_as_callable(self) -> None: - # pylint: disable=missing-function-docstring def auth_fn() -> None: pass @@ -62,7 +59,6 @@ def auth_fn() -> None: @pytest.mark.asyncio # type: ignore @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) async def test_basicauth_in_request_session(self, mock_request: Any) -> None: - # pylint: disable=missing-function-docstring async def do_request(*args: Any, **kwargs: Any) -> Any: response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() @@ -95,7 +91,6 @@ async def do_request(*args: Any, **kwargs: Any) -> Any: @pytest.mark.asyncio # type: ignore @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) async def test_callable_in_request_session(self, mock_request: Any) -> None: - # pylint: disable=missing-function-docstring def auth_fn(*args: Any, **kwargs: Any) -> Any: return { "Test": "PASSED", diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index 5852fa5e..04eaa7eb 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -20,7 +20,6 @@ class TestPluginsClient: async def test_plugins_client(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: client = AsyncOpenSearch() # testing double-init here diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 666a7904..42deb3cc 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -41,7 +41,6 @@ @pytest.fixture(scope="function") # type: ignore async def async_client() -> Any: - # pylint: disable=missing-function-docstring client = None try: if not hasattr(opensearchpy, "AsyncOpenSearch"): diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index 226efd96..d95d68f9 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -37,13 +37,11 @@ class TestUnicode: async def test_indices_analyze(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: async def test_bulk_works_with_string_body(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -51,7 +49,6 @@ async def test_bulk_works_with_string_body(self, async_client: Any) -> None: assert len(response["items"]) == 1 async def test_bulk_works_with_bytestring_body(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -76,13 +73,11 @@ async def test_aiohttp_connection_works_without_yarl( class TestClose: async def test_close_doesnt_break_client(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring await async_client.cluster.health() await async_client.close() await async_client.cluster.health() async def test_with_doesnt_break_client(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring for _ in range(2): async with async_client as client: await client.cluster.health() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 305b74cf..50aca89d 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -36,7 +36,6 @@ @fixture(scope="function") # type: ignore async def client() -> Any: - # pylint: disable=missing-function-docstring client = await get_test_client(verify_certs=False, http_auth=("admin", "admin")) await add_connection("default", client) return client @@ -44,16 +43,20 @@ async def client() -> Any: @fixture(scope="function") # type: ignore async def opensearch_version(client: Any) -> Any: - # pylint: disable=missing-function-docstring - info = await client.info() + """ + yields the version of the OpenSearch cluster + :param client: + :return: + """ + info = client.info() print(info) - yield tuple(int(x) for x in match_version(info)) + yield (int(x) async for x in match_version(info)) async def match_version(info: Any) -> Any: """ - matches the major version from the given client info - :param info: + matches the full semver server version with the given info + :param info: response from the OpenSearch cluster """ match = re.match(r"^([0-9.]+)", info["version"]["number"]) assert match is not None @@ -62,7 +65,6 @@ async def match_version(info: Any) -> Any: @fixture # type: ignore async def write_client(client: Any) -> Any: - # pylint: disable=missing-function-docstring yield client await client.indices.delete("test-*", ignore=404) await client.indices.delete_template("test-template", ignore=404) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 9e779baf..c1a5dc7a 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -71,7 +71,6 @@ async def bulk(self, *args: Any, **kwargs: Any) -> Any: class TestStreamingBulk(object): async def test_actions_remain_unchanged(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring actions1 = [{"_id": 1}, {"_id": 2}] async for ok, item in actions.async_streaming_bulk( async_client, actions1, index="test-index" @@ -80,7 +79,6 @@ async def test_actions_remain_unchanged(self, async_client: Any) -> None: assert [{"_id": 1}, {"_id": 2}] == actions1 async def test_all_documents_get_inserted(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True @@ -93,7 +91,6 @@ async def test_all_documents_get_inserted(self, async_client: Any) -> None: ] async def test_documents_data_types(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring async def async_gen() -> Any: for x in range(100): await asyncio.sleep(0) @@ -130,7 +127,6 @@ def sync_gen() -> Any: async def test_all_errors_from_chunk_are_raised_on_failure( self, async_client: Any ) -> None: - # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -151,7 +147,6 @@ async def test_all_errors_from_chunk_are_raised_on_failure( assert False, "exception should have been raised" async def test_different_op_types(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring await async_client.index(index="i", id=45, body={}) await async_client.index(index="i", id=42, body={}) docs = [ @@ -167,7 +162,6 @@ async def test_different_op_types(self, async_client: Any) -> None: assert {"f": "v"} == (await async_client.get(index="i", id=47))["_source"] async def test_transport_error_can_becaught(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient(async_client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -202,7 +196,6 @@ async def test_transport_error_can_becaught(self, async_client: Any) -> None: } == results[1][1] async def test_rejected_documents_are_retried(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -233,7 +226,6 @@ async def test_rejected_documents_are_retried(self, async_client: Any) -> None: async def test_rejected_documents_are_retried_at_most_max_retries_times( self, async_client: Any ) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -265,7 +257,6 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( async def test_transport_error_is_raised_with_max_retries( self, async_client: Any ) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( async_client, fail_at=(1, 2, 3, 4), @@ -292,7 +283,6 @@ async def streaming_bulk() -> Any: class TestBulk(object): async def test_bulk_works_with_single_item(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -306,7 +296,6 @@ async def test_bulk_works_with_single_item(self, async_client: Any) -> None: ] async def test_all_documents_get_inserted(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -320,7 +309,6 @@ async def test_all_documents_get_inserted(self, async_client: Any) -> None: ] async def test_stats_only_reports_numbers(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True @@ -331,7 +319,6 @@ async def test_stats_only_reports_numbers(self, async_client: Any) -> None: assert 100 == (await async_client.count(index="test-index"))["count"] async def test_errors_are_reported_correctly(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -359,7 +346,6 @@ async def test_errors_are_reported_correctly(self, async_client: Any) -> None: ) or "mapper_parsing_exception" in repr(error["index"]["error"]) async def test_error_is_raised(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -373,7 +359,6 @@ async def test_error_is_raised(self, async_client: Any) -> None: await actions.async_bulk(async_client, [{"a": 42}, {"a": "c"}], index="i") async def test_ignore_error_if_raised(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring # ignore the status code 400 in tuple await actions.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -407,7 +392,6 @@ async def test_ignore_error_if_raised(self, async_client: Any) -> None: ) async def test_errors_are_collected_properly(self, async_client: Any) -> None: - # pylint: disable=missing-function-docstring await async_client.indices.create( "i", { @@ -465,7 +449,6 @@ def __await__(self) -> Any: @pytest.fixture(scope="function") # type: ignore async def scan_teardown(async_client: Any) -> Any: - # pylint: disable=missing-function-docstring yield await async_client.clear_scroll(scroll_id="_all") @@ -474,7 +457,6 @@ class TestScan(object): async def test_order_can_be_preserved( self, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -498,7 +480,6 @@ async def test_order_can_be_preserved( async def test_all_documents_are_read( self, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -515,7 +496,6 @@ async def test_all_documents_are_read( assert set(range(100)) == set(d["_source"]["answer"] for d in docs) async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -554,7 +534,6 @@ async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None async def test_initial_search_error( self, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring with patch.object(async_client, "clear_scroll", new_callable=AsyncMock): with patch.object( async_client, @@ -607,7 +586,6 @@ async def test_initial_search_error( async def test_no_scroll_id_fast_route( self, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: @@ -626,7 +604,6 @@ async def test_no_scroll_id_fast_route( async def test_logger( self, logger_mock: Any, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -668,7 +645,6 @@ async def test_logger( ) async def test_clear_scroll(self, async_client: Any, scan_teardown: Any) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -715,7 +691,6 @@ async def test_clear_scroll(self, async_client: Any, scan_teardown: Any) -> None async def test_scan_auth_kwargs_forwarded( self, async_client: Any, scan_teardown: Any, kwargs: Any ) -> None: - # pylint: disable=missing-function-docstring ((key, val),) = kwargs.items() with patch.object( @@ -758,7 +733,6 @@ async def test_scan_auth_kwargs_forwarded( async def test_scan_auth_kwargs_favor_scroll_kwargs_option( self, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring with patch.object( async_client, "search", @@ -808,7 +782,6 @@ async def test_scan_auth_kwargs_favor_scroll_kwargs_option( async def test_async_scan_with_missing_hits_key( self, async_client: Any, scan_teardown: Any ) -> None: - # pylint: disable=missing-function-docstring with patch.object( async_client, "search", @@ -837,7 +810,6 @@ async def test_async_scan_with_missing_hits_key( @pytest.fixture(scope="function") # type: ignore async def reindex_setup(async_client: Any) -> Any: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -856,7 +828,6 @@ class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client: Any, reindex_setup: Any ) -> None: - # pylint: disable=missing-function-docstring await actions.async_reindex( async_client, "test_index", @@ -878,7 +849,6 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( async def test_reindex_accepts_a_query( self, async_client: Any, reindex_setup: Any ) -> None: - # pylint: disable=missing-function-docstring await actions.async_reindex( async_client, "test_index", @@ -900,7 +870,6 @@ async def test_reindex_accepts_a_query( async def test_all_documents_get_moved( self, async_client: Any, reindex_setup: Any ) -> None: - # pylint: disable=missing-function-docstring await actions.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() @@ -923,7 +892,6 @@ async def test_all_documents_get_moved( @pytest.fixture(scope="function") # type: ignore async def parent_reindex_setup(async_client: Any) -> None: - # pylint: disable=missing-function-docstring body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -954,7 +922,6 @@ class TestParentChildReindex: async def test_children_are_reindexed_correctly( self, async_client: Any, parent_reindex_setup: Any ) -> None: - # pylint: disable=missing-function-docstring await actions.async_reindex(async_client, "test-index", "real-index") assert {"question_answer": "question"} == ( await async_client.get(index="real-index", id=42) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index 667c1a33..b0f396f2 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -13,7 +13,6 @@ async def create_flat_git_index(client: Any, index: Any) -> None: - # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -57,7 +56,6 @@ async def create_flat_git_index(client: Any, index: Any) -> None: async def create_git_index(client: Any, index: Any) -> None: - # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -1080,7 +1078,6 @@ async def create_git_index(client: Any, index: Any) -> None: def flatten_doc(d: Any) -> Dict[str, Any]: - # pylint: disable=missing-function-docstring src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1090,7 +1087,6 @@ def flatten_doc(d: Any) -> Dict[str, Any]: def create_test_git_data(d: Any) -> Dict[str, Any]: - # pylint: disable=missing-function-docstring src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 0f452090..bf02161d 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -117,7 +117,6 @@ class Index: async def test_serialization(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await SerializationDoc.init() await write_client.index( index="test-serialization", @@ -148,7 +147,6 @@ async def test_serialization(write_client: Any) -> None: async def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: - # pylint: disable=missing-function-docstring history_query = Q( "nested", path="comments.history", @@ -177,7 +175,6 @@ async def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None async def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: - # pylint: disable=missing-function-docstring s = PullRequest.search().query( "nested", inner_hits={}, @@ -193,7 +190,6 @@ async def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> async def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: - # pylint: disable=missing-function-docstring s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -206,7 +202,6 @@ async def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: async def test_update_object_field(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -227,7 +222,6 @@ async def test_update_object_field(write_client: Any) -> None: async def test_update_script(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -238,7 +232,6 @@ async def test_update_script(write_client: Any) -> None: async def test_update_retry_on_conflict(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -260,7 +253,6 @@ async def test_update_retry_on_conflict(write_client: Any) -> None: async def test_update_conflicting_version( write_client: Any, retry_on_conflict: bool ) -> None: - # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -278,7 +270,6 @@ async def test_update_conflicting_version( async def test_save_and_update_return_doc_meta(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = await w.save(return_doc_meta=True) @@ -303,38 +294,32 @@ async def test_save_and_update_return_doc_meta(write_client: Any) -> None: async def test_init(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await Repository.init(index="test-git") assert await write_client.indices.exists(index="test-git") async def test_get_raises_404_on_index_missing(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(NotFoundError): await Repository.get("opensearch-dsl-php", index="not-there") async def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(NotFoundError): await Repository.get("opensearch-dsl-php") async def test_get_returns_none_if_404_ignored(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert None is await Repository.get("opensearch-dsl-php", ignore=404) async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( data_client: Any, ) -> None: - # pylint: disable=missing-function-docstring assert None is await Repository.get("42", index="not-there", ignore=404) async def test_get(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -343,17 +328,14 @@ async def test_get(data_client: Any) -> None: async def test_exists_return_true(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert await Repository.exists("opensearch-py") async def test_exists_false(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert not await Repository.exists("opensearch-dsl-php") async def test_get_with_tz_date(data_client: Any) -> None: - # pylint: disable=missing-function-docstring first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -366,7 +348,6 @@ async def test_get_with_tz_date(data_client: Any) -> None: async def test_save_with_tz_date(data_client: Any) -> None: - # pylint: disable=missing-function-docstring tzinfo = timezone("Europe/Prague") first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -394,7 +375,6 @@ async def test_save_with_tz_date(data_client: Any) -> None: async def test_mget(data_client: Any) -> None: - # pylint: disable=missing-function-docstring commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -405,13 +385,11 @@ async def test_mget(data_client: Any) -> None: async def test_mget_raises_exception_when_missing_param_is_invalid( data_client: Any, ) -> None: - # pylint: disable=missing-function-docstring with raises(ValueError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") async def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(NotFoundError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") @@ -419,14 +397,12 @@ async def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> async def test_mget_ignores_missing_docs_when_missing_param_is_skip( data_client: Any, ) -> None: - # pylint: disable=missing-function-docstring commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" async def test_update_works_from_search_response(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = (await Repository.search().execute())[0] await opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -438,7 +414,6 @@ async def test_update_works_from_search_response(data_client: Any) -> None: async def test_update(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -463,7 +438,6 @@ async def test_update(data_client: Any) -> None: async def test_save_updates_existing_doc(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -479,7 +453,6 @@ async def test_save_updates_existing_doc(data_client: Any) -> None: async def test_save_automatically_uses_seq_no_and_primary_term( data_client: Any, ) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -490,7 +463,6 @@ async def test_save_automatically_uses_seq_no_and_primary_term( async def test_delete_automatically_uses_seq_no_and_primary_term( data_client: Any, ) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -499,14 +471,12 @@ async def test_delete_automatically_uses_seq_no_and_primary_term( async def assert_doc_equals(expected: Any, actual: Any) -> None: - # pylint: disable=missing-function-docstring async for f in aiter(expected): assert f in actual assert actual[f] == expected[f] async def test_can_save_to_different_index(write_client: Any) -> None: - # pylint: disable=missing-function-docstring test_repo = Repository(description="testing", meta={"id": 42}) assert await test_repo.save(index="test-document") @@ -524,7 +494,6 @@ async def test_can_save_to_different_index(write_client: Any) -> None: async def test_save_without_skip_empty_will_include_empty_fields( write_client: Any, ) -> None: - # pylint: disable=missing-function-docstring test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert await test_repo.save(index="test-document", skip_empty=False) @@ -540,7 +509,6 @@ async def test_save_without_skip_empty_will_include_empty_fields( async def test_delete(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await write_client.create( index="test-document", id="opensearch-py", @@ -562,12 +530,10 @@ async def test_delete(write_client: Any) -> None: async def test_search(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert await Repository.search().count() == 1 async def test_search_returns_proper_doc_classes(data_client: Any) -> None: - # pylint: disable=missing-function-docstring result = await Repository.search().execute() opensearch_repo = result.hits[0] @@ -577,7 +543,6 @@ async def test_search_returns_proper_doc_classes(data_client: Any) -> None: async def test_refresh_mapping(data_client: Any) -> None: - # pylint: disable=missing-function-docstring class Commit(AsyncDocument): _index: Any @@ -594,7 +559,6 @@ class Index: async def test_highlight_in_meta(data_client: Any) -> None: - # pylint: disable=missing-function-docstring commit = ( await Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 0a12e0a5..40e49daa 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -133,7 +133,6 @@ class PRSearch(AsyncFacetedSearch): async def test_facet_with_custom_metric(data_client: Any) -> None: - # pylint: disable=missing-function-docstring ms = MetricSearch() r = await ms.execute() @@ -143,7 +142,6 @@ async def test_facet_with_custom_metric(data_client: Any) -> None: async def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring prs = pr_search_cls() r = await prs.execute() @@ -152,7 +150,6 @@ async def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: async def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = await prs.execute() @@ -165,7 +162,6 @@ async def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) - async def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring rs = repo_search_cls() r = await rs.execute() @@ -174,7 +170,6 @@ async def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> No async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring rs = repo_search_cls() r = await rs.execute() @@ -187,7 +182,6 @@ async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: async def test_empty_search_finds_everything( data_client: Any, opensearch_version: Any, commit_search_cls: Any ) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls() r = await cs.execute() assert r.hits.total.value == 52 @@ -234,7 +228,6 @@ async def test_empty_search_finds_everything( async def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = await cs.execute() @@ -281,7 +274,6 @@ async def test_term_filters_are_shown_as_selected_and_data_is_filtered( async def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"deletions": "better"}) r = await cs.execute() @@ -290,7 +282,6 @@ async def test_range_filters_are_shown_as_selected_and_data_is_filtered( async def test_pagination(data_client: Any, commit_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index 8c9e6f42..e2670e55 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -26,7 +26,6 @@ class Post(AsyncDocument): async def test_index_template_works(write_client: Any) -> None: - # pylint: disable=missing-function-docstring it = AsyncIndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -48,7 +47,6 @@ async def test_index_template_works(write_client: Any) -> None: async def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) await i.save() @@ -64,7 +62,6 @@ async def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: async def test_index_exists(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert await AsyncIndex("git").exists() assert not await AsyncIndex("not-there").exists() @@ -72,7 +69,6 @@ async def test_index_exists(data_client: Any) -> None: async def test_index_can_be_created_with_settings_and_mappings( write_client: Any, ) -> None: - # pylint: disable=missing-function-docstring i = AsyncIndex("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -98,7 +94,6 @@ async def test_index_can_be_created_with_settings_and_mappings( async def test_delete(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -110,7 +105,6 @@ async def test_delete(write_client: Any) -> None: async def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: - # pylint: disable=missing-function-docstring i1: Any = AsyncIndex("test-index-1", using=write_client) i2: Any = AsyncIndex("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index 5975ad20..0aa3c0b8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -21,7 +21,6 @@ async def test_mapping_saved_into_opensearch(write_client: Any) -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -44,7 +43,6 @@ async def test_mapping_saved_into_opensearch(write_client: Any) -> None: async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( write_client: Any, ) -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -70,7 +68,6 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( async def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client: Any, ) -> None: - # pylint: disable=missing-function-docstring m = mapping.AsyncMapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) @@ -100,7 +97,6 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_with_anal async def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: - # pylint: disable=missing-function-docstring await write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 2f6d9c79..a42daf6c 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -43,7 +43,6 @@ class Index: async def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: - # pylint: disable=missing-function-docstring has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -65,7 +64,6 @@ async def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> N async def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -82,7 +80,6 @@ async def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: async def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = AsyncSearch(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -94,7 +91,6 @@ async def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: async def test_scan_respects_doc_types(data_client: Any) -> None: - # pylint: disable=missing-function-docstring result = Repository.search().scan() repos = await get_result(result) @@ -104,7 +100,6 @@ async def test_scan_respects_doc_types(data_client: Any) -> None: async def test_scan_iterates_through_all_docs(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = AsyncSearch(index="flat-git") result = s.scan() commits = await get_result(result) @@ -114,7 +109,6 @@ async def test_scan_iterates_through_all_docs(data_client: Any) -> None: async def get_result(b: Any) -> Any: - # pylint: disable=missing-function-docstring a = [] async for i in b: a.append(i) @@ -122,7 +116,6 @@ async def get_result(b: Any) -> Any: async def test_multi_search(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = AsyncSearch(index="flat-git") @@ -140,7 +133,6 @@ async def test_multi_search(data_client: Any) -> None: async def test_multi_missing(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = AsyncSearch(index="flat-git") s3 = AsyncSearch(index="does_not_exist") @@ -164,7 +156,6 @@ async def test_multi_missing(data_client: Any) -> None: async def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = AsyncSearch(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) r = await s.execute() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 68e98e97..a26d046e 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -21,7 +21,6 @@ async def test_update_by_query_no_script( write_client: Any, setup_ubq_tests: Any ) -> None: - # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -43,7 +42,6 @@ async def test_update_by_query_no_script( async def test_update_by_query_with_script( write_client: Any, setup_ubq_tests: Any ) -> None: - # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -63,7 +61,6 @@ async def test_update_by_query_with_script( async def test_delete_by_query_with_script( write_client: Any, setup_ubq_tests: Any ) -> None: - # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 31596875..4e742c2f 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -28,7 +28,6 @@ class TestAlertingPlugin(AsyncOpenSearchTestCase): "Plugin not supported for opensearch version", ) async def test_create_destination(self) -> None: - # pylint: disable=missing-function-docstring # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -45,7 +44,6 @@ async def test_create_destination(self) -> None: "Plugin not supported for opensearch version", ) async def test_get_destination(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy destination await self.test_create_destination() @@ -61,7 +59,6 @@ async def test_get_destination(self) -> None: "Plugin not supported for opensearch version", ) async def test_create_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy destination await self.test_create_destination() @@ -127,7 +124,6 @@ async def test_create_monitor(self) -> None: "Plugin not supported for opensearch version", ) async def test_search_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy monitor await self.test_create_monitor() @@ -146,7 +142,6 @@ async def test_search_monitor(self) -> None: "Plugin not supported for opensearch version", ) async def test_get_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy monitor await self.test_create_monitor() @@ -171,7 +166,6 @@ async def test_get_monitor(self) -> None: "Plugin not supported for opensearch version", ) async def test_run_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy monitor await self.test_create_monitor() diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index 22d19490..0ffae69f 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -69,7 +69,6 @@ class TestIndexManagementPlugin(AsyncOpenSearchTestCase): } async def test_create_policy(self) -> None: - # pylint: disable=missing-function-docstring # Test to create policy response = await self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -79,7 +78,6 @@ async def test_create_policy(self) -> None: self.assertIn("_id", response) async def test_get_policy(self) -> None: - # pylint: disable=missing-function-docstring # Create a policy await self.test_create_policy() @@ -91,7 +89,6 @@ async def test_get_policy(self) -> None: self.assertEqual(response["_id"], self.POLICY_NAME) async def test_update_policy(self) -> None: - # pylint: disable=missing-function-docstring # Create a policy await self.test_create_policy() @@ -114,7 +111,6 @@ async def test_update_policy(self) -> None: self.assertIn("_id", response) async def test_delete_policy(self) -> None: - # pylint: disable=missing-function-docstring # Create a policy await self.test_create_policy() diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 91be3623..9e412d7d 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -215,7 +215,6 @@ async def _feature_enabled(self, name: str) -> Any: @pytest.fixture(scope="function") # type: ignore def async_runner(async_client: Any) -> AsyncYamlRunner: - # pylint: disable=missing-function-docstring return AsyncYamlRunner(async_client) @@ -223,7 +222,6 @@ def async_runner(async_client: Any) -> AsyncYamlRunner: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore async def test_rest_api_spec(test_spec: Any, async_runner: Any) -> None: - # pylint: disable=missing-function-docstring if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 415e10ee..926bade9 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -48,12 +48,11 @@ async def asyncSetUp(self) -> None: await add_connection("default", self.client) async def asyncTearDown(self) -> None: - # pylint: disable=missing-function-docstring, invalid-name + # pylint disable=invalid-name if self.client: await self.client.close() async def test_create_role(self) -> None: - # pylint: disable=missing-function-docstring # Test to create role response = await self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -63,7 +62,6 @@ async def test_create_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) async def test_create_role_with_body_param_empty(self) -> None: - # pylint: disable=missing-function-docstring try: await self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -72,7 +70,6 @@ async def test_create_role_with_body_param_empty(self) -> None: assert False async def test_get_role(self) -> None: - # pylint: disable=missing-function-docstring # Create a role await self.test_create_role() @@ -83,7 +80,6 @@ async def test_get_role(self) -> None: self.assertIn(self.ROLE_NAME, response) async def test_update_role(self) -> None: - # pylint: disable=missing-function-docstring # Create a role await self.test_create_role() @@ -99,7 +95,6 @@ async def test_update_role(self) -> None: self.assertEqual("OK", response.get("status")) async def test_delete_role(self) -> None: - # pylint: disable=missing-function-docstring # Create a role await self.test_create_role() @@ -113,7 +108,6 @@ async def test_delete_role(self) -> None: response = await self.client.security.get_role(self.ROLE_NAME) async def test_create_user(self) -> None: - # pylint: disable=missing-function-docstring # Test to create user response = await self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -123,7 +117,6 @@ async def test_create_user(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) async def test_create_user_with_body_param_empty(self) -> None: - # pylint: disable=missing-function-docstring try: await self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -132,7 +125,6 @@ async def test_create_user_with_body_param_empty(self) -> None: assert False async def test_create_user_with_role(self) -> None: - # pylint: disable=missing-function-docstring await self.test_create_role() # Test to create user @@ -148,7 +140,6 @@ async def test_create_user_with_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) async def test_get_user(self) -> None: - # pylint: disable=missing-function-docstring # Create a user await self.test_create_user() @@ -159,7 +150,6 @@ async def test_get_user(self) -> None: self.assertIn(self.USER_NAME, response) async def test_update_user(self) -> None: - # pylint: disable=missing-function-docstring # Create a user await self.test_create_user() @@ -175,7 +165,6 @@ async def test_update_user(self) -> None: self.assertEqual("OK", response.get("status")) async def test_delete_user(self) -> None: - # pylint: disable=missing-function-docstring # Create a user await self.test_create_user() @@ -189,13 +178,11 @@ async def test_delete_user(self) -> None: response = await self.client.security.get_user(self.USER_NAME) async def test_health_check(self) -> None: - # pylint: disable=missing-function-docstring response = await self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) async def test_health(self) -> None: - # pylint: disable=missing-function-docstring response = await self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -229,7 +216,6 @@ async def test_health(self) -> None: } async def test_update_audit_config(self) -> None: - # pylint: disable=missing-function-docstring response = await self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) @@ -237,7 +223,6 @@ async def test_update_audit_config(self) -> None: self.assertEqual("OK", response.get("status")) async def test_update_audit_configuration(self) -> None: - # pylint: disable=missing-function-docstring response = await self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 8500a231..664f6a95 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -18,7 +18,6 @@ class TestAsyncSigner: def mock_session(self) -> Mock: - # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -32,7 +31,6 @@ def mock_session(self) -> Mock: return dummy_session async def test_aws_signer_async_as_http_auth(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -44,7 +42,6 @@ async def test_aws_signer_async_as_http_auth(self) -> None: assert "X-Amz-Security-Token" in headers async def test_aws_signer_async_when_region_is_null(self) -> None: - # pylint: disable=missing-function-docstring session = self.mock_session() from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -58,7 +55,6 @@ async def test_aws_signer_async_when_region_is_null(self) -> None: assert str(e.value) == "Region cannot be empty" async def test_aws_signer_async_when_credentials_is_null(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-1" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -68,7 +64,6 @@ async def test_aws_signer_async_when_credentials_is_null(self) -> None: assert str(e.value) == "Credentials cannot be empty" async def test_aws_signer_async_when_service_is_specified(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" service = "aoss" @@ -84,7 +79,6 @@ async def test_aws_signer_async_when_service_is_specified(self) -> None: class TestAsyncSignerWithFrozenCredentials(TestAsyncSigner): def mock_session(self, disable_get_frozen: bool = True) -> Mock: - # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -97,7 +91,6 @@ def mock_session(self, disable_get_frozen: bool = True) -> Mock: return dummy_session async def test_aws_signer_async_frozen_credentials_as_http_auth(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index 846103da..6efa4b6c 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -64,7 +64,6 @@ async def perform_request(self, *args: Any, **kwargs: Any) -> Any: return self.status, self.headers, self.data async def close(self) -> None: - # pylint: disable=missing-function-docstring if self.closed: raise RuntimeError("This connection is already closed") self.closed = True @@ -123,7 +122,6 @@ async def close(self) -> None: class TestTransport: async def test_single_connection_uses_dummy_connection_pool(self) -> None: - # pylint: disable=missing-function-docstring t1: Any = AsyncTransport([{}]) await t1._async_call() assert isinstance(t1.connection_pool, DummyConnectionPool) @@ -132,7 +130,6 @@ async def test_single_connection_uses_dummy_connection_pool(self) -> None: assert isinstance(t2.connection_pool, DummyConnectionPool) async def test_request_timeout_extracted_from_params_and_passed(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -145,7 +142,6 @@ async def test_request_timeout_extracted_from_params_and_passed(self) -> None: } == t.get_connection().calls[0][1] async def test_timeout_extracted_from_params_and_passed(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"timeout": 84}) @@ -158,7 +154,6 @@ async def test_timeout_extracted_from_params_and_passed(self) -> None: } == t.get_connection().calls[0][1] async def test_opaque_id(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{}], opaque_id="app-1", connection_class=DummyConnection ) @@ -183,7 +178,6 @@ async def test_opaque_id(self) -> None: } == t.get_connection().calls[1][1] async def test_request_with_custom_user_agent_header(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request( @@ -197,7 +191,6 @@ async def test_request_with_custom_user_agent_header(self) -> None: } == t.get_connection().calls[0][1] async def test_send_get_body_as_source(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -207,7 +200,6 @@ async def test_send_get_body_as_source(self) -> None: assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] async def test_send_get_body_as_post(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -217,7 +209,6 @@ async def test_send_get_body_as_post(self) -> None: assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] async def test_body_gets_encoded_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好") @@ -230,7 +221,6 @@ async def test_body_gets_encoded_into_bytes(self) -> None: ) == t.get_connection().calls[0][0] async def test_body_bytes_get_passed_untouched(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -239,7 +229,6 @@ async def test_body_bytes_get_passed_untouched(self) -> None: assert ("GET", "/", None, body) == t.get_connection().calls[0][0] async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好\uda6a") @@ -252,21 +241,18 @@ async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: ) == t.get_connection().calls[0][0] async def test_kwargs_passed_on_to_connections(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{"host": "google.com"}], port=123) await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host async def test_kwargs_passed_on_to_connection_pool(self) -> None: - # pylint: disable=missing-function-docstring dt = object() t: Any = AsyncTransport([{}, {}], dead_timeout=dt) await t._async_call() assert dt is t.connection_pool.dead_timeout async def test_custom_connection_class(self) -> None: - # pylint: disable=missing-function-docstring class MyConnection(object): def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs @@ -277,7 +263,6 @@ def __init__(self, **kwargs: Any) -> None: assert isinstance(t.connection_pool.connections[0], MyConnection) async def test_add_connection(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) @@ -285,7 +270,6 @@ async def test_add_connection(self) -> None: assert "http://google.com:1234" == t.connection_pool.connections[1].host async def test_request_will_fail_after_x_retries(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -301,7 +285,6 @@ async def test_request_will_fail_after_x_retries(self) -> None: assert 4 == len(t.get_connection().calls) async def test_failed_connection_will_be_marked_as_dead(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, @@ -319,7 +302,6 @@ async def test_failed_connection_will_be_marked_as_dead(self) -> None: async def test_resurrected_connection_will_be_marked_as_live_on_success( self, ) -> None: - # pylint: disable=missing-function-docstring for method in ("GET", "HEAD"): t: Any = AsyncTransport([{}, {}], connection_class=DummyConnection) await t._async_call() @@ -333,7 +315,6 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success( assert 1 == len(t.connection_pool.dead_count) async def test_sniff_will_use_seed_connections(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection ) @@ -345,7 +326,6 @@ async def test_sniff_will_use_seed_connections(self) -> None: assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -358,7 +338,6 @@ async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -373,7 +352,6 @@ async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: ].calls[0] async def test_sniff_uses_sniff_timeout(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -387,7 +365,6 @@ async def test_sniff_uses_sniff_timeout(self) -> None: ].calls[0] async def test_sniff_reuses_connection_instances_if_possible(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, @@ -402,7 +379,6 @@ async def test_sniff_reuses_connection_instances_if_possible(self) -> None: assert connection is t.get_connection() async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"exception": ConnectionError(None, "abandon ship", Exception())}, @@ -431,7 +407,6 @@ async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: async def test_sniff_on_fail_failing_does_not_prevent_retires( self, sniff_hosts: Any ) -> None: - # pylint: disable=missing-function-docstring sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = AsyncTransport( [ @@ -453,7 +428,6 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires( assert 1 == len(conn_data.calls) async def test_sniff_after_n_seconds(self, event_loop: Any) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -493,7 +467,6 @@ async def test_sniff_7x_publish_host(self) -> None: } async def test_transport_close_closes_all_pool_connections(self) -> None: - # pylint: disable=missing-function-docstring t1: Any = AsyncTransport([{}], connection_class=DummyConnection) await t1._async_call() @@ -511,7 +484,6 @@ async def test_transport_close_closes_all_pool_connections(self) -> None: async def test_sniff_on_start_error_if_no_sniffed_hosts( self, event_loop: Any ) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"data": ""}, @@ -531,7 +503,6 @@ async def test_sniff_on_start_error_if_no_sniffed_hosts( async def test_sniff_on_start_waits_for_sniff_to_complete( self, event_loop: Any ) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"delay": 1, "data": ""}, @@ -570,7 +541,6 @@ async def test_sniff_on_start_waits_for_sniff_to_complete( async def test_sniff_on_start_close_unlocks_async_calls( self, event_loop: Any ) -> None: - # pylint: disable=missing-function-docstring t: Any = AsyncTransport( [ {"delay": 10, "data": CLUSTER_NODES}, @@ -598,7 +568,6 @@ async def test_sniff_on_start_close_unlocks_async_calls( assert duration < 1 async def test_init_connection_pool_with_many_hosts(self) -> None: - # pylint: disable=missing-function-docstring """ Check init of connection pool with multiple connections. @@ -617,7 +586,6 @@ async def test_init_connection_pool_with_many_hosts(self) -> None: await t._async_call() async def test_init_pool_with_connection_class_to_many_hosts(self) -> None: - # pylint: disable=missing-function-docstring """ Check init of connection pool with user specified connection_class. diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index 7652be71..a03f0e44 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -36,7 +36,6 @@ class DummyTransport(object): def __init__( self, hosts: Sequence[str], responses: Any = None, **kwargs: Any ) -> None: - # pylint: disable=missing-function-docstring self.hosts = hosts self.responses = responses self.call_count: int = 0 @@ -50,7 +49,6 @@ def perform_request( body: Optional[bytes] = None, headers: Optional[Mapping[str, str]] = None, ) -> Any: - # pylint: disable=missing-function-docstring resp: Any = (200, {}) if self.responses: resp = self.responses[self.call_count] @@ -61,16 +59,13 @@ def perform_request( class OpenSearchTestCase(TestCase): def setUp(self) -> None: - # pylint: disable=missing-function-docstring super(OpenSearchTestCase, self).setUp() self.client: Any = OpenSearch(transport_class=DummyTransport) # type: ignore def assert_call_count_equals(self, count: int) -> None: - # pylint: disable=missing-function-docstring self.assertEqual(count, self.client.transport.call_count) def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: - # pylint: disable=missing-function-docstring self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) @@ -79,15 +74,12 @@ def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: class TestOpenSearchTestCase(OpenSearchTestCase): def test_our_transport_used(self) -> None: - # pylint: disable=missing-function-docstring self.assertIsInstance(self.client.transport, DummyTransport) def test_start_with_0_call(self) -> None: - # pylint: disable=missing-function-docstring self.assert_call_count_equals(0) def test_each_call_is_recorded(self) -> None: - # pylint: disable=missing-function-docstring self.client.transport.perform_request("GET", "/") self.client.transport.perform_request("DELETE", "/42", params={}, body="body") self.assert_call_count_equals(2) diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index 6b4d683d..55fcd4a9 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -37,15 +37,12 @@ class TestNormalizeHosts(TestCase): def test_none_uses_defaults(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual([{}], _normalize_hosts(None)) def test_strings_are_used_as_hostnames(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual([{"host": "elastic.co"}], _normalize_hosts(["elastic.co"])) def test_strings_are_parsed_for_port_and_user(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( [ {"host": "elastic.co", "port": 42}, @@ -55,7 +52,6 @@ def test_strings_are_parsed_for_port_and_user(self) -> None: ) def test_strings_are_parsed_for_scheme(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( [ {"host": "elastic.co", "port": 42, "use_ssl": True}, @@ -73,26 +69,22 @@ def test_strings_are_parsed_for_scheme(self) -> None: ) def test_dicts_are_left_unchanged(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( [{"host": "local", "extra": 123}], _normalize_hosts([{"host": "local", "extra": 123}]), ) def test_single_string_is_wrapped_in_list(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual([{"host": "elastic.co"}], _normalize_hosts("elastic.co")) class TestClient(OpenSearchTestCase): def test_request_timeout_is_passed_through_unescaped(self) -> None: - # pylint: disable=missing-function-docstring self.client.ping(request_timeout=0.1) calls = self.assert_url_called("HEAD", "/") self.assertEqual([({"request_timeout": 0.1}, {}, None)], calls) def test_params_is_copied_when(self) -> None: - # pylint: disable=missing-function-docstring rt = object() params = dict(request_timeout=rt) self.client.ping(params=params) @@ -105,7 +97,6 @@ def test_params_is_copied_when(self) -> None: self.assertFalse(calls[0][0] is calls[1][0]) def test_headers_is_copied_when(self) -> None: - # pylint: disable=missing-function-docstring hv = "value" headers = dict(Authentication=hv) self.client.ping(headers=headers) @@ -118,47 +109,39 @@ def test_headers_is_copied_when(self) -> None: self.assertFalse(calls[0][0] is calls[1][0]) def test_from_in_search(self) -> None: - # pylint: disable=missing-function-docstring self.client.search(index="i", from_=10) calls = self.assert_url_called("POST", "/i/_search") self.assertEqual([({"from": "10"}, {}, None)], calls) def test_repr_contains_hosts(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual("", repr(self.client)) def test_repr_subclass(self) -> None: - # pylint: disable=missing-function-docstring class OtherOpenSearch(OpenSearch): pass self.assertEqual("", repr(OtherOpenSearch())) def test_repr_contains_hosts_passed_in(self) -> None: - # pylint: disable=missing-function-docstring self.assertIn("opensearchpy.org", repr(OpenSearch(["opensearch.org:123"]))) def test_repr_truncates_host_to_5(self) -> None: - # pylint: disable=missing-function-docstring hosts = [{"host": "opensearch" + str(i)} for i in range(10)] client = OpenSearch(hosts) self.assertNotIn("opensearch5", repr(client)) self.assertIn("...", repr(client)) def test_index_uses_post_if_id_is_empty(self) -> None: - # pylint: disable=missing-function-docstring self.client.index(index="my-index", id="", body={}) self.assert_url_called("POST", "/my-index/_doc") def test_index_uses_put_if_id_is_not_empty(self) -> None: - # pylint: disable=missing-function-docstring self.client.index(index="my-index", id=0, body={}) self.assert_url_called("PUT", "/my-index/_doc/0") def test_tasks_get_without_task_id_deprecated(self) -> None: - # pylint: disable=missing-function-docstring warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get() @@ -173,7 +156,6 @@ def test_tasks_get_without_task_id_deprecated(self) -> None: ) def test_tasks_get_with_task_id_not_deprecated(self) -> None: - # pylint: disable=missing-function-docstring warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get("task-1") diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index 605eac4c..3e9ad987 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -30,12 +30,10 @@ class TestCluster(OpenSearchTestCase): def test_stats_without_node_id(self) -> None: - # pylint: disable=missing-function-docstring self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") def test_stats_with_node_id(self) -> None: - # pylint: disable=missing-function-docstring self.client.cluster.stats("node-1") self.assert_url_called("GET", "/_cluster/stats/nodes/node-1") @@ -43,7 +41,6 @@ def test_stats_with_node_id(self) -> None: self.assert_url_called("GET", "/_cluster/stats/nodes/node-2") def test_state_with_index_without_metric_defaults_to_all(self) -> None: - # pylint: disable=missing-function-docstring self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") diff --git a/test_opensearchpy/test_client/test_http.py b/test_opensearchpy/test_client/test_http.py index 3d230c88..145cbb6d 100644 --- a/test_opensearchpy/test_client/test_http.py +++ b/test_opensearchpy/test_client/test_http.py @@ -13,19 +13,16 @@ class TestHttp(OpenSearchTestCase): def test_http_get(self) -> None: - # pylint: disable=missing-function-docstring self.client.http.get("/") self.assert_call_count_equals(1) self.assertEqual([(None, None, None)], self.assert_url_called("GET", "/", 1)) def test_http_head(self) -> None: - # pylint: disable=missing-function-docstring self.client.http.head("/") self.assert_call_count_equals(1) self.assertEqual([(None, None, None)], self.assert_url_called("HEAD", "/", 1)) def test_http_put(self) -> None: - # pylint: disable=missing-function-docstring self.client.http.put("/xyz", headers={"X": "Y"}, body="body") self.assert_call_count_equals(1) self.assertEqual( @@ -33,7 +30,6 @@ def test_http_put(self) -> None: ) def test_http_post(self) -> None: - # pylint: disable=missing-function-docstring self.client.http.post("/xyz", headers={"X": "Y"}, body="body") self.assert_call_count_equals(1) self.assertEqual( @@ -41,7 +37,6 @@ def test_http_post(self) -> None: ) def test_http_post_with_params(self) -> None: - # pylint: disable=missing-function-docstring self.client.http.post( "/xyz", headers={"X": "Y"}, params={"A": "B"}, body="body" ) @@ -52,7 +47,6 @@ def test_http_post_with_params(self) -> None: ) def test_http_delete(self) -> None: - # pylint: disable=missing-function-docstring self.client.http.delete("/xyz", headers={"X": "Y"}, body="body") self.assert_call_count_equals(1) self.assertEqual( diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index bf4e0f71..d45405e5 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -30,22 +30,18 @@ class TestIndices(OpenSearchTestCase): def test_create_one_index(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.create("test-index") self.assert_url_called("PUT", "/test-index") def test_delete_multiple_indices(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.delete(["test-index", "second.index", "third/index"]) self.assert_url_called("DELETE", "/test-index,second.index,third%2Findex") def test_exists_index(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.exists("second.index,third/index") self.assert_url_called("HEAD", "/second.index,third%2Findex") def test_passing_empty_value_for_required_param_raises_exception(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(ValueError, self.client.indices.exists, index=None) self.assertRaises(ValueError, self.client.indices.exists, index=[]) self.assertRaises(ValueError, self.client.indices.exists, index="") diff --git a/test_opensearchpy/test_client/test_overrides.py b/test_opensearchpy/test_client/test_overrides.py index 9b69fc9f..160a8bdd 100644 --- a/test_opensearchpy/test_client/test_overrides.py +++ b/test_opensearchpy/test_client/test_overrides.py @@ -32,42 +32,34 @@ class TestOverriddenUrlTargets(OpenSearchTestCase): def test_create(self) -> None: - # pylint: disable=missing-function-docstring self.client.create(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_create/test-id") def test_delete(self) -> None: - # pylint: disable=missing-function-docstring self.client.delete(index="test-index", id="test-id") self.assert_url_called("DELETE", "/test-index/_doc/test-id") def test_exists(self) -> None: - # pylint: disable=missing-function-docstring self.client.exists(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_doc/test-id") def test_explain(self) -> None: - # pylint: disable=missing-function-docstring self.client.explain(index="test-index", id="test-id") self.assert_url_called("POST", "/test-index/_explain/test-id") def test_get(self) -> None: - # pylint: disable=missing-function-docstring self.client.get(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_doc/test-id") def test_get_source(self) -> None: - # pylint: disable=missing-function-docstring self.client.get_source(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_source/test-id") def test_exists_source(self) -> None: - # pylint: disable=missing-function-docstring self.client.exists_source(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_source/test-id") def test_index(self) -> None: - # pylint: disable=missing-function-docstring self.client.index(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_doc") @@ -75,7 +67,6 @@ def test_index(self) -> None: self.assert_url_called("PUT", "/test-index/_doc/test-id") def test_termvectors(self) -> None: - # pylint: disable=missing-function-docstring self.client.termvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_termvectors") @@ -83,17 +74,14 @@ def test_termvectors(self) -> None: self.assert_url_called("POST", "/test-index/_termvectors/test-id") def test_mtermvectors(self) -> None: - # pylint: disable=missing-function-docstring self.client.mtermvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_mtermvectors") def test_update(self) -> None: - # pylint: disable=missing-function-docstring self.client.update(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_update/test-id") def test_cluster_state(self) -> None: - # pylint: disable=missing-function-docstring self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") @@ -104,7 +92,6 @@ def test_cluster_state(self) -> None: self.assert_url_called("GET", "/_cluster/state/test-metric/test-index") def test_cluster_stats(self) -> None: - # pylint: disable=missing-function-docstring self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") @@ -112,7 +99,6 @@ def test_cluster_stats(self) -> None: self.assert_url_called("GET", "/_cluster/stats/nodes/test-node") def test_indices_put_mapping(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.put_mapping(body={}) self.assert_url_called("PUT", "/_all/_mapping") @@ -120,6 +106,5 @@ def test_indices_put_mapping(self) -> None: self.assert_url_called("PUT", "/test-index/_mapping") def test_tasks_get(self) -> None: - # pylint: disable=missing-function-docstring with pytest.warns(DeprecationWarning): self.client.tasks.get() diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index 7a9b3df8..f012ccbb 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -12,48 +12,40 @@ class TestAlerting(OpenSearchTestCase): def test_create_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Test Post Method self.client.alerting.create_monitor({}) self.assert_url_called("POST", "/_plugins/_alerting/monitors") def test_run_monitor(self) -> None: - # pylint: disable=missing-function-docstring self.client.alerting.run_monitor("...") self.assert_url_called("POST", "/_plugins/_alerting/monitors/.../_execute") def test_get_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Test Get Method self.client.alerting.get_monitor("...") self.assert_url_called("GET", "/_plugins/_alerting/monitors/...") def test_search_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Test Search Method self.client.alerting.search_monitor({}) self.assert_url_called("GET", "/_plugins/_alerting/monitors/_search") def test_update_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Test Update Method self.client.alerting.update_monitor("...") self.assert_url_called("PUT", "/_plugins/_alerting/monitors/...") def test_delete_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Test Delete Method self.client.alerting.delete_monitor("...") self.assert_url_called("DELETE", "/_plugins/_alerting/monitors/...") def test_create_destination(self) -> None: - # pylint: disable=missing-function-docstring # Test Post Method self.client.alerting.create_destination({}) self.assert_url_called("POST", "/_plugins/_alerting/destinations") def test_get_destination(self) -> None: - # pylint: disable=missing-function-docstring # Test Get Method # Get a specific destination @@ -65,24 +57,20 @@ def test_get_destination(self) -> None: self.assert_url_called("GET", "/_plugins/_alerting/destinations") def test_update_destination(self) -> None: - # pylint: disable=missing-function-docstring # Test Update Method self.client.alerting.update_destination("...") self.assert_url_called("PUT", "/_plugins/_alerting/destinations/...") def test_delete_destination(self) -> None: - # pylint: disable=missing-function-docstring # Test Delete Method self.client.alerting.delete_destination("...") self.assert_url_called("DELETE", "/_plugins/_alerting/destinations/...") def test_get_alerts(self) -> None: - # pylint: disable=missing-function-docstring self.client.alerting.get_alerts() self.assert_url_called("GET", "/_plugins/_alerting/monitors/alerts") def test_acknowledge_alerts(self) -> None: - # pylint: disable=missing-function-docstring self.client.alerting.acknowledge_alert("...") self.assert_url_called( "POST", "/_plugins/_alerting/monitors/.../_acknowledge/alerts" diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index 53a5cc16..a2052163 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -12,12 +12,10 @@ class TestIndexManagement(OpenSearchTestCase): def test_create_policy(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.put_policy("...") self.assert_url_called("PUT", "/_plugins/_ism/policies/...") def test_update_policy(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.put_policy( "...", params={"if_seq_no": 7, "if_primary_term": 1} ) @@ -27,32 +25,26 @@ def test_update_policy(self) -> None: ) def test_add_policy(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.add_policy("...") self.assert_url_called("POST", "/_plugins/_ism/add/...") def test_get_policy(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.get_policy("...") self.assert_url_called("GET", "/_plugins/_ism/policies/...") def test_remove_policy_from_index(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.remove_policy_from_index("...") self.assert_url_called("POST", "/_plugins/_ism/remove/...") def test_change_policy(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.change_policy("...") self.assert_url_called("POST", "/_plugins/_ism/change_policy/...") def test_retry(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.retry("...") self.assert_url_called("POST", "/_plugins/_ism/retry/...") def test_explain_index(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.explain_index("...", show_policy=True) self.assertEqual( [({"show_policy": b"true"}, {}, None)], @@ -60,6 +52,5 @@ def test_explain_index(self) -> None: ) def test_delete_policy(self) -> None: - # pylint: disable=missing-function-docstring self.client.index_management.delete_policy("...") self.assert_url_called("DELETE", "/_plugins/_ism/policies/...") diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index 2146905c..1b794486 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -14,7 +14,6 @@ class TestPluginsClient(TestCase): def test_plugins_client(self) -> None: - # pylint: disable=missing-function-docstring with self.assertWarns(Warning) as w: client = OpenSearch() # double-init diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index cb9331f4..38a4b8cc 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -12,43 +12,35 @@ class TestPointInTime(OpenSearchTestCase): def test_create_one_point_in_time(self) -> None: - # pylint: disable=missing-function-docstring index_name = "test-index" self.client.create_point_in_time(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") def test_delete_one_point_in_time(self) -> None: - # pylint: disable=missing-function-docstring self.client.delete_point_in_time(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") def test_delete_all_point_in_time(self) -> None: - # pylint: disable=missing-function-docstring self.client.delete_point_in_time(all=True) self.assert_url_called("DELETE", "/_search/point_in_time/_all") def test_list_all_point_in_time(self) -> None: - # pylint: disable=missing-function-docstring self.client.list_all_point_in_time() self.assert_url_called("GET", "/_search/point_in_time/_all") def test_create_pit(self) -> None: - # pylint: disable=missing-function-docstring index_name = "test-index" self.client.create_pit(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") def test_delete_pit(self) -> None: - # pylint: disable=missing-function-docstring self.client.delete_pit(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") def test_delete_all_pits(self) -> None: - # pylint: disable=missing-function-docstring self.client.delete_all_pits() self.assert_url_called("DELETE", "/_search/point_in_time/_all") def test_get_all_pits(self) -> None: - # pylint: disable=missing-function-docstring self.client.get_all_pits() self.assert_url_called("GET", "/_search/point_in_time/_all") diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py index 8d83d13d..faf91297 100644 --- a/test_opensearchpy/test_client/test_remote_store.py +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -11,6 +11,5 @@ class TestRemoteStore(OpenSearchTestCase): def test_remote_store_restore(self) -> None: - # pylint: disable=missing-function-docstring self.client.remote_store.restore(body=["index-1"]) self.assert_url_called("POST", "/_remotestore/_restore") diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py index 53db865e..b3ac3d6f 100644 --- a/test_opensearchpy/test_client/test_requests.py +++ b/test_opensearchpy/test_client/test_requests.py @@ -14,7 +14,6 @@ class TestRequests(TestCase): def test_connection_class(self) -> None: - # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=RequestsHttpConnection) self.assertEqual(client.transport.pool_maxsize, None) self.assertEqual(client.transport.connection_class, RequestsHttpConnection) @@ -23,7 +22,6 @@ def test_connection_class(self) -> None: ) def test_pool_maxsize(self) -> None: - # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=RequestsHttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) self.assertEqual( diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py index 599f1cf2..d30c85e7 100644 --- a/test_opensearchpy/test_client/test_urllib3.py +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -16,13 +16,11 @@ class TestUrlLib3(TestCase): def test_default(self) -> None: - # pylint: disable=missing-function-docstring client = OpenSearch() self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertEqual(client.transport.pool_maxsize, None) def test_connection_class(self) -> None: - # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=Urllib3HttpConnection) self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertIsInstance( @@ -33,7 +31,6 @@ def test_connection_class(self) -> None: ) def test_pool_maxsize(self) -> None: - # pylint: disable=missing-function-docstring client = OpenSearch(connection_class=Urllib3HttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) # https://github.com/python/cpython/blob/3.12/Lib/queue.py#L35 diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index 44c106d0..efed662a 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -36,16 +36,13 @@ class TestQueryParams(TestCase): def setup_method(self, _: Any) -> None: - # pylint: disable=missing-function-docstring self.calls: Any = [] @query_params("simple_param") def func_to_wrap(self, *args: Any, **kwargs: Any) -> None: - # pylint: disable=missing-function-docstring self.calls.append((args, kwargs)) def test_handles_params(self) -> None: - # pylint: disable=missing-function-docstring self.func_to_wrap(params={"simple_param_2": "2"}, simple_param="3") self.assertEqual( self.calls, @@ -61,21 +58,18 @@ def test_handles_params(self) -> None: ) def test_handles_headers(self) -> None: - # pylint: disable=missing-function-docstring self.func_to_wrap(headers={"X-Opaque-Id": "app-1"}) self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "app-1"}})] ) def test_handles_opaque_id(self) -> None: - # pylint: disable=missing-function-docstring self.func_to_wrap(opaque_id="request-id") self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "request-id"}})] ) def test_handles_empty_none_and_normalization(self) -> None: - # pylint: disable=missing-function-docstring self.func_to_wrap(params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) @@ -92,7 +86,6 @@ def test_handles_empty_none_and_normalization(self) -> None: self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {"x": "y"}})) def test_non_escaping_params(self) -> None: - # pylint: disable=missing-function-docstring # the query_params decorator doesn't validate "timeout" it simply avoids escaping as it did self.func_to_wrap(simple_param="x", timeout="4s") self.assertEqual( @@ -118,7 +111,6 @@ def test_non_escaping_params(self) -> None: ) def test_per_call_authentication(self) -> None: - # pylint: disable=missing-function-docstring self.func_to_wrap(api_key=("name", "key")) self.assertEqual( self.calls[-1], @@ -164,7 +156,6 @@ def test_per_call_authentication(self) -> None: class TestMakePath(TestCase): def test_handles_unicode(self) -> None: - # pylint: disable=missing-function-docstring id = "中文" self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) @@ -173,34 +164,28 @@ def test_handles_unicode(self) -> None: class TestEscape(TestCase): def test_handles_ascii(self) -> None: - # pylint: disable=missing-function-docstring string = "abc123" self.assertEqual(b"abc123", _escape(string)) def test_handles_unicode(self) -> None: - # pylint: disable=missing-function-docstring string = "中文" self.assertEqual(b"\xe4\xb8\xad\xe6\x96\x87", _escape(string)) def test_handles_bytestring(self) -> None: - # pylint: disable=missing-function-docstring string = b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0" self.assertEqual(string, _escape(string)) class TestBulkBody(TestCase): def test_proper_bulk_body_as_string_is_not_modified(self) -> None: - # pylint: disable=missing-function-docstring string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(string_body, _bulk_body(None, string_body)) def test_proper_bulk_body_as_bytestring_is_not_modified(self) -> None: - # pylint: disable=missing-function-docstring bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(bytestring_body, _bulk_body(None, bytestring_body)) def test_bulk_body_as_string_adds_trailing_newline(self) -> None: - # pylint: disable=missing-function-docstring string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', @@ -208,7 +193,6 @@ def test_bulk_body_as_string_adds_trailing_newline(self) -> None: ) def test_bulk_body_as_bytestring_adds_trailing_newline(self) -> None: - # pylint: disable=missing-function-docstring bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index f599d7cf..6725849a 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -46,7 +46,6 @@ class TestBaseConnection(TestCase): def test_empty_warnings(self) -> None: - # pylint: disable=missing-function-docstring con = Connection() with warnings.catch_warnings(record=True) as w: con._raise_warnings(()) @@ -55,7 +54,6 @@ def test_empty_warnings(self) -> None: self.assertEqual(w, []) def test_raises_warnings(self) -> None: - # pylint: disable=missing-function-docstring con = Connection() with warnings.catch_warnings(record=True) as warn: @@ -78,7 +76,6 @@ def test_raises_warnings(self) -> None: ) def test_raises_warnings_when_folded(self) -> None: - # pylint: disable=missing-function-docstring con = Connection() with warnings.catch_warnings(record=True) as warn: con._raise_warnings( @@ -91,7 +88,6 @@ def test_raises_warnings_when_folded(self) -> None: self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) def test_ipv6_host_and_port(self) -> None: - # pylint: disable=missing-function-docstring for kwargs, expected_host in [ ({"host": "::1"}, "http://[::1]:9200"), ({"host": "::1", "port": 443}, "http://[::1]:443"), @@ -103,7 +99,6 @@ def test_ipv6_host_and_port(self) -> None: assert conn.host == expected_host def test_compatibility_accept_header(self) -> None: - # pylint: disable=missing-function-docstring try: conn = Connection() assert "accept" not in conn.headers @@ -124,33 +119,28 @@ def test_compatibility_accept_header(self) -> None: os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") def test_ca_certs_ssl_cert_file(self) -> None: - # pylint: disable=missing-function-docstring cert = "/path/to/clientcert.pem" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_FILE", cert) assert Connection.default_ca_certs() == cert def test_ca_certs_ssl_cert_dir(self) -> None: - # pylint: disable=missing-function-docstring cert = "/path/to/clientcert/dir" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_DIR", cert) assert Connection.default_ca_certs() == cert def test_ca_certs_certifi(self) -> None: - # pylint: disable=missing-function-docstring import certifi assert Connection.default_ca_certs() == certifi.where() def test_no_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring with MonkeyPatch().context() as monkeypatch: monkeypatch.setitem(sys.modules, "certifi", None) assert Connection.default_ca_certs() is None def test_default_connection_is_returned_by_default(self) -> None: - # pylint: disable=missing-function-docstring c = connections.Connections() con, con2 = object(), object() @@ -161,7 +151,6 @@ def test_default_connection_is_returned_by_default(self) -> None: assert c.get_connection() is con def test_get_connection_created_connection_if_needed(self) -> None: - # pylint: disable=missing-function-docstring c = connections.Connections() c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -177,7 +166,6 @@ def test_get_connection_created_connection_if_needed(self) -> None: assert [{"host": "localhost"}] == local.transport.hosts def test_configure_preserves_unchanged_connections(self) -> None: - # pylint: disable=missing-function-docstring c = connections.Connections() c.configure( @@ -196,7 +184,6 @@ def test_configure_preserves_unchanged_connections(self) -> None: assert new_default is not default def test_remove_connection_removes_both_conn_and_conf(self) -> None: - # pylint: disable=missing-function-docstring c = connections.Connections() c.configure( @@ -213,7 +200,6 @@ def test_remove_connection_removes_both_conn_and_conf(self) -> None: c.get_connection("default") def test_create_connection_constructs_client(self) -> None: - # pylint: disable=missing-function-docstring c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) @@ -221,7 +207,6 @@ def test_create_connection_constructs_client(self) -> None: assert [{"host": "opensearch.com"}] == con.transport.hosts def test_create_connection_adds_our_serializer(self) -> None: - # pylint: disable=missing-function-docstring c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index ee648610..360cef2f 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -84,24 +84,20 @@ def _get_request(self, connection: Any, *args: Any, **kwargs: Any) -> Any: return args[0] def test_custom_http_auth_is_allowed(self) -> None: - # pylint: disable=missing-function-docstring auth = AuthBase() c = RequestsHttpConnection(http_auth=auth) self.assertEqual(auth, c.session.auth) def test_timeout_set(self) -> None: - # pylint: disable=missing-function-docstring con = RequestsHttpConnection(timeout=42) self.assertEqual(42, con.timeout) def test_opaque_id(self) -> None: - # pylint: disable=missing-function-docstring con = RequestsHttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") def test_no_http_compression(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() self.assertFalse(con.http_compress) @@ -114,7 +110,6 @@ def test_no_http_compression(self) -> None: self.assertNotIn("accept-encoding", req.headers) def test_http_compression(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection( {"http_compress": True}, ) @@ -138,7 +133,6 @@ def test_http_compression(self) -> None: self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") def test_uses_https_if_verify_certs_is_off(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( {"use_ssl": True, "url_prefix": "url", "verify_certs": False} @@ -157,23 +151,19 @@ def test_uses_https_if_verify_certs_is_off(self) -> None: self.assertEqual(None, request.body) def test_uses_given_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring path = "/path/to/my/ca_certs.pem" c = RequestsHttpConnection(ca_certs=path) self.assertEqual(path, c.session.verify) def test_uses_default_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring c = RequestsHttpConnection() self.assertEqual(Connection.default_ca_certs(), c.session.verify) def test_uses_no_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring c = RequestsHttpConnection(verify_certs=False) self.assertFalse(c.session.verify) def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( { @@ -192,7 +182,6 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: self.assertEqual(None, request.body) def test_merge_headers(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection( connection_params={"headers": {"h1": "v1", "h2": "v2"}} ) @@ -202,14 +191,12 @@ def test_merge_headers(self) -> None: self.assertEqual(req.headers["h3"], "v3") def test_default_headers(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() req = self._get_request(con, "GET", "/") self.assertEqual(req.headers["content-type"], "application/json") self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) def test_custom_headers(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() req = self._get_request( con, @@ -224,45 +211,37 @@ def test_custom_headers(self) -> None: self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") def test_http_auth(self) -> None: - # pylint: disable=missing-function-docstring con = RequestsHttpConnection(http_auth="username:secret") self.assertEqual(("username", "secret"), con.session.auth) def test_http_auth_tuple(self) -> None: - # pylint: disable=missing-function-docstring con = RequestsHttpConnection(http_auth=("username", "secret")) self.assertEqual(("username", "secret"), con.session.auth) def test_http_auth_list(self) -> None: - # pylint: disable=missing-function-docstring con = RequestsHttpConnection(http_auth=["username", "secret"]) self.assertEqual(("username", "secret"), con.session.auth) def test_repr(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) self.assertEqual( "", repr(con) ) def test_conflict_error_is_returned_on_409(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=409) self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") def test_not_found_error_is_returned_on_404(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") def test_request_error_is_returned_on_400(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=400) self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("opensearchpy.connection.base.logger") def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @@ -270,7 +249,6 @@ def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection( response_body=b'{"answer": 42}', response_code=500 ) @@ -299,7 +277,6 @@ def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") status, headers, data = con.perform_request( "GET", @@ -310,10 +287,7 @@ def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: # trace request self.assertEqual(1, tracer.info.call_count) - trace_curl_cmd = """ - curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' - -d '{\n "question": "what\\u0027s that?"\n}' - """ + trace_curl_cmd = "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n \"question\": \"what\\u0027s that?\"\n}'" # pylint: disable=line-too-long self.assertEqual( trace_curl_cmd, tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], @@ -343,7 +317,6 @@ def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_uncompressed_body_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -367,7 +340,6 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_body_not_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection() @@ -378,7 +350,6 @@ def test_body_not_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_failure_body_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) with pytest.raises(NotFoundError) as e: con.perform_request("GET", "/invalid", body=b'{"example": "body"}') @@ -392,7 +363,6 @@ def test_failure_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_failure_body_not_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection(response_code=404) @@ -404,7 +374,6 @@ def test_failure_body_not_logged(self, logger: Any) -> None: self.assertEqual(logger.debug.call_count, 0) def test_defaults(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() request = self._get_request(con, "GET", "/") @@ -413,7 +382,6 @@ def test_defaults(self) -> None: self.assertEqual(None, request.body) def test_params_properly_encoded(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() request = self._get_request( con, "GET", "/", params={"param": "value with spaces"} @@ -424,7 +392,6 @@ def test_params_properly_encoded(self) -> None: self.assertEqual(None, request.body) def test_body_attached(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() request = self._get_request(con, "GET", "/", body='{"answer": 42}') @@ -433,7 +400,6 @@ def test_body_attached(self) -> None: self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) def test_http_auth_attached(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection({"http_auth": "username:secret"}) request = self._get_request(con, "GET", "/") @@ -441,7 +407,6 @@ def test_http_auth_attached(self) -> None: @patch("opensearchpy.connection.base.tracer") def test_url_prefix(self, tracer: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) request = self._get_request( con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 @@ -463,14 +428,12 @@ def test_url_prefix(self, tracer: Any) -> None: ) def test_surrogatepass_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: - # pylint: disable=missing-function-docstring conn = RequestsHttpConnection() def send_raise(*_: Any, **__: Any) -> Any: @@ -483,7 +446,6 @@ def send_raise(*_: Any, **__: Any) -> Any: self.assertEqual(str(e.value), "Wasn't modified!") def mock_session(self) -> Any: - # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -496,7 +458,6 @@ def mock_session(self) -> Any: return dummy_session def test_aws_signer_as_http_auth(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" import requests @@ -515,7 +476,6 @@ def test_aws_signer_as_http_auth(self) -> None: self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) def test_aws_signer_when_service_is_specified(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-1" service = "aoss" @@ -535,7 +495,6 @@ def test_aws_signer_when_service_is_specified(self) -> None: @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") def test_aws_signer_signs_with_query_string(self, mock_sign: Any) -> None: - # pylint: disable=missing-function-docstring region = "us-west-1" service = "aoss" @@ -575,7 +534,6 @@ def teardown_class(cls) -> None: # allow_redirects = False def test_redirect_failure_when_allow_redirect_false(self) -> None: - # pylint: disable=missing-function-docstring conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) with pytest.raises(TransportError) as e: conn.perform_request("GET", "/redirect", allow_redirects=False) @@ -583,7 +541,6 @@ def test_redirect_failure_when_allow_redirect_false(self) -> None: # allow_redirects = True (Default) def test_redirect_success_when_allow_redirect_true(self) -> None: - # pylint: disable=missing-function-docstring conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) user_agent = conn._get_default_user_agent() status, headers, data = conn.perform_request("GET", "/redirect") @@ -601,7 +558,6 @@ def test_redirect_success_when_allow_redirect_true(self) -> None: class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): def mock_session(self) -> Any: - # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -616,7 +572,6 @@ def mock_session(self) -> Any: def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth( self, ) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" import requests diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 0479adf2..9da5d40b 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -66,7 +66,6 @@ def _dummy_urlopen(*args: Any, **kwargs: Any) -> Any: return con def test_ssl_context(self) -> None: - # pylint: disable=missing-function-docstring try: context = ssl.create_default_context() except AttributeError: @@ -84,12 +83,10 @@ def test_ssl_context(self) -> None: self.assertTrue(con.use_ssl) def test_opaque_id(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") def test_no_http_compression(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection() self.assertFalse(con.http_compress) self.assertNotIn("accept-encoding", con.headers) @@ -103,7 +100,6 @@ def test_no_http_compression(self) -> None: self.assertNotIn("content-encoding", kwargs["headers"]) def test_http_compression(self) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection({"http_compress": True}) self.assertTrue(con.http_compress) self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") @@ -131,7 +127,6 @@ def test_http_compression(self) -> None: self.assertNotIn("content-encoding", kwargs["headers"]) def test_default_user_agent(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection() self.assertEqual( con._get_default_user_agent(), @@ -139,12 +134,10 @@ def test_default_user_agent(self) -> None: ) def test_timeout_set(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(timeout=42) self.assertEqual(42, con.timeout) def test_keep_alive_is_on_by_default(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection() self.assertEqual( { @@ -156,7 +149,6 @@ def test_keep_alive_is_on_by_default(self) -> None: ) def test_http_auth(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(http_auth="username:secret") self.assertEqual( { @@ -169,7 +161,6 @@ def test_http_auth(self) -> None: ) def test_http_auth_tuple(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(http_auth=("username", "secret")) self.assertEqual( { @@ -182,7 +173,6 @@ def test_http_auth_tuple(self) -> None: ) def test_http_auth_list(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection(http_auth=["username", "secret"]) self.assertEqual( { @@ -199,7 +189,6 @@ def test_http_auth_list(self) -> None: return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), ) def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: - # pylint: disable=missing-function-docstring from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") @@ -217,7 +206,6 @@ def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: self.assertIn("X-Amz-Content-SHA256", headers) def test_aws_signer_as_http_auth(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -230,7 +218,6 @@ def test_aws_signer_as_http_auth(self) -> None: self.assertIn("X-Amz-Content-SHA256", headers) def test_aws_signer_when_region_is_null(self) -> None: - # pylint: disable=missing-function-docstring session = self.mock_session() from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -244,7 +231,6 @@ def test_aws_signer_when_region_is_null(self) -> None: self.assertEqual(str(e.value), "Region cannot be empty") def test_aws_signer_when_credentials_is_null(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-1" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -258,7 +244,6 @@ def test_aws_signer_when_credentials_is_null(self) -> None: self.assertEqual(str(e.value), "Credentials cannot be empty") def test_aws_signer_when_service_is_specified(self) -> None: - # pylint: disable=missing-function-docstring region = "us-west-1" service = "aoss" @@ -272,7 +257,6 @@ def test_aws_signer_when_service_is_specified(self) -> None: self.assertIn("X-Amz-Security-Token", headers) def mock_session(self) -> Any: - # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -285,7 +269,6 @@ def mock_session(self) -> Any: return dummy_session def test_uses_https_if_verify_certs_is_off(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertEqual(1, len(w)) @@ -298,7 +281,6 @@ def test_uses_https_if_verify_certs_is_off(self) -> None: self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: - # pylint: disable=missing-function-docstring with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -308,19 +290,16 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) def test_doesnt_use_https_if_not_specified(self) -> None: - # pylint: disable=missing-function-docstring con = Urllib3HttpConnection() self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) def test_no_warning_when_using_ssl_context(self) -> None: - # pylint: disable=missing-function-docstring ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: Urllib3HttpConnection(ssl_context=ctx) self.assertEqual(0, len(w)) def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: - # pylint: disable=missing-function-docstring kwargs: Any for kwargs in ( {"ssl_show_warn": False}, @@ -344,24 +323,20 @@ def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: ) def test_uses_given_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring path = "/path/to/my/ca_certs.pem" c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) self.assertEqual(path, c.pool.ca_certs) def test_uses_default_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring c = Urllib3HttpConnection(use_ssl=True) self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) def test_uses_no_ca_certs(self) -> None: - # pylint: disable=missing-function-docstring c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertIsNone(c.pool.ca_certs) @patch("opensearchpy.connection.base.logger") def test_uncompressed_body_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -373,7 +348,6 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_body_not_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection() @@ -384,7 +358,6 @@ def test_body_not_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_failure_body_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring con = self._get_mock_connection(response_code=404) with pytest.raises(NotFoundError) as e: con.perform_request("GET", "/invalid", body=b'{"example": "body"}') @@ -398,7 +371,6 @@ def test_failure_body_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) def test_failure_body_not_logged(self, logger: Any) -> None: - # pylint: disable=missing-function-docstring logger.isEnabledFor.return_value = False con = self._get_mock_connection(response_code=404) @@ -410,14 +382,12 @@ def test_failure_body_not_logged(self, logger: Any) -> None: self.assertEqual(logger.debug.call_count, 0) def test_surrogatepass_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: - # pylint: disable=missing-function-docstring conn = Urllib3HttpConnection() def urlopen_raise(*_: Any, **__: Any) -> Any: @@ -432,7 +402,6 @@ def urlopen_raise(*_: Any, **__: Any) -> Any: class TestSignerWithFrozenCredentials(TestUrllib3HttpConnection): def mock_session(self) -> Any: - # pylint: disable=missing-function-docstring access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -447,7 +416,6 @@ def mock_session(self) -> Any: def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth( self, ) -> None: - # pylint: disable=missing-function-docstring region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 8aea61e9..5fa750c6 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -41,18 +41,15 @@ class TestConnectionPool(TestCase): def test_dummy_cp_raises_exception_on_more_connections(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(ImproperlyConfigured, DummyConnectionPool, []) self.assertRaises( ImproperlyConfigured, DummyConnectionPool, [object(), object()] ) def test_raises_exception_when_no_connections_defined(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(ImproperlyConfigured, ConnectionPool, []) def test_default_round_robin(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) connections = set() @@ -61,7 +58,6 @@ def test_default_round_robin(self) -> None: self.assertEqual(connections, set(range(100))) def test_disable_shuffling(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)], randomize_hosts=False) connections = [] @@ -70,7 +66,6 @@ def test_disable_shuffling(self) -> None: self.assertEqual(connections, list(range(100))) def test_selectors_have_access_to_connection_opts(self) -> None: - # pylint: disable=missing-function-docstring class MySelector(RoundRobinSelector): def select(self, connections: Any) -> Any: return self.connection_opts[ @@ -89,7 +84,6 @@ def select(self, connections: Any) -> Any: self.assertEqual(connections, [x * x for x in range(100)]) def test_dead_nodes_are_removed_from_active_connections(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -99,7 +93,6 @@ def test_dead_nodes_are_removed_from_active_connections(self) -> None: self.assertEqual((now + 60, 42), pool.dead.get()) def test_connection_is_skipped_when_dead(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(2)]) pool.mark_dead(0) @@ -109,7 +102,6 @@ def test_connection_is_skipped_when_dead(self) -> None: ) def test_new_connection_is_not_marked_dead(self) -> None: - # pylint: disable=missing-function-docstring # Create 10 connections pool = ConnectionPool([(Connection(), {}) for _ in range(10)]) @@ -123,7 +115,6 @@ def test_new_connection_is_not_marked_dead(self) -> None: def test_connection_is_forcibly_resurrected_when_no_live_ones_are_available( self, ) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(2)]) pool.dead_count[0] = 1 pool.mark_dead(0) # failed twice, longer timeout @@ -134,7 +125,6 @@ def test_connection_is_forcibly_resurrected_when_no_live_ones_are_available( self.assertEqual([1], pool.connections) def test_connection_is_resurrected_after_its_timeout(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -144,7 +134,6 @@ def test_connection_is_resurrected_after_its_timeout(self) -> None: self.assertEqual(100, len(pool.connections)) def test_force_resurrect_always_returns_a_connection(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(0, {})]) pool.connections = [] @@ -153,7 +142,6 @@ def test_force_resurrect_always_returns_a_connection(self) -> None: self.assertTrue(pool.dead.empty()) def test_already_failed_connection_has_longer_timeout(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 @@ -163,7 +151,6 @@ def test_already_failed_connection_has_longer_timeout(self) -> None: self.assertEqual((now + 4 * 60, 42), pool.dead.get()) def test_timeout_for_failed_connections_is_limitted(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 245 @@ -173,7 +160,6 @@ def test_timeout_for_failed_connections_is_limitted(self) -> None: self.assertEqual((now + 32 * 60, 42), pool.dead.get()) def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self) -> None: - # pylint: disable=missing-function-docstring pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index 8f845382..a918e2b0 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -32,7 +32,6 @@ class TestTransformError(TestCase): def test_transform_error_parse_with_error_reason(self) -> None: - # pylint: disable=missing-function-docstring e = TransportError( 500, "InternalServerError", @@ -44,7 +43,6 @@ def test_transform_error_parse_with_error_reason(self) -> None: ) def test_transform_error_parse_with_error_string(self) -> None: - # pylint: disable=missing-function-docstring e = TransportError( 500, "InternalServerError", {"error": "something error message"} ) diff --git a/test_opensearchpy/test_helpers/conftest.py b/test_opensearchpy/test_helpers/conftest.py index d5ebd933..06355e24 100644 --- a/test_opensearchpy/test_helpers/conftest.py +++ b/test_opensearchpy/test_helpers/conftest.py @@ -35,7 +35,6 @@ @fixture # type: ignore def mock_client(dummy_response: Any) -> Any: - # pylint: disable=missing-function-docstring client = Mock() client.search.return_value = dummy_response add_connection("mock", client) @@ -46,7 +45,6 @@ def mock_client(dummy_response: Any) -> Any: @fixture # type: ignore def dummy_response() -> Any: - # pylint: disable=missing-function-docstring return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -96,7 +94,6 @@ def dummy_response() -> Any: @fixture # type: ignore def aggs_search() -> Any: - # pylint: disable=missing-function-docstring from opensearchpy import Search s = Search(index="flat-git") @@ -112,7 +109,6 @@ def aggs_search() -> Any: @fixture # type: ignore def aggs_data() -> Any: - # pylint: disable=missing-function-docstring return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 26d79861..68ce1027 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -62,7 +62,6 @@ class TestParallelBulk(TestCase): side_effect=mock_process_bulk_chunk, ) def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: - # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(OpenSearch(), actions, chunk_size=2)) @@ -70,7 +69,6 @@ def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: @mock.patch("opensearchpy.OpenSearch.bulk") def test_with_all_options(self, _bulk: Any) -> None: - # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) list( helpers.parallel_bulk( @@ -94,7 +92,6 @@ def test_with_all_options(self, _bulk: Any) -> None: def test_process_bulk_chunk_with_all_options( self, _process_bulk_chunk: Any ) -> None: - # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) client = OpenSearch() list( @@ -130,7 +127,6 @@ def test_process_bulk_chunk_with_all_options( ], ) def test_chunk_sent_from_different_threads(self, _process_bulk_chunk: Any) -> None: - # pylint: disable=missing-function-docstring actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) @@ -148,14 +144,12 @@ def setup_method(self, _: Any) -> None: ] def test_expand_action(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) self.assertEqual( helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) ) def test_expand_action_actions(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( helpers.expand_action( {"_op_type": "delete", "_id": "id", "_index": "index"} @@ -187,7 +181,6 @@ def test_expand_action_actions(self) -> None: ) def test_expand_action_options(self) -> None: - # pylint: disable=missing-function-docstring for option in ( "_id", "_index", @@ -219,7 +212,6 @@ def test_expand_action_options(self) -> None: ) def test__source_metadata_or_source(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( helpers.expand_action({"_source": {"key": "val"}}), ({"index": {}}, {"key": "val"}), @@ -248,7 +240,6 @@ def test__source_metadata_or_source(self) -> None: ) def test_chunks_are_chopped_by_byte_size(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( 100, len( @@ -257,7 +248,6 @@ def test_chunks_are_chopped_by_byte_size(self) -> None: ) def test_chunks_are_chopped_by_chunk_size(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( 10, len( @@ -268,7 +258,6 @@ def test_chunks_are_chopped_by_chunk_size(self) -> None: ) def test_chunks_are_chopped_by_byte_size_properly(self) -> None: - # pylint: disable=missing-function-docstring max_byte_size = 170 chunks = list( helpers._chunk_actions( @@ -284,7 +273,6 @@ def test_chunks_are_chopped_by_byte_size_properly(self) -> None: class TestExpandActions(TestCase): def test_string_actions_are_marked_as_simple_inserts(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index ea01cf58..006edbe4 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -30,7 +30,6 @@ def test_repr() -> None: - # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -38,7 +37,6 @@ def test_repr() -> None: def test_meta() -> None: - # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -52,7 +50,6 @@ def test_meta() -> None: def test_meta_from_dict() -> None: - # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -62,7 +59,6 @@ def test_meta_from_dict() -> None: def test_aggs_creates_proper_agg() -> None: - # pylint: disable=missing-function-docstring a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) @@ -70,7 +66,6 @@ def test_aggs_creates_proper_agg() -> None: def test_aggs_handles_nested_aggs_properly() -> None: - # pylint: disable=missing-function-docstring max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -79,13 +74,11 @@ def test_aggs_handles_nested_aggs_properly() -> None: def test_aggs_passes_aggs_through() -> None: - # pylint: disable=missing-function-docstring a = aggs.A("terms", field="tags") assert aggs.A(a) is a def test_aggs_from_dict() -> None: - # pylint: disable=missing-function-docstring d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -102,7 +95,6 @@ def test_aggs_from_dict() -> None: def test_aggs_fails_with_incorrect_dict() -> None: - # pylint: disable=missing-function-docstring correct_d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -123,7 +115,6 @@ def test_aggs_fails_with_incorrect_dict() -> None: def test_aggs_fails_with_agg_and_params() -> None: - # pylint: disable=missing-function-docstring a = aggs.A("terms", field="tags") with raises(Exception): @@ -131,7 +122,6 @@ def test_aggs_fails_with_agg_and_params() -> None: def test_buckets_are_nestable() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -141,7 +131,6 @@ def test_buckets_are_nestable() -> None: def test_metric_inside_buckets() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a.metric("max_score", "max", field="score") @@ -151,7 +140,6 @@ def test_metric_inside_buckets() -> None: def test_buckets_equals_counts_subaggs() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") b = aggs.Terms(field="tags") @@ -160,7 +148,6 @@ def test_buckets_equals_counts_subaggs() -> None: def test_buckets_to_dict() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") @@ -179,7 +166,6 @@ def test_buckets_to_dict() -> None: def test_nested_buckets_are_reachable_as_getitem() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -188,7 +174,6 @@ def test_nested_buckets_are_reachable_as_getitem() -> None: def test_nested_buckets_are_settable_as_getitem() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") b = a["per_author"] = aggs.A("terms", field="author.raw") @@ -196,7 +181,6 @@ def test_nested_buckets_are_settable_as_getitem() -> None: def test_filter_can_be_instantiated_using_positional_args() -> None: - # pylint: disable=missing-function-docstring a = aggs.Filter(query.Q("term", f=42)) assert {"filter": {"term": {"f": 42}}} == a.to_dict() @@ -205,7 +189,6 @@ def test_filter_can_be_instantiated_using_positional_args() -> None: def test_filter_aggregation_as_nested_agg() -> None: - # pylint: disable=missing-function-docstring a = aggs.Terms(field="tags") a.bucket("filtered", "filter", query.Q("term", f=42)) @@ -216,7 +199,6 @@ def test_filter_aggregation_as_nested_agg() -> None: def test_filter_aggregation_with_nested_aggs() -> None: - # pylint: disable=missing-function-docstring a = aggs.Filter(query.Q("term", f=42)) a.bucket("testing", "terms", field="tags") @@ -227,7 +209,6 @@ def test_filter_aggregation_with_nested_aggs() -> None: def test_filters_correctly_identifies_the_hash() -> None: - # pylint: disable=missing-function-docstring a = aggs.A( "filters", filters={ @@ -248,7 +229,6 @@ def test_filters_correctly_identifies_the_hash() -> None: def test_bucket_sort_agg() -> None: - # pylint: disable=missing-function-docstring bucket_sort_agg = aggs.BucketSort(sort=[{"total_sales": {"order": "desc"}}], size=3) assert bucket_sort_agg.to_dict() == { "bucket_sort": {"sort": [{"total_sales": {"order": "desc"}}], "size": 3} @@ -274,7 +254,6 @@ def test_bucket_sort_agg() -> None: def test_bucket_sort_agg_only_trnunc() -> None: - # pylint: disable=missing-function-docstring bucket_sort_agg = aggs.BucketSort(**{"from": 1, "size": 1}) assert bucket_sort_agg.to_dict() == {"bucket_sort": {"from": 1, "size": 1}} @@ -287,28 +266,24 @@ def test_bucket_sort_agg_only_trnunc() -> None: def test_geohash_grid_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.GeohashGrid(**{"field": "centroid", "precision": 3}) assert {"geohash_grid": {"field": "centroid", "precision": 3}} == a.to_dict() def test_geotile_grid_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.GeotileGrid(**{"field": "centroid", "precision": 3}) assert {"geotile_grid": {"field": "centroid", "precision": 3}} == a.to_dict() def test_boxplot_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.Boxplot(field="load_time") assert {"boxplot": {"field": "load_time"}} == a.to_dict() def test_rare_terms_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.RareTerms(field="the-field") a.bucket("total_sales", "sum", field="price") a.bucket( @@ -330,20 +305,17 @@ def test_rare_terms_aggregation() -> None: def test_variable_width_histogram_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.VariableWidthHistogram(field="price", buckets=2) assert {"variable_width_histogram": {"buckets": 2, "field": "price"}} == a.to_dict() def test_median_absolute_deviation_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.MedianAbsoluteDeviation(field="rating") assert {"median_absolute_deviation": {"field": "rating"}} == a.to_dict() def test_t_test_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.TTest( a={"field": "startup_time_before"}, b={"field": "startup_time_after"}, @@ -360,7 +332,6 @@ def test_t_test_aggregation() -> None: def test_inference_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.Inference(model_id="model-id", buckets_path={"agg_name": "agg_name"}) assert { "inference": {"buckets_path": {"agg_name": "agg_name"}, "model_id": "model-id"} @@ -368,7 +339,6 @@ def test_inference_aggregation() -> None: def test_moving_percentiles_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.DateHistogram() a.bucket("the_percentile", "percentiles", field="price", percents=[1.0, 99.0]) a.pipeline( @@ -389,7 +359,6 @@ def test_moving_percentiles_aggregation() -> None: def test_normalize_aggregation() -> None: - # pylint: disable=missing-function-docstring a = aggs.Normalize(buckets_path="normalized", method="percent_of_sum") assert { "normalize": {"buckets_path": "normalized", "method": "percent_of_sum"} diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index 4c645df2..d335b565 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -30,14 +30,12 @@ def test_analyzer_serializes_as_name() -> None: - # pylint: disable=missing-function-docstring a = analysis.analyzer("my_analyzer") assert "my_analyzer" == a.to_dict() def test_analyzer_has_definition() -> None: - # pylint: disable=missing-function-docstring a = analysis.CustomAnalyzer( "my_analyzer", tokenizer="keyword", filter=["lowercase"] ) @@ -50,7 +48,6 @@ def test_analyzer_has_definition() -> None: def test_simple_multiplexer_filter() -> None: - # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -79,7 +76,6 @@ def test_simple_multiplexer_filter() -> None: def test_multiplexer_with_custom_filter() -> None: - # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -111,7 +107,6 @@ def test_multiplexer_with_custom_filter() -> None: def test_conditional_token_filter() -> None: - # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -149,7 +144,6 @@ def test_conditional_token_filter() -> None: def test_conflicting_nested_filters_cause_error() -> None: - # pylint: disable=missing-function-docstring a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -172,14 +166,12 @@ def test_conflicting_nested_filters_cause_error() -> None: def test_normalizer_serializes_as_name() -> None: - # pylint: disable=missing-function-docstring n = analysis.normalizer("my_normalizer") assert "my_normalizer" == n.to_dict() def test_normalizer_has_definition() -> None: - # pylint: disable=missing-function-docstring n = analysis.CustomNormalizer( "my_normalizer", filter=["lowercase", "asciifolding"], char_filter=["quote"] ) @@ -192,7 +184,6 @@ def test_normalizer_has_definition() -> None: def test_tokenizer() -> None: - # pylint: disable=missing-function-docstring t = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) assert t.to_dict() == "trigram" @@ -200,7 +191,6 @@ def test_tokenizer() -> None: def test_custom_analyzer_can_collect_custom_items() -> None: - # pylint: disable=missing-function-docstring trigram = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) my_stop = analysis.token_filter("my_stop", "stop", stopwords=["a", "b"]) umlauts = analysis.char_filter("umlauts", "pattern_replace", mappings=["ü=>ue"]) @@ -228,7 +218,6 @@ def test_custom_analyzer_can_collect_custom_items() -> None: def test_stemmer_analyzer_can_pass_name() -> None: - # pylint: disable=missing-function-docstring t = analysis.token_filter( "my_english_filter", name="minimal_english", type="stemmer" ) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index 08351c0d..d60dd6b2 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -136,7 +136,6 @@ class Index: def test_range_serializes_properly() -> None: - # pylint: disable=missing-function-docstring class DocumentD(document.Document): lr = field.LongRange() @@ -150,7 +149,6 @@ class DocumentD(document.Document): def test_range_deserializes_properly() -> None: - # pylint: disable=missing-function-docstring class DocumentD(document.InnerDoc): lr = field.LongRange() @@ -161,14 +159,12 @@ class DocumentD(document.InnerDoc): def test_resolve_nested() -> None: - # pylint: disable=missing-function-docstring nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: - # pylint: disable=missing-function-docstring class DocumentA(document.Document): name = field.Text() @@ -184,20 +180,17 @@ class DocumentB(document.Document): def test_ip_address_serializes_properly() -> None: - # pylint: disable=missing-function-docstring host: Any = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() def test_matches_uses_index() -> None: - # pylint: disable=missing-function-docstring assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) def test_matches_with_no_name_always_matches() -> None: - # pylint: disable=missing-function-docstring class DocumentD(document.Document): pass @@ -206,7 +199,6 @@ class DocumentD(document.Document): def test_matches_accepts_wildcards() -> None: - # pylint: disable=missing-function-docstring class MyDoc(document.Document): class Index: name = "my-*" @@ -216,7 +208,6 @@ class Index: def test_assigning_attrlist_to_field() -> None: - # pylint: disable=missing-function-docstring sc: Any = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -225,14 +216,12 @@ def test_assigning_attrlist_to_field() -> None: def test_optional_inner_objects_are_not_validated_if_missing() -> None: - # pylint: disable=missing-function-docstring d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None def test_custom_field() -> None: - # pylint: disable=missing-function-docstring s1: Any = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s1.to_dict() @@ -244,14 +233,12 @@ def test_custom_field() -> None: def test_custom_field_mapping() -> None: - # pylint: disable=missing-function-docstring assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() def test_custom_field_in_nested() -> None: - # pylint: disable=missing-function-docstring s: Any = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -260,7 +247,6 @@ def test_custom_field_in_nested() -> None: def test_multi_works_after_doc_has_been_saved() -> None: - # pylint: disable=missing-function-docstring c: Any = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -269,7 +255,6 @@ def test_multi_works_after_doc_has_been_saved() -> None: def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: - # pylint: disable=missing-function-docstring # Issue #359 c: Any = DocWithNested(comments=[Comment(title="First!")]) @@ -279,14 +264,12 @@ def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: def test_null_value_for_object() -> None: - # pylint: disable=missing-function-docstring d: Any = MyDoc(inner=None) assert d.inner is None def test_inherited_doc_types_can_override_index() -> None: - # pylint: disable=missing-function-docstring class MyDocDifferentIndex(MySubDoc): _index: Any @@ -321,7 +304,6 @@ class Index: def test_to_dict_with_meta() -> None: - # pylint: disable=missing-function-docstring d: Any = MySubDoc(title="hello") d.meta.routing = "some-parent" @@ -333,7 +315,6 @@ def test_to_dict_with_meta() -> None: def test_to_dict_with_meta_includes_custom_index() -> None: - # pylint: disable=missing-function-docstring d: Any = MySubDoc(title="hello") d.meta.index = "other-index" @@ -341,7 +322,6 @@ def test_to_dict_with_meta_includes_custom_index() -> None: def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: - # pylint: disable=missing-function-docstring d: Any = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() @@ -349,7 +329,6 @@ def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: def test_attribute_can_be_removed() -> None: - # pylint: disable=missing-function-docstring d: Any = MyDoc(title="hello") del d.title @@ -357,7 +336,6 @@ def test_attribute_can_be_removed() -> None: def test_doc_type_can_be_correctly_pickled() -> None: - # pylint: disable=missing-function-docstring d: Any = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -373,7 +351,6 @@ def test_doc_type_can_be_correctly_pickled() -> None: def test_meta_is_accessible_even_on_empty_doc() -> None: - # pylint: disable=missing-function-docstring d1: Any = MyDoc() assert d1.meta == {} @@ -382,7 +359,6 @@ def test_meta_is_accessible_even_on_empty_doc() -> None: def test_meta_field_mapping() -> None: - # pylint: disable=missing-function-docstring class User(document.Document): username = field.Text() @@ -402,7 +378,6 @@ class Meta: def test_multi_value_fields() -> None: - # pylint: disable=missing-function-docstring class Blog(document.Document): tags = field.Keyword(multi=True) @@ -414,7 +389,6 @@ class Blog(document.Document): def test_docs_with_properties() -> None: - # pylint: disable=missing-function-docstring class User(document.Document): pwd_hash: Any = field.Text() @@ -443,7 +417,6 @@ def password(self, pwd: Any) -> None: def test_nested_can_be_assigned_to() -> None: - # pylint: disable=missing-function-docstring d1: Any = DocWithNested(comments=[Comment(title="First!")]) d2: Any = DocWithNested() @@ -455,14 +428,12 @@ def test_nested_can_be_assigned_to() -> None: def test_nested_can_be_none() -> None: - # pylint: disable=missing-function-docstring d: Any = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() def test_nested_defaults_to_list_and_can_be_updated() -> None: - # pylint: disable=missing-function-docstring md: Any = DocWithNested() assert [] == md.comments @@ -472,7 +443,6 @@ def test_nested_defaults_to_list_and_can_be_updated() -> None: def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] @@ -485,14 +455,12 @@ def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: def test_to_dict_ignores_empty_collections() -> None: - # pylint: disable=missing-function-docstring md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() def test_declarative_mapping_definition() -> None: - # pylint: disable=missing-function-docstring assert issubclass(MyDoc, document.Document) assert hasattr(MyDoc, "_doc_type") assert { @@ -506,7 +474,6 @@ def test_declarative_mapping_definition() -> None: def test_you_can_supply_own_mapping_instance() -> None: - # pylint: disable=missing-function-docstring class MyD(document.Document): title = field.Text() @@ -521,7 +488,6 @@ class Meta: def test_document_can_be_created_dynamically() -> None: - # pylint: disable=missing-function-docstring n = datetime.now() md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" @@ -543,7 +509,6 @@ def test_document_can_be_created_dynamically() -> None: def test_invalid_date_will_raise_exception() -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -551,7 +516,6 @@ def test_invalid_date_will_raise_exception() -> None: def test_document_inheritance() -> None: - # pylint: disable=missing-function-docstring assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.Document) assert hasattr(MySubDoc, "_doc_type") @@ -566,7 +530,6 @@ def test_document_inheritance() -> None: def test_child_class_can_override_parent() -> None: - # pylint: disable=missing-function-docstring class DocumentA(document.Document): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -585,7 +548,6 @@ class DocumentB(DocumentA): def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: - # pylint: disable=missing-function-docstring md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -596,7 +558,6 @@ def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: def test_index_inheritance() -> None: - # pylint: disable=missing-function-docstring assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.Document) @@ -614,7 +575,6 @@ def test_index_inheritance() -> None: def test_meta_fields_can_be_set_directly_in_init() -> None: - # pylint: disable=missing-function-docstring p = object() md: Any = MyDoc(_id=p, title="Hello World!") @@ -622,28 +582,24 @@ def test_meta_fields_can_be_set_directly_in_init() -> None: def test_save_no_index(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): md.save(using="mock") def test_delete_no_index(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(ValidationException): md.delete(using="mock") def test_update_no_fields() -> None: - # pylint: disable=missing-function-docstring md: Any = MyDoc() with raises(IllegalOperation): md.update() def test_search_with_custom_alias_and_index(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -653,7 +609,6 @@ def test_search_with_custom_alias_and_index(mock_client: Any) -> None: def test_from_opensearch_respects_underscored_non_meta_fields() -> None: - # pylint: disable=missing-function-docstring doc = { "_index": "test-index", "_id": "opensearch", @@ -678,7 +633,6 @@ class Index: def test_nested_and_object_inner_doc() -> None: - # pylint: disable=missing-function-docstring class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) @@ -696,7 +650,6 @@ class MySubDocWithNested(MyDoc): def test_save_double(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring class MyDocumentWithDouble(MyDoc): a_double: Union[float, field.Double] = field.Double() diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index 922d6989..d1874541 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -50,7 +50,6 @@ class BlogSearch(FacetedSearch): def test_query_is_created_properly() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch("python search") s = bs.build_search() @@ -74,7 +73,6 @@ def test_query_is_created_properly() -> None: def test_query_is_created_properly_with_sort_tuple() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -99,7 +97,6 @@ def test_query_is_created_properly_with_sort_tuple() -> None: def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -123,7 +120,6 @@ def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: def test_filters_are_applied_to_search_ant_relevant_facets() -> None: - # pylint: disable=missing-function-docstring bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -158,7 +154,6 @@ def test_filters_are_applied_to_search_ant_relevant_facets() -> None: def test_date_histogram_facet_with_1970_01_01_date() -> None: - # pylint: disable=missing-function-docstring dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -192,7 +187,6 @@ def test_date_histogram_facet_with_1970_01_01_date() -> None: ], ) def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: - # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -205,7 +199,6 @@ def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> Non def test_date_histogram_no_interval_keyerror() -> None: - # pylint: disable=missing-function-docstring dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index 6afb7684..65dbab5a 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -37,7 +37,6 @@ def test_date_range_deserialization() -> None: - # pylint: disable=missing-function-docstring data = {"lt": "2018-01-01T00:30:10"} r = field.DateRange().deserialize(data) @@ -47,7 +46,6 @@ def test_date_range_deserialization() -> None: def test_boolean_deserialization() -> None: - # pylint: disable=missing-function-docstring bf = field.Boolean() assert not bf.deserialize("false") @@ -61,7 +59,6 @@ def test_boolean_deserialization() -> None: def test_date_field_can_have_default_tz() -> None: - # pylint: disable=missing-function-docstring f: Any = field.Date(default_timezone="UTC") now = datetime.now() @@ -77,7 +74,6 @@ def test_date_field_can_have_default_tz() -> None: def test_custom_field_car_wrap_other_field() -> None: - # pylint: disable=missing-function-docstring class MyField(field.CustomField): @property def builtin_type(self) -> Any: @@ -89,7 +85,6 @@ def builtin_type(self) -> Any: def test_field_from_dict() -> None: - # pylint: disable=missing-function-docstring f = field.construct_field({"type": "text", "index": "not_analyzed"}) assert isinstance(f, field.Text) @@ -97,7 +92,6 @@ def test_field_from_dict() -> None: def test_multi_fields_are_accepted_and_parsed() -> None: - # pylint: disable=missing-function-docstring f = field.construct_field( "text", fields={"raw": {"type": "keyword"}, "eng": field.Text(analyzer="english")}, @@ -114,7 +108,6 @@ def test_multi_fields_are_accepted_and_parsed() -> None: def test_nested_provides_direct_access_to_its_fields() -> None: - # pylint: disable=missing-function-docstring f = field.Nested(properties={"name": {"type": "text", "index": "not_analyzed"}}) assert "name" in f @@ -122,7 +115,6 @@ def test_nested_provides_direct_access_to_its_fields() -> None: def test_field_supports_multiple_analyzers() -> None: - # pylint: disable=missing-function-docstring f = field.Text(analyzer="snowball", search_analyzer="keyword") assert { "analyzer": "snowball", @@ -132,7 +124,6 @@ def test_field_supports_multiple_analyzers() -> None: def test_multifield_supports_multiple_analyzers() -> None: - # pylint: disable=missing-function-docstring f = field.Text( fields={ "f1": field.Text(search_analyzer="keyword", analyzer="snowball"), @@ -153,7 +144,6 @@ def test_multifield_supports_multiple_analyzers() -> None: def test_scaled_float() -> None: - # pylint: disable=missing-function-docstring with pytest.raises(TypeError): field.ScaledFloat() # type: ignore f: Any = field.ScaledFloat(scaling_factor=123) @@ -161,7 +151,6 @@ def test_scaled_float() -> None: def test_ipaddress() -> None: - # pylint: disable=missing-function-docstring f = field.Ip() assert f.deserialize("127.0.0.1") == ip_address("127.0.0.1") assert f.deserialize("::1") == ip_address("::1") @@ -172,7 +161,6 @@ def test_ipaddress() -> None: def test_float() -> None: - # pylint: disable=missing-function-docstring f = field.Float() assert f.deserialize("42") == 42.0 assert f.deserialize(None) is None @@ -181,7 +169,6 @@ def test_float() -> None: def test_integer() -> None: - # pylint: disable=missing-function-docstring f = field.Integer() assert f.deserialize("42") == 42 assert f.deserialize(None) is None @@ -190,7 +177,6 @@ def test_integer() -> None: def test_binary() -> None: - # pylint: disable=missing-function-docstring f = field.Binary() assert f.deserialize(base64.b64encode(b"42")) == b"42" assert f.deserialize(f.serialize(b"42")) == b"42" @@ -198,32 +184,27 @@ def test_binary() -> None: def test_constant_keyword() -> None: - # pylint: disable=missing-function-docstring f = field.ConstantKeyword() assert f.to_dict() == {"type": "constant_keyword"} def test_rank_features() -> None: - # pylint: disable=missing-function-docstring f = field.RankFeatures() assert f.to_dict() == {"type": "rank_features"} def test_object_dynamic_values() -> None: - # pylint: disable=missing-function-docstring for dynamic in True, False, "strict": f = field.Object(dynamic=dynamic) assert f.to_dict()["dynamic"] == dynamic def test_object_disabled() -> None: - # pylint: disable=missing-function-docstring f = field.Object(enabled=False) assert f.to_dict() == {"type": "object", "enabled": False} def test_object_constructor() -> None: - # pylint: disable=missing-function-docstring expected = {"type": "object", "properties": {"inner_int": {"type": "integer"}}} class Inner(InnerDoc): diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index 11dbd418..eac720b4 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -39,7 +39,6 @@ class Post(Document): def test_multiple_doc_types_will_combine_mappings() -> None: - # pylint: disable=missing-function-docstring class User(Document): username = Text() @@ -58,7 +57,6 @@ class User(Document): def test_search_is_limited_to_index_name() -> None: - # pylint: disable=missing-function-docstring i = Index("my-index") s = i.search() @@ -66,7 +64,6 @@ def test_search_is_limited_to_index_name() -> None: def test_cloned_index_has_copied_settings_and_using() -> None: - # pylint: disable=missing-function-docstring client = object() i: Any = Index("my-index", using=client) i.settings(number_of_shards=1) @@ -80,7 +77,6 @@ def test_cloned_index_has_copied_settings_and_using() -> None: def test_cloned_index_has_analysis_attribute() -> None: - # pylint: disable=missing-function-docstring """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -101,7 +97,6 @@ def test_cloned_index_has_analysis_attribute() -> None: def test_settings_are_saved() -> None: - # pylint: disable=missing-function-docstring i: Any = Index("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -110,7 +105,6 @@ def test_settings_are_saved() -> None: def test_registered_doc_type_included_in_to_dict() -> None: - # pylint: disable=missing-function-docstring i: Any = Index("i", using="alias") i.document(Post) @@ -125,7 +119,6 @@ def test_registered_doc_type_included_in_to_dict() -> None: def test_registered_doc_type_included_in_search() -> None: - # pylint: disable=missing-function-docstring i: Any = Index("i", using="alias") i.document(Post) @@ -135,7 +128,6 @@ def test_registered_doc_type_included_in_search() -> None: def test_aliases_add_to_object() -> None: - # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -146,7 +138,6 @@ def test_aliases_add_to_object() -> None: def test_aliases_returned_from_to_dict() -> None: - # pylint: disable=missing-function-docstring random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict: Any = {random_alias: {}} @@ -157,7 +148,6 @@ def test_aliases_returned_from_to_dict() -> None: def test_analyzers_added_to_object() -> None: - # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -174,7 +164,6 @@ def test_analyzers_added_to_object() -> None: def test_analyzers_returned_from_to_dict() -> None: - # pylint: disable=missing-function-docstring random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -188,7 +177,6 @@ def test_analyzers_returned_from_to_dict() -> None: def test_conflicting_analyzer_raises_error() -> None: - # pylint: disable=missing-function-docstring i: Any = Index("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -197,7 +185,6 @@ def test_conflicting_analyzer_raises_error() -> None: def test_index_template_can_have_order() -> None: - # pylint: disable=missing-function-docstring i: Any = Index("i-*") it = i.as_template("i", order=2) @@ -205,7 +192,6 @@ def test_index_template_can_have_order() -> None: def test_index_template_save_result(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring it: Any = IndexTemplate("test-template", "test-*") assert it.save(using="mock") == mock_client.indices.put_template() diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index 98ca7e42..6e4af163 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -31,7 +31,6 @@ def test_mapping_can_has_fields() -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("name", "text").field("tags", "keyword") @@ -41,7 +40,6 @@ def test_mapping_can_has_fields() -> None: def test_mapping_update_is_recursive() -> None: - # pylint: disable=missing-function-docstring m1 = mapping.Mapping() m1.field("title", "text") m1.field("author", "object") @@ -75,7 +73,6 @@ def test_mapping_update_is_recursive() -> None: def test_properties_can_iterate_over_all_the_fields() -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -86,7 +83,6 @@ def test_properties_can_iterate_over_all_the_fields() -> None: def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: - # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -160,7 +156,6 @@ def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: def test_mapping_can_collect_multiple_analyzers() -> None: - # pylint: disable=missing-function-docstring a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -207,7 +202,6 @@ def test_mapping_can_collect_multiple_analyzers() -> None: def test_even_non_custom_analyzers_can_have_params() -> None: - # pylint: disable=missing-function-docstring a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.Mapping() m.field("title", "text", analyzer=a1) @@ -218,7 +212,6 @@ def test_even_non_custom_analyzers_can_have_params() -> None: def test_resolve_field_can_resolve_multifields() -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("title", "text", fields={"keyword": Keyword()}) @@ -226,7 +219,6 @@ def test_resolve_field_can_resolve_multifields() -> None: def test_resolve_nested() -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 65d798a0..7fb2f3a0 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -32,7 +32,6 @@ def test_empty_query_is_match_all() -> None: - # pylint: disable=missing-function-docstring q = query.Q() assert isinstance(q, query.MatchAll) @@ -40,48 +39,40 @@ def test_empty_query_is_match_all() -> None: def test_match_to_dict() -> None: - # pylint: disable=missing-function-docstring assert {"match": {"f": "value"}} == query.Match(f="value").to_dict() def test_match_to_dict_extra() -> None: - # pylint: disable=missing-function-docstring assert {"match": {"f": "value", "boost": 2}} == query.Match( f="value", boost=2 ).to_dict() def test_fuzzy_to_dict() -> None: - # pylint: disable=missing-function-docstring assert {"fuzzy": {"f": "value"}} == query.Fuzzy(f="value").to_dict() def test_prefix_to_dict() -> None: - # pylint: disable=missing-function-docstring assert {"prefix": {"f": "value"}} == query.Prefix(f="value").to_dict() def test_term_to_dict() -> None: - # pylint: disable=missing-function-docstring assert {"term": {"_type": "article"}} == query.Term(_type="article").to_dict() def test_bool_to_dict() -> None: - # pylint: disable=missing-function-docstring bool = query.Bool(must=[query.Match(f="value")], should=[]) assert {"bool": {"must": [{"match": {"f": "value"}}]}} == bool.to_dict() def test_dismax_to_dict() -> None: - # pylint: disable=missing-function-docstring assert {"dis_max": {"queries": [{"term": {"_type": "article"}}]}} == query.DisMax( queries=[query.Term(_type="article")] ).to_dict() def test_bool_from_dict_issue_318() -> None: - # pylint: disable=missing-function-docstring d = {"bool": {"must_not": {"match": {"field": "value"}}}} q = query.Q(d) @@ -89,14 +80,12 @@ def test_bool_from_dict_issue_318() -> None: def test_repr() -> None: - # pylint: disable=missing-function-docstring bool = query.Bool(must=[query.Match(f="value")], should=[]) assert "Bool(must=[Match(f='value')])" == repr(bool) def test_query_clone() -> None: - # pylint: disable=missing-function-docstring bool = query.Bool( must=[query.Match(x=42)], should=[query.Match(g="v2")], @@ -109,7 +98,6 @@ def test_query_clone() -> None: def test_bool_converts_its_init_args_to_queries() -> None: - # pylint: disable=missing-function-docstring q = query.Bool(must=[{"match": {"f": "value"}}]) assert len(q.must) == 1 @@ -117,7 +105,6 @@ def test_bool_converts_its_init_args_to_queries() -> None: def test_two_queries_make_a_bool() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f="value1") q2 = query.Match(message={"query": "this is a test", "opeartor": "and"}) q = q1 & q2 @@ -127,7 +114,6 @@ def test_two_queries_make_a_bool() -> None: def test_other_and_bool_appends_other_to_must() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f="value1") qb = query.Bool() @@ -137,7 +123,6 @@ def test_other_and_bool_appends_other_to_must() -> None: def test_bool_and_other_appends_other_to_must() -> None: - # pylint: disable=missing-function-docstring q1: Any = query.Match(f="value1") qb: Any = query.Bool() @@ -147,7 +132,6 @@ def test_bool_and_other_appends_other_to_must() -> None: def test_bool_and_other_sets_min_should_match_if_needed() -> None: - # pylint: disable=missing-function-docstring q1 = query.Q("term", category=1) q2 = query.Q( "bool", should=[query.Q("term", name="aaa"), query.Q("term", name="bbb")] @@ -162,7 +146,6 @@ def test_bool_and_other_sets_min_should_match_if_needed() -> None: def test_bool_with_different_minimum_should_match_should_not_be_combined() -> None: - # pylint: disable=missing-function-docstring q1 = query.Q( "bool", minimum_should_match=2, @@ -202,12 +185,10 @@ def test_bool_with_different_minimum_should_match_should_not_be_combined() -> No def test_empty_bool_has_min_should_match_0() -> None: - # pylint: disable=missing-function-docstring assert 0 == query.Bool()._min_should_match def test_query_and_query_creates_bool() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.Match(g=47) @@ -217,7 +198,6 @@ def test_query_and_query_creates_bool() -> None: def test_match_all_and_query_equals_other() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.MatchAll() @@ -226,28 +206,24 @@ def test_match_all_and_query_equals_other() -> None: def test_not_match_all_is_match_none() -> None: - # pylint: disable=missing-function-docstring q = query.MatchAll() assert ~q == query.MatchNone() def test_not_match_none_is_match_all() -> None: - # pylint: disable=missing-function-docstring q = query.MatchNone() assert ~q == query.MatchAll() def test_invert_empty_bool_is_match_none() -> None: - # pylint: disable=missing-function-docstring q = query.Bool() assert ~q == query.MatchNone() def test_match_none_or_query_equals_query() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.MatchNone() @@ -255,7 +231,6 @@ def test_match_none_or_query_equals_query() -> None: def test_match_none_and_query_equals_match_none() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.MatchNone() @@ -263,7 +238,6 @@ def test_match_none_and_query_equals_match_none() -> None: def test_bool_and_bool() -> None: - # pylint: disable=missing-function-docstring qt1, qt2, qt3 = query.Match(f=1), query.Match(f=2), query.Match(f=3) q1 = query.Bool(must=[qt1], should=[qt2]) @@ -280,7 +254,6 @@ def test_bool_and_bool() -> None: def test_bool_and_bool_with_min_should_match() -> None: - # pylint: disable=missing-function-docstring qt1, qt2 = query.Match(f=1), query.Match(f=2) q1 = query.Q("bool", minimum_should_match=1, should=[qt1]) q2 = query.Q("bool", minimum_should_match=1, should=[qt2]) @@ -289,21 +262,18 @@ def test_bool_and_bool_with_min_should_match() -> None: def test_inverted_query_becomes_bool_with_must_not() -> None: - # pylint: disable=missing-function-docstring q = query.Match(f=42) assert ~q == query.Bool(must_not=[query.Match(f=42)]) def test_inverted_query_with_must_not_become_should() -> None: - # pylint: disable=missing-function-docstring q = query.Q("bool", must_not=[query.Q("match", f=1), query.Q("match", f=2)]) assert ~q == query.Q("bool", should=[query.Q("match", f=1), query.Q("match", f=2)]) def test_inverted_query_with_must_and_must_not() -> None: - # pylint: disable=missing-function-docstring q = query.Q( "bool", must=[query.Q("match", f=3), query.Q("match", f=4)], @@ -324,14 +294,12 @@ def test_inverted_query_with_must_and_must_not() -> None: def test_double_invert_returns_original_query() -> None: - # pylint: disable=missing-function-docstring q = query.Match(f=42) assert q == ~~q def test_bool_query_gets_inverted_internally() -> None: - # pylint: disable=missing-function-docstring q = query.Bool(must_not=[query.Match(f=42)], must=[query.Match(g="v")]) assert ~q == query.Bool( @@ -345,7 +313,6 @@ def test_bool_query_gets_inverted_internally() -> None: def test_match_all_or_something_is_match_all() -> None: - # pylint: disable=missing-function-docstring q1 = query.MatchAll() q2 = query.Match(f=42) @@ -354,7 +321,6 @@ def test_match_all_or_something_is_match_all() -> None: def test_or_produces_bool_with_should() -> None: - # pylint: disable=missing-function-docstring q1 = query.Match(f=42) q2 = query.Match(g="v") @@ -363,7 +329,6 @@ def test_or_produces_bool_with_should() -> None: def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: - # pylint: disable=missing-function-docstring q = query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -372,7 +337,6 @@ def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: - # pylint: disable=missing-function-docstring q = ~query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -384,7 +348,6 @@ def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: def test_bool_will_append_another_query_with_or() -> None: - # pylint: disable=missing-function-docstring qb = query.Bool(should=[query.Match(f="v"), query.Match(f="v2")]) q = query.Match(g=42) @@ -392,7 +355,6 @@ def test_bool_will_append_another_query_with_or() -> None: def test_bool_queries_with_only_should_get_concatenated() -> None: - # pylint: disable=missing-function-docstring q1 = query.Bool(should=[query.Match(f=1), query.Match(f=2)]) q2 = query.Bool(should=[query.Match(f=3), query.Match(f=4)]) @@ -402,7 +364,6 @@ def test_bool_queries_with_only_should_get_concatenated() -> None: def test_two_bool_queries_append_one_to_should_if_possible() -> None: - # pylint: disable=missing-function-docstring q1 = query.Bool(should=[query.Match(f="v")]) q2 = query.Bool(must=[query.Match(f="v")]) @@ -415,13 +376,11 @@ def test_two_bool_queries_append_one_to_should_if_possible() -> None: def test_queries_are_registered() -> None: - # pylint: disable=missing-function-docstring assert "match" in query.Query._classes assert query.Query._classes["match"] is query.Match def test_defining_query_registers_it() -> None: - # pylint: disable=missing-function-docstring class MyQuery(query.Query): name = "my_query" @@ -430,14 +389,12 @@ class MyQuery(query.Query): def test_query_passes_query_through() -> None: - # pylint: disable=missing-function-docstring q = query.Match(f="value1") assert query.Q(q) is q def test_query_constructs_query_by_name() -> None: - # pylint: disable=missing-function-docstring q = query.Q("match", f="value") assert isinstance(q, query.Match) @@ -445,21 +402,18 @@ def test_query_constructs_query_by_name() -> None: def test_query_translates_double_underscore_to_dots_in_param_names() -> None: - # pylint: disable=missing-function-docstring q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params def test_query_doesn_translate_double_underscore_to_dots_in_param_names() -> None: - # pylint: disable=missing-function-docstring q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params def test_query_constructs_simple_query_from_dict() -> None: - # pylint: disable=missing-function-docstring q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) @@ -467,20 +421,17 @@ def test_query_constructs_simple_query_from_dict() -> None: def test_query_constructs_compound_query_from_dict() -> None: - # pylint: disable=missing-function-docstring q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) def test_query_raises_error_when_passed_in_dict_and_params() -> None: - # pylint: disable=missing-function-docstring with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") def test_query_raises_error_when_passed_in_query_and_params() -> None: - # pylint: disable=missing-function-docstring q = query.Match(f="value1") with raises(Exception): @@ -488,13 +439,11 @@ def test_query_raises_error_when_passed_in_query_and_params() -> None: def test_query_raises_error_on_unknown_query() -> None: - # pylint: disable=missing-function-docstring with raises(Exception): query.Q("not a query", f="value") def test_match_all_and_anything_is_anything() -> None: - # pylint: disable=missing-function-docstring q = query.MatchAll() s = query.Match(f=42) @@ -503,7 +452,6 @@ def test_match_all_and_anything_is_anything() -> None: def test_function_score_with_functions() -> None: - # pylint: disable=missing-function-docstring q = query.Q( "function_score", functions=[query.SF("script_score", script="doc['comment_count'] * _score")], @@ -517,7 +465,6 @@ def test_function_score_with_functions() -> None: def test_function_score_with_no_function_is_boost_factor() -> None: - # pylint: disable=missing-function-docstring q = query.Q( "function_score", functions=[query.SF({"weight": 20, "filter": query.Q("term", f=42)})], @@ -529,7 +476,6 @@ def test_function_score_with_no_function_is_boost_factor() -> None: def test_function_score_to_dict() -> None: - # pylint: disable=missing-function-docstring q = query.Q( "function_score", query=query.Q("match", title="python"), @@ -559,7 +505,6 @@ def test_function_score_to_dict() -> None: def test_function_score_with_single_function() -> None: - # pylint: disable=missing-function-docstring d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -578,7 +523,6 @@ def test_function_score_with_single_function() -> None: def test_function_score_from_dict() -> None: - # pylint: disable=missing-function-docstring d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -608,7 +552,6 @@ def test_function_score_from_dict() -> None: def test_script_score() -> None: - # pylint: disable=missing-function-docstring d = { "script_score": { "query": {"match_all": {}}, diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index 9c21e618..fa535d57 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -47,7 +47,6 @@ def agg_response(aggs_search: Any, aggs_data: Any) -> Any: def test_agg_response_is_pickleable(agg_response: Any) -> None: - # pylint: disable=missing-function-docstring assert agg_response.hits == [] r = pickle.loads(pickle.dumps(agg_response)) @@ -57,7 +56,6 @@ def test_agg_response_is_pickleable(agg_response: Any) -> None: def test_response_is_pickleable(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) assert res.hits r = pickle.loads(pickle.dumps(res)) @@ -68,7 +66,6 @@ def test_response_is_pickleable(dummy_response: Any) -> None: def test_hit_is_pickleable(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = pickle.loads(pickle.dumps(res.hits)) @@ -77,7 +74,6 @@ def test_hit_is_pickleable(dummy_response: Any) -> None: def test_response_stores_search(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring s = Search() r = response.Response(s, dummy_response) @@ -85,7 +81,6 @@ def test_response_stores_search(dummy_response: Any) -> None: def test_interactive_helpers(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = res.hits h = hits[0] @@ -109,7 +104,6 @@ def test_interactive_helpers(dummy_response: Any) -> None: def test_empty_response_is_false(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring dummy_response["hits"]["hits"] = [] res = response.Response(Search(), dummy_response) @@ -117,13 +111,11 @@ def test_empty_response_is_false(dummy_response: Any) -> None: def test_len_response(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) assert len(res) == 4 def test_iterating_over_response_gives_you_hits(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = list(h for h in res) @@ -141,7 +133,6 @@ def test_iterating_over_response_gives_you_hits(dummy_response: Any) -> None: def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) hits = res.hits @@ -150,7 +141,6 @@ def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response: Any) -> No def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) h = res.hits[0] @@ -167,7 +157,6 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> No def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: - # pylint: disable=missing-function-docstring res = response.Response(Search(), dummy_response) assert res[0] is res.hits[0] @@ -175,18 +164,15 @@ def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: def test_aggregation_base(agg_response: Any) -> None: - # pylint: disable=missing-function-docstring assert agg_response.aggs is agg_response.aggregations assert isinstance(agg_response.aggs, response.AggResponse) def test_metric_agg_works(agg_response: Any) -> None: - # pylint: disable=missing-function-docstring assert 25052.0 == agg_response.aggs.sum_lines.value def test_aggregations_can_be_iterated_over(agg_response: Any) -> None: - # pylint: disable=missing-function-docstring aggs = [a for a in agg_response.aggs] assert len(aggs) == 3 @@ -196,7 +182,6 @@ def test_aggregations_can_be_iterated_over(agg_response: Any) -> None: def test_aggregations_can_be_retrieved_by_name( agg_response: Any, aggs_search: Any ) -> None: - # pylint: disable=missing-function-docstring a = agg_response.aggs["popular_files"] assert isinstance(a, BucketData) @@ -205,7 +190,6 @@ def test_aggregations_can_be_retrieved_by_name( def test_bucket_response_can_be_iterated_over(agg_response: Any) -> None: - # pylint: disable=missing-function-docstring popular_files = agg_response.aggregations.popular_files buckets = [b for b in popular_files] @@ -214,7 +198,6 @@ def test_bucket_response_can_be_iterated_over(agg_response: Any) -> None: def test_bucket_keys_get_deserialized(aggs_data: Any, aggs_search: Any) -> None: - # pylint: disable=missing-function-docstring class Commit(Document): info = Object(properties={"committed_date": Date()}) diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index 5950cd05..c7fa20c5 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -35,14 +35,12 @@ def test_expand__to_dot_is_respected() -> None: - # pylint: disable=missing-function-docstring s = search.Search().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() def test_execute_uses_cache() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() r: Any = object() s._response = r @@ -51,7 +49,6 @@ def test_execute_uses_cache() -> None: def test_cache_can_be_ignored(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search(using="mock") r: Any = object() s._response = r @@ -61,7 +58,6 @@ def test_cache_can_be_ignored(mock_client: Any) -> None: def test_iter_iterates_over_hits() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() s._response = [1, 2, 3] @@ -69,7 +65,6 @@ def test_iter_iterates_over_hits() -> None: def test_cache_isnt_cloned() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() s._response = object() @@ -77,14 +72,12 @@ def test_cache_isnt_cloned() -> None: def test_search_starts_with_no_query() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() assert s.query._proxied is None def test_search_query_combines_query() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() s2 = s.query("match", f=42) @@ -97,7 +90,6 @@ def test_search_query_combines_query() -> None: def test_query_can_be_assigned_to() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() q = Q("match", title="python") @@ -107,7 +99,6 @@ def test_query_can_be_assigned_to() -> None: def test_query_can_be_wrapped() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -123,7 +114,6 @@ def test_query_can_be_wrapped() -> None: def test_using() -> None: - # pylint: disable=missing-function-docstring o: Any = object() o2: Any = object() s: Any = search.Search(using=o) @@ -134,21 +124,18 @@ def test_using() -> None: def test_methods_are_proxied_to_the_query() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search().query("match_all") assert s.query.to_dict() == {"match_all": {}} def test_query_always_returns_search() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() assert isinstance(s.query("match", f=42), search.Search) def test_source_copied_on_clone() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -163,7 +150,6 @@ def test_source_copied_on_clone() -> None: def test_copy_clones() -> None: - # pylint: disable=missing-function-docstring from copy import copy s1: Any = search.Search().source(["some", "fields"]) @@ -174,7 +160,6 @@ def test_copy_clones() -> None: def test_aggs_allow_two_metric() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -185,7 +170,6 @@ def test_aggs_allow_two_metric() -> None: def test_aggs_get_copied_on_change() -> None: - # pylint: disable=missing-function-docstring s: Any = search.Search().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" @@ -218,7 +202,6 @@ def test_aggs_get_copied_on_change() -> None: def test_search_index() -> None: - # pylint: disable=missing-function-docstring s = search.Search(index="i") assert s._index == ["i"] s = s.index("i2") @@ -250,7 +233,6 @@ def test_search_index() -> None: def test_doc_type_document_class() -> None: - # pylint: disable=missing-function-docstring class MyDocument(Document): pass @@ -264,7 +246,6 @@ class MyDocument(Document): def test_sort() -> None: - # pylint: disable=missing-function-docstring s = search.Search() s = s.sort("fielda", "-fieldb") @@ -277,7 +258,6 @@ def test_sort() -> None: def test_sort_by_score() -> None: - # pylint: disable=missing-function-docstring s = search.Search() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -288,7 +268,6 @@ def test_sort_by_score() -> None: def test_collapse() -> None: - # pylint: disable=missing-function-docstring s = search.Search() inner_hits = {"name": "most_recent", "size": 5, "sort": [{"@timestamp": "desc"}]} @@ -323,7 +302,6 @@ def test_collapse() -> None: def test_slice() -> None: - # pylint: disable=missing-function-docstring s = search.Search() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -333,13 +311,11 @@ def test_slice() -> None: def test_index() -> None: - # pylint: disable=missing-function-docstring s = search.Search() assert {"from": 3, "size": 1} == s[3].to_dict() def test_search_to_dict() -> None: - # pylint: disable=missing-function-docstring s = search.Search() assert {} == s.to_dict() @@ -369,7 +345,6 @@ def test_search_to_dict() -> None: def test_complex_example() -> None: - # pylint: disable=missing-function-docstring s = search.Search() s = ( s.query("match", title="python") @@ -421,7 +396,6 @@ def test_complex_example() -> None: def test_reverse() -> None: - # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -472,14 +446,12 @@ def test_reverse() -> None: def test_from_dict_doesnt_need_query() -> None: - # pylint: disable=missing-function-docstring s = search.Search.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() def test_params_being_passed_to_search(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring s = search.Search(using="mock") s = s.params(routing="42") s.execute() @@ -488,7 +460,6 @@ def test_params_being_passed_to_search(mock_client: Any) -> None: def test_source() -> None: - # pylint: disable=missing-function-docstring assert {} == search.Search().source().to_dict() assert { @@ -503,7 +474,6 @@ def test_source() -> None: def test_source_on_clone() -> None: - # pylint: disable=missing-function-docstring assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -519,7 +489,6 @@ def test_source_on_clone() -> None: def test_source_on_clear() -> None: - # pylint: disable=missing-function-docstring assert ( {} == search.Search() @@ -530,7 +499,6 @@ def test_source_on_clear() -> None: def test_suggest_accepts_global_text() -> None: - # pylint: disable=missing-function-docstring s = search.Search.from_dict( { "suggest": { @@ -553,7 +521,6 @@ def test_suggest_accepts_global_text() -> None: def test_suggest() -> None: - # pylint: disable=missing-function-docstring s = search.Search() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -563,7 +530,6 @@ def test_suggest() -> None: def test_exclude() -> None: - # pylint: disable=missing-function-docstring s = search.Search() s = s.exclude("match", title="python") @@ -577,7 +543,6 @@ def test_exclude() -> None: def test_delete_by_query(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring s = search.Search(using="mock").query("match", lang="java") s.delete() @@ -587,7 +552,6 @@ def test_delete_by_query(mock_client: Any) -> None: def test_update_from_dict() -> None: - # pylint: disable=missing-function-docstring s = search.Search() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -599,7 +563,6 @@ def test_update_from_dict() -> None: def test_rescore_query_to_dict() -> None: - # pylint: disable=missing-function-docstring s = search.Search(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index 8545782c..afcfabc9 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -32,14 +32,12 @@ def test_ubq_starts_with_no_query() -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery() assert ubq.query._proxied is None def test_ubq_to_dict() -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery() assert {} == ubq.to_dict() @@ -56,7 +54,6 @@ def test_ubq_to_dict() -> None: def test_complex_example() -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -95,7 +92,6 @@ def test_complex_example() -> None: def test_exclude() -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -109,7 +105,6 @@ def test_exclude() -> None: def test_reverse() -> None: - # pylint: disable=missing-function-docstring d = { "query": { "filtered": { @@ -146,14 +141,12 @@ def test_reverse() -> None: def test_from_dict_doesnt_need_query() -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() def test_params_being_passed_to_search(mock_client: Any) -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery(using="mock") ubq = ubq.params(routing="42") ubq.execute() @@ -164,7 +157,6 @@ def test_params_being_passed_to_search(mock_client: Any) -> None: def test_overwrite_script() -> None: - # pylint: disable=missing-function-docstring ubq = UpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} @@ -181,7 +173,6 @@ def test_overwrite_script() -> None: def test_update_by_query_response_success() -> None: - # pylint: disable=missing-function-docstring ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index 8c45a23c..c67bcb8b 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -33,7 +33,6 @@ def test_attrdict_pickle() -> None: - # pylint: disable=missing-function-docstring ad = utils.AttrDict({}) pickled_ad = pickle.dumps(ad) @@ -41,7 +40,6 @@ def test_attrdict_pickle() -> None: def test_attrlist_pickle() -> None: - # pylint: disable=missing-function-docstring al = utils.AttrList([]) pickled_al = pickle.dumps(al) @@ -49,7 +47,6 @@ def test_attrlist_pickle() -> None: def test_attrlist_slice() -> None: - # pylint: disable=missing-function-docstring class MyAttrDict(utils.AttrDict): pass @@ -58,7 +55,6 @@ class MyAttrDict(utils.AttrDict): def test_merge() -> None: - # pylint: disable=missing-function-docstring a = utils.AttrDict({"a": {"b": 42, "c": 47}}) b = {"a": {"b": 123, "d": -12}, "e": [1, 2, 3]} @@ -68,7 +64,6 @@ def test_merge() -> None: def test_merge_conflict() -> None: - # pylint: disable=missing-function-docstring for d in ( {"a": 42}, {"a": {"b": 47}}, @@ -79,7 +74,6 @@ def test_merge_conflict() -> None: def test_attrdict_bool() -> None: - # pylint: disable=missing-function-docstring d = utils.AttrDict({}) assert not d @@ -88,7 +82,6 @@ def test_attrdict_bool() -> None: def test_attrlist_items_get_wrapped_during_iteration() -> None: - # pylint: disable=missing-function-docstring al = utils.AttrList([1, object(), [1], {}]) ls = list(iter(al)) @@ -98,7 +91,6 @@ def test_attrlist_items_get_wrapped_during_iteration() -> None: def test_serializer_deals_with_attr_versions() -> None: - # pylint: disable=missing-function-docstring d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( @@ -107,7 +99,6 @@ def test_serializer_deals_with_attr_versions() -> None: def test_serializer_deals_with_objects_with_to_dict() -> None: - # pylint: disable=missing-function-docstring class MyClass(object): def to_dict(self) -> int: return 42 @@ -116,14 +107,12 @@ def to_dict(self) -> int: def test_recursive_to_dict() -> None: - # pylint: disable=missing-function-docstring assert utils.recursive_to_dict({"k": [1, (1.0, {"v": Q("match", key="val")})]}) == { "k": [1, (1.0, {"v": {"match": {"key": "val"}}})] } def test_attrdict_get() -> None: - # pylint: disable=missing-function-docstring a = utils.AttrDict({"a": {"b": 42, "c": 47}}) assert a.get("a", {}).get("b", 0) == 42 assert a.get("a", {}).get("e", 0) == 0 diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index b076ed1e..67f2fe50 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -75,7 +75,6 @@ class Log(Document): def test_required_int_can_be_0() -> None: - # pylint: disable=missing-function-docstring class DT(Document): i = Integer(required=True) @@ -84,7 +83,6 @@ class DT(Document): def test_required_field_cannot_be_empty_list() -> None: - # pylint: disable=missing-function-docstring class DT(Document): i = Integer(required=True) @@ -94,7 +92,6 @@ class DT(Document): def test_validation_works_for_lists_of_values() -> None: - # pylint: disable=missing-function-docstring class DT(Document): i = Date(required=True) @@ -107,7 +104,6 @@ class DT(Document): def test_field_with_custom_clean() -> None: - # pylint: disable=missing-function-docstring ls = Log() ls.full_clean() @@ -115,7 +111,6 @@ def test_field_with_custom_clean() -> None: def test_empty_object() -> None: - # pylint: disable=missing-function-docstring d: Any = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) d.inner = {} @@ -123,7 +118,6 @@ def test_empty_object() -> None: def test_missing_required_field_raises_validation_exception() -> None: - # pylint: disable=missing-function-docstring d1: Any = BlogPost() with raises(ValidationException): d1.full_clean() @@ -139,7 +133,6 @@ def test_missing_required_field_raises_validation_exception() -> None: def test_boolean_doesnt_treat_false_as_empty() -> None: - # pylint: disable=missing-function-docstring d: Any = BlogPostWithStatus() with raises(ValidationException): d.full_clean() @@ -150,7 +143,6 @@ def test_boolean_doesnt_treat_false_as_empty() -> None: def test_custom_validation_on_nested_gets_run() -> None: - # pylint: disable=missing-function-docstring d: Any = BlogPost( authors=[Author(name="Guian", email="king@example.com")], created=None ) @@ -162,7 +154,6 @@ def test_custom_validation_on_nested_gets_run() -> None: def test_accessing_known_fields_returns_empty_value() -> None: - # pylint: disable=missing-function-docstring d: Any = BlogPost() assert [] == d.authors @@ -173,7 +164,6 @@ def test_accessing_known_fields_returns_empty_value() -> None: def test_empty_values_are_not_serialized() -> None: - # pylint: disable=missing-function-docstring d: Any = BlogPost( authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}], created=None ) diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index b53e7eb0..6826b83e 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -45,7 +45,6 @@ ], ) def test_range_contains(kwargs: Any, item: Any) -> None: - # pylint: disable=missing-function-docstring assert item in Range(**kwargs) @@ -60,7 +59,6 @@ def test_range_contains(kwargs: Any, item: Any) -> None: ], ) def test_range_not_contains(kwargs: Any, item: Any) -> None: - # pylint: disable=missing-function-docstring assert item not in Range(**kwargs) @@ -75,7 +73,6 @@ def test_range_not_contains(kwargs: Any, item: Any) -> None: ], ) def test_range_raises_value_error_on_wrong_params(args: Any, kwargs: Any) -> None: - # pylint: disable=missing-function-docstring with pytest.raises(ValueError): Range(*args, **kwargs) @@ -90,7 +87,6 @@ def test_range_raises_value_error_on_wrong_params(args: Any, kwargs: Any) -> Non ], ) def test_range_lower(range: Any, lower: Any, inclusive: Any) -> None: - # pylint: disable=missing-function-docstring assert (lower, inclusive) == range.lower @@ -104,5 +100,4 @@ def test_range_lower(range: Any, lower: Any, inclusive: Any) -> None: ], ) def test_range_upper(range: Any, upper: Any, inclusive: Any) -> None: - # pylint: disable=missing-function-docstring assert (upper, inclusive) == range.upper diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index dc1656fb..524887d0 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -49,21 +49,18 @@ def requires_numpy_and_pandas() -> None: - # pylint: disable=missing-function-docstring if np is None or pd is None: raise SkipTest("Test requires numpy or pandas to be available") class TestJSONSerializer(TestCase): def test_datetime_serialization(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": datetime(2010, 10, 1, 2, 30)}), ) def test_decimal_serialization(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() if sys.version_info[:2] == (2, 6): @@ -71,7 +68,6 @@ def test_decimal_serialization(self) -> None: self.assertEqual('{"d":3.8}', JSONSerializer().dumps({"d": Decimal("3.8")})) def test_uuid_serialization(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( '{"d":"00000000-0000-0000-0000-000000000003"}', JSONSerializer().dumps( @@ -80,13 +76,11 @@ def test_uuid_serialization(self) -> None: ) def test_serializes_numpy_bool(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual('{"d":true}', JSONSerializer().dumps({"d": np.bool_(True)})) def test_serializes_numpy_integers(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() ser = JSONSerializer() @@ -108,7 +102,6 @@ def test_serializes_numpy_integers(self) -> None: self.assertEqual(ser.dumps({"d": np_type(1)}), '{"d":1}') def test_serializes_numpy_floats(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() ser = JSONSerializer() @@ -120,7 +113,6 @@ def test_serializes_numpy_floats(self) -> None: self.assertRegex(ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$') def test_serializes_numpy_datetime(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -129,7 +121,6 @@ def test_serializes_numpy_datetime(self) -> None: ) def test_serializes_numpy_ndarray(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -143,7 +134,6 @@ def test_serializes_numpy_ndarray(self) -> None: ) def test_serializes_numpy_nan_to_nan(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -152,7 +142,6 @@ def test_serializes_numpy_nan_to_nan(self) -> None: ) def test_serializes_pandas_timestamp(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -161,7 +150,6 @@ def test_serializes_pandas_timestamp(self) -> None: ) def test_serializes_pandas_series(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() self.assertEqual( @@ -170,7 +158,6 @@ def test_serializes_pandas_series(self) -> None: ) def test_serializes_pandas_na(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() if not hasattr(pd, "NA"): # pandas.NA added in v1 @@ -181,7 +168,6 @@ def test_serializes_pandas_na(self) -> None: ) def test_raises_serialization_error_pandas_nat(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() if not hasattr(pd, "NaT"): @@ -189,7 +175,6 @@ def test_raises_serialization_error_pandas_nat(self) -> None: self.assertRaises(SerializationError, JSONSerializer().dumps, {"d": pd.NaT}) def test_serializes_pandas_category(self) -> None: - # pylint: disable=missing-function-docstring requires_numpy_and_pandas() cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) @@ -205,41 +190,33 @@ def test_serializes_pandas_category(self) -> None: ) def test_raises_serialization_error_on_dump_error(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, JSONSerializer().dumps, object()) def test_raises_serialization_error_on_load_error(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, JSONSerializer().loads, object()) self.assertRaises(SerializationError, JSONSerializer().loads, "") self.assertRaises(SerializationError, JSONSerializer().loads, "{{") def test_strings_are_left_untouched(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual("你好", JSONSerializer().dumps("你好")) class TestTextSerializer(TestCase): def test_strings_are_left_untouched(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual("你好", TextSerializer().dumps("你好")) def test_raises_serialization_error_on_dump_error(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, TextSerializer().dumps, {}) class TestDeserializer(TestCase): def setup_method(self, _: Any) -> None: - # pylint: disable=missing-function-docstring self.de = Deserializer(DEFAULT_SERIALIZERS) def test_deserializes_json_by_default(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual({"some": "data"}, self.de.loads('{"some":"data"}')) def test_deserializes_text_with_correct_ct(self) -> None: - # pylint: disable=missing-function-docstring self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain") ) @@ -249,11 +226,9 @@ def test_deserializes_text_with_correct_ct(self) -> None: ) def test_raises_serialization_error_on_unknown_mimetype(self) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(SerializationError, self.de.loads, "{}", "text/html") def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized( self, ) -> None: - # pylint: disable=missing-function-docstring self.assertRaises(ImproperlyConfigured, Deserializer, {}) diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index b3c17950..36b548b5 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -35,7 +35,6 @@ def get_client(**kwargs: Any) -> Any: - # pylint: disable=missing-function-docstring global client if client is False: raise SkipTest("No client is available") @@ -55,7 +54,6 @@ def get_client(**kwargs: Any) -> Any: def setup_module() -> None: - # pylint: disable=missing-function-docstring get_client() diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index 063a55f0..e1f83e4b 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -45,7 +45,6 @@ @pytest.fixture(scope="session") # type: ignore def sync_client_factory() -> Any: - # pylint: disable=missing-function-docstring client = None try: # Configure the client optionally with an HTTP conn class @@ -86,7 +85,6 @@ def sync_client_factory() -> Any: @pytest.fixture(scope="function") # type: ignore def sync_client(sync_client_factory: Any) -> Any: - # pylint: disable=missing-function-docstring try: yield sync_client_factory finally: diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index 4bc6170a..a77b0f37 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -32,13 +32,11 @@ class TestUnicode(OpenSearchTestCase): def test_indices_analyze(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.analyze(body='{"text": "привет"}') class TestBulk(OpenSearchTestCase): def test_bulk_works_with_string_body(self) -> None: - # pylint: disable=missing-function-docstring docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = self.client.bulk(body=docs) @@ -46,7 +44,6 @@ def test_bulk_works_with_string_body(self) -> None: self.assertEqual(1, len(response["items"])) def test_bulk_works_with_bytestring_body(self) -> None: - # pylint: disable=missing-function-docstring docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = self.client.bulk(body=docs) @@ -56,13 +53,11 @@ def test_bulk_works_with_bytestring_body(self) -> None: class TestClose(OpenSearchTestCase): def test_close_doesnt_break_client(self) -> None: - # pylint: disable=missing-function-docstring self.client.cluster.health() self.client.close() self.client.cluster.health() def test_with_doesnt_break_client(self) -> None: - # pylint: disable=missing-function-docstring for _ in range(2): with self.client as client: client.cluster.health() diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index e35f9a60..967c320e 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -46,7 +46,6 @@ @fixture(scope="session") # type: ignore def client() -> Any: - # pylint: disable=missing-function-docstring client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", client) return client @@ -60,7 +59,7 @@ def opensearch_version(client: Any) -> Any: """ info: Any = client.info() print(info) - yield tuple(int(x) for x in match_version(info)) + yield (int(x) for x in match_version(info)) def match_version(info: Any) -> Any: @@ -75,7 +74,6 @@ def match_version(info: Any) -> Any: @fixture # type: ignore def write_client(client: Any) -> Any: - # pylint: disable=missing-function-docstring yield client client.indices.delete("test-*", ignore=404) client.indices.delete_template("test-template", ignore=404) @@ -83,7 +81,6 @@ def write_client(client: Any) -> Any: @fixture(scope="session") # type: ignore def data_client(client: Any) -> Any: - # pylint: disable=missing-function-docstring # create mappings create_git_index(client, "git") create_flat_git_index(client, "flat-git") @@ -97,7 +94,6 @@ def data_client(client: Any) -> Any: @fixture # type: ignore def pull_request(write_client: Any) -> Any: - # pylint: disable=missing-function-docstring PullRequest.init() pr = PullRequest( _id=42, @@ -122,7 +118,6 @@ def pull_request(write_client: Any) -> Any: @fixture # type: ignore def setup_ubq_tests(client: Any) -> str: - # pylint: disable=missing-function-docstring # todo what's a ubq test? index = "test-git" create_git_index(client, index) diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index 32a17b50..17b60d6d 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -50,7 +50,6 @@ def __init__( self._fail_with = fail_with def bulk(self, *args: Any, **kwargs: Any) -> Any: - # pylint: disable=missing-function-docstring self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -59,7 +58,6 @@ def bulk(self, *args: Any, **kwargs: Any) -> Any: class TestStreamingBulk(OpenSearchTestCase): def test_actions_remain_unchanged(self) -> None: - # pylint: disable=missing-function-docstring actions = [{"_id": 1}, {"_id": 2}] for ok, item in helpers.streaming_bulk( self.client, actions, index="test-index" @@ -68,7 +66,6 @@ def test_actions_remain_unchanged(self) -> None: self.assertEqual([{"_id": 1}, {"_id": 2}], actions) def test_all_documents_get_inserted(self) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True @@ -81,7 +78,6 @@ def test_all_documents_get_inserted(self) -> None: ) def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -102,7 +98,6 @@ def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: assert False, "exception should have been raised" def test_different_op_types(self) -> Any: - # pylint: disable=missing-function-docstring if self.opensearch_version() < (0, 90, 1): raise SkipTest("update supported since 0.90.1") self.client.index(index="i", id=45, body={}) @@ -125,7 +120,6 @@ def test_different_op_types(self) -> Any: self.assertEqual({"f": "v"}, self.client.get(index="i", id=47)["_source"]) def test_transport_error_can_becaught(self) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient(self.client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -162,7 +156,6 @@ def test_transport_error_can_becaught(self) -> None: ) def test_rejected_documents_are_retried(self) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( self.client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -190,7 +183,6 @@ def test_rejected_documents_are_retried(self) -> None: self.assertEqual(4, failing_client._called) def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( self.client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -219,7 +211,6 @@ def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: self.assertEqual(4, failing_client._called) def test_transport_error_is_raised_with_max_retries(self) -> None: - # pylint: disable=missing-function-docstring failing_client = FailingBulkClient( self.client, fail_at=(1, 2, 3, 4), @@ -244,7 +235,6 @@ def streaming_bulk() -> Any: class TestBulk(OpenSearchTestCase): def test_bulk_works_with_single_item(self) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": 42, "_id": 1}] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -258,7 +248,6 @@ def test_bulk_works_with_single_item(self) -> None: ) def test_all_documents_get_inserted(self) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -272,7 +261,6 @@ def test_all_documents_get_inserted(self) -> None: ) def test_stats_only_reports_numbers(self) -> None: - # pylint: disable=missing-function-docstring docs = [{"answer": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True, stats_only=True @@ -283,7 +271,6 @@ def test_stats_only_reports_numbers(self) -> None: self.assertEqual(100, self.client.count(index="test-index")["count"]) def test_errors_are_reported_correctly(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -311,7 +298,6 @@ def test_errors_are_reported_correctly(self) -> None: ) def test_error_is_raised(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -330,7 +316,6 @@ def test_error_is_raised(self) -> None: ) def test_ignore_error_if_raised(self) -> None: - # pylint: disable=missing-function-docstring # ignore the status code 400 in tuple helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -364,7 +349,6 @@ def test_ignore_error_if_raised(self) -> None: helpers.bulk(failing_client, [{"a": 42}], index="i", ignore_status=(599,)) def test_errors_are_collected_properly(self) -> None: - # pylint: disable=missing-function-docstring self.client.indices.create( "i", { @@ -404,7 +388,6 @@ def teardown_method(self, m: Any) -> None: super(TestScan, self).teardown_method(m) def test_order_can_be_preserved(self) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -425,7 +408,6 @@ def test_order_can_be_preserved(self) -> None: self.assertEqual(list(range(100)), list(d["_source"]["answer"] for d in docs)) def test_all_documents_are_read(self) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -439,7 +421,6 @@ def test_all_documents_are_read(self) -> None: self.assertEqual(set(range(100)), set(d["_source"]["answer"] for d in docs)) def test_scroll_error(self) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -475,7 +456,6 @@ def test_scroll_error(self) -> None: self.assertEqual(data[-1], {"scroll_data": 42}) def test_initial_search_error(self) -> None: - # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -502,7 +482,6 @@ def test_initial_search_error(self) -> None: client_mock.scroll.assert_not_called() def test_no_scroll_id_fast_route(self) -> None: - # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = {"no": "_scroll_id"} data = list(helpers.scan(self.client, index="test_index")) @@ -512,7 +491,6 @@ def test_no_scroll_id_fast_route(self) -> None: client_mock.clear_scroll.assert_not_called() def test_scan_auth_kwargs_forwarded(self) -> None: - # pylint: disable=missing-function-docstring for key, val in { "api_key": ("name", "value"), "http_auth": ("username", "password"), @@ -547,7 +525,6 @@ def test_scan_auth_kwargs_forwarded(self) -> None: self.assertEqual(api_mock.call_args[1][key], val) def test_scan_auth_kwargs_favor_scroll_kwargs_option(self) -> None: - # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "scroll_id", @@ -580,7 +557,6 @@ def test_scan_auth_kwargs_favor_scroll_kwargs_option(self) -> None: @patch("opensearchpy.helpers.actions.logger") def test_logger(self, logger_mock: Any) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -616,7 +592,6 @@ def test_logger(self, logger_mock: Any) -> None: logger_mock.warning.assert_called() def test_clear_scroll(self) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) @@ -644,7 +619,6 @@ def test_clear_scroll(self) -> None: spy.assert_not_called() def test_shards_no_skipped_field(self) -> None: - # pylint: disable=missing-function-docstring with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -674,7 +648,6 @@ def test_shards_no_skipped_field(self) -> None: class TestReindex(OpenSearchTestCase): def setup_method(self, _: Any) -> None: - # pylint: disable=missing-function-docstring bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) @@ -688,7 +661,6 @@ def setup_method(self, _: Any) -> None: self.client.bulk(bulk, refresh=True) def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: - # pylint: disable=missing-function-docstring helpers.reindex( self.client, "test_index", @@ -708,7 +680,6 @@ def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: ) def test_reindex_accepts_a_query(self) -> None: - # pylint: disable=missing-function-docstring helpers.reindex( self.client, "test_index", @@ -728,7 +699,6 @@ def test_reindex_accepts_a_query(self) -> None: ) def test_all_documents_get_moved(self) -> None: - # pylint: disable=missing-function-docstring helpers.reindex(self.client, "test_index", "prod_index") self.client.indices.refresh() @@ -748,7 +718,6 @@ def test_all_documents_get_moved(self) -> None: class TestParentChildReindex(OpenSearchTestCase): def setup_method(self, _: Any) -> None: - # pylint: disable=missing-function-docstring body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -775,7 +744,6 @@ def setup_method(self, _: Any) -> None: self.client.indices.refresh(index="test-index") def test_children_are_reindexed_correctly(self) -> None: - # pylint: disable=missing-function-docstring helpers.reindex(self.client, "test-index", "real-index") self.assertEqual( diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index b4502551..9f8c69f6 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -30,7 +30,6 @@ def test_simulate_with_just__builtin_tokenizer(client: Any) -> None: - # pylint: disable=missing-function-docstring a = analyzer("my-analyzer", tokenizer="keyword") tokens = a.simulate("Hello World!", using=client).tokens @@ -39,7 +38,6 @@ def test_simulate_with_just__builtin_tokenizer(client: Any) -> None: def test_simulate_complex(client: Any) -> None: - # pylint: disable=missing-function-docstring a = analyzer( "my-analyzer", tokenizer=tokenizer("split_words", "simple_pattern_split", pattern=":"), @@ -53,7 +51,6 @@ def test_simulate_complex(client: Any) -> None: def test_simulate_builtin(client: Any) -> None: - # pylint: disable=missing-function-docstring a = analyzer("my-analyzer", "english") tokens = a.simulate("fixes running").tokens diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index 62264090..20b43608 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -30,13 +30,11 @@ def test_count_all(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Search(using=data_client).index("git") assert 53 == s.count() def test_count_prefetch(data_client: Any, mocker: Any) -> None: - # pylint: disable=missing-function-docstring mocker.spy(data_client, "count") search = Search(using=data_client).index("git") @@ -50,7 +48,6 @@ def test_count_prefetch(data_client: Any, mocker: Any) -> None: def test_count_filter(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Search(using=data_client).index("git").filter(~Q("exists", field="parent_shas")) # initial commit + repo document assert 2 == s.count() diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index b1c27be8..9118ffed 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -30,7 +30,6 @@ def create_flat_git_index(client: Any, index: Any) -> None: - # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -74,7 +73,6 @@ def create_flat_git_index(client: Any, index: Any) -> None: def create_git_index(client: Any, index: Any) -> None: - # pylint: disable=missing-function-docstring # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -1097,7 +1095,6 @@ def create_git_index(client: Any, index: Any) -> None: def flatten_doc(d: Any) -> Dict[str, Any]: - # pylint: disable=missing-function-docstring src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1107,7 +1104,6 @@ def flatten_doc(d: Any) -> Dict[str, Any]: def create_test_git_data(d: Any) -> Dict[str, Any]: - # pylint: disable=missing-function-docstring src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index 47b0fd4e..53e24173 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -132,7 +132,6 @@ class Index: def test_serialization(write_client: Any) -> None: - # pylint: disable=missing-function-docstring SerializationDoc.init() write_client.index( index="test-serialization", @@ -163,7 +162,6 @@ def test_serialization(write_client: Any) -> None: def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: - # pylint: disable=missing-function-docstring history_query = Q( "nested", path="comments.history", @@ -192,7 +190,6 @@ def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: - # pylint: disable=missing-function-docstring s = PullRequest.search().query( "nested", inner_hits={}, @@ -208,7 +205,6 @@ def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: - # pylint: disable=missing-function-docstring s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -221,7 +217,6 @@ def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: def test_update_object_field(write_client: Any) -> None: - # pylint: disable=missing-function-docstring Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -242,7 +237,6 @@ def test_update_object_field(write_client: Any) -> None: def test_update_script(write_client: Any) -> None: - # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -253,7 +247,6 @@ def test_update_script(write_client: Any) -> None: def test_update_retry_on_conflict(write_client: Any) -> None: - # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -269,7 +262,6 @@ def test_update_retry_on_conflict(write_client: Any) -> None: @pytest.mark.parametrize("retry_on_conflict", [None, 0]) # type: ignore def test_update_conflicting_version(write_client: Any, retry_on_conflict: Any) -> None: - # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -287,7 +279,6 @@ def test_update_conflicting_version(write_client: Any, retry_on_conflict: Any) - def test_save_and_update_return_doc_meta(write_client: Any) -> None: - # pylint: disable=missing-function-docstring Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = w.save(return_doc_meta=True) @@ -312,38 +303,32 @@ def test_save_and_update_return_doc_meta(write_client: Any) -> None: def test_init(write_client: Any) -> None: - # pylint: disable=missing-function-docstring Repository.init(index="test-git") assert write_client.indices.exists(index="test-git") def test_get_raises_404_on_index_missing(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(NotFoundError): Repository.get("opensearch-dsl-php", index="not-there") def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(NotFoundError): Repository.get("opensearch-dsl-php") def test_get_returns_none_if_404_ignored(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert None is Repository.get("opensearch-dsl-php", ignore=404) def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( data_client: Any, ) -> None: - # pylint: disable=missing-function-docstring assert None is Repository.get("42", index="not-there", ignore=404) def test_get(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -352,17 +337,14 @@ def test_get(data_client: Any) -> None: def test_exists_return_true(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert Repository.exists("opensearch-py") def test_exists_false(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert not Repository.exists("opensearch-dsl-php") def test_get_with_tz_date(data_client: Any) -> None: - # pylint: disable=missing-function-docstring first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -375,7 +357,6 @@ def test_get_with_tz_date(data_client: Any) -> None: def test_save_with_tz_date(data_client: Any) -> None: - # pylint: disable=missing-function-docstring tzinfo = timezone("Europe/Prague") first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -403,7 +384,6 @@ def test_save_with_tz_date(data_client: Any) -> None: def test_mget(data_client: Any) -> None: - # pylint: disable=missing-function-docstring commits = Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -412,26 +392,22 @@ def test_mget(data_client: Any) -> None: def test_mget_raises_exception_when_missing_param_is_invalid(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(ValueError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: - # pylint: disable=missing-function-docstring with raises(NotFoundError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client: Any) -> None: - # pylint: disable=missing-function-docstring commits = Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" def test_update_works_from_search_response(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = Repository.search().execute()[0] opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -443,7 +419,6 @@ def test_update_works_from_search_response(data_client: Any) -> None: def test_update(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -468,7 +443,6 @@ def test_update(data_client: Any) -> None: def test_save_updates_existing_doc(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -482,7 +456,6 @@ def test_save_updates_existing_doc(data_client: Any) -> None: def test_save_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -491,7 +464,6 @@ def test_save_automatically_uses_seq_no_and_primary_term(data_client: Any) -> No def test_delete_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: - # pylint: disable=missing-function-docstring opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -500,14 +472,12 @@ def test_delete_automatically_uses_seq_no_and_primary_term(data_client: Any) -> def assert_doc_equals(expected: Any, actual: Any) -> None: - # pylint: disable=missing-function-docstring for f in expected: assert f in actual assert actual[f] == expected[f] def test_can_save_to_different_index(write_client: Any) -> None: - # pylint: disable=missing-function-docstring test_repo = Repository(description="testing", meta={"id": 42}) assert test_repo.save(index="test-document") @@ -523,7 +493,6 @@ def test_can_save_to_different_index(write_client: Any) -> None: def test_save_without_skip_empty_will_include_empty_fields(write_client: Any) -> None: - # pylint: disable=missing-function-docstring test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert test_repo.save(index="test-document", skip_empty=False) @@ -539,7 +508,6 @@ def test_save_without_skip_empty_will_include_empty_fields(write_client: Any) -> def test_delete(write_client: Any) -> None: - # pylint: disable=missing-function-docstring write_client.create( index="test-document", id="opensearch-py", @@ -561,12 +529,10 @@ def test_delete(write_client: Any) -> None: def test_search(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert Repository.search().count() == 1 def test_search_returns_proper_doc_classes(data_client: Any) -> None: - # pylint: disable=missing-function-docstring result = Repository.search().execute() opensearch_repo = result.hits[0] @@ -576,7 +542,6 @@ def test_search_returns_proper_doc_classes(data_client: Any) -> None: def test_refresh_mapping(data_client: Any) -> None: - # pylint: disable=missing-function-docstring class Commit(Document): class Index: name = "git" @@ -593,7 +558,6 @@ class Index: def test_highlight_in_meta(data_client: Any) -> None: - # pylint: disable=missing-function-docstring commit = ( Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 522b6e05..54e49c9d 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -68,7 +68,6 @@ class MetricSearch(FacetedSearch): @pytest.fixture(scope="session") # type: ignore def commit_search_cls(opensearch_version: Any) -> Any: - # pylint: disable=missing-function-docstring interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(FacetedSearch): @@ -94,7 +93,6 @@ class CommitSearch(FacetedSearch): @pytest.fixture(scope="session") # type: ignore def repo_search_cls(opensearch_version: Any) -> Any: - # pylint: disable=missing-function-docstring interval_type = "calendar_interval" class RepoSearch(FacetedSearch): @@ -116,7 +114,6 @@ def search(self) -> Any: @pytest.fixture(scope="session") # type: ignore def pr_search_cls(opensearch_version: Any) -> Any: - # pylint: disable=missing-function-docstring interval_type = "calendar_interval" class PRSearch(FacetedSearch): @@ -135,7 +132,6 @@ class PRSearch(FacetedSearch): def test_facet_with_custom_metric(data_client: Any) -> None: - # pylint: disable=missing-function-docstring ms = MetricSearch() r = ms.execute() @@ -145,7 +141,6 @@ def test_facet_with_custom_metric(data_client: Any) -> None: def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring prs = pr_search_cls() r = prs.execute() @@ -154,7 +149,6 @@ def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = prs.execute() @@ -167,7 +161,6 @@ def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring rs = repo_search_cls() r = rs.execute() @@ -176,7 +169,6 @@ def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring rs = repo_search_cls() r = rs.execute() @@ -189,7 +181,6 @@ def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: def test_empty_search_finds_everything( data_client: Any, opensearch_version: Any, commit_search_cls: Any ) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls() r = cs.execute() assert r.hits.total.value == 52 @@ -236,7 +227,6 @@ def test_empty_search_finds_everything( def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = cs.execute() @@ -283,7 +273,6 @@ def test_term_filters_are_shown_as_selected_and_data_is_filtered( def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client: Any, commit_search_cls: Any ) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls(filters={"deletions": "better"}) r = cs.execute() @@ -292,7 +281,6 @@ def test_range_filters_are_shown_as_selected_and_data_is_filtered( def test_pagination(data_client: Any, commit_search_cls: Any) -> None: - # pylint: disable=missing-function-docstring cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 8a7566e0..5b8250b4 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -36,7 +36,6 @@ class Post(Document): def test_index_template_works(write_client: Any) -> None: - # pylint: disable=missing-function-docstring it = IndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -58,7 +57,6 @@ def test_index_template_works(write_client: Any) -> None: def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: - # pylint: disable=missing-function-docstring i = Index("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) i.save() @@ -71,13 +69,11 @@ def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: def test_index_exists(data_client: Any) -> None: - # pylint: disable=missing-function-docstring assert Index("git").exists() assert not Index("not-there").exists() def test_index_can_be_created_with_settings_and_mappings(write_client: Any) -> None: - # pylint: disable=missing-function-docstring i = Index("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -103,7 +99,6 @@ def test_index_can_be_created_with_settings_and_mappings(write_client: Any) -> N def test_delete(write_client: Any) -> None: - # pylint: disable=missing-function-docstring write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -115,7 +110,6 @@ def test_delete(write_client: Any) -> None: def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: - # pylint: disable=missing-function-docstring i1 = Index("test-index-1", using=write_client) i2 = Index("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index 675b645a..62d608a6 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -33,7 +33,6 @@ def test_mapping_saved_into_opensearch(write_client: Any) -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -56,7 +55,6 @@ def test_mapping_saved_into_opensearch(write_client: Any) -> None: def test_mapping_saved_into_opensearch_when_index_already_exists_closed( write_client: Any, ) -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -82,7 +80,6 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_closed( def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client: Any, ) -> None: - # pylint: disable=missing-function-docstring m = mapping.Mapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) @@ -112,7 +109,6 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: - # pylint: disable=missing-function-docstring write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index 1f37471d..bbad6cc3 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -64,7 +64,6 @@ class Index: def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: - # pylint: disable=missing-function-docstring has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -86,7 +85,6 @@ def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -103,7 +101,6 @@ def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Search(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -115,7 +112,6 @@ def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: def test_scan_respects_doc_types(data_client: Any) -> None: - # pylint: disable=missing-function-docstring repos = list(Repository.search().scan()) assert 1 == len(repos) @@ -124,7 +120,6 @@ def test_scan_respects_doc_types(data_client: Any) -> None: def test_scan_iterates_through_all_docs(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Search(index="flat-git") commits = list(s.scan()) @@ -134,7 +129,6 @@ def test_scan_iterates_through_all_docs(data_client: Any) -> None: def test_response_is_cached(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Repository.search() repos = list(s) @@ -143,7 +137,6 @@ def test_response_is_cached(data_client: Any) -> None: def test_multi_search(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = Search(index="flat-git") @@ -161,7 +154,6 @@ def test_multi_search(data_client: Any) -> None: def test_multi_missing(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s1 = Repository.search() s2 = Search(index="flat-git") s3 = Search(index="does_not_exist") @@ -185,7 +177,6 @@ def test_multi_missing(data_client: Any) -> None: def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: - # pylint: disable=missing-function-docstring s = Search(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index c1d30a6e..07d592c7 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -31,7 +31,6 @@ def test_update_by_query_no_script(write_client: Any, setup_ubq_tests: Any) -> None: - # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -51,7 +50,6 @@ def test_update_by_query_no_script(write_client: Any, setup_ubq_tests: Any) -> N def test_update_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: - # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( @@ -69,7 +67,6 @@ def test_update_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> def test_delete_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: - # pylint: disable=missing-function-docstring index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 0bf05c54..6ecac372 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -23,7 +23,6 @@ class TestAlertingPlugin(OpenSearchTestCase): "Plugin not supported for opensearch version", ) def test_create_destination(self) -> None: - # pylint: disable=missing-function-docstring # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -40,7 +39,6 @@ def test_create_destination(self) -> None: "Plugin not supported for opensearch version", ) def test_get_destination(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy destination self.test_create_destination() @@ -56,7 +54,6 @@ def test_get_destination(self) -> None: "Plugin not supported for opensearch version", ) def test_create_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy destination self.test_create_destination() @@ -122,7 +119,6 @@ def test_create_monitor(self) -> None: "Plugin not supported for opensearch version", ) def test_search_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy monitor self.test_create_monitor() @@ -141,7 +137,6 @@ def test_search_monitor(self) -> None: "Plugin not supported for opensearch version", ) def test_get_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy monitor self.test_create_monitor() @@ -166,7 +161,6 @@ def test_get_monitor(self) -> None: "Plugin not supported for opensearch version", ) def test_run_monitor(self) -> None: - # pylint: disable=missing-function-docstring # Create a dummy monitor self.test_create_monitor() diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index 77fabee5..1d2b696f 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -64,7 +64,6 @@ class TestIndexManagementPlugin(OpenSearchTestCase): } def test_create_policy(self) -> None: - # pylint: disable=missing-function-docstring # Test to create policy response = self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -74,7 +73,6 @@ def test_create_policy(self) -> None: self.assertIn("_id", response) def test_get_policy(self) -> None: - # pylint: disable=missing-function-docstring # Create a policy self.test_create_policy() @@ -86,7 +84,6 @@ def test_get_policy(self) -> None: self.assertEqual(response["_id"], self.POLICY_NAME) def test_update_policy(self) -> None: - # pylint: disable=missing-function-docstring # Create a policy self.test_create_policy() @@ -109,7 +106,6 @@ def test_update_policy(self) -> None: self.assertIn("_id", response) def test_delete_policy(self) -> None: - # pylint: disable=missing-function-docstring # Create a policy self.test_create_policy() diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 612fd6b2..cb03cb76 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -143,7 +143,6 @@ class YamlRunner: def __init__(self, client: Any) -> None: - # pylint: disable=missing-function-docstring self.client = client self.last_response: Any = None @@ -153,7 +152,6 @@ def __init__(self, client: Any) -> None: self._state: Any = {} def use_spec(self, test_spec: Any) -> None: - # pylint: disable=missing-function-docstring self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) @@ -177,13 +175,11 @@ def setup(self) -> Any: self.run_code(self._setup_code) def teardown(self) -> Any: - # pylint: disable=missing-function-docstring if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) def opensearch_version(self) -> Any: - # pylint: disable=missing-function-docstring global OPENSEARCH_VERSION if OPENSEARCH_VERSION is None: version_string = (self.client.info())["version"]["number"] @@ -194,11 +190,9 @@ def opensearch_version(self) -> Any: return OPENSEARCH_VERSION def section(self, name: str) -> None: - # pylint: disable=missing-function-docstring print(("=" * 10) + " " + name + " " + ("=" * 10)) def run(self) -> Any: - # pylint: disable=missing-function-docstring try: self.setup() self.section("test") @@ -222,7 +216,6 @@ def run_code(self, test: Any) -> Any: raise RuntimeError("Invalid action type %r" % (action_type,)) def run_do(self, action: Any) -> Any: - # pylint: disable=missing-function-docstring api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -289,7 +282,6 @@ def run_do(self, action: Any) -> Any: ) def run_catch(self, catch: Any, exception: Any) -> None: - # pylint: disable=missing-function-docstring if catch == "param": assert isinstance(exception, TypeError) return @@ -305,7 +297,6 @@ def run_catch(self, catch: Any, exception: Any) -> None: self.last_response = exception.info def run_skip(self, skip: Any) -> Any: - # pylint: disable=missing-function-docstring global IMPLEMENTED_FEATURES if "features" in skip: @@ -328,37 +319,31 @@ def run_skip(self, skip: Any) -> Any: pytest.skip(reason) def run_gt(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value def run_gte(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value def run_lt(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value def run_lte(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value def run_set(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) def run_is_false(self, action: Any) -> None: - # pylint: disable=missing-function-docstring try: value = self._lookup(action) except AssertionError: @@ -367,19 +352,16 @@ def run_is_false(self, action: Any) -> None: assert value in FALSEY_VALUES def run_is_true(self, action: Any) -> None: - # pylint: disable=missing-function-docstring value = self._lookup(action) assert value not in FALSEY_VALUES def run_length(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) def run_match(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) @@ -398,7 +380,6 @@ def run_match(self, action: Any) -> None: self._assert_match_equals(value, expected) def run_contains(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] @@ -407,7 +388,6 @@ def run_contains(self, action: Any) -> None: raise AssertionError("%s is not contained by %s" % (expected, value)) def run_transform_and_set(self, action: Any) -> None: - # pylint: disable=missing-function-docstring for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: @@ -483,7 +463,6 @@ def _assert_match_equals(self, a: Any, b: Any) -> None: @pytest.fixture(scope="function") # type: ignore def sync_runner(sync_client: Any) -> Any: - # pylint: disable=missing-function-docstring return YamlRunner(sync_client) @@ -563,7 +542,6 @@ def load_rest_api_tests() -> None: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore def test_rest_api_spec(test_spec: Any, sync_runner: Any) -> None: - # pylint: disable=missing-function-docstring if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_server_secured/test_clients.py b/test_opensearchpy/test_server_secured/test_clients.py index 52f33ccc..c885b58d 100644 --- a/test_opensearchpy/test_server_secured/test_clients.py +++ b/test_opensearchpy/test_server_secured/test_clients.py @@ -15,7 +15,6 @@ class TestSecurity(TestCase): def test_security(self) -> None: - # pylint: disable=missing-function-docstring client = OpenSearch( OPENSEARCH_URL, http_auth=("admin", "admin"), diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 2196eb73..8f8c6937 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -44,7 +44,6 @@ def tearDown(self) -> None: self.client.close() def test_create_role(self) -> None: - # pylint: disable=missing-function-docstring # Test to create role response = self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -54,7 +53,6 @@ def test_create_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) def test_create_role_with_body_param_empty(self) -> None: - # pylint: disable=missing-function-docstring try: self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -63,7 +61,6 @@ def test_create_role_with_body_param_empty(self) -> None: assert False def test_get_role(self) -> None: - # pylint: disable=missing-function-docstring # Create a role self.test_create_role() @@ -74,7 +71,6 @@ def test_get_role(self) -> None: self.assertIn(self.ROLE_NAME, response) def test_update_role(self) -> None: - # pylint: disable=missing-function-docstring # Create a role self.test_create_role() @@ -88,7 +84,6 @@ def test_update_role(self) -> None: self.assertEqual("OK", response.get("status")) def test_delete_role(self) -> None: - # pylint: disable=missing-function-docstring # Create a role self.test_create_role() @@ -102,7 +97,6 @@ def test_delete_role(self) -> None: response = self.client.security.get_role(self.ROLE_NAME) def test_create_user(self) -> None: - # pylint: disable=missing-function-docstring # Test to create user response = self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -112,7 +106,6 @@ def test_create_user(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) def test_create_user_with_body_param_empty(self) -> None: - # pylint: disable=missing-function-docstring try: self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -121,7 +114,6 @@ def test_create_user_with_body_param_empty(self) -> None: assert False def test_create_user_with_role(self) -> None: - # pylint: disable=missing-function-docstring self.test_create_role() # Test to create user @@ -137,7 +129,6 @@ def test_create_user_with_role(self) -> None: self.assertIn(response.get("status"), ["CREATED", "OK"]) def test_get_user(self) -> None: - # pylint: disable=missing-function-docstring # Create a user self.test_create_user() @@ -148,7 +139,6 @@ def test_get_user(self) -> None: self.assertIn(self.USER_NAME, response) def test_update_user(self) -> None: - # pylint: disable=missing-function-docstring # Create a user self.test_create_user() @@ -162,7 +152,6 @@ def test_update_user(self) -> None: self.assertEqual("OK", response.get("status")) def test_delete_user(self) -> None: - # pylint: disable=missing-function-docstring # Create a user self.test_create_user() @@ -176,13 +165,11 @@ def test_delete_user(self) -> None: response = self.client.security.get_user(self.USER_NAME) def test_health_check(self) -> None: - # pylint: disable=missing-function-docstring response = self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) def test_health(self) -> None: - # pylint: disable=missing-function-docstring response = self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -216,7 +203,6 @@ def test_health(self) -> None: } def test_update_audit_config(self) -> None: - # pylint: disable=missing-function-docstring response = self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) @@ -224,7 +210,6 @@ def test_update_audit_config(self) -> None: self.assertEqual("OK", response.get("status")) def test_update_audit_configuration(self) -> None: - # pylint: disable=missing-function-docstring response = self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 93e12fdc..6a09b83b 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -109,7 +109,6 @@ def perform_request(self, *args: Any, **kwargs: Any) -> Any: class TestHostsInfoCallback(TestCase): def test_cluster_manager_only_nodes_are_ignored(self) -> None: - # pylint: disable=missing-function-docstring nodes = [ {"roles": ["cluster_manager"]}, {"roles": ["cluster_manager", "data", "ingest"]}, @@ -127,14 +126,12 @@ def test_cluster_manager_only_nodes_are_ignored(self) -> None: class TestTransport(TestCase): def test_single_connection_uses_dummy_connection_pool(self) -> None: - # pylint: disable=missing-function-docstring t1: Any = Transport([{}]) self.assertIsInstance(t1.connection_pool, DummyConnectionPool) t2: Any = Transport([{"host": "localhost"}]) self.assertIsInstance(t2.connection_pool, DummyConnectionPool) def test_request_timeout_extracted_from_params_and_passed(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -146,7 +143,6 @@ def test_request_timeout_extracted_from_params_and_passed(self) -> None: ) def test_timeout_extracted_from_params_and_passed(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"timeout": 84}) @@ -158,7 +154,6 @@ def test_timeout_extracted_from_params_and_passed(self) -> None: ) def test_opaque_id(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) t.perform_request("GET", "/") @@ -179,7 +174,6 @@ def test_opaque_id(self) -> None: ) def test_request_with_custom_user_agent_header(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) @@ -194,7 +188,6 @@ def test_request_with_custom_user_agent_header(self) -> None: ) def test_send_get_body_as_source(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -206,7 +199,6 @@ def test_send_get_body_as_source(self) -> None: ) def test_send_get_body_as_post(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -216,7 +208,6 @@ def test_send_get_body_as_post(self) -> None: self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) def test_body_gets_encoded_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好") @@ -227,7 +218,6 @@ def test_body_gets_encoded_into_bytes(self) -> None: ) def test_body_bytes_get_passed_untouched(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -236,7 +226,6 @@ def test_body_bytes_get_passed_untouched(self) -> None: self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好\uda6a") @@ -247,19 +236,16 @@ def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: ) def test_kwargs_passed_on_to_connections(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{"host": "google.com"}], port=123) self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) def test_kwargs_passed_on_to_connection_pool(self) -> None: - # pylint: disable=missing-function-docstring dt = object() t: Any = Transport([{}, {}], dead_timeout=dt) self.assertIs(dt, t.connection_pool.dead_timeout) def test_custom_connection_class(self) -> None: - # pylint: disable=missing-function-docstring class MyConnection(Connection): def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs @@ -269,7 +255,6 @@ def __init__(self, **kwargs: Any) -> None: self.assertIsInstance(t.connection_pool.connections[0], MyConnection) def test_add_connection(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) @@ -279,7 +264,6 @@ def test_add_connection(self) -> None: ) def test_request_will_fail_after_x_retries(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -289,7 +273,6 @@ def test_request_will_fail_after_x_retries(self) -> None: self.assertEqual(4, len(t.get_connection().calls)) def test_failed_connection_will_be_marked_as_dead(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, @@ -299,7 +282,6 @@ def test_failed_connection_will_be_marked_as_dead(self) -> None: self.assertEqual(0, len(t.connection_pool.connections)) def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: - # pylint: disable=missing-function-docstring for method in ("GET", "HEAD"): t: Any = Transport([{}, {}], connection_class=DummyConnection) con1 = t.connection_pool.get_connection() @@ -312,7 +294,6 @@ def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: self.assertEqual(1, len(t.connection_pool.dead_count)) def test_sniff_will_use_seed_connections(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t.set_connections([{"data": "invalid"}]) @@ -321,7 +302,6 @@ def test_sniff_will_use_seed_connections(self) -> None: self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -331,7 +311,6 @@ def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_ignores_sniff_timeout(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -344,7 +323,6 @@ def test_sniff_on_start_ignores_sniff_timeout(self) -> None: ) def test_sniff_uses_sniff_timeout(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -357,7 +335,6 @@ def test_sniff_uses_sniff_timeout(self) -> None: ) def test_sniff_reuses_connection_instances_if_possible(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, @@ -370,7 +347,6 @@ def test_sniff_reuses_connection_instances_if_possible(self) -> None: self.assertIs(connection, t.get_connection()) def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [ {"exception": ConnectionError(None, "abandon ship", Exception())}, @@ -390,7 +366,6 @@ def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: def test_sniff_on_fail_failing_does_not_prevent_retires( self, sniff_hosts: Any ) -> None: - # pylint: disable=missing-function-docstring sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = Transport( [ @@ -411,7 +386,6 @@ def test_sniff_on_fail_failing_does_not_prevent_retires( self.assertEqual(1, len(conn_data.calls)) def test_sniff_after_n_seconds(self) -> None: - # pylint: disable=missing-function-docstring t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -430,7 +404,6 @@ def test_sniff_after_n_seconds(self) -> None: self.assertTrue(time.time() - 1 < t.last_sniff < time.time() + 0.01) def test_sniff_7x_publish_host(self) -> None: - # pylint: disable=missing-function-docstring # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t: Any = Transport( diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 6f315517..d4469600 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -60,7 +60,6 @@ def wipe_cluster(client: Any) -> None: def wipe_cluster_settings(client: Any) -> None: - # pylint: disable=missing-function-docstring settings = client.cluster.get_settings() new_settings: Any = {} for name, value in settings.items(): @@ -98,7 +97,6 @@ def wipe_snapshots(client: Any) -> None: def wipe_data_streams(client: Any) -> None: - # pylint: disable=missing-function-docstring try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: @@ -106,7 +104,6 @@ def wipe_data_streams(client: Any) -> None: def wipe_indices(client: Any) -> None: - # pylint: disable=missing-function-docstring client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", @@ -115,7 +112,6 @@ def wipe_indices(client: Any) -> None: def wipe_searchable_snapshot_indices(client: Any) -> None: - # pylint: disable=missing-function-docstring cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", @@ -126,19 +122,16 @@ def wipe_searchable_snapshot_indices(client: Any) -> None: def wipe_slm_policies(client: Any) -> None: - # pylint: disable=missing-function-docstring for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) def wipe_auto_follow_patterns(client: Any) -> None: - # pylint: disable=missing-function-docstring for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) def wipe_node_shutdown_metadata(client: Any) -> None: - # pylint: disable=missing-function-docstring shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. @@ -151,7 +144,6 @@ def wipe_node_shutdown_metadata(client: Any) -> None: def wipe_tasks(client: Any) -> None: - # pylint: disable=missing-function-docstring tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): @@ -159,7 +151,6 @@ def wipe_tasks(client: Any) -> None: def wait_for_pending_tasks(client: Any, filter: Any, timeout: int = 30) -> None: - # pylint: disable=missing-function-docstring end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True).split("\n") @@ -168,7 +159,6 @@ def wait_for_pending_tasks(client: Any, filter: Any, timeout: int = 30) -> None: def wait_for_pending_datafeeds_and_jobs(client: Any, timeout: int = 30) -> None: - # pylint: disable=missing-function-docstring end_time = time.time() + timeout while time.time() < end_time: if ( @@ -182,7 +172,6 @@ def wait_for_pending_datafeeds_and_jobs(client: Any, timeout: int = 30) -> None: def wait_for_cluster_state_updates_to_finish(client: Any, timeout: int = 30) -> None: - # pylint: disable=missing-function-docstring end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): diff --git a/utils/build_dists.py b/utils/build_dists.py index 5f3ee283..bb9d537e 100644 --- a/utils/build_dists.py +++ b/utils/build_dists.py @@ -125,7 +125,7 @@ def test_dist(dist: Any) -> None: run( venv_python, "-c", - f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", # pylint: disable=line-too-long + f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", ) # Install aiohttp and see that async is now available @@ -134,7 +134,7 @@ def test_dist(dist: Any) -> None: run( venv_python, "-c", - f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", # pylint: disable=line-too-long + f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", ) # Only need to test 'async_types' for non-aliased package diff --git a/utils/disable_pylint_check.py b/utils/disable_pylint_check.py deleted file mode 100644 index be24c8ef..00000000 --- a/utils/disable_pylint_check.py +++ /dev/null @@ -1,60 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -import os -import re -from typing import Generator - -import black - - -def test_files() -> Generator[str, None, None]: - """ - generator to yield full paths to files that look like unit tests - """ - # TODO should support tests in __init__.py files - test_source_files_re = re.compile(r".*test_[^/]+\.py$") - include_dirs = ["test_opensearchpy", "samples", "benchmarks"] - for top in include_dirs: - for root, dirs, files in os.walk(top, topdown=True): - for name in files: - full_path = os.path.join(root, name) - if test_source_files_re.match(full_path): - yield full_path - - -if __name__ == "__main__": - """ - adds a disable instruction for test_ methods for missing-function-docstring. - test methods typically have good names and can go without docstring for - comments. this is destructive so use git as part of the process. - """ - MISSING_FUNCTION_DOCSTRING_DISABLE = "# pylint: disable=missing-function-docstring" - test_method_re = re.compile( - r"(?P[^\S\r\n]*)(?Pasync)*" - r"(?P\s*def\stest_.*:)" - r"(?P(\n|.)*?)(?P\s*#\spylint.*)*", - flags=re.MULTILINE, - ) - for file in test_files(): - new_file_contents = "" # pylint: disable=C0103 - with open(file, encoding="utf-8") as test_file: - print(f"Working on {file}") - full_file = test_file.read() - # TODO multiline function declarations are not supported - new_file_contents = re.sub( - test_method_re, - r"\g\g\g\n\g\g" # pylint: disable=line-too-long - + MISSING_FUNCTION_DOCSTRING_DISABLE, - full_file, - ) - new_file_contents = black.format_str( - new_file_contents, mode=black.FileMode() - ) - with open(f"{file}", "w", encoding="utf-8") as new_file: - new_file.write(new_file_contents) From 2265ad356a7084f642363dc7b1873fda95f97c34 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Mon, 8 Jan 2024 17:09:43 -0500 Subject: [PATCH 11/14] added pylint.extensions.docparams plugin updated some docstrings to correct parameters removed pylint from setup.cfg Signed-off-by: Mark Cohen --- noxfile.py | 7 +++++++ opensearchpy/compat.py | 6 ++++++ setup.cfg | 3 --- .../test_async/test_server/test_helpers/conftest.py | 6 ++++++ .../test_server/test_helpers/test_faceted_search.py | 5 ----- test_opensearchpy/test_server/test_helpers/conftest.py | 2 +- 6 files changed, 20 insertions(+), 9 deletions(-) diff --git a/noxfile.py b/noxfile.py index 8b88a2df..123ee7ff 100644 --- a/noxfile.py +++ b/noxfile.py @@ -130,12 +130,15 @@ def lint_per_folder(session: Any) -> None: # TODO determine how to separate generated code from generator code for linting exclude_path_from_linting = ["opensearchpy/"] + default_enable = [ "line-too-long", "invalid-name", "pointless-statement", "unspecified-encoding", "missing-function-docstring", + "missing-param-doc", + "differing-param-doc", ] override_enable = { "test_opensearchpy/": [ @@ -143,6 +146,8 @@ def lint_per_folder(session: Any) -> None: # "invalid-name", lots of short functions with one or two character names "pointless-statement", "unspecified-encoding", + "missing-param-doc", + "differing-param-doc", # "missing-function-docstring", test names usually are, self describing ] } @@ -155,6 +160,8 @@ def lint_per_folder(session: Any) -> None: "--disable=all", "--max-line-length=240", "--good-names-rgxs=^[_a-z][_a-z0-9]?$", + "--load-plugins", + "pylint.extensions.docparams", ] if source_file in override_enable: args.append(f"--enable={','.join(override_enable[source_file])}") diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index 977edf37..72355bb9 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -35,6 +35,12 @@ def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: + """ + + :param y: + :param encoding: + :return: + """ if not isinstance(x, str): return x.decode(encoding) return x diff --git a/setup.cfg b/setup.cfg index 62150074..5bfc6492 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,7 +22,4 @@ target-version = 'py33' [mypy] ignore_missing_imports=True -[pylint] -max-line-length = 240 -good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 50aca89d..44d90e7b 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -49,6 +49,12 @@ async def opensearch_version(client: Any) -> Any: :return: """ info = client.info() + """ + yields the version of the OpenSearch cluster + :param client: client connection to OpenSearch + :return: yields major version number + """ + info = client.info() print(info) yield (int(x) async for x in match_version(info)) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 40e49daa..49685e32 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -85,11 +85,6 @@ class CommitSearch(AsyncFacetedSearch): @pytest.fixture(scope="function") # type: ignore def repo_search_cls(opensearch_version: Any) -> Any: - """ - creates a mock AsyncFacetedSearch instance to search a git repo - :param opensearch_version: - :return: - """ interval_type = "calendar_interval" class RepoSearch(AsyncFacetedSearch): diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 967c320e..4f9a8d6f 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -65,7 +65,7 @@ def opensearch_version(client: Any) -> Any: def match_version(info: Any) -> Any: """ matches the major version from the given client info - :param info: + :param info: part of the response from OpenSearch """ match = re.match(r"^([0-9.]+)", info["version"]["number"]) assert match is not None From d9c087470f392ce3c9f41237c70321293084112d Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Wed, 17 Jan 2024 16:25:04 -0500 Subject: [PATCH 12/14] added four lints for opensearchpy/ Signed-off-by: Mark Cohen --- noxfile.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/noxfile.py b/noxfile.py index 123ee7ff..a2834692 100644 --- a/noxfile.py +++ b/noxfile.py @@ -25,7 +25,7 @@ # under the License. -from typing import Any +from typing import Any, List import nox @@ -128,9 +128,10 @@ def lint_per_folder(session: Any) -> None: :param session: the current nox session """ - # TODO determine how to separate generated code from generator code for linting - exclude_path_from_linting = ["opensearchpy/"] + # any paths that should not be run through pylint + exclude_path_from_linting: List[str] = [] + # all paths not referenced in override_enable will run these lints default_enable = [ "line-too-long", "invalid-name", @@ -149,7 +150,13 @@ def lint_per_folder(session: Any) -> None: "missing-param-doc", "differing-param-doc", # "missing-function-docstring", test names usually are, self describing - ] + ], + "opensearchpy/": [ + "line-too-long", + "invalid-name", + "pointless-statement", + "unspecified-encoding", + ], } for source_file in SOURCE_FILES: From ffa80b36c23102b94fd99dedfd5e5358a0e3aae3 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Wed, 17 Jan 2024 17:48:35 -0500 Subject: [PATCH 13/14] adding await back to client.info() call Signed-off-by: Mark Cohen --- .../test_async/test_server/test_helpers/conftest.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 44d90e7b..172a6a39 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -43,18 +43,12 @@ async def client() -> Any: @fixture(scope="function") # type: ignore async def opensearch_version(client: Any) -> Any: - """ - yields the version of the OpenSearch cluster - :param client: - :return: - """ - info = client.info() """ yields the version of the OpenSearch cluster :param client: client connection to OpenSearch :return: yields major version number """ - info = client.info() + info = await client.info() print(info) yield (int(x) async for x in match_version(info)) From 107469c6205df592fd3049c497a3489067833d15 Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Thu, 18 Jan 2024 20:56:31 +0000 Subject: [PATCH 14/14] updated TODOs as requested renamed test_opensearchpy.test_async.test_server.test_helpers.conftest.setup_ubq_tests to setup_update_by_query_tests added OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/stats/50_noop_update[0] to skip tests list run_tests.py catches a CalledProcessError when the git repo already exists and the command to add the origin fails in fetch_opensearch_repo() Signed-off-by: Mark Cohen --- opensearchpy/compat.py | 8 +++---- test_opensearchpy/run_tests.py | 21 +++++++++++++------ .../test_server/test_helpers/conftest.py | 4 ++-- .../test_server/test_helpers/test_actions.py | 2 +- .../test_helpers/test_faceted_search.py | 2 +- .../test_helpers/test_update_by_query.py | 12 +++++------ .../test_server/test_rest_api_spec.py | 21 ++++++++++--------- utils/generate_api.py | 2 +- 8 files changed, 41 insertions(+), 31 deletions(-) diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index 72355bb9..1bf6df94 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -36,10 +36,10 @@ def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: """ - - :param y: - :param encoding: - :return: + returns x as a string encoded in "encoding" if it is not already a string + :param x: the value to convert to a str + :param encoding: the encoding to convert to - see https://docs.python.org/3/library/codecs.html#standard-encodings + :return: an encoded str """ if not isinstance(x, str): return x.decode(encoding) diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index b54218a0..2aa94a76 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -37,6 +37,7 @@ import sys from os import environ from os.path import abspath, dirname, exists, join, pardir +from subprocess import CalledProcessError from typing import Any @@ -82,12 +83,20 @@ def fetch_opensearch_repo() -> None: # make a new blank repository in the test directory subprocess.check_call("cd %s && git init" % repo_path, shell=True) - # add a remote - subprocess.check_call( - "cd %s && git remote add origin https://github.com/opensearch-project/opensearch.git" - % repo_path, - shell=True, - ) # TODO this fails when the remote already exists; should clean up or ignore? + try: + # add a remote + subprocess.check_call( + "cd %s && git remote add origin https://github.com/opensearch-project/opensearch.git" + % repo_path, + shell=True, + ) + except CalledProcessError as e: + # if the run is interrupted from a previous run, it doesn't clean up, and the git add origin command + # errors out; this allows the test to continue + remote_origin_already_exists = 3 + print(e) + if e.returncode != remote_origin_already_exists: + sys.exit(1) # fetch the sha commit, version from info() print("Fetching opensearch repo...") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 172a6a39..983ae711 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -115,9 +115,9 @@ async def pull_request(write_client: Any) -> Any: @fixture # type: ignore -async def setup_ubq_tests(client: Any) -> str: +async def setup_update_by_query_tests(client: Any) -> str: """ - #todo what's a ubq test? this is ignored. should it be deleted? + sets up update by query tests :param client: :return: an index name """ diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index c1a5dc7a..29fbb5a3 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -61,7 +61,7 @@ def __init__( async def bulk(self, *args: Any, **kwargs: Any) -> Any: """ - increments number of times called and, when it equals fail_at, raises self.fail_with when + increments number of times called and, when it equals fail_at, raises self.fail_with """ self._called += 1 if self._called in self._fail_at: diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 49685e32..90899d62 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -57,7 +57,7 @@ class MetricSearch(AsyncFacetedSearch): @pytest.fixture(scope="function") # type: ignore def commit_search_cls(opensearch_version: Any) -> Any: """ - :param opensearch_version: not used. #todo should be removed? + :param opensearch_version the semver version of OpenSearch :return: an AsyncFacetedSearch for git commits """ interval_kwargs = {"fixed_interval": "1d"} diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index a26d046e..9301e3e6 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -19,9 +19,9 @@ async def test_update_by_query_no_script( - write_client: Any, setup_ubq_tests: Any + write_client: Any, setup_update_by_query_tests: Any ) -> None: - index = setup_ubq_tests + index = setup_update_by_query_tests ubq = ( AsyncUpdateByQuery(using=write_client) @@ -40,9 +40,9 @@ async def test_update_by_query_no_script( async def test_update_by_query_with_script( - write_client: Any, setup_ubq_tests: Any + write_client: Any, setup_update_by_query_tests: Any ) -> None: - index = setup_ubq_tests + index = setup_update_by_query_tests ubq = ( AsyncUpdateByQuery(using=write_client) @@ -59,9 +59,9 @@ async def test_update_by_query_with_script( async def test_delete_by_query_with_script( - write_client: Any, setup_ubq_tests: Any + write_client: Any, setup_update_by_query_tests: Any ) -> None: - index = setup_ubq_tests + index = setup_update_by_query_tests ubq = ( AsyncUpdateByQuery(using=write_client) diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index cb03cb76..8f965501 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -74,21 +74,22 @@ # Warning about date_histogram.interval deprecation is raised randomly "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipeline/10_basic", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/pit/10_basic", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/clone/40_wait_for_completion[0]", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/forcemerge/20_wait_for_completion[0]", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/open/30_wait_for_completion[0]", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/shrink/50_wait_for_completion[0]", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/split/40_wait_for_completion[0]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/clone/40_wait_for_completion[0]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/forcemerge/20_wait_for_completion[0]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/open/30_wait_for_completion[0]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/shrink/50_wait_for_completion[0]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/split/40_wait_for_completion[0]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cat/nodes/10_basic[1]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cat/nodeattrs/10_basic[1]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[2]", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[3]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[2]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/put_settings/10_basic[3]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cat/indices/10_basic[2]", - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/10_basic[6]", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/20_request_timeout", # pylint: disable=line-too-long - "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/search/aggregation/20_terms[4]", # pylint: disable=line-too-long + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/10_basic[6]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/cluster/health/20_request_timeout", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/search/aggregation/20_terms[4]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/tasks/list/10_basic[0]", "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/index/90_unsigned_long[1]", + "OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/indices/stats/50_noop_update[0]", "search/aggregation/250_moving_fn[1]", # body: null "indices/simulate_index_template/10_basic[2]", diff --git a/utils/generate_api.py b/utils/generate_api.py index 86868f65..b10a9056 100644 --- a/utils/generate_api.py +++ b/utils/generate_api.py @@ -418,7 +418,7 @@ def method(self) -> Any: """ To adhere to the HTTP RFC we shouldn't send bodies in GET requests. - :return: + :return: an updated HTTP method to use to communicate with the OpenSearch API """ default_method = self.path["methods"][0]