From 80262a10e2f47c40687d5fda645167fbedd1cb31 Mon Sep 17 00:00:00 2001 From: Helen Lin Date: Thu, 18 Jul 2024 13:26:39 -0700 Subject: [PATCH 1/3] feat: do not convert docdb records to DataAssetRecord --- src/aind_data_access_api/document_db.py | 187 ++++++++++++ src/aind_data_access_api/document_db_ssh.py | 3 + src/aind_data_access_api/document_store.py | 2 + src/aind_data_access_api/models.py | 1 + tests/test_document_db.py | 314 ++++++++++++++++++++ 5 files changed, 507 insertions(+) diff --git a/src/aind_data_access_api/document_db.py b/src/aind_data_access_api/document_db.py index 988b51c..beba463 100644 --- a/src/aind_data_access_api/document_db.py +++ b/src/aind_data_access_api/document_db.py @@ -13,6 +13,7 @@ from requests import Response from aind_data_access_api.models import DataAssetRecord +from aind_data_access_api.utils import is_dict_corrupt class Client: @@ -226,6 +227,96 @@ def _bulk_write(self, operations: List[dict]) -> Response: class MetadataDbClient(Client): """Class to manage reading and writing to metadata db""" + def retrieve_docdb_records( + self, + filter_query: Optional[dict] = None, + projection: Optional[dict] = None, + sort: Optional[dict] = None, + limit: int = 0, + paginate: bool = True, + paginate_batch_size: int = 10, + paginate_max_iterations: int = 20000, + ) -> List[dict]: + """ + Retrieve raw json records from DocDB API Gateway as a list of dicts. + + Parameters + ---------- + filter_query : Optional[dict] + Filter to apply to the records being returned. Default is None. + projection : Optional[dict] + Subset of document fields to return. Default is None. + sort : Optional[dict] + Sort records when returned. Default is None. + limit : int + Return a smaller set of records. 0 for all records. Default is 0. + paginate : bool + If set to true, will batch the queries to the API Gateway. It may + be faster to set to false if the number of records expected to be + returned is small. + paginate_batch_size : int + Number of records to return at a time. Default is 10. + paginate_max_iterations : int + Max number of iterations to run to prevent indefinite calls to the + API Gateway. Default is 20000. + + Returns + ------- + List[dict] + + """ + if paginate is False: + records = self._get_records( + filter_query=filter_query, + projection=projection, + sort=sort, + limit=limit, + ) + else: + # Get record count + record_counts = self._count_records(filter_query) + total_record_count = record_counts["total_record_count"] + filtered_record_count = record_counts["filtered_record_count"] + if filtered_record_count <= paginate_batch_size: + records = self._get_records( + filter_query=filter_query, projection=projection, sort=sort + ) + else: + records = [] + errors = [] + num_of_records_collected = 0 + limit = filtered_record_count if limit == 0 else limit + skip = 0 + iter_count = 0 + while ( + skip < total_record_count + and num_of_records_collected + < min(filtered_record_count, limit) + and iter_count < paginate_max_iterations + ): + try: + batched_records = self._get_records( + filter_query=filter_query, + projection=projection, + sort=sort, + limit=paginate_batch_size, + skip=skip, + ) + num_of_records_collected += len(batched_records) + records.extend(batched_records) + except Exception as e: + errors.append(repr(e)) + skip = skip + paginate_batch_size + iter_count += 1 + # TODO: Add optional progress bar? + records = records[0:limit] + if len(errors) > 0: + logging.error( + f"There were errors retrieving records. {errors}" + ) + return records + + # TODO: deprecate this method def retrieve_data_asset_records( self, filter_query: Optional[dict] = None, @@ -318,6 +409,19 @@ def retrieve_data_asset_records( data_asset_records.append(DataAssetRecord(**record)) return data_asset_records + def upsert_one_docdb_record(self, record: dict) -> Response: + """Upsert one record if the record is not corrupt""" + if record.get("_id") is None: + raise ValueError("Record does not have an _id field.") + if is_dict_corrupt(record): + raise ValueError("Record is corrupt and cannot be upserted.") + response = self._upsert_one_record( + record_filter={"_id": record["_id"]}, + update={"$set": json.loads(json.dumps(record, default=str))}, + ) + return response + + # TODO: deprecate this method def upsert_one_record( self, data_asset_record: DataAssetRecord ) -> Response: @@ -362,6 +466,89 @@ def _record_to_operation(record: str, record_id: str) -> dict: } } + def upsert_list_of_docdb_records( + self, + records: List[dict], + max_payload_size: int = 2e6, + ) -> List[Response]: + """ + Upsert a list of records. There's a limit to the size of the + request that can be sent, so we chunk the requests. + + Parameters + ---------- + + records : List[dict] + List of records to upsert into the DocDB database + max_payload_size : int + Chunk requests into smaller lists no bigger than this value in bytes. + If a single record is larger than this value in bytes, an attempt + will be made to upsert the record but will most likely receive a 413 + status code. The Default is 2e6 bytes. The max payload for the API + Gateway including headers is 10MB. + + Returns + ------- + List[Response] + A list of responses from the API Gateway. + + """ + if len(records) == 0: + return [] + else: + # check no record is corrupt or missing _id + for record in records: + if record.get("_id") is None: + raise ValueError("A record does not have an _id field.") + if is_dict_corrupt(record): + raise ValueError( + "A record is corrupt and cannot be upserted." + ) + # chunk records + first_index = 0 + end_index = len(records) + second_index = 1 + responses = [] + record_json = json.dumps(records[first_index], default=str) + total_size = getsizeof(record_json) + operations = [ + self._record_to_operation( + record=record_json, + record_id=records[first_index].get("_id"), + ) + ] + while second_index < end_index + 1: + if second_index == end_index: + response = self._bulk_write(operations) + responses.append(response) + else: + record_json = json.dumps( + records[second_index], default=str + ) + record_size = getsizeof(record_json) + if total_size + record_size > max_payload_size: + response = self._bulk_write(operations) + responses.append(response) + first_index = second_index + operations = [ + self._record_to_operation( + record=record_json, + record_id=records[first_index].get("_id"), + ) + ] + total_size = record_size + else: + operations.append( + self._record_to_operation( + record=record_json, + record_id=records[second_index].get("_id"), + ) + ) + total_size += record_size + second_index = second_index + 1 + return responses + + # TODO: deprecate this method def upsert_list_of_records( self, data_asset_records: List[DataAssetRecord], diff --git a/src/aind_data_access_api/document_db_ssh.py b/src/aind_data_access_api/document_db_ssh.py index b41bdd2..44943b8 100644 --- a/src/aind_data_access_api/document_db_ssh.py +++ b/src/aind_data_access_api/document_db_ssh.py @@ -69,6 +69,9 @@ def from_secrets_manager( class DocumentDbSSHClient: """Class to establish a Document Store client with SSH tunneling.""" + # TODO: add retrieve_docdb_records, upsert_one_docdb_record, + # and upsert_list_of_docdb_records methods + def __init__(self, credentials: DocumentDbSSHCredentials): """ Construct a client to interface with a Document Database. diff --git a/src/aind_data_access_api/document_store.py b/src/aind_data_access_api/document_store.py index 1eb58a6..8271a19 100644 --- a/src/aind_data_access_api/document_store.py +++ b/src/aind_data_access_api/document_store.py @@ -12,6 +12,7 @@ from aind_data_access_api.models import DataAssetRecord +# TODO: deprecate this class class DocumentStoreCredentials(CoreCredentials): """Document Store credentials""" @@ -31,6 +32,7 @@ class DocumentStoreCredentials(CoreCredentials): database: str = Field(...) +# TODO: deprecate this client class Client: """Class to establish a document store client.""" diff --git a/src/aind_data_access_api/models.py b/src/aind_data_access_api/models.py index de58c48..fd7abcf 100644 --- a/src/aind_data_access_api/models.py +++ b/src/aind_data_access_api/models.py @@ -5,6 +5,7 @@ from pydantic import BaseModel, Extra, Field +# TODO: deprecate this model class DataAssetRecord(BaseModel): """The records in the Data Asset Collection needs to contain certain fields to easily query and index the data.""" diff --git a/tests/test_document_db.py b/tests/test_document_db.py index b3bf619..0f28ad6 100644 --- a/tests/test_document_db.py +++ b/tests/test_document_db.py @@ -245,6 +245,85 @@ class TestMetadataDbClient(unittest.TestCase): "collection": "data_assets", } + @patch("aind_data_access_api.document_db.Client._get_records") + @patch("aind_data_access_api.document_db.Client._count_records") + def test_retrieve_docdb_records( + self, + mock_count_record_response: MagicMock, + mock_get_record_response: MagicMock, + ): + """Tests retrieving docdb records""" + + client = MetadataDbClient(**self.example_client_args) + expected_response = [ + { + "_id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "location": "some_url", + "created": datetime(2000, 10, 10, 10, 10, 10), + "subject": {"subject_id": "00000", "sex": "Female"}, + } + ] + mock_get_record_response.return_value = expected_response + mock_count_record_response.return_value = { + "total_record_count": 1, + "filtered_record_count": 1, + } + records = client.retrieve_docdb_records() + paginate_records = client.retrieve_docdb_records(paginate=False) + self.assertEqual(expected_response, records) + self.assertEqual(expected_response, paginate_records) + + @patch("aind_data_access_api.document_db.Client._get_records") + @patch("aind_data_access_api.document_db.Client._count_records") + @patch("logging.error") + def test_retrieve_many_docdb_records( + self, + mock_log_error: MagicMock, + mock_count_record_response: MagicMock, + mock_get_record_response: MagicMock, + ): + """Tests retrieving many docdb records""" + + client = MetadataDbClient(**self.example_client_args) + mocked_record_list = [ + { + "_id": f"{id_num}", + "name": "modal_00000_2000-10-10_10-10-10", + "location": "some_url", + "created": datetime(2000, 10, 10, 10, 10, 10), + "subject": {"subject_id": "00000", "sex": "Female"}, + } + for id_num in range(0, 10) + ] + mock_get_record_response.side_effect = [ + mocked_record_list[0:2], + Exception("Test"), + mocked_record_list[4:6], + mocked_record_list[6:8], + mocked_record_list[8:10], + ] + mock_count_record_response.return_value = { + "total_record_count": len(mocked_record_list), + "filtered_record_count": len(mocked_record_list), + } + expected_response = [ + { + "_id": f"{id_num}", + "name": "modal_00000_2000-10-10_10-10-10", + "location": "some_url", + "created": datetime(2000, 10, 10, 10, 10, 10), + "subject": {"subject_id": "00000", "sex": "Female"}, + } + for id_num in [0, 1, 4, 5, 6, 7, 8, 9] + ] + records = client.retrieve_docdb_records(paginate_batch_size=2) + mock_log_error.assert_called_once_with( + "There were errors retrieving records. [\"Exception('Test')\"]" + ) + self.assertEqual(expected_response, records) + + # TODO: Deprecate this test @patch("aind_data_access_api.document_db.Client._get_records") @patch("aind_data_access_api.document_db.Client._count_records") def test_retrieve_data_asset_records( @@ -282,6 +361,7 @@ def test_retrieve_data_asset_records( self.assertEqual(expected_response, list(records)) self.assertEqual(expected_response, list(paginate_records)) + # TODO: Deprecate this test @patch("aind_data_access_api.document_db.Client._get_records") @patch("aind_data_access_api.document_db.Client._count_records") @patch("logging.error") @@ -331,6 +411,55 @@ def test_retrieve_many_data_asset_records( ) self.assertEqual(expected_response, list(records)) + @patch("aind_data_access_api.document_db.Client._upsert_one_record") + def test_upsert_one_docdb_record(self, mock_upsert: MagicMock): + """Tests upserting one docdb record""" + client = MetadataDbClient(**self.example_client_args) + mock_upsert.return_value = {"message": "success"} + record = { + "_id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Female"}, + } + response = client.upsert_one_docdb_record(record) + self.assertEqual({"message": "success"}, response) + mock_upsert.assert_called_once_with( + record_filter={"_id": "abc-123"}, + update={"$set": json.loads(json.dumps(record, default=str))}, + ) + + @patch("aind_data_access_api.document_db.Client._upsert_one_record") + def test_upsert_one_docdb_record_invalid_corrupt( + self, mock_upsert: MagicMock + ): + """Tests upserting one docdb record if record is invalid or corrupt""" + client = MetadataDbClient(**self.example_client_args) + record_no__id = { + "id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Female"}, + } + record_corrupt = { + "_id": "abc-123", + "name.corrupt": "modal_00000_2000-10-10_10-10-10", + } + with self.assertRaises(ValueError) as e: + client.upsert_one_docdb_record(record_no__id) + self.assertEqual( + "Record does not have an _id field.", str(e.exception) + ) + with self.assertRaises(ValueError) as e: + client.upsert_one_docdb_record(record_corrupt) + self.assertEqual( + "Record is corrupt and cannot be upserted.", str(e.exception) + ) + mock_upsert.assert_not_called() + + # TODO: Deprecate this test @patch("aind_data_access_api.document_db.Client._upsert_one_record") def test_upsert_one_record(self, mock_upsert: MagicMock): """Tests upserting one data asset record""" @@ -354,6 +483,189 @@ def test_upsert_one_record(self, mock_upsert: MagicMock): }, ) + @patch("aind_data_access_api.document_db.Client._bulk_write") + def test_upsert_list_of_docdb_records(self, mock_bulk_write: MagicMock): + """Tests upserting a list of docdb records""" + + client = MetadataDbClient(**self.example_client_args) + mock_bulk_write.return_value = {"message": "success"} + records = [ + { + "_id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Female"}, + }, + { + "_id": "abc-125", + "name": "modal_00001_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Male"}, + }, + ] + response = client.upsert_list_of_docdb_records(records) + self.assertEqual([{"message": "success"}], response) + mock_bulk_write.assert_called_once_with( + [ + { + "UpdateOne": { + "filter": {"_id": "abc-123"}, + "update": { + "$set": json.loads( + json.dumps(records[0], default=str) + ) + }, + "upsert": "True", + } + }, + { + "UpdateOne": { + "filter": {"_id": "abc-125"}, + "update": { + "$set": json.loads( + json.dumps(records[1], default=str) + ) + }, + "upsert": "True", + } + }, + ] + ) + + @patch("aind_data_access_api.document_db.Client._bulk_write") + def test_upsert_empty_list_of_docdb_records( + self, mock_bulk_write: MagicMock + ): + """Tests upserting an empty list of docdb records""" + + client = MetadataDbClient(**self.example_client_args) + records = [] + + response = client.upsert_list_of_docdb_records(records) + self.assertEqual([], response) + mock_bulk_write.assert_not_called() + + @patch("aind_data_access_api.document_db.Client._bulk_write") + def test_upsert_chunked_list_of_docdb_records( + self, mock_bulk_write: MagicMock + ): + """Tests upserting a list of docdb records in chunks""" + + client = MetadataDbClient(**self.example_client_args) + mock_bulk_write.return_value = {"message": "success"} + records = [ + { + "_id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Female"}, + }, + { + "_id": "abc-125", + "name": "modal_00001_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Male"}, + }, + ] + + response = client.upsert_list_of_docdb_records( + records, max_payload_size=1 + ) + self.assertEqual( + [{"message": "success"}, {"message": "success"}], response + ) + mock_bulk_write.assert_has_calls( + [ + call( + [ + { + "UpdateOne": { + "filter": {"_id": "abc-123"}, + "update": { + "$set": json.loads( + json.dumps(records[0], default=str) + ) + }, + "upsert": "True", + } + } + ] + ), + call( + [ + { + "UpdateOne": { + "filter": {"_id": "abc-125"}, + "update": { + "$set": json.loads( + json.dumps(records[1], default=str) + ) + }, + "upsert": "True", + } + } + ] + ), + ] + ) + + @patch("aind_data_access_api.document_db.Client._bulk_write") + def test_upsert_list_of_docdb_records_invalid_corrupt( + self, mock_bulk_write: MagicMock + ): + """Tests upserting a list of docdb records if a record is invalid or + corrupt""" + + client = MetadataDbClient(**self.example_client_args) + records_no__id = [ + { + "_id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Female"}, + }, + { + "id": "abc-125", + "name": "modal_00001_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Male"}, + }, + ] + records_corrupt = [ + { + "_id": "abc-123", + "name": "modal_00000_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Female"}, + }, + { + "_id": "abc-125", + "name.corrupt": "modal_00001_2000-10-10_10-10-10", + "created": datetime(2000, 10, 10, 10, 10, 10), + "location": "some_url", + "subject": {"subject_id": "00000", "sex": "Male"}, + }, + ] + with self.assertRaises(ValueError) as e: + client.upsert_list_of_docdb_records(records_no__id) + self.assertEqual( + "A record does not have an _id field.", str(e.exception) + ) + with self.assertRaises(ValueError) as e: + client.upsert_list_of_docdb_records(records_corrupt) + self.assertEqual( + "A record is corrupt and cannot be upserted.", str(e.exception) + ) + mock_bulk_write.assert_not_called() + + # TODO: Deprecate this test @patch("aind_data_access_api.document_db.Client._bulk_write") def test_upsert_list_of_records(self, mock_bulk_write: MagicMock): """Tests upserting a list of data asset records""" @@ -416,6 +728,7 @@ def test_upsert_list_of_records(self, mock_bulk_write: MagicMock): ] ) + # TODO: Deprecate this test @patch("aind_data_access_api.document_db.Client._bulk_write") def test_upsert_empty_list_of_records(self, mock_bulk_write: MagicMock): """Tests upserting an empty list of data asset records""" @@ -427,6 +740,7 @@ def test_upsert_empty_list_of_records(self, mock_bulk_write: MagicMock): self.assertEqual([], response) mock_bulk_write.assert_not_called() + # TODO: Deprecate this test @patch("aind_data_access_api.document_db.Client._bulk_write") def test_upsert_chunked_list_of_records(self, mock_bulk_write: MagicMock): """Tests upserting a list of data asset records in chunks""" From 5916ddd7d1c07540d8f32876f65ae7a0487046ea Mon Sep 17 00:00:00 2001 From: Helen Lin Date: Thu, 18 Jul 2024 13:46:44 -0700 Subject: [PATCH 2/3] feat: add deprecation warnings --- src/aind_data_access_api/document_db.py | 40 ++++++++++++++++++++++--- src/aind_data_access_api/models.py | 2 +- tests/test_document_db.py | 12 ++++---- 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/src/aind_data_access_api/document_db.py b/src/aind_data_access_api/document_db.py index beba463..d508be6 100644 --- a/src/aind_data_access_api/document_db.py +++ b/src/aind_data_access_api/document_db.py @@ -2,6 +2,7 @@ import json import logging +import warnings from functools import cached_property from sys import getsizeof from typing import List, Optional, Tuple @@ -316,7 +317,7 @@ def retrieve_docdb_records( ) return records - # TODO: deprecate this method + # TODO: remove this method def retrieve_data_asset_records( self, filter_query: Optional[dict] = None, @@ -328,6 +329,9 @@ def retrieve_data_asset_records( paginate_max_iterations: int = 20000, ) -> List[DataAssetRecord]: """ + DEPRECATED: This method is deprecated. Use `retrieve_docdb_records` + instead. + Retrieve data asset records Parameters @@ -355,6 +359,13 @@ def retrieve_data_asset_records( List[DataAssetRecord] """ + warnings.warn( + "retrieve_data_asset_records is deprecated. " + "Use retrieve_docdb_records instead." + "", + DeprecationWarning, + stacklevel=2, + ) if paginate is False: records = self._get_records( filter_query=filter_query, @@ -421,12 +432,23 @@ def upsert_one_docdb_record(self, record: dict) -> Response: ) return response - # TODO: deprecate this method + # TODO: remove this method def upsert_one_record( self, data_asset_record: DataAssetRecord ) -> Response: - """Upsert one record""" + """ + DEPRECATED: This method is deprecated. Use `upsert_one_docdb_record` + instead. + Upsert one record + """ + warnings.warn( + "upsert_one_record is deprecated. " + "Use upsert_one_docdb_record instead." + "", + DeprecationWarning, + stacklevel=2, + ) response = self._upsert_one_record( record_filter={"_id": data_asset_record.id}, update={ @@ -548,13 +570,16 @@ def upsert_list_of_docdb_records( second_index = second_index + 1 return responses - # TODO: deprecate this method + # TODO: remove this method def upsert_list_of_records( self, data_asset_records: List[DataAssetRecord], max_payload_size: int = 2e6, ) -> List[Response]: """ + DEPRECATED: This method is deprecated. Use + `upsert_list_of_docdb_records` instead. + Upsert a list of records. There's a limit to the size of the request that can be sent, so we chunk the requests. @@ -576,6 +601,13 @@ def upsert_list_of_records( A list of responses from the API Gateway. """ + warnings.warn( + "upsert_list_of_records is deprecated. " + "Use upsert_list_of_docdb_records instead." + "", + DeprecationWarning, + stacklevel=2, + ) if len(data_asset_records) == 0: return [] else: diff --git a/src/aind_data_access_api/models.py b/src/aind_data_access_api/models.py index fd7abcf..ecb9163 100644 --- a/src/aind_data_access_api/models.py +++ b/src/aind_data_access_api/models.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, Extra, Field -# TODO: deprecate this model +# TODO: remove this model class DataAssetRecord(BaseModel): """The records in the Data Asset Collection needs to contain certain fields to easily query and index the data.""" diff --git a/tests/test_document_db.py b/tests/test_document_db.py index 0f28ad6..e787e06 100644 --- a/tests/test_document_db.py +++ b/tests/test_document_db.py @@ -323,7 +323,7 @@ def test_retrieve_many_docdb_records( ) self.assertEqual(expected_response, records) - # TODO: Deprecate this test + # TODO: remove this test @patch("aind_data_access_api.document_db.Client._get_records") @patch("aind_data_access_api.document_db.Client._count_records") def test_retrieve_data_asset_records( @@ -361,7 +361,7 @@ def test_retrieve_data_asset_records( self.assertEqual(expected_response, list(records)) self.assertEqual(expected_response, list(paginate_records)) - # TODO: Deprecate this test + # TODO: remove this test @patch("aind_data_access_api.document_db.Client._get_records") @patch("aind_data_access_api.document_db.Client._count_records") @patch("logging.error") @@ -459,7 +459,7 @@ def test_upsert_one_docdb_record_invalid_corrupt( ) mock_upsert.assert_not_called() - # TODO: Deprecate this test + # TODO: remove this test @patch("aind_data_access_api.document_db.Client._upsert_one_record") def test_upsert_one_record(self, mock_upsert: MagicMock): """Tests upserting one data asset record""" @@ -665,7 +665,7 @@ def test_upsert_list_of_docdb_records_invalid_corrupt( ) mock_bulk_write.assert_not_called() - # TODO: Deprecate this test + # TODO: remove this test @patch("aind_data_access_api.document_db.Client._bulk_write") def test_upsert_list_of_records(self, mock_bulk_write: MagicMock): """Tests upserting a list of data asset records""" @@ -728,7 +728,7 @@ def test_upsert_list_of_records(self, mock_bulk_write: MagicMock): ] ) - # TODO: Deprecate this test + # TODO: remove this test @patch("aind_data_access_api.document_db.Client._bulk_write") def test_upsert_empty_list_of_records(self, mock_bulk_write: MagicMock): """Tests upserting an empty list of data asset records""" @@ -740,7 +740,7 @@ def test_upsert_empty_list_of_records(self, mock_bulk_write: MagicMock): self.assertEqual([], response) mock_bulk_write.assert_not_called() - # TODO: Deprecate this test + # TODO: remove this test @patch("aind_data_access_api.document_db.Client._bulk_write") def test_upsert_chunked_list_of_records(self, mock_bulk_write: MagicMock): """Tests upserting a list of data asset records in chunks""" From 093ca3e98f0d8c09ac670069c223aa785dd64389 Mon Sep 17 00:00:00 2001 From: Helen Lin Date: Thu, 18 Jul 2024 13:50:00 -0700 Subject: [PATCH 3/3] docs: update UserGuide for readonly rest api --- docs/source/UserGuide.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/UserGuide.rst b/docs/source/UserGuide.rst index c214801..6563e06 100644 --- a/docs/source/UserGuide.rst +++ b/docs/source/UserGuide.rst @@ -66,7 +66,7 @@ REST API (Read-Only) filter = {"subject.subject_id": "123456"} limit = 1000 paginate_batch_size = 100 - response = docdb_api_client.retrieve_data_asset_records( + response = docdb_api_client.retrieve_docdb_records( filter_query=filter, limit=limit, paginate_batch_size=paginate_batch_size