Skip to content

Commit

Permalink
Merge pull request #66 from quantori/feature/bulkexporterror
Browse files Browse the repository at this point in the history
increase timeout for get_content from 60 up tp 600 seconds
  • Loading branch information
quantori-pokidovea authored Feb 23, 2023
2 parents f6a62e8 + dc0d98c commit 4b8fe63
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 19 deletions.
8 changes: 8 additions & 0 deletions src/signals_notebook/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,11 @@ def __str__(self) -> str:

class EIDError(PydanticValueError):
msg_template = 'incorrect EID value: "{value}"'


class BulkExportJobAlreadyRunningError(Exception):
def __init__(self, message=None):
super().__init__(message)

def __str__(self) -> str:
return 'Only one Bulk Export job can be processed at a time'
44 changes: 25 additions & 19 deletions src/signals_notebook/materials/library.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from signals_notebook.materials.batch import Batch
from signals_notebook.materials.field import AssetConfig, BatchConfig
from signals_notebook.utils.fs_handler import FSHandler
from signals_notebook.exceptions import SignalsNotebookError, BulkExportJobAlreadyRunningError

MAX_MATERIAL_FILE_SIZE = 52428800
EXPORT_ERROR_LIBRARY_EMPTY = 'Nothing to export.'
Expand Down Expand Up @@ -394,7 +395,7 @@ def _download_file(self, file_id: str) -> requests.Response:
path=(self._get_endpoint(), 'bulkExport', 'download', file_id),
)

def get_content(self, timeout: int = 30, period: int = 5) -> File:
def get_content(self, timeout: int = 600, period: int = 5) -> File:
"""Get library content.
Compounds/Reagents (SNB) will be exported to SD file, others will be exported to CSV file.
Expand All @@ -405,13 +406,23 @@ def get_content(self, timeout: int = 30, period: int = 5) -> File:
Returns:
File
"""
bulk_export_response = None
api = SignalsNotebookApi.get_default_api()
log.debug('Get content for: %s| %s', self.__class__.__name__, self.eid)

bulk_export_response = api.call(
method='POST',
path=(self._get_endpoint(), self.name, 'bulkExport'),
)
try:
bulk_export_response = api.call(
method='POST',
path=(self._get_endpoint(), self.name, 'bulkExport'),
)
except SignalsNotebookError as e:
error = e.parsed_response.errors[0]
if error.status == '409':
raise BulkExportJobAlreadyRunningError()
else:
raise
except Exception as e:
raise e

file_id, report_id = bulk_export_response.json()['data']['attributes'].values()

Expand Down Expand Up @@ -559,20 +570,15 @@ def dump(self, base_path: str, fs_handler: FSHandler, alias: Optional[List[str]]
metadata = {
**{k: v for k, v in self.dict().items() if k in ('library_name', 'asset_type_id', 'eid', 'name')},
}
try:
content = self.get_content(timeout=60)
metadata['file_name'] = content.name
file_name = content.name
data = content.content
fs_handler.write(
fs_handler.join_path(base_path, self.eid, file_name),
data,
base_alias=alias + [metadata['name'], file_name] if alias else None,
)
except FileNotFoundError:
metadata['error'] = 'Library is empty'
except TimeoutError:
metadata['error'] = 'Time is over to dump library'
content = self.get_content(timeout=600)
metadata['file_name'] = content.name
file_name = content.name
data = content.content
fs_handler.write(
fs_handler.join_path(base_path, self.eid, file_name),
data,
base_alias=alias + [metadata['name'], file_name] if alias else None,
)

fs_handler.write(
fs_handler.join_path(base_path, self.eid, 'metadata.json'),
Expand Down

0 comments on commit 4b8fe63

Please sign in to comment.