diff --git a/miqa/core/conversion/import_export_csvs.py b/miqa/core/conversion/import_export_csvs.py index 4bc1bdf9..51d696ef 100644 --- a/miqa/core/conversion/import_export_csvs.py +++ b/miqa/core/conversion/import_export_csvs.py @@ -1,8 +1,10 @@ +from datetime import datetime from pathlib import Path from typing import List, Optional as TypingOptional +import pandas from rest_framework.exceptions import APIException -from schema import And, Optional, Or, Schema, SchemaError, Use +from schema import Optional, Or, Schema, SchemaError, Use from miqa.core.models import GlobalSettings, Project @@ -58,14 +60,24 @@ def validate_import_dict(import_dict, project: TypingOptional[Project]): Optional('notes'): Optional(str, None), 'scans': { Optional(Use(str)): { - 'type': And(Use(str)), + 'type': Use(str), Optional('subject_id'): Or(str, None), Optional('session_id'): Or(str, None), Optional('scan_link'): Or(str, None), - 'frames': {And(Use(int)): {'file_location': And(str)}}, + 'frames': {Use(int): {'file_location': Use(str)}}, + Optional('decisions'): [ + { + 'decision': Use(str), + 'creator': Or(str, None), + 'note': Or(str, None), + 'created': Or(str, None), + 'user_identified_artifacts': Or(str, None), + 'location': Or(str, None), + }, + ], Optional('last_decision'): Or( { - 'decision': And(str), + 'decision': Use(str), 'creator': Or(str, None), 'note': Or(str, None), 'created': Or(str, None), @@ -133,6 +145,7 @@ def import_dataframe_to_dict(df, project): } for row in scan_df.iterrows() }, + 'decisions': [], } except ValueError as e: raise APIException( @@ -158,9 +171,7 @@ def import_dataframe_to_dict(df, project): 'location': scan_df['location_of_interest'].iloc[0] or None, } decision_dict = {k: (v or None) for k, v in decision_dict.items()} - scan_dict['last_decision'] = decision_dict - else: - scan_dict['last_decision'] = None + scan_dict['decisions'].append(decision_dict) # added for BIDS import if 'subject_ID' in scan_df.columns: @@ -173,3 +184,47 @@ def import_dataframe_to_dict(df, project): project_dict['experiments'][experiment_name] = experiment_dict ingest_dict['projects'][project_name] = project_dict return ingest_dict + + +def import_dict_to_dataframe(data): + row_data = [] + for project_name, project_data in data.get('projects', {}).items(): + for experiment_name, experiment_data in project_data.get('experiments', {}).items(): + for scan_name, scan_data in experiment_data.get('scans', {}).items(): + for frame_number, frame_data in scan_data.get('frames', {}).items(): + row = [ + project_name, + experiment_name, + scan_name, + scan_data.get('type', ''), + frame_number, + frame_data.get('file_location', ''), + experiment_data.get('notes', ''), + scan_data.get('subject_id', ''), + scan_data.get('session_id', ''), + scan_data.get('scan_link', ''), + ] + sorted_decisions = sorted( + scan_data.get('decisions', []), + key=lambda d: datetime.strptime( + d['created'].split('+')[0], '%Y-%m-%d %H:%M:%S' + ), + reverse=True, + ) + if len(sorted_decisions) > 0: + last_decision_data = sorted_decisions[0] + if last_decision_data: + row += [ + last_decision_data.get('decision', ''), + last_decision_data.get('creator', ''), + last_decision_data.get('note', ''), + last_decision_data.get('created', ''), + last_decision_data.get('user_identified_artifacts', ''), + last_decision_data.get('location', ''), + ] + else: + row += ['' for i in range(6)] + else: + row += ['' for i in range(6)] + row_data.append(row) + return pandas.DataFrame(row_data, columns=IMPORT_CSV_COLUMNS) diff --git a/miqa/core/rest/scan_decision.py b/miqa/core/rest/scan_decision.py index 997cf116..c02607a3 100644 --- a/miqa/core/rest/scan_decision.py +++ b/miqa/core/rest/scan_decision.py @@ -28,7 +28,7 @@ class Meta: ref_name = 'scan_decision' creator = UserSerializer() - created = serializers.DateTimeField(format='%d-%m-%Y') + created = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S') class ScanDecisionViewSet( diff --git a/miqa/core/tasks.py b/miqa/core/tasks.py index 895fa12b..dbf7447a 100644 --- a/miqa/core/tasks.py +++ b/miqa/core/tasks.py @@ -16,8 +16,8 @@ from rest_framework.exceptions import APIException from miqa.core.conversion.import_export_csvs import ( - IMPORT_CSV_COLUMNS, import_dataframe_to_dict, + import_dict_to_dataframe, validate_import_dict, ) from miqa.core.conversion.nifti_to_zarr_ngff import nifti_to_zarr_ngff @@ -210,61 +210,53 @@ def perform_import(import_dict): session_id=session_id, scan_link=scan_link, ) - - if 'last_decision' in scan_data: - last_decision_dict = scan_data['last_decision'] - if ( - last_decision_dict - and 'decision' in last_decision_dict - and len(last_decision_dict['decision']) > 0 - ): - try: - creator = User.objects.get(email=last_decision_dict['creator']) - except User.DoesNotExist: - creator = None - note = '' - created = ( - datetime.now().strftime('%Y-%m-%d %H:%M') - if settings.REPLACE_NULL_CREATION_DATETIMES - else None + if 'last_decision' in scan_data and scan_data['last_decision']: + scan_data['decisions'] = [scan_data['last_decision']] + for decision_data in scan_data.get('decisions', []): + try: + creator = User.objects.get(email=decision_data.get('creator', '')) + except User.DoesNotExist: + creator = None + note = '' + created = ( + datetime.now().strftime('%Y-%m-%d %H:%M') + if settings.REPLACE_NULL_CREATION_DATETIMES + else None + ) + location = {} + note = decision_data.get('note', '') + if decision_data['created']: + valid_dt = dateparser.parse(decision_data['created']) + if valid_dt: + created = valid_dt.strftime('%Y-%m-%d %H:%M') + if decision_data['location'] and decision_data['location'] != '': + slices = [ + axis.split('=')[1] for axis in decision_data['location'].split(';') + ] + location = { + 'i': slices[0], + 'j': slices[1], + 'k': slices[2], + } + if decision_data['decision'] in [dec[0] for dec in DECISION_CHOICES]: + decision = ScanDecision( + decision=decision_data['decision'], + creator=creator, + created=created, + note=note or '', + user_identified_artifacts={ + artifact_name: ( + 1 + if decision_data['user_identified_artifacts'] + and artifact_name in decision_data['user_identified_artifacts'] + else 0 + ) + for artifact_name in default_identified_artifacts().keys() + }, + location=location, + scan=scan_object, ) - location = {} - if last_decision_dict['note']: - note = last_decision_dict['note'].replace(';', ',') - if last_decision_dict['created']: - valid_dt = dateparser.parse(last_decision_dict['created']) - if valid_dt: - created = valid_dt.strftime('%Y-%m-%d %H:%M') - if last_decision_dict['location'] and last_decision_dict['location'] != '': - slices = [ - axis.split('=')[1] - for axis in last_decision_dict['location'].split(';') - ] - location = { - 'i': slices[0], - 'j': slices[1], - 'k': slices[2], - } - if last_decision_dict['decision'] in [dec[0] for dec in DECISION_CHOICES]: - last_decision = ScanDecision( - decision=last_decision_dict['decision'], - creator=creator, - created=created, - note=note, - user_identified_artifacts={ - artifact_name: ( - 1 - if last_decision_dict['user_identified_artifacts'] - and artifact_name - in last_decision_dict['user_identified_artifacts'] - else 0 - ) - for artifact_name in default_identified_artifacts().keys() - }, - location=location, - scan=scan_object, - ) - new_scan_decisions.append(last_decision) + new_scan_decisions.append(decision) new_scans.append(scan_object) for frame_number, frame_data in scan_data['frames'].items(): @@ -322,7 +314,7 @@ def export_data(project_id: Optional[str]): @shared_task def perform_export(project_id: Optional[str]): - data: List[List[Optional[str]]] = [] + data = {'projects': {}} export_warnings = [] if project_id is None: @@ -337,73 +329,65 @@ def perform_export(project_id: Optional[str]): export_path = project.export_path for project_object in projects: - project_frames = Frame.objects.filter(scan__experiment__project=project_object) - if project_frames.count() == 0: - data.append([project_object.name]) - for frame_object in project_frames: - if frame_object.storage_mode == StorageMode.LOCAL_PATH: - row_data = [ - project_object.name, - frame_object.scan.experiment.name, - frame_object.scan.name, - frame_object.scan.scan_type, - str(frame_object.frame_number), - frame_object.raw_path, - frame_object.scan.experiment.note, - frame_object.scan.subject_id, - frame_object.scan.session_id, - frame_object.scan.scan_link, - ] - last_decision = ( - frame_object.scan.decisions.exclude(created__isnull=True) - .order_by('created') - .last() - ) - if not last_decision: - last_decision = frame_object.scan.decisions.order_by('created').last() - if last_decision: - location = '' - if last_decision.location: + project_data = {'experiments': {}} + for experiment_object in project_object.experiments.all(): + experiment_data = {'scans': {}, 'notes': experiment_object.note} + for scan_object in experiment_object.scans.all(): + scan_data = { + 'frames': {}, + 'decisions': [], + 'type': scan_object.scan_type, + 'subject_id': scan_object.subject_id, + 'session_id': scan_object.session_id, + 'scan_link': scan_object.scan_link, + } + for frame_object in scan_object.frames.all(): + scan_data['frames'][frame_object.frame_number] = { + 'file_location': frame_object.raw_path + } + for decision_object in scan_object.decisions.all(): + location = None + if decision_object.location: location = ( - f'i={last_decision.location["i"]};' - f'j={last_decision.location["j"]};' - f'k={last_decision.location["k"]}' + f'i={decision_object.location["i"]};' + f'j={decision_object.location["j"]};' + f'k={decision_object.location["k"]}' ) - artifacts = [ - artifact - for artifact, value in last_decision.user_identified_artifacts.items() - if value == 1 - ] - creator = '' - if last_decision.creator: - creator = last_decision.creator.email - created = None - if last_decision.created: - created = str(last_decision.created) - row_data += [ - last_decision.decision, - creator, - last_decision.note.replace(',', ';'), - created, - ';'.join(artifacts), - location, - ] - else: - row_data += ['' for i in range(6)] - data.append(row_data) - else: - export_warnings.append( - f'{frame_object.scan.name} not exported; this scan was uploaded, not imported.' - ) - export_df = pandas.DataFrame(data, columns=IMPORT_CSV_COLUMNS) + artifacts = ';'.join( + [ + artifact + for artifact, value in decision_object.user_identified_artifacts.items() + if value == 1 + ] + ) + scan_data['decisions'].append( + { + 'decision': decision_object.decision, + 'creator': decision_object.creator.username + if decision_object.creator + else None, + 'note': decision_object.note, + 'created': datetime.strftime( + decision_object.created, '%Y-%m-%d %H:%M:%S' + ) + if decision_object.created + else None, + 'user_identified_artifacts': artifacts if len(artifacts) > 0 else None, + 'location': location, + } + ) + experiment_data['scans'][scan_object.name] = scan_data + project_data['experiments'][experiment_object.name] = experiment_data + data['projects'][project_object.name] = project_data + data, export_warnings = validate_import_dict(data, project) try: if export_path.endswith('csv'): + export_df = import_dict_to_dataframe(data) export_df.to_csv(export_path, index=False) elif export_path.endswith('json'): - json_contents = import_dataframe_to_dict(export_df, project) with open(export_path, 'w') as fd: - json.dump(json_contents, fd) + json.dump(data, fd) else: raise APIException( f'Unknown format for export path {export_path}. Expected csv or json.' diff --git a/miqa/core/tests/data/test_import.json b/miqa/core/tests/data/test_import.json index 08ae25d7..0f6514f0 100644 --- a/miqa/core/tests/data/test_import.json +++ b/miqa/core/tests/data/test_import.json @@ -14,14 +14,16 @@ "subject_id": null, "session_id": null, "scan_link": null, - "last_decision": { - "decision": "Q?", - "creator": null, - "note": "MP(2022-09-16): Damaged Phantom", - "created": null, - "user_identified_artifacts": null, - "location": null - } + "decisions": [ + { + "decision": "Q?", + "creator": null, + "note": "MP(2022-09-16): Damaged Phantom", + "created": null, + "user_identified_artifacts": null, + "location": null + } + ] } }, "notes": "" diff --git a/miqa/settings.py b/miqa/settings.py index ee18b7fa..e0fed07c 100644 --- a/miqa/settings.py +++ b/miqa/settings.py @@ -3,6 +3,7 @@ from pathlib import Path +from celery.schedules import crontab from composed_configuration import ( ComposedConfiguration, ConfigMixin, @@ -16,7 +17,6 @@ ) from composed_configuration._configuration import _BaseConfiguration from configurations import values -from celery.schedules import crontab class MiqaMixin(ConfigMixin): @@ -48,7 +48,6 @@ class MiqaMixin(ConfigMixin): CELERY_BEAT_SCHEDULE = {} - @staticmethod def before_binding(configuration: ComposedConfiguration) -> None: # Install local apps first, to ensure any overridden resources are found first @@ -90,12 +89,14 @@ def before_binding(configuration: ComposedConfiguration) -> None: ] = 'miqa.core.rest.exceptions.custom_exception_handler' if configuration.DEMO_MODE: - configuration.CELERY_BEAT_SCHEDULE.update({ - 'reset-demo': { - 'task': 'miqa.core.tasks.reset_demo', - 'schedule': crontab(minute=0, hour=0), # daily at midnight + configuration.CELERY_BEAT_SCHEDULE.update( + { + 'reset-demo': { + 'task': 'miqa.core.tasks.reset_demo', + 'schedule': crontab(minute=0, hour=0), # daily at midnight + } } - }) + ) class DevelopmentConfiguration(MiqaMixin, DevelopmentBaseConfiguration): diff --git a/web_client/src/components/ExperimentsView.vue b/web_client/src/components/ExperimentsView.vue index 926e8768..844b5e58 100644 --- a/web_client/src/components/ExperimentsView.vue +++ b/web_client/src/components/ExperimentsView.vue @@ -101,7 +101,7 @@ export default { }, decisionToRating(decisions) { if (decisions.length === 0) return {}; - const rating = _.first(_.sortBy(decisions, (dec) => dec.created)).decision; + const rating = _.last(_.sortBy(decisions, (dec) => dec.created)).decision; let color = 'grey--text'; if (rating === 'U') { color = 'green--text'; diff --git a/web_client/src/components/ScanDecision.vue b/web_client/src/components/ScanDecision.vue index e3c46664..3cab3c71 100644 --- a/web_client/src/components/ScanDecision.vue +++ b/web_client/src/components/ScanDecision.vue @@ -115,7 +115,7 @@ export default {