Skip to content

Commit

Permalink
If data has already been processed, return the data
Browse files Browse the repository at this point in the history
  • Loading branch information
oliverkinch committed Jul 5, 2024
1 parent 2052cc2 commit 13e4df0
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
4 changes: 2 additions & 2 deletions src/doms_databasen/_utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Utility function for the doms_databasen package."""

import json
from typing import Dict, List
from typing import List

import jsonlines

Expand All @@ -19,7 +19,7 @@ def save_dict_to_json(dict_, file_path) -> None:
json.dump(dict_, f, indent=4)


def read_json(file_path) -> Dict[str, str]:
def read_json(file_path) -> dict:
"""Reads a json file.
Args:
Expand Down
8 changes: 6 additions & 2 deletions src/doms_databasen/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@ def process(self, case_id: str) -> Dict[str, Union[str, Dict[str, str]]]:
processed_data (dict):
Processed data (only returned for testing purposes)
"""
processed_data: Dict[str, Union[str, Dict[str, str]]] = {}

case_id = str(case_id)
if case_id in self.blacklist:
logger.info(f"{case_id} is blacklisted.")
Expand All @@ -88,7 +90,10 @@ def process(self, case_id: str) -> Dict[str, Union[str, Dict[str, str]]]:
logger.info(
f"Case {case_id} has already been processed. Use --force to overwrite."
)
return {}
processed_data = read_json(
file_path=case_dir_processed / self.config.file_names.processed_data
)
return processed_data

# Process data for the case.
logger.info(f"Processing case {case_id}...")
Expand All @@ -99,7 +104,6 @@ def process(self, case_id: str) -> Dict[str, Union[str, Dict[str, str]]]:
case_dir_raw / self.config.file_names.tabular_data
)

processed_data: Dict[str, Union[str, Dict[str, str]]] = {}
processed_data["case_id"] = case_id
processed_data["tabular_data"] = tabular_data

Expand Down

0 comments on commit 13e4df0

Please sign in to comment.