diff --git a/README.md b/README.md index 5902959e5..ae2f95e5e 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ # DaCapo ![DaCapo](docs/source/_static/icon_dacapo.png) -[![tests](https://github.com/funkelab/dacapo/actions/workflows/tests.yaml/badge.svg)](https://github.com/funkelab/dacapo/actions/workflows/tests.yaml) -[![black](https://github.com/funkelab/dacapo/actions/workflows/black.yaml/badge.svg)](https://github.com/funkelab/dacapo/actions/workflows/black.yaml) -[![mypy](https://github.com/funkelab/dacapo/actions/workflows/mypy.yaml/badge.svg)](https://github.com/funkelab/dacapo/actions/workflows/mypy.yaml) -[![docs](https://github.com/funkelab/dacapo/actions/workflows/docs.yaml/badge.svg)](https://funkelab.github.io/dacapo/) +[![tests](https://github.com/janelia-cellmap/dacapo/actions/workflows/tests.yaml/badge.svg)](https://github.com/janelia-cellmap/dacapo/actions/workflows/tests.yaml) +[![black](https://github.com/janelia-cellmap/dacapo/actions/workflows/black.yaml/badge.svg)](https://github.com/janelia-cellmap/dacapo/actions/workflows/black.yaml) +[![mypy](https://github.com/janelia-cellmap/dacapo/actions/workflows/mypy.yaml/badge.svg)](https://github.com/janelia-cellmap/dacapo/actions/workflows/mypy.yaml) +[![docs](https://github.com/janelia-cellmap/dacapo/actions/workflows/docs.yaml/badge.svg)](https://janelia-cellmap.github.io/dacapo/) +[![codecov](https://codecov.io/gh/janelia-cellmap/dacapo/branch/main/graph/badge.svg)](https://codecov.io/gh/janelia-cellmap/dacapo) A framework for easy application of established machine learning techniques on large, multi-dimensional images. diff --git a/dacapo/store/file_config_store.py b/dacapo/store/file_config_store.py index 725c4a46f..98bfd6562 100644 --- a/dacapo/store/file_config_store.py +++ b/dacapo/store/file_config_store.py @@ -98,10 +98,12 @@ def __save_insert(self, collection, data, ignore=None): file_store = collection / name if not file_store.exists(): - pickle.dump(dict(data), file_store.open("wb")) + with file_store.open("wb") as fd: + pickle.dump(dict(data), fd) else: - existing = pickle.load(file_store.open("rb")) + with file_store.open("rb") as fd: + existing = pickle.load(fd) if not self.__same_doc(existing, data, ignore): raise DuplicateNameError( @@ -113,7 +115,8 @@ def __save_insert(self, collection, data, ignore=None): def __load(self, collection, name): file_store = collection / name if file_store.exists(): - return pickle.load(file_store.open("rb")) + with file_store.open("rb") as fd: + return pickle.load(fd) else: raise ValueError(f"No config with name: {name} in collection: {collection}") diff --git a/dacapo/store/file_stats_store.py b/dacapo/store/file_stats_store.py index 8a299bcf7..b3ce77f37 100644 --- a/dacapo/store/file_stats_store.py +++ b/dacapo/store/file_stats_store.py @@ -88,12 +88,14 @@ def __store_training_stats(self, stats, begin, end, run_name): if docs: file_store = self.training_stats / run_name - pickle.dump(docs, file_store.open("wb")) + with file_store.open("wb") as fd: + pickle.dump(docs, fd) def __read_training_stats(self, run_name): file_store = self.training_stats / run_name if file_store.exists(): - docs = pickle.load(file_store.open("rb")) + with file_store.open("rb") as fd: + docs = pickle.load(fd) else: docs = [] stats = TrainingStats(converter.structure(docs, List[TrainingIterationStats])) @@ -117,12 +119,14 @@ def __store_validation_iteration_scores( if docs: file_store = self.validation_scores / run_name - pickle.dump(docs, file_store.open("wb")) + with file_store.open("wb") as fd: + pickle.dump(docs, fd) def __read_validation_iteration_scores(self, run_name): file_store = self.validation_scores / run_name if file_store.exists(): - docs = pickle.load(file_store.open("rb")) + with file_store.open("rb") as fd: + docs = pickle.load(fd) else: docs = [] scores = converter.structure(docs, List[ValidationIterationScores]) diff --git a/dacapo/store/local_weights_store.py b/dacapo/store/local_weights_store.py index c5f0ba5ff..0935c945b 100644 --- a/dacapo/store/local_weights_store.py +++ b/dacapo/store/local_weights_store.py @@ -103,11 +103,10 @@ def store_best(self, run: str, iteration: int, dataset: str, criterion: str): def retrieve_best(self, run: str, dataset: str, criterion: str) -> int: logger.info("Retrieving weights for run %s, criterion %s", run, criterion) - weights_info = json.loads( - (self.__get_weights_dir(run) / criterion / f"{dataset}.json") - .open("r") - .read() - ) + with (self.__get_weights_dir(run) / criterion / f"{dataset}.json").open( + "r" + ) as fd: + weights_info = json.load(fd) return weights_info["iteration"]