From 2b59d1ca4c17cffb1d896e445a3e50e6c4b0073a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:37:41 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/save_concurrent_batches.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/scripts/save_concurrent_batches.py b/scripts/save_concurrent_batches.py index 55bee9ee..7ee2e568 100644 --- a/scripts/save_concurrent_batches.py +++ b/scripts/save_concurrent_batches.py @@ -2,9 +2,9 @@ Constructs batches where each batch includes all GSPs and only a single timestamp. Currently a slightly hacky implementation due to the way the configs are done. This script will use -the same config file currently set to train the model. In the datamodule config file it is possible -to set the batch_output_dir and number of train/val batches, they can also be overriden in the command as -shown in the example below. +the same config file currently set to train the model. In the datamodule config file it is possible +to set the batch_output_dir and number of train/val batches, they can also be overriden in the command as +shown in the example below. use: ``` @@ -164,7 +164,9 @@ def main(config: DictConfig): with open(f"{config_dm.batch_output_dir}/datamodule.yaml", "w") as f: f.write(OmegaConf.to_yaml(config.datamodule)) - shutil.copyfile(config_dm.configuration, f"{config_dm.batch_output_dir}/data_configuration.yaml") + shutil.copyfile( + config_dm.configuration, f"{config_dm.batch_output_dir}/data_configuration.yaml" + ) dataloader_kwargs = dict( shuffle=False,