Skip to content

Commit

Permalink
MLLAMDatastore -> MDPDatastore
Browse files Browse the repository at this point in the history
  • Loading branch information
leifdenby committed Sep 12, 2024
1 parent ac10d7d commit bf8172a
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 7 deletions.
4 changes: 2 additions & 2 deletions neural_lam/datastore/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# Local
from .mllam import MLLAMDatastore # noqa
from .mdp import MDPDatastore # noqa
from .npyfiles import NpyFilesDatastore # noqa

DATASTORES = dict(
mllam=MLLAMDatastore,
mdp=MDPDatastore,
npyfiles=NpyFilesDatastore,
)

Expand Down
12 changes: 9 additions & 3 deletions neural_lam/datastore/mllam.py → neural_lam/datastore/mdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,18 @@
from .base import BaseCartesianDatastore, CartesianGridShape


class MLLAMDatastore(BaseCartesianDatastore):
"""Datastore class for the MLLAM dataset."""
class MDPDatastore(BaseCartesianDatastore):
"""
Datastore class for datasets made with the mllam_data_prep library
(https://github.com/mllam/mllam-data-prep). This class wraps the
`mllam_data_prep` library to do the necessary transforms to create the
different categories (state/forcing/static) of data, with the actual
transform to do being specified in the configuration file.
"""

def __init__(self, config_path, n_boundary_points=30, reuse_existing=True):
"""
Construct a new MLLAMDatastore from the configuration file at
Construct a new MDPDatastore from the configuration file at
`config_path`. A boundary mask is created with `n_boundary_points`
boundary points. If `reuse_existing` is True, the dataset is loaded
from a zarr file if it exists (unless the config has been modified
Expand Down
4 changes: 2 additions & 2 deletions neural_lam/train_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

# Local
from . import utils
from .datastore import init_datastore
from .datastore import DATASTORES, init_datastore
from .models import GraphLAM, HiLAM, HiLAMParallel
from .weather_dataset import WeatherDataModule

Expand All @@ -30,7 +30,7 @@ def main(input_args=None):
parser.add_argument(
"datastore_kind",
type=str,
choices=["npyfiles", "mllam"],
choices=DATASTORES.keys(),
help="Kind of datastore to use",
)
parser.add_argument(
Expand Down

0 comments on commit bf8172a

Please sign in to comment.