From d796b3fad3e9e10c8051dadb6367b747df51bd99 Mon Sep 17 00:00:00 2001 From: Tyler Sutterley Date: Mon, 15 Jul 2024 17:15:06 -0700 Subject: [PATCH] feat: add JSON definition file parsing for #310 (#312) * docs: bump `docutils` to latest --- doc/environment.yml | 2 +- pyTMD/io/model.py | 198 +++++++++++++- setup.cfg | 1 + test/def_to_json.py | 81 ++++++ test/model_CATS2008.json | 1 + test/model_FES2012.json | 1 + test/model_FES2014.json | 1 + test/model_FES2014_currents.json | 1 + test/model_GOT4.10.json | 1 + test/model_TPXO9-atlas-v5.json | 1 + test/model_TPXO9-atlas-v5_currents.json | 1 + test/test_model.py | 341 ++++++++++++++++-------- 12 files changed, 519 insertions(+), 111 deletions(-) create mode 100644 test/def_to_json.py create mode 100644 test/model_CATS2008.json create mode 100644 test/model_FES2012.json create mode 100644 test/model_FES2014.json create mode 100644 test/model_FES2014_currents.json create mode 100644 test/model_GOT4.10.json create mode 100644 test/model_TPXO9-atlas-v5.json create mode 100644 test/model_TPXO9-atlas-v5_currents.json diff --git a/doc/environment.yml b/doc/environment.yml index 0a3c51ae..09eab61c 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -2,7 +2,7 @@ name: pytmd-docs channels: - conda-forge dependencies: - - docutils<0.18 + - docutils - fontconfig - freetype - graphviz diff --git a/pyTMD/io/model.py b/pyTMD/io/model.py index d6e4a6d0..3d6d49f5 100644 --- a/pyTMD/io/model.py +++ b/pyTMD/io/model.py @@ -7,6 +7,7 @@ UPDATE HISTORY: Updated 07/2024: added new FES2022 and FES2022_load to list of models + added JSON format for model definition files Updated 05/2024: make subscriptable and allow item assignment Updated 04/2024: append v-components of velocity only to netcdf format Updated 11/2023: revert TPXO9-atlas currents changes to separate dicts @@ -44,6 +45,7 @@ import re import io import copy +import json import pathlib class model: @@ -1337,7 +1339,10 @@ def pathfinder(self, model_file: str | pathlib.Path | list): # return the complete output path return output_file - def from_file(self, definition_file: str | pathlib.Path | io.IOBase): + def from_file(self, + definition_file: str | pathlib.Path | io.IOBase, + format: str = 'ascii' + ): """ Create a model object from an input definition file @@ -1345,23 +1350,45 @@ def from_file(self, definition_file: str | pathlib.Path | io.IOBase): ---------- definition_file: str, pathlib.Path or io.IOBase model definition file for creating model object + format: str + format of the input definition file + + - ``'ascii'`` for tab-delimited definition file + - ``'json'`` for JSON formatted definition file """ - # variable with parameter definitions - parameters = {} # Opening definition file and assigning file ID number if isinstance(definition_file, io.IOBase): fid = copy.copy(definition_file) else: definition_file = pathlib.Path(definition_file).expanduser() fid = definition_file.open(mode="r", encoding='utf8') + # load and parse definition file type + if (format.lower() == 'ascii'): + self.from_ascii(fid) + elif (format.lower() == 'json'): + self.from_json(fid) + # close the definition file + fid.close() + # return the model object + return self + + def from_ascii(self, fid: io.IOBase): + """ + Load and parse tab-delimited definition file + + Parameters + ---------- + fid: io.IOBase + open definition file object + """ + # variable with parameter definitions + parameters = {} # for each line in the file will extract the parameter (name and value) for fileline in fid: # Splitting the input line between parameter name and value part = fileline.rstrip().split(maxsplit=1) # filling the parameter definition variable parameters[part[0]] = part[1] - # close the parameter file - fid.close() # convert from dictionary to model variable temp = self.from_dict(parameters) # verify model name, format and type @@ -1539,6 +1566,167 @@ def from_file(self, definition_file: str | pathlib.Path | io.IOBase): # return the model parameters return temp + def from_json(self, fid: io.IOBase): + """ + Load and parse JSON definition file + + Parameters + ---------- + fid: io.IOBase + open definition file object + """ + # load JSON file + parameters = json.load(fid) + # convert from dictionary to model variable + temp = self.from_dict(parameters) + # verify model name, format and type + assert temp.name + assert temp.format in ('OTIS','ATLAS','TMD3','netcdf','GOT','FES') + assert temp.type + assert temp.model_file + # split model file into list if an ATLAS, GOT or FES file + # model files can be comma, tab or space delimited + # extract full path to tide model files + # extract full path to tide grid file + if temp.format in ('OTIS','ATLAS','TMD3'): + assert temp.grid_file + # check if grid file is relative + if (temp.directory is not None): + temp.grid_file = temp.directory.joinpath(temp.grid_file).resolve() + else: + temp.grid_file = pathlib.Path(temp.grid_file).expanduser() + # extract model files + if (temp.type == ['u','v']) and (temp.directory is not None): + # use glob strings to find files in directory + for key, glob_string in temp.model_file.items(): + temp.model_file[key] = list(temp.directory.glob(glob_string)) + # attempt to extract model directory + try: + temp.model_directory = temp.model_file['u'][0].parent + except (IndexError, AttributeError) as exc: + message = f'No model files found with {glob_string}' + raise FileNotFoundError(message) from exc + elif (temp.type == 'z') and (temp.directory is not None): + # use glob strings to find files in directory + glob_string = copy.copy(temp.model_file) + + temp.model_file = list(temp.directory.glob(glob_string)) + # attempt to extract model directory + try: + temp.model_directory = temp.model_file[0].parent + except (IndexError, AttributeError) as exc: + message = f'No model files found with {glob_string}' + raise FileNotFoundError(message) from exc + elif (temp.type == ['u','v']) and isinstance(temp.model_file, dict): + # resolve paths to model files for each direction + for key, model_file in temp.model_file.items(): + temp.model_file[key] = [pathlib.Path(f).expanduser() for f in + model_file] + # copy directory dictionaries + temp.model_directory = temp.model_file['u'][0].parent + elif (temp.type == 'z') and isinstance(temp.model_file, list): + # resolve paths to model files + temp.model_file = [pathlib.Path(f).expanduser() for f in + temp.model_file] + temp.model_directory = temp.model_file[0].parent + else: + # fully defined single file case + temp.model_file = pathlib.Path(temp.model_file).expanduser() + temp.model_directory = temp.model_file.parent + elif temp.format in ('netcdf',): + assert temp.grid_file + # check if grid file is relative + if (temp.directory is not None): + temp.grid_file = temp.directory.joinpath(temp.grid_file).resolve() + else: + temp.grid_file = pathlib.Path(temp.grid_file).expanduser() + # extract model files + if (temp.type == ['u','v']) and (temp.directory is not None): + # use glob strings to find files in directory + for key, glob_string in temp.model_file.items(): + temp.model_file[key] = list(temp.directory.glob(glob_string)) + # attempt to extract model directory + try: + temp.model_directory = temp.model_file['u'][0].parent + except (IndexError, AttributeError) as exc: + message = f'No model files found with {glob_string}' + raise FileNotFoundError(message) from exc + elif (temp.type == 'z') and (temp.directory is not None): + # use glob strings to find files in directory + glob_string = copy.copy(temp.model_file) + temp.model_file = list(temp.directory.glob(glob_string)) + # attempt to extract model directory + try: + temp.model_directory = temp.model_file[0].parent + except (IndexError, AttributeError) as exc: + message = f'No model files found with {glob_string}' + raise FileNotFoundError(message) from exc + elif (temp.type == ['u','v']): + # resolve paths to model files for each direction + for key, model_file in temp.model_file.items(): + temp.model_file[key] = [pathlib.Path(f).expanduser() for f in + model_file] + # copy to directory dictionaries + temp.model_directory = temp.model_file['u'][0].parent + elif (temp.type == 'z'): + # resolve paths to model files + temp.model_file = [pathlib.Path(f).expanduser() for f in + temp.model_file] + temp.model_directory = temp.model_file[0].parent + elif temp.format in ('FES','GOT'): + # extract model files + if (temp.type == ['u','v']) and (temp.directory is not None): + # use glob strings to find files in directory + for key, glob_string in temp.model_file.items(): + temp.model_file[key] = list(temp.directory.glob(glob_string)) + # build model directory dictionaries + temp.model_directory = {} + for key, val in temp.model_file.items(): + # attempt to extract model directory + try: + temp.model_directory[key] = val[0].parent + except (IndexError, AttributeError) as exc: + message = f'No model files found with {glob_string[key]}' + raise FileNotFoundError(message) from exc + elif (temp.type == 'z') and (temp.directory is not None): + # use glob strings to find files in directory + glob_string = copy.copy(temp.model_file) + + temp.model_file = list(temp.directory.glob(glob_string)) + # attempt to extract model directory + try: + temp.model_directory = temp.model_file[0].parent + except (IndexError, AttributeError) as exc: + message = f'No model files found with {glob_string}' + elif (temp.type == ['u','v']): + # resolve paths to model files for each direction + for key, model_file in temp.model_file.items(): + temp.model_file[key] = [pathlib.Path(f).expanduser() for f in + model_file] + # build model directory dictionaries + temp.model_directory = {} + for key, val in temp.model_file.items(): + temp.model_directory[key] = val[0].parent + elif (temp.type == 'z'): + # resolve paths to model files + temp.model_file = [pathlib.Path(f).expanduser() for f in + temp.model_file] + temp.model_directory = temp.model_file[0].parent + # verify that projection attribute exists for projected models + if temp.format in ('OTIS','ATLAS','TMD3'): + assert temp.projection + # convert scale from string to float + if temp.format in ('netcdf','GOT','FES'): + assert temp.scale + # assert that FES model has a version + # get model constituents from constituent files + if temp.format in ('FES',): + assert temp.version + if (temp.constituents is None): + temp.parse_constituents() + # return the model parameters + return temp + def parse_constituents(self) -> list: """ Parses tide model files for a list of model constituents diff --git a/setup.cfg b/setup.cfg index bc2cde0c..9a449e3b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,6 +20,7 @@ omit = pyTMD/convert_ll_xy.py pyTMD/load_constituent.py pyTMD/load_nodal_corrections.py + test/def_to_json.py [coverage:report] show_missing = true diff --git a/test/def_to_json.py b/test/def_to_json.py new file mode 100644 index 00000000..480fb4c5 --- /dev/null +++ b/test/def_to_json.py @@ -0,0 +1,81 @@ +""" +def_to_json.py (07/2024) +Converts a definition file to a json file +""" +import re +import json +import pathlib +import argparse + +def read_definition_file(definition_file): + parameters = {} + fid = open(definition_file, 'r') + for fileline in fid: + # Splitting the input line between parameter name and value + part = fileline.rstrip().split(maxsplit=1) + # filling the parameter definition variable + parameters[part[0]] = part[1] + fid.close() + return parameters + +# PURPOSE: create argument parser +def arguments(): + parser = argparse.ArgumentParser( + description="""Converts a definition file to a json file" + """, + fromfile_prefix_chars="@" + ) + # command line parameters + parser.add_argument('infile', + type=pathlib.Path, nargs='+', + help='Definition file to convert') + parser.add_argument('--pretty', '-p', + action='store_true', + help='Pretty print the json file') + parser.add_argument('--verbose', '-v', + action='store_true', + help='Verbose output') + parser.add_argument('--cleanup', '-c', + action='store_true', + help='Remove original definition files') + return parser + +def main(): + # Read the system arguments listed after the program + parser = arguments() + args,_ = parser.parse_known_args() + # iterate over each input file + for definition_file in args.infile: + print(f'{definition_file} -->') if args.verbose else None + # Reading each definition file + parameters = read_definition_file(definition_file) + if re.search(r';', parameters['model_file']): + # split model into list of files for each direction + model_file_u, model_file_v = parameters['model_file'].split(';') + parameters['model_file'] = dict( + u=re.split(r'[\s\,]+', model_file_u), + v=re.split(r'[\s\,]+', model_file_v) + ) + elif re.search(r',', parameters['model_file']): + # split model into list of files + parameters['model_file'] = re.split(r'[\s\,]+', parameters['model_file']) + if 'constituents' in parameters and re.search(r',', parameters['constituents']): + parameters['constituents'] = re.split(r'[\s\,]+', parameters['constituents']) + if 'type' in parameters and re.search(r',', parameters['type']): + parameters['type'] = re.split(r'[\s\,]+', parameters['type']) + if 'compressed' in parameters: + parameters['compressed'] = eval(parameters['compressed']) + if 'scale' in parameters: + parameters['scale'] = float(parameters['scale']) + # Writing the parameters to a json file + json_file = definition_file.with_suffix('.json') + print(f'\t{json_file}') if args.verbose else None + with open(json_file, 'w') as fid: + indent = 4 if args.pretty else None + json.dump(parameters, fid, indent=indent) + # Removing the definition file + if args.cleanup: + definition_file.unlink() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/test/model_CATS2008.json b/test/model_CATS2008.json new file mode 100644 index 00000000..2373e6e4 --- /dev/null +++ b/test/model_CATS2008.json @@ -0,0 +1 @@ +{"format": "OTIS", "name": "CATS2008", "model_file": "CATS2008/hf.CATS2008.out", "grid_file": "CATS2008/grid_CATS2008", "projection": "CATS2008", "type": "z", "variable": "tide_ocean", "reference": "https://doi.org/10.15784/601235"} \ No newline at end of file diff --git a/test/model_FES2012.json b/test/model_FES2012.json new file mode 100644 index 00000000..7e827b1b --- /dev/null +++ b/test/model_FES2012.json @@ -0,0 +1 @@ +{"format": "FES", "name": "FES2012", "model_file": "fes2012/*_FES2012_SLEV.nc.gz", "type": "z", "version": "FES2012", "variable": "tide_ocean", "scale": 0.01, "compressed": true, "reference": "https://www.aviso.altimetry.fr/en/data/products/auxiliary-products/global-tide-fes.html"} \ No newline at end of file diff --git a/test/model_FES2014.json b/test/model_FES2014.json new file mode 100644 index 00000000..0e7e0370 --- /dev/null +++ b/test/model_FES2014.json @@ -0,0 +1 @@ +{"format": "FES", "name": "FES2014", "model_file": ["fes2014/ocean_tide/2n2.nc.gz", "fes2014/ocean_tide/eps2.nc.gz", "fes2014/ocean_tide/j1.nc.gz", "fes2014/ocean_tide/k1.nc.gz", "fes2014/ocean_tide/k2.nc.gz", "fes2014/ocean_tide/l2.nc.gz", "fes2014/ocean_tide/la2.nc.gz", "fes2014/ocean_tide/m2.nc.gz", "fes2014/ocean_tide/m3.nc.gz", "fes2014/ocean_tide/m4.nc.gz", "fes2014/ocean_tide/m6.nc.gz", "fes2014/ocean_tide/m8.nc.gz", "fes2014/ocean_tide/mf.nc.gz", "fes2014/ocean_tide/mks2.nc.gz", "fes2014/ocean_tide/mm.nc.gz", "fes2014/ocean_tide/mn4.nc.gz", "fes2014/ocean_tide/ms4.nc.gz", "fes2014/ocean_tide/msf.nc.gz", "fes2014/ocean_tide/msqm.nc.gz", "fes2014/ocean_tide/mtm.nc.gz", "fes2014/ocean_tide/mu2.nc.gz", "fes2014/ocean_tide/n2.nc.gz", "fes2014/ocean_tide/n4.nc.gz", "fes2014/ocean_tide/nu2.nc.gz", "fes2014/ocean_tide/o1.nc.gz", "fes2014/ocean_tide/p1.nc.gz", "fes2014/ocean_tide/q1.nc.gz", "fes2014/ocean_tide/r2.nc.gz", "fes2014/ocean_tide/s1.nc.gz", "fes2014/ocean_tide/s2.nc.gz", "fes2014/ocean_tide/s4.nc.gz", "fes2014/ocean_tide/sa.nc.gz", "fes2014/ocean_tide/ssa.nc.gz", "fes2014/ocean_tide/t2.nc.gz"], "constituents": ["2n2", "eps2", "j1", "k1", "k2", "l2", "lambda2", "m2", "m3", "m4", "m6", "m8", "mf", "mks2", "mm", "mn4", "ms4", "msf", "msqm", "mtm", "mu2", "n2", "n4", "nu2", "o1", "p1", "q1", "r2", "s1", "s2", "s4", "sa", "ssa", "t2"], "type": "z", "version": "FES2014", "variable": "tide_ocean", "scale": 0.01, "compressed": true, "reference": "https://www.aviso.altimetry.fr/en/data/products/auxiliary-products/global-tide-fes.html"} \ No newline at end of file diff --git a/test/model_FES2014_currents.json b/test/model_FES2014_currents.json new file mode 100644 index 00000000..21852d52 --- /dev/null +++ b/test/model_FES2014_currents.json @@ -0,0 +1 @@ +{"format": "FES", "name": "FES2014", "model_file": {"u": ["fes2014/eastward_velocity/2n2.nc.gz", "fes2014/eastward_velocity/eps2.nc.gz", "fes2014/eastward_velocity/j1.nc.gz", "fes2014/eastward_velocity/k1.nc.gz", "fes2014/eastward_velocity/k2.nc.gz", "fes2014/eastward_velocity/l2.nc.gz", "fes2014/eastward_velocity/la2.nc.gz", "fes2014/eastward_velocity/m2.nc.gz", "fes2014/eastward_velocity/m3.nc.gz", "fes2014/eastward_velocity/m4.nc.gz", "fes2014/eastward_velocity/m6.nc.gz", "fes2014/eastward_velocity/m8.nc.gz", "fes2014/eastward_velocity/mf.nc.gz", "fes2014/eastward_velocity/mks2.nc.gz", "fes2014/eastward_velocity/mm.nc.gz", "fes2014/eastward_velocity/mn4.nc.gz", "fes2014/eastward_velocity/ms4.nc.gz", "fes2014/eastward_velocity/msf.nc.gz", "fes2014/eastward_velocity/msqm.nc.gz", "fes2014/eastward_velocity/mtm.nc.gz", "fes2014/eastward_velocity/mu2.nc.gz", "fes2014/eastward_velocity/n2.nc.gz", "fes2014/eastward_velocity/n4.nc.gz", "fes2014/eastward_velocity/nu2.nc.gz", "fes2014/eastward_velocity/o1.nc.gz", "fes2014/eastward_velocity/p1.nc.gz", "fes2014/eastward_velocity/q1.nc.gz", "fes2014/eastward_velocity/r2.nc.gz", "fes2014/eastward_velocity/s1.nc.gz", "fes2014/eastward_velocity/s2.nc.gz", "fes2014/eastward_velocity/s4.nc.gz", "fes2014/eastward_velocity/sa.nc.gz", "fes2014/eastward_velocity/ssa.nc.gz", "fes2014/eastward_velocity/t2.nc.gz"], "v": ["fes2014/northward_velocity/2n2.nc.gz", "fes2014/northward_velocity/eps2.nc.gz", "fes2014/northward_velocity/j1.nc.gz", "fes2014/northward_velocity/k1.nc.gz", "fes2014/northward_velocity/k2.nc.gz", "fes2014/northward_velocity/l2.nc.gz", "fes2014/northward_velocity/la2.nc.gz", "fes2014/northward_velocity/m2.nc.gz", "fes2014/northward_velocity/m3.nc.gz", "fes2014/northward_velocity/m4.nc.gz", "fes2014/northward_velocity/m6.nc.gz", "fes2014/northward_velocity/m8.nc.gz", "fes2014/northward_velocity/mf.nc.gz", "fes2014/northward_velocity/mks2.nc.gz", "fes2014/northward_velocity/mm.nc.gz", "fes2014/northward_velocity/mn4.nc.gz", "fes2014/northward_velocity/ms4.nc.gz", "fes2014/northward_velocity/msf.nc.gz", "fes2014/northward_velocity/msqm.nc.gz", "fes2014/northward_velocity/mtm.nc.gz", "fes2014/northward_velocity/mu2.nc.gz", "fes2014/northward_velocity/n2.nc.gz", "fes2014/northward_velocity/n4.nc.gz", "fes2014/northward_velocity/nu2.nc.gz", "fes2014/northward_velocity/o1.nc.gz", "fes2014/northward_velocity/p1.nc.gz", "fes2014/northward_velocity/q1.nc.gz", "fes2014/northward_velocity/r2.nc.gz", "fes2014/northward_velocity/s1.nc.gz", "fes2014/northward_velocity/s2.nc.gz", "fes2014/northward_velocity/s4.nc.gz", "fes2014/northward_velocity/sa.nc.gz", "fes2014/northward_velocity/ssa.nc.gz", "fes2014/northward_velocity/t2.nc.gz"]}, "constituents": ["2n2", "eps2", "j1", "k1", "k2", "l2", "lambda2", "m2", "m3", "m4", "m6", "m8", "mf", "mks2", "mm", "mn4", "ms4", "msf", "msqm", "mtm", "mu2", "n2", "n4", "nu2", "o1", "p1", "q1", "r2", "s1", "s2", "s4", "sa", "ssa", "t2"], "type": ["u", "v"], "version": "FES2014", "scale": 1.0, "compressed": true, "reference": "https://www.aviso.altimetry.fr/en/data/products/auxiliary-products/global-tide-fes.html"} \ No newline at end of file diff --git a/test/model_GOT4.10.json b/test/model_GOT4.10.json new file mode 100644 index 00000000..07ac9f0b --- /dev/null +++ b/test/model_GOT4.10.json @@ -0,0 +1 @@ +{"format": "GOT", "name": "GOT4.10", "model_file": ["GOT4.10c/grids_loadtide/k1load.d.gz", "GOT4.10c/grids_loadtide/k2load.d.gz", "GOT4.10c/grids_loadtide/m2load.d.gz", "GOT4.10c/grids_loadtide/m4load.d.gz", "GOT4.10c/grids_loadtide/n2load.d.gz", "GOT4.10c/grids_loadtide/o1load.d.gz", "GOT4.10c/grids_loadtide/p1load.d.gz", "GOT4.10c/grids_loadtide/q1load.d.gz", "GOT4.10c/grids_loadtide/s1load.d.gz", "GOT4.10c/grids_loadtide/s2load.d.gz"], "type": "z", "variable": "tide_load", "version": "4.10", "scale": 0.001, "compressed": true, "reference": "https://ntrs.nasa.gov/citations/19990089548"} \ No newline at end of file diff --git a/test/model_TPXO9-atlas-v5.json b/test/model_TPXO9-atlas-v5.json new file mode 100644 index 00000000..7dcf755e --- /dev/null +++ b/test/model_TPXO9-atlas-v5.json @@ -0,0 +1 @@ +{"format": "netcdf", "name": "TPXO9-atlas-v5", "model_file": ["TPXO9_atlas_v5/h_2n2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_k1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_k2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_m2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_m4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_mf_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_mm_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_mn4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_ms4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_n2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_o1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_p1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_q1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_s1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/h_s2_tpxo9_atlas_30_v5.nc"], "grid_file": "TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc", "type": "z", "variable": "tide_ocean", "version": "v5", "scale": 0.01, "compressed": false, "reference": "https://www.tpxo.net/global/tpxo9-atlas"} \ No newline at end of file diff --git a/test/model_TPXO9-atlas-v5_currents.json b/test/model_TPXO9-atlas-v5_currents.json new file mode 100644 index 00000000..04bdaa03 --- /dev/null +++ b/test/model_TPXO9-atlas-v5_currents.json @@ -0,0 +1 @@ +{"format": "netcdf", "name": "TPXO9-atlas-v5", "model_file": {"u": ["TPXO9_atlas_v5/u_2n2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_k1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_k2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_m2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_m4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_mf_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_mm_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_mn4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_ms4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_n2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_o1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_p1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_q1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_s1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_s2_tpxo9_atlas_30_v5.nc"], "v": ["TPXO9_atlas_v5/u_2n2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_k1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_k2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_m2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_m4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_mf_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_mm_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_mn4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_ms4_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_n2_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_o1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_p1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_q1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_s1_tpxo9_atlas_30_v5.nc", "TPXO9_atlas_v5/u_s2_tpxo9_atlas_30_v5.nc"]}, "grid_file": "TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc", "type": ["u", "v"], "version": "v5", "scale": 0.01, "compressed": false, "reference": "https://www.tpxo.net/global/tpxo9-atlas"} \ No newline at end of file diff --git a/test/test_model.py b/test/test_model.py index b6a83a56..1952a900 100644 --- a/test/test_model.py +++ b/test/test_model.py @@ -5,6 +5,7 @@ from __future__ import annotations import io +import json import pytest import shutil import inspect @@ -15,10 +16,17 @@ filename = inspect.getframeinfo(inspect.currentframe()).filename filepath = pathlib.Path(filename).absolute().parent -def test_definition_CATS2008(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_CATS2008(file_format): """Tests the reading of the CATS2008 model definition file """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_CATS2008.def')) + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_CATS2008.def' + definition_file['json'] = 'model_CATS2008.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) # test read variables assert m.format == 'OTIS' assert m.name == 'CATS2008' @@ -37,13 +45,18 @@ def test_definition_CATS2008(): assert m.gla12 == 'd_ocElv' assert m.long_name == 'ocean_tide_elevation' -def test_definition_FES(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_FES(file_format): """Tests the reading of the FES2014 model definition file """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_FES2014.def')) - # test read variables - assert m.format == 'FES' - assert m.name == 'FES2014' + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_FES2014.def' + definition_file['json'] = 'model_FES2014.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files and constituents model_files = ['fes2014/ocean_tide/2n2.nc.gz', 'fes2014/ocean_tide/eps2.nc.gz', 'fes2014/ocean_tide/j1.nc.gz', 'fes2014/ocean_tide/k1.nc.gz', 'fes2014/ocean_tide/k2.nc.gz', @@ -62,14 +75,17 @@ def test_definition_FES(): 'fes2014/ocean_tide/s2.nc.gz', 'fes2014/ocean_tide/s4.nc.gz', 'fes2014/ocean_tide/sa.nc.gz', 'fes2014/ocean_tide/ssa.nc.gz', 'fes2014/ocean_tide/t2.nc.gz'] - # assert that all model files are in the model definition - for f in model_files: - assert pathlib.Path(f) in m.model_file - # assert that all constituents are in the model definition constituents = ['2n2','eps2','j1','k1','k2','l2', 'lambda2','m2','m3','m4','m6','m8','mf','mks2','mm', 'mn4','ms4','msf','msqm','mtm','mu2','n2','n4','nu2', 'o1','p1','q1','r2','s1','s2','s4','sa','ssa','t2'] + # test read variables + assert m.format == 'FES' + assert m.name == 'FES2014' + # assert that all model files are in the model definition + for f in model_files: + assert pathlib.Path(f) in m.model_file + # assert that all constituents are in the model definition assert m.constituents == constituents assert m.type == 'z' assert m.scale == 1.0/100.0 @@ -89,12 +105,19 @@ def test_definition_FES(): assert m.long_name == 'ocean_tide_elevation' # PURPOSE: test glob file functionality -def test_definition_FES_glob(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_FES_glob(file_format): """Tests the reading of the FES2014 model definition file with glob file searching """ - # get model parameters - m = pyTMD.io.model().from_file(filepath.joinpath('model_FES2014.def')) + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_FES2014.def' + definition_file['json'] = 'model_FES2014.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files model_files = ['fes2014/ocean_tide/2n2.nc.gz', 'fes2014/ocean_tide/eps2.nc.gz', 'fes2014/ocean_tide/j1.nc.gz', 'fes2014/ocean_tide/k1.nc.gz', 'fes2014/ocean_tide/k2.nc.gz', @@ -120,19 +143,27 @@ def test_definition_FES_glob(): local.touch(exist_ok=True) # create model definition file fid = io.StringIO() - attrs = ['name','format','compressed','type','scale','version'] - for attr in attrs: - val = getattr(m,attr) - if isinstance(val,list): - fid.write('{0}\t{1}\n'.format(attr,','.join(val))) - else: - fid.write('{0}\t{1}\n'.format(attr,val)) - # append glob strings for model file glob_string = r'fes2014/ocean_tide/*.nc.gz' - fid.write('{0}\t{1}\n'.format('model_file',glob_string)) + attrs = ['name','format','compressed','type','scale','version'] + if (file_format == 'ascii'): + # create tab-delimited definition file + for attr in attrs: + val = getattr(m,attr) + if isinstance(val,list): + fid.write('{0}\t{1}\n'.format(attr,','.join(val))) + else: + fid.write('{0}\t{1}\n'.format(attr,val)) + # append glob strings for model file + fid.write(f'model_file\t{glob_string}\n') + elif (file_format == 'json'): + # create JSON definition file + d = {attr:getattr(m,attr) for attr in attrs} + d['model_file'] = glob_string + json.dump(d, fid) + # rewind the glob definition file fid.seek(0) # use model definition file as input - model = pyTMD.io.model(directory=filepath).from_file(fid) + model = pyTMD.io.model(directory=filepath).from_file(fid, format=file_format) for attr in attrs: assert getattr(model,attr) == getattr(m,attr) # verify that the model files and constituents match @@ -146,13 +177,18 @@ def test_definition_FES_glob(): # clean up model shutil.rmtree(filepath.joinpath('fes2014')) -def test_definition_FES_currents(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_FES_currents(file_format): """Tests the reading of the FES2014 model definition file for currents """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_FES2014_currents.def')) - # test read variables - assert m.format == 'FES' - assert m.name == 'FES2014' + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_FES2014_currents.def' + definition_file['json'] = 'model_FES2014_currents.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files and constituents model_files = {} model_files['u'] = ['fes2014/eastward_velocity/2n2.nc.gz', 'fes2014/eastward_velocity/eps2.nc.gz', 'fes2014/eastward_velocity/j1.nc.gz', @@ -190,15 +226,18 @@ def test_definition_FES_currents(): 'fes2014/northward_velocity/s2.nc.gz', 'fes2014/northward_velocity/s4.nc.gz', 'fes2014/northward_velocity/sa.nc.gz', 'fes2014/northward_velocity/ssa.nc.gz', 'fes2014/northward_velocity/t2.nc.gz'] + constituents = ['2n2','eps2','j1','k1','k2','l2', + 'lambda2','m2','m3','m4','m6','m8','mf','mks2','mm', + 'mn4','ms4','msf','msqm','mtm','mu2','n2','n4','nu2', + 'o1','p1','q1','r2','s1','s2','s4','sa','ssa','t2'] + # test read variables + assert m.format == 'FES' + assert m.name == 'FES2014' # assert that all model files are in the model definition for t in ['u','v']: for f in model_files[t]: assert pathlib.Path(f) in m.model_file[t] # assert that all constituents are in the model definition - constituents = ['2n2','eps2','j1','k1','k2','l2', - 'lambda2','m2','m3','m4','m6','m8','mf','mks2','mm', - 'mn4','ms4','msf','msqm','mtm','mu2','n2','n4','nu2', - 'o1','p1','q1','r2','s1','s2','s4','sa','ssa','t2'] assert m.constituents == constituents assert m.type == ['u','v'] assert m.scale == 1.0 @@ -211,12 +250,19 @@ def test_definition_FES_currents(): assert m.long_name['v'] == 'meridional_tidal_current' # PURPOSE: test glob file functionality -def test_definition_FES_currents_glob(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_FES_currents_glob(file_format): """Tests the reading of the FES2014 model definition file with glob file searching for currents """ - # get model parameters - m = pyTMD.io.model().from_file(filepath.joinpath('model_FES2014_currents.def')) + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_FES2014_currents.def' + definition_file['json'] = 'model_FES2014_currents.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files for each component model_files = {} model_files['u'] = ['fes2014/eastward_velocity/2n2.nc.gz', 'fes2014/eastward_velocity/eps2.nc.gz', 'fes2014/eastward_velocity/j1.nc.gz', @@ -263,19 +309,27 @@ def test_definition_FES_currents_glob(): # create model definition file fid = io.StringIO() attrs = ['name','format','compressed','type','scale','version'] - for attr in attrs: - val = getattr(m,attr) - if isinstance(val,list): - fid.write('{0}\t{1}\n'.format(attr,','.join(val))) - else: - fid.write('{0}\t{1}\n'.format(attr,val)) - # append glob strings for model file - eastward = r'fes2014/eastward_velocity/*.nc.gz' - northward = r'fes2014/northward_velocity/*.nc.gz' - fid.write('{0}\t{1};{2}\n'.format('model_file',eastward,northward)) + glob_string_u = r'fes2014/eastward_velocity/*.nc.gz' + glob_string_v = r'fes2014/northward_velocity/*.nc.gz' + if (file_format == 'ascii'): + # create tab-delimited definition file + for attr in attrs: + val = getattr(m,attr) + if isinstance(val,list): + fid.write('{0}\t{1}\n'.format(attr,','.join(val))) + else: + fid.write('{0}\t{1}\n'.format(attr,val)) + # append glob strings for model file + fid.write(f'model_file\t{glob_string_u};{glob_string_v}\n') + elif (file_format == 'json'): + # create JSON definition file + d = {attr:getattr(m,attr) for attr in attrs} + d['model_file'] = {'u':glob_string_u,'v':glob_string_v} + json.dump(d, fid) + # rewind the glob definition file fid.seek(0) # use model definition file as input - model = pyTMD.io.model(directory=filepath).from_file(fid) + model = pyTMD.io.model(directory=filepath).from_file(fid, format=file_format) for attr in attrs: assert getattr(model,attr) == getattr(m,attr) # verify that the model files and constituents match @@ -290,13 +344,18 @@ def test_definition_FES_currents_glob(): # clean up model shutil.rmtree(filepath.joinpath('fes2014')) -def test_definition_GOT(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_GOT(file_format): """Tests the reading of the GOT4.10 model definition file """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_GOT4.10.def')) - # test read variables - assert m.format == 'GOT' - assert m.name == 'GOT4.10' + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_GOT4.10.def' + definition_file['json'] = 'model_GOT4.10.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files model_files = ['GOT4.10c/grids_loadtide/k1load.d.gz', 'GOT4.10c/grids_loadtide/k2load.d.gz', 'GOT4.10c/grids_loadtide/m2load.d.gz', @@ -307,6 +366,9 @@ def test_definition_GOT(): 'GOT4.10c/grids_loadtide/q1load.d.gz', 'GOT4.10c/grids_loadtide/s1load.d.gz', 'GOT4.10c/grids_loadtide/s2load.d.gz'] + # test read variables + assert m.format == 'GOT' + assert m.name == 'GOT4.10' # assert that all model files are in the model definition for f in model_files: assert pathlib.Path(f) in m.model_file @@ -325,12 +387,19 @@ def test_definition_GOT(): assert m.long_name == 'load_tide_elevation' # PURPOSE: test glob file functionality -def test_definition_GOT_glob(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_GOT_glob(file_format): """Tests the reading of the GOT4.10 model definition file with glob file searching """ - # get model parameters - m = pyTMD.io.model().from_file(filepath.joinpath('model_GOT4.10.def')) + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_GOT4.10.def' + definition_file['json'] = 'model_GOT4.10.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files model_files = ['GOT4.10c/grids_loadtide/k1load.d.gz', 'GOT4.10c/grids_loadtide/k2load.d.gz', 'GOT4.10c/grids_loadtide/m2load.d.gz', @@ -349,18 +418,26 @@ def test_definition_GOT_glob(): # create model definition file fid = io.StringIO() attrs = ['name','format','compressed','type','scale'] - for attr in attrs: - val = getattr(m,attr) - if isinstance(val,list): - fid.write('{0}\t{1}\n'.format(attr,','.join(val))) - else: - fid.write('{0}\t{1}\n'.format(attr,val)) - # append glob strings for model file glob_string = r'GOT4.10c/grids_loadtide/*.d.gz' - fid.write('{0}\t{1}\n'.format('model_file',glob_string)) + if (file_format == 'ascii'): + # create tab-delimited definition file + for attr in attrs: + val = getattr(m,attr) + if isinstance(val,list): + fid.write('{0}\t{1}\n'.format(attr,','.join(val))) + else: + fid.write('{0}\t{1}\n'.format(attr,val)) + # append glob strings for model file + fid.write(f'model_file\t{glob_string}\n') + elif (file_format == 'json'): + # create JSON definition file + d = {attr:getattr(m,attr) for attr in attrs} + d['model_file'] = glob_string + json.dump(d, fid) + # rewind the glob definition file fid.seek(0) # use model definition file as input - model = pyTMD.io.model(directory=filepath).from_file(fid) + model = pyTMD.io.model(directory=filepath).from_file(fid, format=file_format) for attr in attrs: assert getattr(model,attr) == getattr(m,attr) # verify that the model files match @@ -372,13 +449,18 @@ def test_definition_GOT_glob(): # clean up model shutil.rmtree(filepath.joinpath('GOT4.10c')) -def test_definition_TPXO9(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_TPXO9(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_TPXO9-atlas-v5.def')) - # test read variables - assert m.format == 'netcdf' - assert m.name == 'TPXO9-atlas-v5' + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_TPXO9-atlas-v5.def' + definition_file['json'] = 'model_TPXO9-atlas-v5.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files model_files = ['TPXO9_atlas_v5/h_2n2_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_k1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_k2_tpxo9_atlas_30_v5.nc', @@ -394,7 +476,11 @@ def test_definition_TPXO9(): 'TPXO9_atlas_v5/h_q1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_s1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_s2_tpxo9_atlas_30_v5.nc'] - assert m.grid_file == pathlib.Path('TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc') + grid_file = pathlib.Path('TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc') + # test read variables + assert m.format == 'netcdf' + assert m.name == 'TPXO9-atlas-v5' + assert m.grid_file == grid_file # assert that all model files are in the model definition for f in model_files: assert pathlib.Path(f) in m.model_file @@ -413,14 +499,19 @@ def test_definition_TPXO9(): assert m.long_name == 'ocean_tide_elevation' # PURPOSE: test glob file functionality -def test_definition_TPXO9_glob(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_TPXO9_glob(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file with glob file searching """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_TPXO9-atlas-v5.def')) - # test read variables - assert m.format == 'netcdf' - assert m.name == 'TPXO9-atlas-v5' + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_TPXO9-atlas-v5.def' + definition_file['json'] = 'model_TPXO9-atlas-v5.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files model_files = ['TPXO9_atlas_v5/h_2n2_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_k1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_k2_tpxo9_atlas_30_v5.nc', @@ -436,30 +527,43 @@ def test_definition_TPXO9_glob(): 'TPXO9_atlas_v5/h_q1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_s1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/h_s2_tpxo9_atlas_30_v5.nc'] + grid_file = pathlib.Path('TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc') # create temporary files for testing glob functionality for model_file in model_files: local = filepath.joinpath(model_file) local.parent.mkdir(parents=True, exist_ok=True) local.touch(exist_ok=True) # create temporary grid file - local = filepath.joinpath(m.grid_file) + local = filepath.joinpath(grid_file) local.touch(exist_ok=True) + # test read variables + assert m.format == 'netcdf' + assert m.name == 'TPXO9-atlas-v5' # create model definition file fid = io.StringIO() attrs = ['name','format','compressed','type','scale'] - for attr in attrs: - val = getattr(m,attr) - if isinstance(val,list): - fid.write('{0}\t{1}\n'.format(attr,','.join(val))) - else: - fid.write('{0}\t{1}\n'.format(attr,val)) - # append glob strings for model file glob_string = r'TPXO9_atlas_v5/h*.nc' - fid.write('{0}\t{1}\n'.format('model_file',glob_string)) - fid.write('{0}\t{1}\n'.format('grid_file',m.grid_file)) + if (file_format == 'ascii'): + # create tab-delimited definition file + for attr in attrs: + val = getattr(m,attr) + if isinstance(val,list): + fid.write('{0}\t{1}\n'.format(attr,','.join(val))) + else: + fid.write('{0}\t{1}\n'.format(attr,val)) + # append glob strings for model file + fid.write(f'model_file\t{glob_string}\n') + fid.write(f'grid_file\t{grid_file}\n') + elif (file_format == 'json'): + # create JSON definition file + d = {attr:getattr(m,attr) for attr in attrs} + d['model_file'] = glob_string + d['grid_file'] = str(grid_file) + json.dump(d, fid) + # rewind the glob definition file fid.seek(0) # use model definition file as input - model = pyTMD.io.model(directory=filepath).from_file(fid) + model = pyTMD.io.model(directory=filepath).from_file(fid, format=file_format) for attr in attrs: assert getattr(model,attr) == getattr(m,attr) # verify that the model files match @@ -471,13 +575,18 @@ def test_definition_TPXO9_glob(): # clean up model shutil.rmtree(filepath.joinpath('TPXO9_atlas_v5')) -def test_definition_TPXO9_currents(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_TPXO9_currents(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file for currents """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_TPXO9-atlas-v5_currents.def')) - # test read variables - assert m.format == 'netcdf' - assert m.name == 'TPXO9-atlas-v5' + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_TPXO9-atlas-v5_currents.def' + definition_file['json'] = 'model_TPXO9-atlas-v5_currents.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files for each component model_files = {} model_files['u'] = ['TPXO9_atlas_v5/u_2n2_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/u_k1_tpxo9_atlas_30_v5.nc', @@ -509,7 +618,11 @@ def test_definition_TPXO9_currents(): 'TPXO9_atlas_v5/u_q1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/u_s1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/u_s2_tpxo9_atlas_30_v5.nc'] - assert m.grid_file == pathlib.Path('TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc') + grid_file = pathlib.Path('TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc') + # test read variables + assert m.format == 'netcdf' + assert m.name == 'TPXO9-atlas-v5' + assert m.grid_file == grid_file for t in ['u','v']: assert sorted(m.model_file[t]) == [pathlib.Path(f) for f in model_files[t]] assert m.type == ['u', 'v'] @@ -520,11 +633,19 @@ def test_definition_TPXO9_currents(): assert m.long_name['v'] == 'meridional_tidal_current' # PURPOSE: test glob file functionality -def test_definition_TPXO9_currents_glob(): +@pytest.mark.parametrize("file_format", ['ascii','json']) +def test_definition_TPXO9_currents_glob(file_format): """Tests the reading of the TPXO9-atlas-v5 model definition file for currents with glob file searching """ - m = pyTMD.io.model().from_file(filepath.joinpath('model_TPXO9-atlas-v5_currents.def')) + # definition files of each format + definition_file = {} + definition_file['ascii'] = 'model_TPXO9-atlas-v5_currents.def' + definition_file['json'] = 'model_TPXO9-atlas-v5_currents.json' + val = definition_file[file_format] + # read model definition file for format + m = pyTMD.io.model().from_file(filepath.joinpath(val), format=file_format) + # model files for each component model_files = {} model_files['u'] = ['TPXO9_atlas_v5/u_2n2_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/u_k1_tpxo9_atlas_30_v5.nc', @@ -556,6 +677,7 @@ def test_definition_TPXO9_currents_glob(): 'TPXO9_atlas_v5/u_q1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/u_s1_tpxo9_atlas_30_v5.nc', 'TPXO9_atlas_v5/u_s2_tpxo9_atlas_30_v5.nc'] + grid_file = pathlib.Path('TPXO9_atlas_v5/grid_tpxo9_atlas_30_v5.nc') # create temporary files for testing glob functionality for t in ['u','v']: for model_file in model_files[t]: @@ -563,25 +685,34 @@ def test_definition_TPXO9_currents_glob(): local.parent.mkdir(parents=True, exist_ok=True) local.touch(exist_ok=True) # create temporary grid file - local = filepath.joinpath(m.grid_file) + local = filepath.joinpath(grid_file) local.touch(exist_ok=True) # create model definition file fid = io.StringIO() attrs = ['name','format','compressed','type','scale'] - for attr in attrs: - val = getattr(m,attr) - if isinstance(val,list): - fid.write('{0}\t{1}\n'.format(attr,','.join(val))) - else: - fid.write('{0}\t{1}\n'.format(attr,val)) - # append glob strings for model file glob_string_u = r'TPXO9_atlas_v5/u*.nc' glob_string_v = r'TPXO9_atlas_v5/u*.nc' - fid.write('{0}\t{1};{2}\n'.format('model_file',glob_string_u,glob_string_v)) - fid.write('{0}\t{1}\n'.format('grid_file',m.grid_file)) + if (file_format == 'ascii'): + # create tab-delimited definition file + for attr in attrs: + val = getattr(m,attr) + if isinstance(val,list): + fid.write('{0}\t{1}\n'.format(attr,','.join(val))) + else: + fid.write('{0}\t{1}\n'.format(attr,val)) + # append glob strings for model file + fid.write(f'model_file\t{glob_string_u};{glob_string_v}\n') + fid.write(f'grid_file\t{grid_file}\n') + elif (file_format == 'json'): + # create JSON definition file + d = {attr:getattr(m,attr) for attr in attrs} + d['model_file'] = {'u':glob_string_u,'v':glob_string_v} + d['grid_file'] = str(grid_file) + json.dump(d, fid) + # rewind the glob definition file fid.seek(0) # use model definition file as input - model = pyTMD.io.model(directory=filepath).from_file(fid) + model = pyTMD.io.model(directory=filepath).from_file(fid, format=file_format) for attr in attrs: assert getattr(model,attr) == getattr(m,attr) # verify that the model files match