diff --git a/.gitignore b/.gitignore
index 12c33075..6142ce7c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,3 +6,4 @@ temp
build
dist
PyConform.egg-info
+.idea/
diff --git a/.project b/.project
deleted file mode 100644
index f1ecf7c5..00000000
--- a/.project
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
- PyConform
-
-
- ASAPPyTools
-
-
-
- org.python.pydev.PyDevBuilder
-
-
-
-
-
- org.python.pydev.pythonNature
-
-
diff --git a/.pydevproject b/.pydevproject
deleted file mode 100644
index 212594a6..00000000
--- a/.pydevproject
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-python 2.7
-Miniconda3 [conform]
-
-/${PROJECT_DIR_NAME}/source
-
-
diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs
deleted file mode 100644
index 5fae9c26..00000000
--- a/.settings/org.eclipse.core.resources.prefs
+++ /dev/null
@@ -1,2 +0,0 @@
-eclipse.preferences.version=1
-encoding//docs/sphinx/conf.py=utf-8
diff --git a/scripts/iconform b/scripts/iconform
index 822f1bd2..cd0e2ba3 100755
--- a/scripts/iconform
+++ b/scripts/iconform
@@ -17,7 +17,8 @@ from dreqPy import dreq
import uuid
-version = 'v'+str(datetime.datetime.now().year)+str(datetime.datetime.now().month).zfill(2)+str(datetime.datetime.now().day).zfill(2)
+#version = 'v'+str(datetime.datetime.now().year)+str(datetime.datetime.now().month).zfill(2)+str(datetime.datetime.now().day).zfill(2)
+version = 'v20190309'
# Map netcdf types to python types
#data_types = {'char': 'char', 'byte': 'int8', 'short': 'int16', 'int': 'int32',
@@ -191,30 +192,54 @@ def fill_missing_glob_attributes(attr, table, v, grids):
attr.pop("external_variables")
if "branch_method" in attr.keys():
- if "none" not in attr["branch_method"]:
+ if "no parent" not in attr["branch_method"]:
if "branch_time_in_child" in attr.keys():
if len(attr["branch_time_in_child"])>0:
- attr["branch_time_in_child"] = float(attr["branch_time_in_child"].split('D')[0])
+ try:
+ attr["branch_time_in_child"] = float(attr["branch_time_in_child"].split('D')[0])
+ except ValueError as e:
+ attr["branch_time_in_child"] = attr["branch_time_in_child"].split('D')[0]
if "branch_time_in_parent" in attr.keys():
if len(attr["branch_time_in_parent"])>0:
- attr["branch_time_in_parent"] = float(attr["branch_time_in_parent"].split('D')[0])
- if "parent_mip_era" in attr.keys():
+ try:
+ attr["branch_time_in_parent"] = float(attr["branch_time_in_parent"].split('D')[0])
+ except ValueError as e:
+ attr["branch_time_in_parent"] = attr["branch_time_in_parent"].split('D')[0]
+ if "parent_mip_era" in attr.keys() and len(attr["parent_activity_id"]) > 2:
attr["parent_mip_era"] = attr["mip_era"]
- if "parent_source_id" in attr.keys():
+ else:
+ attr["parent_mip_era"] = "no parent"
+ if "parent_source_id" in attr.keys() and len(attr["parent_activity_id"]) > 2:
attr["parent_source_id"] = attr["source_id"]
- if "parent_time_units" in attr.keys():
+ else:
+ attr["parent_source_id"] = ""
+ if "parent_time_units" in attr.keys() and len(attr["parent_activity_id"]) > 2:
attr["parent_time_units"] = "days since 0001-01-01 00:00:00"
+ else:
+ attr["parent_time_units"] = "none"
+ else:
+ if "branch_time_in_child" in attr.keys():
+ attr["branch_time_in_child"] = float(attr["branch_time_in_child"].split('D')[0])
+ if "branch_time_in_parent" in attr.keys():
+ attr["branch_time_in_parent"] = 0.0
+ if "parent_mip_era" in attr.keys():
+ attr["parent_mip_era"] = "no parent"
+ if "parent_source_id" in attr.keys():
+ attr["parent_source_id"] = "no parent"
+ if "parent_time_units" in attr.keys():
+ attr["parent_time_units"] = "no parent"
+
else:
if "branch_time_in_child" in attr.keys():
- attr["branch_time_in_child"] = "none"
+ attr["branch_time_in_child"] = "no parent"
if "branch_time_in_parent" in attr.keys():
- attr["branch_time_in_parent"] = "none"
+ attr["branch_time_in_parent"] = "no parent"
if "parent_mip_era" in attr.keys():
- attr["parent_mip_era"] = "none"
+ attr["parent_mip_era"] = "no parent"
if "parent_source_id" in attr.keys():
- attr["parent_source_id"] = "none"
+ attr["parent_source_id"] = "no parent"
if "parent_time_units" in attr.keys():
- attr["parent_time_units"] = "none"
+ attr["parent_time_units"] = "no parent"
if "variant_label" in attr.keys():
pre = attr["variant_label"].split('r')[1]
@@ -349,10 +374,12 @@ def defineVar(v, varName, attr, table_info, definition, ig, experiment, out_dir)
dst = date_strings[v["frequency"]]
else:
dst = ''
+ vid = v['variable_id']
+
f_name = ("{0}/{1}/{2}/{3}/{4}/{5}/{6}/{7}/{8}/{9}/{10}/{11}_{12}_{13}_{14}_{15}_{16}{17}.nc".format(
out_dir, mip_era, activity_id, institution_id, source_id, experiment, ripf, mipTable,
- varName, grid, version,
- varName, mipTable, source_id, experiment, ripf, grid, dst))
+ vid, grid, version,
+ vid, mipTable, source_id, experiment, ripf, grid, dst))
var = {}
# put together the dictionary entry for this variable
@@ -363,6 +390,7 @@ def defineVar(v, varName, attr, table_info, definition, ig, experiment, out_dir)
var["file"] = {}
var["file"]["attributes"] = attributes
var["file"]["attributes"]["variant_label"] = ripf
+ var["attributes"]["comment"] = definition
var["file"]["filename"] = f_name
var["file"]["format"] = f_format
if compression is not None:
@@ -496,21 +524,25 @@ def create_output(exp_dict, definitions, input_glob, attributes, output_path, ar
ts_key = None
mip = d['mipTable']
if mip in definitions.keys():
- ig = ""
- if v in definitions[mip].keys():
- if "N/A" in definitions[mip][v].upper():
- v_def = ""
- else:
- v_def = definitions[mip][v]
- ig = input_glob[mip][v]
- else:
- v_def = ""
+ ig = ""
+ if v in definitions[mip].keys():
+ v_def = definitions[mip][v]
+ ig = input_glob[mip][v]
+# if v in definitions[mip].keys():
+# if "N/A" in definitions[mip][v].upper():
+# v_def = ""
+# else:
+# v_def = definitions[mip][v]
+# ig = input_glob[mip][v]
+# else:
+# v_def = ""
var_list[v] = defineVar(d, v, attributes, table_info, v_def, ig, experiment, out_dir)
realm = d["realm"].replace(' ','_')
ts_key = var_list[v]["file"]["attributes"]["activity_id"]+'_'+var_list[v]["attributes"]["mipTable"]+'_'+realm
if ts_key not in TableSpec.keys():
TableSpec[ts_key] = {}
- TableSpec[ts_key][v] = var_list[v]
+ #TableSpec[ts_key][v] = var_list[v]
+ TableSpec[ts_key][var_list[v]["file"]["attributes"]["variable_id"]] = var_list[v]
t_realm = "NoRealm"
for k1,v1 in definitions.iteritems():
if 'Coords' in k1:
@@ -528,6 +560,8 @@ def create_output(exp_dict, definitions, input_glob, attributes, output_path, ar
else:
if 'definition' not in TableSpec[ts_key][dim].keys():
print "MISSING "+dim+" in "+'Coords_'+t_realm+" (for variable "+v+")"
+ else:
+ print 'missing:',v
else:
AllMissing[t].append(v)
@@ -642,6 +676,7 @@ def main(argv=None):
for gaFile in args.globalAttrFile.split(","):
if os.path.isfile(gaFile):
if "json" in gaFile:
+ print 'opening ',gaFile
with open(gaFile) as gaF:
ga = json.load(gaF)
for k in ga.keys():
diff --git a/source/pyconform/datasets.py b/source/pyconform/datasets.py
index d8915f2f..7b7b176e 100644
--- a/source/pyconform/datasets.py
+++ b/source/pyconform/datasets.py
@@ -380,7 +380,8 @@ class FileDesc(object):
file, a dict of DimensionDesc objects, and a dict of VariableDesc objects.
"""
- def __init__(self, name, format='NETCDF4_CLASSIC', deflate=2, variables=(), attributes={}): # @ReservedAssignment
+ def __init__(self, name, format='NETCDF4_CLASSIC', deflate=2, variables=(), attributes={},
+ autoparse_time_variable=None): # @ReservedAssignment
"""
Initializer
@@ -392,6 +393,8 @@ def __init__(self, name, format='NETCDF4_CLASSIC', deflate=2, variables=(), attr
deflate (int): Level of lossless compression to use in all variables within the file (0-9)
variables (tuple): Tuple of VariableDesc objects describing the file variables
attributes (dict): Dict of global attributes in the file
+ autoparse_time_variable (str): The name of an output variable that should be used
+ to represent the 'time' when autoparsing the output filename
"""
self._name = name
@@ -431,6 +434,13 @@ def __init__(self, name, format='NETCDF4_CLASSIC', deflate=2, variables=(), attr
raise TypeError(err_msg)
self._attributes = deepcopy(attributes)
+ if autoparse_time_variable:
+ if autoparse_time_variable not in self._variables:
+ err_msg = ('Variable {!r} does not exist in describe file {!r}, but is required '
+ 'for autoparsing the filename'.format(autoparse_time_variable, name))
+ raise ValueError(err_msg)
+ self.autoparse_time_variable = autoparse_time_variable
+
@property
def name(self):
"""Name of the file"""
@@ -773,6 +783,9 @@ def __init__(self, name='output', dsdict=OrderedDict()):
if 'deflate' in fdict:
files[fname]['deflate'] = fdict['deflate']
+ if 'autoparse_time_variable' in fdict:
+ files[fname]['autoparse_time_variable'] = fdict['autoparse_time_variable']
+
if 'attributes' in fdict:
files[fname]['attributes'] = fdict['attributes']
@@ -794,7 +807,7 @@ def __init__(self, name='output', dsdict=OrderedDict()):
# Get the variable descriptors for each variable required to be in the file
vlist = OrderedDict([(vname, variables[vname]) for vname in fdict['variables']])
- # Get the unique list of dimension names for required by these variables
+ # Get the unique list of dimension names required by these variables
fdims = set()
for vname in vlist:
vdesc = vlist[vname]
diff --git a/source/pyconform/flownodes.py b/source/pyconform/flownodes.py
index 4337da73..5d51d47d 100644
--- a/source/pyconform/flownodes.py
+++ b/source/pyconform/flownodes.py
@@ -637,6 +637,11 @@ def __init__(self, filedesc, inputs=()):
raise TypeError(('WriteNode {!r} cannot accept input from type {}, must be a '
'ValidateNode').format(filedesc.name, type(inp)))
+ # Extract hidden variables (names starting with '_') from list of input nodes
+ hidden_labels = [inp.label for inp in inputs if inp.label[0] == '_']
+ self._hidden_inputs = [inp for inp in inputs if inp.label in hidden_labels]
+ inputs = [inp for inp in inputs if inp.label not in hidden_labels]
+
# Call base class (label is filename)
super(WriteNode, self).__init__(filedesc.name, *inputs)
@@ -647,7 +652,7 @@ def __init__(self, filedesc, inputs=()):
for inp in inputs:
if inp.label not in self._filedesc.variables:
raise ValueError(('WriteNode {!r} takes input from variable {!r} that is not '
- 'contained in the descibed file').format(filedesc.name, inp.label))
+ 'contained in the described file').format(filedesc.name, inp.label))
# Construct the proper filename
fname = self._autoparse_filename_(self.label)
@@ -678,28 +683,31 @@ def _autoparse_filename_(self, fname):
if '{' in fname:
possible_tvars = []
- for var in self._filedesc.variables:
- vdesc = self._filedesc.variables[var]
- if var in ('time', 'time1', 'time2', 'time3'):
- possible_tvars.append(var)
- elif vdesc.cfunits().is_time_reference() and len(vdesc.dimensions) == 1:
- possible_tvars.append(var)
- elif 'standard_name' in vdesc.attributes and vdesc.attributes['standard_name'] == 'time':
- possible_tvars.append(var)
- elif 'axis' in vdesc.attributes and vdesc.attributes['axis'] == 'T':
- possible_tvars.append(var)
+ possible_inputs = list(self.inputs)
+ if self._filedesc.autoparse_time_variable:
+ possible_tvars.append(self._filedesc.autoparse_time_variable)
+ possible_inputs += self._hidden_inputs
+ else:
+ for var in self._filedesc.variables:
+ vdesc = self._filedesc.variables[var]
+ if var in ('time', 'time1', 'time2', 'time3'):
+ possible_tvars.append(var)
+ elif vdesc.cfunits().is_time_reference() and len(vdesc.dimensions) == 1:
+ possible_tvars.append(var)
+ elif 'standard_name' in vdesc.attributes and vdesc.attributes['standard_name'] == 'time':
+ possible_tvars.append(var)
+ elif 'axis' in vdesc.attributes and vdesc.attributes['axis'] == 'T':
+ possible_tvars.append(var)
if len(possible_tvars) == 0:
- msg = 'Could not identify a time variable to autoparse filename {!r}'.format(
- fname)
+ msg = 'Could not identify a time variable to autoparse filename {!r}'.format(fname)
warn(msg, DateTimeAutoParseWarning)
return fname
+ possible_tnodes = {vnode.label:vnode for vnode in possible_inputs
+ if vnode.label in possible_tvars}
+ if len(possible_tnodes) == 0:
+ raise ValueError('Time variable input missing for file {!r}'.format(fname))
+ tnode = possible_tnodes['time'] if 'time' in possible_tnodes else possible_tnodes.values()[0]
- tvar = 'time' if 'time' in possible_tvars else possible_tvars[0]
- tnodes = [vnode for vnode in self.inputs if vnode.label == tvar]
- if len(tnodes) == 0:
- raise ValueError(
- 'Time variable input missing for file {!r}'.format(fname))
- tnode = tnodes[0]
t1 = tnode[0:1]
t2 = tnode[-1:]
@@ -750,8 +758,11 @@ def _open_(self, deflate=None):
try:
makedirs(fdir)
except:
- raise IOError(
- 'Failed to create directory for output file {!r}'.format(fname))
+ if exists(fdir):
+ print('Already created directory for output file {!r}'.format(fname))
+ else:
+ raise IOError(
+ 'Failed to create directory for output file {!r}'.format(fname))
# Try to open the output file for writing
try:
@@ -806,8 +817,7 @@ def _open_(self, deflate=None):
for vnode in self.inputs:
vname = vnode.label
vdesc = self._filedesc.variables[vname]
- vattrs = OrderedDict((k, v)
- for k, v in vnode.attributes.iteritems())
+ vattrs = OrderedDict((k, v) for k, v in vnode.attributes.iteritems())
vdtype = vdesc.dtype
fillval = vattrs.get('_FillValue', None)
@@ -824,8 +834,8 @@ def _open_(self, deflate=None):
'Override deflate value range from 0 to 9')
zlib = deflate > 0
clev = deflate if zlib else 1
- ncvar = self._file.createVariable(
- vname, vdtype, vdims, fill_value=fillval, zlib=zlib, complevel=clev)
+ ncvar = self._file.createVariable(vname, vdtype, vdims, fill_value=fillval,
+ zlib=zlib, complevel=clev)
for aname in vattrs:
if aname not in self._unwritten_attributes:
@@ -926,19 +936,16 @@ def execute(self, chunks={}, deflate=None):
# Open the file and write the header information
self._open_(deflate=deflate)
- # Create data structure to keep track of which variable chunks we have
- # written
+ # Create data structure to keep track of which variable chunks we have written
vchunks = {vnode.label: set() for vnode in self.inputs}
- # Compute the Global Dimension Sizes dictionary from the input variable
- # nodes
+ # Compute the Global Dimension Sizes dictionary from the input variable nodes
inputdims = []
for vnode in self.inputs:
for d in self._filedesc.variables[vnode.label].dimensions:
if d not in inputdims:
inputdims.append(d)
- gdims = OrderedDict(
- (d, self._filedesc.dimensions[d].size) for d in inputdims)
+ gdims = OrderedDict((d, self._filedesc.dimensions[d].size) for d in inputdims)
# Iterate over the global dimension space
for chunk in WriteNode._chunk_iter_(gdims, chunks=chunks):
diff --git a/source/pyconform/miptableparser.py b/source/pyconform/miptableparser.py
index ad4d9811..c1ccae72 100644
--- a/source/pyconform/miptableparser.py
+++ b/source/pyconform/miptableparser.py
@@ -377,11 +377,15 @@ def parse_table(self,exp,mips,tables,v_list,table_var_fields,table_axes_fields,t
print sorted(dq.inx.experiment.label.keys()),len(dq.inx.experiment.label.keys())
return {}
activity_id = dq.inx.uid[e_id[0]].mip
- e_vars = dq.inx.iref_by_sect[e_id[0]].a
- if len(e_vars['requestItem']) == 0:
- e_vars = dq.inx.iref_by_sect[dq.inx.uid[e_id[0]].egid].a
+
+ e_vars =[]
+ e_vars.append(dq.inx.iref_by_sect[e_id[0]].a)
+ #if len(e_vars['requestItem']) == 0:
+ e_vars.append(dq.inx.iref_by_sect[dq.inx.uid[e_id[0]].egid].a)
+ e_vars.append(dq.inx.iref_by_sect[activity_id].a)
total_request = {}
- for ri in e_vars['requestItem']:
+ for e_var in e_vars:
+ for ri in e_var['requestItem']:
table_info = {}
dr = dq.inx.uid[ri]
@@ -434,10 +438,10 @@ def parse_table(self,exp,mips,tables,v_list,table_var_fields,table_axes_fields,t
if hasattr(c_var,'label'):
var['id']= c_var.label
var['out_name'] = c_var.label
- var['variable_id'] = c_var.label
+ var['variable_id'] = dq.inx.uid[c_var.vid].label
l = dq.inx.var.label[c_var.label]
- if len(l)>0:
- var['standard_name'] = dq.inx.var.uid[l[0]].sn
+ #if len(l)>0:
+ var['standard_name'] = dq.inx.uid[c_var.vid].sn
if hasattr(c_var,'modeling_realm'):
var['realm']= c_var.modeling_realm
#if hasattr(c_var,'ok_min_mean_abs'):
@@ -459,8 +463,8 @@ def parse_table(self,exp,mips,tables,v_list,table_var_fields,table_axes_fields,t
if hasattr(c_var,'title'):
var['title']= c_var.title
var['long_name']= c_var.title
- if hasattr(c_var,'description'):
- var['comment']= c_var.description
+ #if hasattr(c_var,'description'):
+ # var['comment']= c_var.description
if hasattr(c_var,'type'):
var['type']= c_var.type
if hasattr(c_var,'valid_max'):
@@ -537,6 +541,8 @@ def parse_table(self,exp,mips,tables,v_list,table_var_fields,table_axes_fields,t
# Set what we can from the variable section
if hasattr(c_var, 'vid'):
v_var = dq.inx.uid[c_var.vid]
+ if hasattr(v_var,'standard_name'):
+ var['standard_name']= v_var.sn
if hasattr(v_var,'cf_standard_name'):
var['cf_standard_name']= v_var.sn
if hasattr(v_var,'long_name'):
diff --git a/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py b/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py
index 40b5a5d4..ca605186 100644
--- a/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py
+++ b/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py
@@ -142,26 +142,27 @@ def __getitem__(self, index):
# Check for valid pfts and compute weighted average
if pft_indx.size > 0:
- if 'grass' in vegType:
- pfts1d_wtlunit_grass = (pfts1d_wtlunit[pft_indx]).astype(np.float32)
- dum = GPP[:,pft_indx]
- weights = pfts1d_wtlunit_grass / np.sum(pfts1d_wtlunit_grass)
- if np.absolute(1.-np.sum(weights)) > eps:
- print("Weights do not sum to 1, exiting")
- sys.exit(-1)
- varo_vegType[:,jxy,ixy] = np.sum(dum * weights)
-
- elif 'shrub' in vegType:
- pfts1d_wtlunit_shrub = (pfts1d_wtlunit[pft_indx]).astype(np.float32)
- dum = GPP[:,pft_indx]
- weights = pfts1d_wtlunit_shrub / np.sum(pfts1d_wtlunit_shrub)
- varo_vegType[:,jxy,ixy] = np.sum(dum * weights)
-
- elif 'tree' in vegType:
- pfts1d_wtlunit_tree = (pfts1d_wtlunit[pft_indx]).astype(np.float32)
- dum = GPP[:,pft_indx]
- weights = pfts1d_wtlunit_tree / np.sum(pfts1d_wtlunit_tree)
- varo_vegType[:,jxy,ixy] = np.sum(dum * weights)
+ for t in range(len(time)):
+ if 'grass' in vegType:
+ pfts1d_wtlunit_grass = (pfts1d_wtlunit[pft_indx]).astype(np.float32)
+ dum = GPP[t,pft_indx]
+ weights = pfts1d_wtlunit_grass / np.sum(pfts1d_wtlunit_grass)
+ if np.absolute(1.-np.sum(weights)) > eps:
+ print("Weights do not sum to 1, exiting")
+ sys.exit(-1)
+ varo_vegType[t,jxy,ixy] = np.sum(dum * weights)
+
+ elif 'shrub' in vegType:
+ pfts1d_wtlunit_shrub = (pfts1d_wtlunit[pft_indx]).astype(np.float32)
+ dum = GPP[t,pft_indx]
+ weights = pfts1d_wtlunit_shrub / np.sum(pfts1d_wtlunit_shrub)
+ varo_vegType[t,jxy,ixy] = np.sum(dum * weights)
+
+ elif 'tree' in vegType:
+ pfts1d_wtlunit_tree = (pfts1d_wtlunit[pft_indx]).astype(np.float32)
+ dum = GPP[t,pft_indx]
+ weights = pfts1d_wtlunit_tree / np.sum(pfts1d_wtlunit_tree)
+ varo_vegType[t,jxy,ixy] = np.sum(dum * weights)
else:
varo_vegType[:,jxy,ixy] = 1e+20
diff --git a/source/pyconform/modules/atm_funcs.py b/source/pyconform/modules/atm_funcs.py
new file mode 100644
index 00000000..d77ab897
--- /dev/null
+++ b/source/pyconform/modules/atm_funcs.py
@@ -0,0 +1,152 @@
+#=========================================================================
+# delpFunction
+#=========================================================================
+
+class delpFunction(Function):
+ key = 'delp'
+
+ def __init__(self, p_PO, p_PS, p_hyai, p_hybi):
+ super(delpFunction, self).__init__(p_PO, p_PS, p_hyai, p_hybi)
+
+ def __getitem__(self, index):
+ p_PO = self.arguments[0][index]
+ p_PS = self.arguments[1][index]
+ p_hyai = self.arguments[2][index]
+ p_hybi = self.arguments[3][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0, 0, 0, 0)), dimensions=[p_PS.dimensions[0], p_hybi.dimensions[0], p_PS.dimensions[1], p_PS.dimensions[2]])
+
+ PO = p_PO.data
+ PS = p_PS.data
+ hyai = p_hyai.data
+ hybi = p_hybi.data
+
+ p = (hyai*PO)+(hybi*PS)
+ l = len(hybi)
+ delp = np.ma.zeros((p_PS.dimensions[0], p_hybi.dimensions[0], p_PS.dimensions[1], p_PS.dimensions[2]))
+ for i in range(0,l-1):
+ delp[:,i,:,:] = p[:,i+1,:,:]-p[:,i,:,:]
+
+
+ new_name = 'delp({}{}{}{})'.format(
+ p_PO.name, p_PS.name, p_hyai.name, p_hybi.name)
+
+ return PhysArray(delp, name=new_name, units="Pa")
+
+
+#=========================================================================
+# rhoFunction
+#=========================================================================
+
+class rhoFunction(Function):
+ key = 'rho'
+
+ def __init__(self, p_PO, p_PS, p_hyam, p_hybm, p_T):
+ super(rhoFunction, self).__init__(p_PO, p_PS, p_hyam, p_hybm, p_T)
+
+ def __getitem__(self, index):
+ p_PO = self.arguments[0][index]
+ p_PS = self.arguments[1][index]
+ p_hyam = self.arguments[2][index]
+ p_hybm = self.arguments[3][index]
+ p_T = self.arguments[4][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0, 0, 0, 0)), dimensions=[p_T.dimensions[0], p_T.dimensions[1], p_T.dimensions[2], p_T.dimensions[3]])
+
+ PO = p_PO.data
+ PS = p_PS.data
+ hyam = p_hyam.data
+ hybm = p_hybm.data
+ t = p_T.data
+
+ p = (hyam*PO)+(hybm*PS)
+ rho = p/(287.04*T)
+
+ new_name = 'rho({}{}{}{}{})'.format(
+ p_PO.name, p_PS.name, p_hyam.name, p_hybm.name, p_T.name)
+
+ return PhysArray(rho, name=new_name, units="cm-3")
+
+
+#=========================================================================
+# pm25Function
+#=========================================================================
+
+class pm25Function(Function):
+ key = 'pm25'
+
+ def __init__(self, p_PO, p_PS, p_hyam, p_hybm, p_T, p_PM25_o):
+ super(pm25Function, self).__init__(p_PO, p_PS, p_hyam, p_hybm, p_T, p_PM25_o)
+
+ def __getitem__(self, index):
+ p_PO = self.arguments[0][index]
+ p_PS = self.arguments[1][index]
+ p_hyam = self.arguments[2][index]
+ p_hybm = self.arguments[3][index]
+ p_T = self.arguments[4][index]
+ p_PM25_o = self.arguments[5][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0, 0, 0, 0)), dimensions=[p_T.dimensions[0], p_T.dimensions[1], p_T.dimensions[2], p_T.dimensions[3]])
+
+ PO = p_PO.data
+ PS = p_PS.data
+ hyam = p_hyam.data
+ hybm = p_hybm.data
+ t = p_T.data
+ PM25_o = p_PM25_o.data
+
+ p = (hyam*PO)+(hybm*PS)
+ pm25 = PM25_o * 287. * T / p
+
+ new_name = 'pm25({}{}{}{}{}{})'.format(
+ p_PO.name, p_PS.name, p_hyam.name, p_hybm.name. p_T.name, p_PM25_o.name)
+
+ return PhysArray(pm25, name=new_name, units="kg/kg")
+
+
+
+#=========================================================================
+# tozFunction
+#=========================================================================
+
+class tozFunction(Function):
+ key = 'toz'
+
+ def __init__(self, p_PO, p_PS, p_hyam, p_hybm, p_indat3a):
+ super(tozFunction, self).__init__(p_PO, p_PS, p_hyam, p_hybm, p_indat3a)
+
+ def __getitem__(self, index):
+ p_PO = self.arguments[0][index]
+ p_PS = self.arguments[1][index]
+ p_hyam = self.arguments[2][index]
+ p_hybm = self.arguments[3][index]
+ p_indat3a = self.arguments[4][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0, 0, 0)), dimensions=[p_indat3a.dimensions[0], p_indat3a.dimensions[1], p_indat3a.dimensions[2]])
+
+ PO = p_PO.data
+ PS = p_PS.data
+ hyam = p_hyam.data
+ hybm = p_hybm.data
+ t = p_T.data
+
+ p = (hyam*PO)+(hybm*PS)
+ delp = np.ma.zeros((p_indat3a.dimensions[0], p_indat3a.dimensions[1], p_indat3a.dimensions[2]))
+ for i in range(0,l-1):
+ delp[:,i,:,:] = p[:,i+1,:,:]-p[:,u,:,:]
+ work3da = indat3a*delp*1.e-02
+ cmordat2d = sum(work3da,dim=3)
+ cmordat2d = cmordat2d * 2.1e+22 / 2.69e16
+
+ new_name = 'toz({}{}{}{}{})'.format(
+ p_PO.name, p_PS.name, p_hyam.name, p_hybm.name, p_T.name)
+
+ return PhysArray(cmordat2d, name=new_name, units='m')
+
+
+
+
diff --git a/source/pyconform/modules/commonfunctions.py b/source/pyconform/modules/commonfunctions.py
index bc75d559..7019dfaf 100644
--- a/source/pyconform/modules/commonfunctions.py
+++ b/source/pyconform/modules/commonfunctions.py
@@ -27,6 +27,227 @@ def __getitem__(self, index):
# return mean(data, axis=3)
+#=========================================================================
+# OclimFunction
+#=========================================================================
+class OclimFunction(Function):
+ key = 'oclim'
+
+ def __init__(self, data):
+ super(OclimFunction, self).__init__(data)
+ data_info = data if is_constant(data) else data[None]
+ if not isinstance(data_info, PhysArray):
+ raise TypeError('oclim: Data must be a PhysArray')
+
+ def __getitem__(self, index):
+
+ data = self.arguments[0][index]
+ new_name = 'oclim({})'.format(data.name)
+
+ if index is None:
+ if len(data.dimensions) == 3:
+ return PhysArray(np.zeros((0, 0, 0)), dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2]], units=data.units)
+ elif len(data.dimensions) == 4:
+ return PhysArray(np.zeros((0, 0, 0, 0)), units=data.units, dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2], data.dimensions[3]])
+
+ if len(data.dimensions) == 3:
+ a = np.ma.zeros((12,data.data.shape[1],data.data.shape[2]))
+ elif len(data.dimensions) == 4:
+ a = np.ma.zeros((12,data.data.shape[1],data.data.shape[2],data.data.shape[3]))
+
+ dim_count = len(data.dimensions)
+ time = data.data.shape[0]
+ dataD = data.data
+
+ for i in range(12):
+ a[i,...] = np.ma.mean(dataD[i::12,...],axis=0)
+
+ a[a>=1e+16] = 1e+20
+ a = np.ma.masked_values(a, 1e+20)
+
+ if dim_count == 3:
+ a = PhysArray(a, name = new_name, units=data.units, dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2]])
+ elif dim_count == 4:
+ a = PhysArray(a, name = new_name, units=data.units, dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2], data.dimensions[3]])
+
+ return a
+
+
+#=========================================================================
+# oclim_timeFunction
+#=========================================================================
+
+
+class oclim_timeFunction(Function):
+ key = 'oclim_time'
+
+ def __init__(self, time_bnds):
+ super(oclim_timeFunction, self).__init__(time_bnds)
+
+ def __getitem__(self, index):
+ p_time_bnds = self.arguments[0][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0)), dimensions=[p_time_bnds.dimensions[0]], units=p_time_bnds.units, calendar='noleap')
+
+ time_bnds = p_time_bnds.data
+
+ b = np.zeros((12))
+ for i in range(12):
+ b[i] = (time_bnds[i][0]+time_bnds[i][1])/2
+
+ new_name = 'oclim_time({})'.format(p_time_bnds.name)
+
+ return PhysArray(b, name = new_name, dimensions=[p_time_bnds.dimensions[0]], units=p_time_bnds.units, calendar='noleap')
+
+
+#=========================================================================
+# oclim_timebndsFunction
+#=========================================================================
+
+
+class oclim_timebndsFunction(Function):
+ key = 'oclim_timebnds'
+
+ def __init__(self, time, bdim='bnds'):
+ super(oclim_timebndsFunction, self).__init__(time, bdim='d2')
+
+ def __getitem__(self, index):
+ p_time = self.arguments[0][index]
+ bdim = self.keywords['bdim']
+
+ bnds = PhysArray([1, 1], dimensions=(bdim,))
+
+ if index is None:
+ return PhysArray(np.zeros((12,2)), dimensions=[p_time.dimensions[0], bnds.dimensions[0]], units=p_time.units, calendar='noleap')
+
+ b = np.zeros((12,2))
+ time = p_time.data
+
+ monLens = [31.0, 28.0, 31.0, 30.0, 31.0,
+ 30.0, 31.0, 31.0, 30.0, 31.0, 30.0, 31.0]
+
+ for i in range(11,-1,-1):
+ b[i][0] = time[i] - monLens[i]
+ b[i][1] = time[-(12-i)]
+ new_name = 'oclim_timebnds({})'.format(p_time.name)
+
+ return PhysArray(b, name = new_name, dimensions=[p_time.dimensions[0], bnds.dimensions[0]], units=p_time.units, calendar='noleap')
+
+
+#=========================================================================
+# monthtoyear_noleapFunction
+#=========================================================================
+class monthtoyear_noleapFunction(Function):
+ key = 'monthtoyear_noleap'
+
+ def __init__(self, data):
+ super(monthtoyear_noleapFunction, self).__init__(data)
+ data_info = data if is_constant(data) else data[None]
+ if not isinstance(data_info, PhysArray):
+ raise TypeError('monthtoyear_noleap: Data must be a PhysArray')
+
+ def __getitem__(self, index):
+
+ data = self.arguments[0][index]
+ new_name = 'monthtoyear_noleap({})'.format(data.name)
+
+ if index is None:
+ if len(data.dimensions) == 3:
+ return PhysArray(np.zeros((0, 0, 0)), dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2]], units=data.units)
+ elif len(data.dimensions) == 4:
+ return PhysArray(np.zeros((0, 0, 0, 0)), units=data.units, dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2], data.dimensions[3]])
+
+ if len(data.dimensions) == 3:
+ a = np.ma.zeros((data.data.shape[0]/12,data.data.shape[1],data.data.shape[2]))
+ elif len(data.dimensions) == 4:
+ a = np.ma.zeros((data.data.shape[0]/12,data.data.shape[1],data.data.shape[2],data.data.shape[3]))
+
+ dim_count = len(data.dimensions)
+ time = data.data.shape[0]
+ dataD = data.data
+
+ days_in_month = np.array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])
+
+ for i in range(time/12):
+ start = i*12
+ end = (i*12)+11
+ #a[i,...] = np.ma.mean(dataD[start:end+1,...],axis=0)
+ a[i,...] = np.ma.average(dataD[start:start+12,...],axis=0,weights=days_in_month)
+
+ a[a>=1e+16] = 1e+20
+ a = np.ma.masked_values(a, 1e+20)
+
+ if dim_count == 3:
+ a1 = PhysArray(a, name = new_name, units=data.units, dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2]])
+ elif dim_count == 4:
+ a1 = PhysArray(a, name = new_name, units=data.units, dimensions=[data.dimensions[0], data.dimensions[1], data.dimensions[2], data.dimensions[3]])
+
+ return a1
+
+
+#=========================================================================
+# monthtoyear_timeFunction
+#=========================================================================
+
+
+class monthtoyear_noleap_timeFunction(Function):
+ key = 'monthtoyear_noleap_time'
+
+ def __init__(self, time_bnds):
+ super(monthtoyear_noleap_timeFunction, self).__init__(time_bnds)
+
+ def __getitem__(self, index):
+ p_time_bnds = self.arguments[0][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0)), dimensions=[p_time_bnds.dimensions[0]], units=p_time_bnds.units, calendar='noleap')
+
+ time_bnds = p_time_bnds.data
+
+ b = np.zeros((time_bnds.shape[0]/12))
+ time = time_bnds.shape[0]
+ for i in range(time/12):
+ start = i*12
+ end = (i*12)+11
+ b[i] = ((time_bnds[start][0]+time_bnds[end][1])/2)-365
+
+ new_name = 'monthtoyear_noleap_time({})'.format(p_time_bnds.name)
+
+ return PhysArray(b, name = new_name, dimensions=[p_time_bnds.dimensions[0]], units=p_time_bnds.units, calendar='noleap')
+
+
+#=========================================================================
+# monthtoyear_timebndsFunction
+#=========================================================================
+
+
+class monthtoyear_noleap_timebndsFunction(Function):
+ key = 'monthtoyear_noleap_timebnds'
+
+ def __init__(self, time_bound, bdim='bnds'):
+ super(monthtoyear_noleap_timebndsFunction, self).__init__(time_bound, bdim='d2')
+
+ def __getitem__(self, index):
+ p_time_bound = self.arguments[0][index]
+ bdim = self.keywords['bdim']
+
+ bnds = PhysArray([1, 1], dimensions=(bdim,))
+
+ if index is None:
+ return PhysArray(np.zeros((12,2)), dimensions=[p_time_bound.dimensions[0], bnds.dimensions[0]], units=p_time_bound.units, calendar='noleap')
+
+ time_bound = p_time_bound.data
+ b = np.zeros((time_bound.shape[0]/12,2))
+
+ for i in range(len(time_bound)/12):
+ b[i][0] = time_bound[i*12][0]-365
+ b[i][1] = time_bound[(i*12)+11][1]-365
+ new_name = 'monthtoyear_noleap_timebnds({})'.format(p_time_bound.name)
+
+ return PhysArray(b, name = new_name, dimensions=[p_time_bound.dimensions[0], bnds.dimensions[0]], units=p_time_bound.units, calendar='noleap')
+
+
#=========================================================================
# BoundsFunction
#=========================================================================
@@ -64,7 +285,7 @@ def __getitem__(self, index):
location = self.keywords['location']
bnds = PhysArray([1, 1], dimensions=(bdim,))
- new_data = PhysArray(data * bnds, name='bounds({})'.format(data.name))
+ new_data = PhysArray(data * bnds, name='bounds({})'.format(data.name), units=data.units)
if index is None:
return new_data
@@ -335,8 +556,8 @@ def __getitem__(self, index):
class diff_axis1_ind0bczero_4dFunction(Function):
key = 'diff_axis1_ind0bczero_4d'
- def __init__(self, data):
- super(diff_axis1_ind0bczero_4dFunction, self).__init__(data)
+ def __init__(self, KMT, new_coord_var, data):
+ super(diff_axis1_ind0bczero_4dFunction, self).__init__(KMT, new_coord_var, data)
data_info = data if is_constant(data) else data[None]
if not isinstance(data_info, PhysArray):
raise TypeError('diff_axis1_ind0bczero_4d: data must be a PhysArray')
@@ -344,20 +565,163 @@ def __init__(self, data):
raise DimensionsError('diff_axis1_ind0bczero_4d: data can only be 4D')
def __getitem__(self, index):
- p_data = self.arguments[0][index]
+ p_KMT = self.arguments[0][index]
+ p_new_coord_var = self.arguments[1][index]
+ p_data = self.arguments[2][index]
if index is None:
a = np.zeros((0, 0, 0, 0))
+ fv = 1e+20
else:
+ KMT = p_KMT.data
data = p_data.data
a = np.empty((p_data.shape))
a[:, 0, :, :] = data[:, 0, :, :]
a[:, 1:, :, :] = np.diff(data, axis=1)
- new_name = '{}({})'.format(self.key, p_data.name)
+ #fv = data.fill_value
+ fv = 1e+20
+ for t in range(p_data.shape[0]):
+ for k in range(p_data.shape[1]):
+ a[t, k, :, :] = np.where(k < KMT, a[t, k, :, :], fv)
+
+ ma_a = np.ma.masked_values(a, fv)
+ new_name = '{}({}{}{})'.format(self.key, p_KMT.name, p_new_coord_var.name, p_data.name)
new_units = p_data.units
- new_dims = p_data.dimensions
+ new_dims = [p_data.dimensions[0], p_new_coord_var.dimensions[0],
+ p_data.dimensions[2], p_data.dimensions[3]]
+ return PhysArray(ma_a, name=new_name, units=new_units, dimensions=new_dims)
+
+
+#=========================================================================
+# rsdoabsorbFunction
+#=========================================================================
+class rsdoabsorbFunction(Function):
+ key = 'rsdoabsorb'
+
+ def __init__(self, KMT, z_t, QSW_3D):
+ super(rsdoabsorbFunction, self).__init__(KMT, z_t, QSW_3D)
+ QSW_3D_info = QSW_3D if is_constant(QSW_3D) else QSW_3D[None]
+ if not isinstance(QSW_3D_info, PhysArray):
+ raise TypeError('rsdoabsorb: QSW_3D must be a PhysArray')
+ if len(QSW_3D_info.dimensions) != 4:
+ raise DimensionsError('rsdoabsorb: QSW_3D can only be 4D')
+
+ def __getitem__(self, index):
+ p_KMT = self.arguments[0][index]
+ p_z_t = self.arguments[1][index]
+ p_QSW_3D = self.arguments[2][index]
+
+ if index is None:
+ a = np.zeros((0, 0, 0, 0))
+ fv = 1e+20
+ else:
+ KMT = p_KMT.data
+ QSW_3D = p_QSW_3D.data
+
+ a = np.empty((p_QSW_3D.shape))
+
+ nlev = p_QSW_3D.shape[1]
+ #fv = QSW_3D.fill_value
+ fv = 1e+20
+ for t in range(p_QSW_3D.shape[0]):
+ for k in range(p_QSW_3D.shape[1]):
+ if k < nlev-1:
+ a[t, k, :, :] = np.where(
+ k < KMT-1, QSW_3D[t, k, :, :] - QSW_3D[t, k+1, :, :],
+ QSW_3D[t, k, :, :])
+ else:
+ a[t, k, :, :] = QSW_3D[t, k, :, :]
+ a[t, k, :, :] = np.where(k < KMT, a[t, k, :, :], fv)
+
+ ma_a = np.ma.masked_values(a, fv)
+ new_name = '{}({}{}{})'.format(self.key, p_KMT.name, p_z_t.name, p_QSW_3D.name)
+ new_units = p_QSW_3D.units
+ new_dims = [p_QSW_3D.dimensions[0], p_z_t.dimensions[0],
+ p_QSW_3D.dimensions[2], p_QSW_3D.dimensions[3]]
+ return PhysArray(ma_a, name=new_name, units=new_units, dimensions=new_dims)
+
+
+#=========================================================================
+# POP_surf_meanFunction
+#=========================================================================
+class POP_surf_meanFunction(Function):
+ key = 'POP_surf_mean'
+
+ def __init__(self, KMT, TAREA, FIELD):
+ super(POP_surf_meanFunction, self).__init__(KMT, TAREA, FIELD)
+ FIELD_info = FIELD if is_constant(FIELD) else FIELD[None]
+ if not isinstance(FIELD_info, PhysArray):
+ raise TypeError('POP_surf_mean: FIELD must be a PhysArray')
+ if len(FIELD_info.dimensions) != 3:
+ raise DimensionsError('POP_surf_mean: FIELD can only be 3D')
+
+ def __getitem__(self, index):
+ p_KMT = self.arguments[0][index]
+ p_TAREA = self.arguments[1][index]
+ p_FIELD = self.arguments[2][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0,)), dimensions=[p_FIELD.dimensions[0]])
+
+ KMT = p_KMT.data
+ TAREA = p_TAREA.data
+ FIELD = p_FIELD.data
+
+ a = np.empty((p_FIELD.shape[0],))
+ for t in range(p_FIELD.shape[0]):
+ a[t] = np.sum(np.where(KMT > 0, TAREA * FIELD[t, :, :], 0.0))
+ denom = np.sum(np.where(KMT > 0, TAREA, 0.0))
+ a[:] *= 1.0 / denom
+
+ new_name = '{}({}{}{})'.format(self.key, p_KMT.name, p_TAREA.name, p_FIELD.name)
+ new_units = p_FIELD.units
+ new_dims = [p_FIELD.dimensions[0]]
+ return PhysArray(a, name=new_name, units=new_units, dimensions=new_dims)
+
+
+#=========================================================================
+# POP_3D_meanFunction
+#=========================================================================
+class POP_3D_meanFunction(Function):
+ key = 'POP_3D_mean'
+
+ def __init__(self, KMT, dz, TAREA, FIELD):
+ super(POP_3D_meanFunction, self).__init__(KMT, dz, TAREA, FIELD)
+ FIELD_info = FIELD if is_constant(FIELD) else FIELD[None]
+ if not isinstance(FIELD_info, PhysArray):
+ raise TypeError('POP_3D_mean: FIELD must be a PhysArray')
+ if len(FIELD_info.dimensions) != 4:
+ raise DimensionsError('POP_3D_mean: FIELD can only be 4D')
+
+ def __getitem__(self, index):
+ p_KMT = self.arguments[0][index]
+ p_dz = self.arguments[1][index]
+ p_TAREA = self.arguments[2][index]
+ p_FIELD = self.arguments[3][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0,)), dimensions=[p_FIELD.dimensions[0]])
+
+ KMT = p_KMT.data
+ dz = p_dz.data
+ TAREA = p_TAREA.data
+ FIELD = p_FIELD.data
+
+ a = np.empty((p_FIELD.shape[0],))
+ for t in range(p_FIELD.shape[0]):
+ a[t] = 0.0
+ for k in range(p_FIELD.shape[1]):
+ a[t] += dz[k] * np.sum(np.where(k < KMT, TAREA * FIELD[t, k, :, :], 0.0))
+ denom = 0.0
+ for k in range(p_FIELD.shape[1]):
+ denom += dz[k] * np.sum(np.where(k < KMT, TAREA, 0.0))
+ a[:] *= 1.0 / denom
+
+ new_name = '{}({}{}{}{})'.format(self.key, p_KMT.name, p_dz.name, p_TAREA.name, p_FIELD.name)
+ new_units = p_FIELD.units
+ new_dims = [p_FIELD.dimensions[0]]
return PhysArray(a, name=new_name, units=new_units, dimensions=new_dims)
@@ -429,6 +793,45 @@ def __getitem__(self, index):
new_units = p_data1.units * p_data2.units
return PhysArray(a2, name=new_name, dimensions=[p_data2.dimensions[0]], units=new_units)
+#=========================================================================
+# POP_layer_sum_multFunction
+#=========================================================================
+class POP_layer_sum_multFunction(Function):
+ key = 'POP_layer_sum_mult'
+
+ def __init__(self, KMT, data1, data2):
+ super(POP_layer_sum_multFunction,
+ self).__init__(KMT, data1, data2)
+
+ def __getitem__(self, index):
+ p_KMT = self.arguments[0][index]
+ p_data1 = self.arguments[1][index]
+ p_data2 = self.arguments[2][index]
+
+ data1 = p_data1.data
+ data2 = p_data2.data
+ KMT = p_KMT.data
+
+ a1 = np.zeros((p_data2.shape[0], p_data2.shape[2], p_data2.shape[3]))
+
+ #fv = data2.fill_value
+ fv = 1e+20
+
+ for t in range(p_data2.shape[0]):
+ for j in range(KMT.shape[0]):
+ for i in range(KMT.shape[1]):
+ if KMT[j, i] > 0:
+ a1[t, j, i] = 0.0
+ for k in range(min(KMT[j, i], p_data2.shape[1])):
+ a1[t, j, i] += data1[k] * data2[t, k, j, i]
+ else:
+ a1[t, j, i] = fv
+
+ ma_a1 = np.ma.masked_values(a1, fv)
+ new_name = 'POP_layer_sum_mult({}{}{})'.format(
+ p_KMT.name, p_data1.name, p_data2.name)
+ new_units = p_data1.units * p_data2.units
+ return PhysArray(ma_a1, name=new_name, dimensions=[p_data2.dimensions[0], p_data2.dimensions[2], p_data2.dimensions[3]], units=new_units)
#=========================================================================
# masked_invalidFunction
@@ -621,6 +1024,45 @@ def __getitem__(self, index):
return PhysArray(a, name=new_name, dimensions=[p_aice.dimensions[0], p_siline.dimensions[0]], units=p_uvel.units)
+#=========================================================================
+# burntFractionFunction
+#=========================================================================
+class burntFractionFunction(Function):
+ key = 'burntFraction'
+
+ def __init__(self, data):
+ super(burntFractionFunction, self).__init__(data)
+
+ def __getitem__(self, index):
+ p_data = self.arguments[0][index]
+
+ if index is None:
+ return PhysArray(np.zeros((0, 0, 0)), dimensions=[p_data.dimensions[0], p_data.dimensions[1], p_data.dimensions[2]])
+
+ data = p_data.data
+
+ ml = [31.0, 28.0, 31.0, 30.0, 31.0,
+ 30.0, 31.0, 31.0, 30.0, 31.0, 30.0, 31.0]
+
+ a = np.ma.zeros((data.shape[0], data.shape[1], data.shape[2]))
+
+ i = 0
+ for t in range(0,data.shape[0]):
+ for x in range(0,data.shape[1]):
+ for y in range(0,data.shape[2]):
+ if data[t,x,y]<1e+16:
+ a[t,x,y] = data[t,x,y]*ml[i]*86400*100
+ else:
+ a[t,x,y] = 1e+20
+ i+=1
+ if i==12:
+ i=1
+ a[a >= 1e+16] = 1e+20
+ new_name = 'burntFraction({})'.format(p_data.name)
+
+ return PhysArray(a, name=new_name, units=p_data.units)
+
+
#=========================================================================
# reduce_luFunction
#=========================================================================
@@ -700,16 +1142,18 @@ def __getitem__(self, index):
class get_nonwoodyvegFunction(Function):
key = 'get_nonwoodyveg'
- def __init__(self, p_pct_nat_pft, p_landfrac, p_landUse):
+ def __init__(self, p_pct_nat_pft, p_pct_crop, p_landfrac, p_landUse):
super(get_nonwoodyvegFunction, self).__init__(
- p_pct_nat_pft,p_landfrac,p_landUse)
+ p_pct_nat_pft, p_pct_crop, p_landfrac,p_landUse)
def __getitem__(self, index):
p_pct_nat_pft = self.arguments[0][index]
- p_landfrac = self.arguments[1][index]
- p_landUse = self.arguments[2][index]
+ p_pct_crop = self.arguments[1][index]
+ p_landfrac = self.arguments[2][index]
+ p_landUse = self.arguments[3][index]
pct_nat_pft = p_pct_nat_pft.data
+ pct_crop = p_pct_crop.data
landfrac = p_landfrac.data
landUse = p_landUse.data
@@ -718,16 +1162,20 @@ def __getitem__(self, index):
return data
data[:, 0, :, :] = pct_nat_pft[:,12,:,:]+pct_nat_pft[:,13,:,:]+pct_nat_pft[:,14,:,:]
- for i in range(p_pct_nat_pft.shape[2]):
- for j in range(p_pct_nat_pft.shape[3]):
- if landfrac[i,j] <= 1.0:
- data[:, 1, i, j] = 1.0
- data[:, 2, i, j] = 0.0
- data[:, 3, i, j] = 0.0
- else:
- data[:, 1, i, j] = 1e+20
- data[:, 2, i, j] = 1e+20
- data[:, 3, i, j] = 1e+20
+ for t in range(p_pct_nat_pft.shape[0]):
+ for i in range(p_pct_nat_pft.shape[2]):
+ for j in range(p_pct_nat_pft.shape[3]):
+ if landfrac[i,j] <= 1.0:
+ data[t, 1, i, j] = 0.0
+ if pct_crop[t,1,i,j] > 0.0:
+ data[t, 2, i, j] = 100.0
+ else:
+ data[t, 2, i, j] = 0.0
+ data[t, 3, i, j] = 0.0
+ else:
+ data[t, 1, i, j] = 1e+20
+ data[t, 2, i, j] = 1e+20
+ data[t, 3, i, j] = 1e+20
data[data >= 1e+16] = 1e+20
data = np.ma.masked_values(data, 1e+20)
diff --git a/source/pyconform/version.py b/source/pyconform/version.py
index 0f306be1..574b7766 100644
--- a/source/pyconform/version.py
+++ b/source/pyconform/version.py
@@ -1,2 +1,2 @@
# Single place for version information
-__version__ = '0.2.7'
+__version__ = '0.2.8'
diff --git a/source/test/dataflowTests.py b/source/test/dataflowTests.py
index ac18d4a5..ca20dc05 100644
--- a/source/test/dataflowTests.py
+++ b/source/test/dataflowTests.py
@@ -42,7 +42,8 @@ def setUp(self):
('u1', ('time', 'lat', 'lon')),
('u2', ('time', 'lat', 'lon')),
('u3', ('time', 'lat', 'lon')),
- ('tyears', ('time',))])
+ ('tyears', ('time',)),
+ ('p0', tuple())])
self.vattrs = OrderedDict([('lat', {'units': 'degrees_north',
'standard_name': 'latitude'}),
('lon', {'units': 'degrees_east',
@@ -61,8 +62,9 @@ def setUp(self):
'standard_name': 'u variable 3',
'positive': 'down'}),
('tyears', {'units': 'years since 1979-01-01',
- 'calendar': 'noleap'})])
- self.dtypes = {'lat': 'd', 'lon': 'd', 'time': 'd', 'tyears': 'd',
+ 'calendar': 'noleap'}),
+ ('p0', {'units': 10})])
+ self.dtypes = {'lat': 'd', 'lon': 'd', 'time': 'd', 'tyears': 'd', 'p0': 'd',
'time_bnds': 'd', 'cat': 'c', 'u1': 'f', 'u2': 'f', 'u3': 'f'}
ulen = reduce(lambda x, y: x * y,
@@ -77,7 +79,8 @@ def setUp(self):
'u1': numpy.linspace(0, ulen, num=ulen, dtype=self.dtypes['u1']).reshape(ushape),
'u2': numpy.linspace(0, ulen, num=ulen, dtype=self.dtypes['u2']).reshape(ushape),
'u3': numpy.linspace(0, ulen, num=ulen, dtype=self.dtypes['u3']).reshape(ushape),
- 'tyears': tdata / 365.0}
+ 'tyears': tdata / 365.0,
+ 'p0': 0.1}
for vname in self.filenames:
fname = self.filenames[vname]
@@ -116,6 +119,14 @@ def setUp(self):
vattribs['axis'] = 'Z'
vdicts['L']['attributes'] = vattribs
+ vdicts['P'] = OrderedDict()
+ vdicts['P']['datatype'] = 'double'
+ vdicts['P']['dimensions'] = tuple()
+ vdicts['P']['definition'] = 'p0'
+ vattribs = OrderedDict()
+ vattribs['units'] = '1'
+ vdicts['P']['attributes'] = vattribs
+
vdicts['C'] = OrderedDict()
vdicts['C']['datatype'] = 'char'
vdicts['C']['dimensions'] = ('c', 'n')
@@ -184,12 +195,23 @@ def setUp(self):
vattribs['calendar'] = 'noleap'
vdicts['T2']['attributes'] = vattribs
+ vdicts['_T'] = OrderedDict()
+ vdicts['_T']['datatype'] = 'double'
+ vdicts['_T']['dimensions'] = ('t',)
+ vdicts['_T']['definition'] = 'chunits(rmunits(tyears) * 365 + 10, units="days since 1979-01-01", calendar="noleap")'
+ vattribs = OrderedDict()
+ vattribs['standard_name'] = 'time_hidden'
+ vattribs['units'] = 'days since 1979-01-01 00:00:00'
+ vattribs['calendar'] = 'noleap'
+ vdicts['_T']['attributes'] = vattribs
+
vdicts['V1'] = OrderedDict()
vdicts['V1']['datatype'] = 'double'
vdicts['V1']['dimensions'] = ('t', 'y', 'x')
vdicts['V1']['definition'] = '0.5*(u1 + u2)'
fdict = OrderedDict()
fdict['filename'] = 'var1_{%Y%m%d-%Y%m%d}.nc'
+ fdict['autoparse_time_variable'] = '_T'
fdict['attributes'] = {'variable': 'V1'}
fdict['metavars'] = ['L', 'C']
vdicts['V1']['file'] = fdict
@@ -219,7 +241,7 @@ def setUp(self):
fdict = OrderedDict()
fdict['filename'] = 'var3_{%Y%m%d-%Y%m%d}.nc'
fdict['attributes'] = {'variable': 'V3'}
- fdict['metavars'] = ['L']
+ fdict['metavars'] = ['L', 'P']
vdicts['V3']['file'] = fdict
vattribs = OrderedDict()
vattribs['standard_name'] = 'originally u2'
@@ -310,6 +332,7 @@ def setUp(self):
self.outfiles = dict((vname, vdict['file']['filename'].replace('{%Y%m%d-%Y%m%d}', '19790101-19790104'))
for vname, vdict in vdicts.iteritems() if 'file' in vdict)
+ self.outfiles['V1'] = 'var1_19790111-19790114.nc'
self.cleanOutputFiles()
def cleanInputFiles(self):
diff --git a/source/test/datasetsTests.py b/source/test/datasetsTests.py
index 77fbd0c0..67707df2 100644
--- a/source/test/datasetsTests.py
+++ b/source/test/datasetsTests.py
@@ -579,6 +579,24 @@ def test_variables_same_dims(self):
print_test_message('FileDesc.dimensions', actual=actual, expected=expected)
self.assertEqual(actual, expected, 'FileDesc.dimensions failed')
+ def test_autoparse_time_variable_missing(self):
+ indata = (VariableDesc('a'), VariableDesc('b'))
+ with self.assertRaises(ValueError):
+ FileDesc('test.nc', variables=indata, autoparse_time_variable='t')
+
+ def test_autoparse_time_variable_hidden(self):
+ indata = (VariableDesc('a'), VariableDesc('b'), VariableDesc('_t'))
+ fdesc = FileDesc('test.nc', variables=indata, autoparse_time_variable='_t')
+ actual = fdesc.variables
+ expected = OrderedDict((d.name, d) for d in indata)
+ print_test_message('FileDesc.variables', input=indata, actual=actual, expected=expected)
+ self.assertEqual(actual, expected, 'FileDesc.variables failed')
+ actual = fdesc.dimensions
+ expected = OrderedDict()
+ print_test_message('FileDesc.dimensions', input=indata, actual=actual, expected=expected)
+ self.assertEqual(actual, expected, 'FileDesc.dimensions failed')
+
+
#=========================================================================
# DatasetDescTests - Tests for the datasets module
diff --git a/source/test/flownodesTests.py b/source/test/flownodesTests.py
index 0e793a2a..097a277d 100644
--- a/source/test/flownodesTests.py
+++ b/source/test/flownodesTests.py
@@ -619,32 +619,35 @@ class WriteNodeTests(BaseTests):
"""
def setUp(self):
- NX = 15
- X0 = -5
+ NX = 2
+ X0 = -1
xdata = PhysArray(numpy.arange(X0, X0 + NX, dtype='d'), name='X', units='m', dimensions=('x',))
- NY = 8
+ NY = 3
Y0 = 0
ydata = PhysArray(numpy.arange(Y0, Y0 + NY, dtype='d'), name='Y', units='m', dimensions=('y',))
- NT = 3
+ NT = 4
tunits = Unit('days since 2000-01-01', calendar='noleap')
tdata = PhysArray(numpy.arange(0, NT, dtype='d'), name='T', units=tunits, dimensions=('t',))
+ t2data = PhysArray(numpy.arange(0, NT, dtype='d') + 2, name='_T', units=tunits, dimensions=('t',))
vdata = PhysArray(numpy.arange(0, NX * NY * NT, dtype='f').reshape(NX, NY, NT), name='V', units='K',
dimensions=('x', 'y', 't'))
- self.data = {'X': xdata, 'Y': ydata, 'T': tdata, 'V': vdata}
+ self.data = {'X': xdata, 'Y': ydata, 'T': tdata, '_T': t2data, 'V': vdata}
self.atts = {'X': {'xa1': 'x attribute 1', 'xa2': 'x attribute 2', 'axis': 'X', 'units': str(xdata.units)},
'Y': {'ya1': 'y attribute 1', 'ya2': 'y attribute 2', 'axis': 'Y',
'direction': 'decreasing', 'units': str(ydata.units)},
'T': {'axis': 'T', 'ta1': 'time attribute', 'units': str(tdata.units),
'calendar': tdata.units.calendar},
+ '_T': {'ta1': 'hidden time attribute', 'units': str(t2data.units),
+ 'calendar': t2data.units.calendar},
'V': {'va1': 'v attribute 1', 'va2': 'v attribute 2', 'units': str(vdata.units)}}
dimdescs = {n: DimensionDesc(n, s) for x in self.data.itervalues() for n, s in zip(x.dimensions, x.shape)}
vardescs = {n: VariableDesc(n, datatype=self.data[n].dtype, attributes=self.atts[n],
- dimensions=[dimdescs[d] for d in self.data[n].dimensions]) for n in self.data}
+ dimensions=tuple(dimdescs[d] for d in self.data[n].dimensions)) for n in self.data}
self.vardescs = vardescs
self.nodes = {n: ValidateNode(self.vardescs[n], DataNode(self.data[n])) for n in self.data}
@@ -753,7 +756,7 @@ def test_execute_simple_autoparse(self):
N = WriteNode(filedesc, inputs=self.nodes.values())
N.enable_history()
N.execute()
- newfname = 'v.20000101-20000103.nc'
+ newfname = 'v.20000101-20000104.nc'
actual = exists(newfname)
expected = True
print_test_message(testname, actual=actual, expected=expected)
@@ -762,18 +765,28 @@ def test_execute_simple_autoparse(self):
def test_execute_simple_autoparse_fail(self):
filename = 'v.{%Y%m%d-%Y%m%d}.nc'
- testname = 'WriteNode({}).execute()'.format(filename)
vdescs = {n: self.vardescs[n] for n in self.vardescs if n != 'T'}
filedesc = FileDesc(filename, variables=vdescs.values(), attributes={'ga': 'global attribute'})
vnodes = {n: self.nodes[n] for n in self.nodes if n != 'T'}
+ with self.assertRaises(ValueError):
+ WriteNode(filedesc, inputs=vnodes.values())
+
+ def test_execute_simple_autoparse_hidden_time(self):
+ filename = 'v.{%Y%m%d-%Y%m%d}.nc'
+ testname = 'WriteNode({}).execute()'.format(filename)
+ vdescs = {n: self.vardescs[n] for n in self.vardescs if n != 'T'}
+ filedesc = FileDesc(filename, variables=vdescs.values(), attributes={'ga': 'global attribute'},
+ autoparse_time_variable='_T')
+ vnodes = {n: self.nodes[n] for n in self.nodes if n != 'T'}
N = WriteNode(filedesc, inputs=vnodes.values())
N.enable_history()
N.execute()
- actual = exists(filename)
+ newfname = 'v.20000103-20000106.nc'
+ actual = exists(newfname)
expected = True
print_test_message(testname, actual=actual, expected=expected)
self.assertEqual(actual, expected, '{} failed'.format(testname))
- print_ncfile(filename)
+ print_ncfile(newfname)
def test_execute_simple_nc3(self):
filename = 'v_x_y_simple_nc3.nc'