diff --git a/scripts/iconform b/scripts/iconform index 5fdc7cfe..70325a0f 100755 --- a/scripts/iconform +++ b/scripts/iconform @@ -166,10 +166,12 @@ def fill_missing_glob_attributes(attr, table, v, grids): attr["variable_id"] = v["variable_id"] if "branch_method" in attr.keys(): if "none" not in attr["branch_method"]: - #if "branch_time_in_child" in attr.keys(): - # attr["branch_time_in_child"] = "Get correct date format" - #if "branch_time_in_parent" in attr.keys(): - # attr["branch_time_in_parent"] = "Get correct date format" + if "branch_time_in_child" in attr.keys(): + if len(attr["branch_time_in_child"])>0: + attr["branch_time_in_child"] = float(attr["branch_time_in_child"].split('D')[0]) + if "branch_time_in_parent" in attr.keys(): + if len(attr["branch_time_in_parent"])>0: + attr["branch_time_in_parent"] = float(attr["branch_time_in_parent"].split('D')[0]) if "parent_mip_era" in attr.keys(): attr["parent_mip_era"] = attr["mip_era"] if "parent_source_id" in attr.keys(): @@ -190,13 +192,13 @@ def fill_missing_glob_attributes(attr, table, v, grids): if "variant_label" in attr.keys(): pre = attr["variant_label"].split('r')[1] - attr["realization_index"] = (pre.split('i')[0]) + attr["realization_index"] = int(pre.split('i')[0]) pre = pre.split('i')[1] - attr["initialization_index"] = (pre.split('p')[0]) + attr["initialization_index"] = int(pre.split('p')[0]) pre = pre.split('p')[1] - attr["physics_index"] = (pre.split('f')[0]) + attr["physics_index"] = int(pre.split('f')[0]) pre = int(pre.split('f')[1]) - attr["forcing_index"] = str(pre) + attr["forcing_index"] = int(pre) if "further_info_url" in attr.keys(): if "__FILL__" in attr["further_info_url"]: @@ -225,7 +227,7 @@ def fill_missing_glob_attributes(attr, table, v, grids): else: ripf = '' info_url = "{0}.{1}.{2}.{3}.{4}.{5}".format(mip_era, institution_id, source_id, experiment_id, sub_experiment_id, ripf) - attr['further_info_url'] = "http://furtherinfo.es-doc.org/" + info_url + attr['further_info_url'] = "https://furtherinfo.es-doc.org/" + info_url if "grid" in attr.keys(): if len(attr["realm"])>0: attr["grid"] = grids[attr["realm"].split()[0]] @@ -260,10 +262,10 @@ def defineVar(v, varName, attr, table_info, definition, ig, experiment, out_dir) # Get variables needed to piece together the filename ripf_list = ['realization_index','initialization_index','physics_index','forcing_index'] if all (ripf in attributes for ripf in ripf_list): - ripf = ("r{0}i{1}p{2}f{3}".format(attributes['realization_index'], - attributes['initialization_index'], - attributes['physics_index'], - attributes['forcing_index'])) + ripf = ("r{0}i{1}p{2}f{3}".format(str(attributes['realization_index']), + str(attributes['initialization_index']), + str(attributes['physics_index']), + str(attributes['forcing_index']))) else: ripf = '' @@ -467,6 +469,7 @@ def create_output(exp_dict, definitions, input_glob, attributes, output_path, ar ts_key = None mip = d['mipTable'] if mip in definitions.keys(): + ig = "" if v in definitions[mip].keys(): if "N/A" in definitions[mip][v].upper(): v_def = "" @@ -514,15 +517,17 @@ def create_output(exp_dict, definitions, input_glob, attributes, output_path, ar with open(f, 'w') as outfile: json.dump(t, outfile, sort_keys=True, indent=4) else: + ignore = ['latitude','longitude','olevel','plev19','time','time1','alevhalf','ygre','xgre','vegtype','spectband','areacellg','alevel','xant','yant','rho','tau','plev3','gridlatitude','plev39','plev4','plev27','plev3','plev7h','plev7c','plev8','plev7h','sdepth','siline','basin','olevel','site','soilpools','snowdepth','snowband','vegtype'] for n,t in TableSpec.iteritems(): for vn,var in t.iteritems(): - varD={} - varD[vn]=var - for d in var["dimensions"]: - varD[d]=t[d] - f = output_path + "/" + experiment + '_' + n + '_' + vn + '_spec.json' - with open(f, 'w') as outfile: - json.dump(varD, outfile, sort_keys=True, indent=4) + if vn not in ignore: + varD={} + varD[vn]=var + for d in var["dimensions"]: + varD[d]=t[d] + f = output_path + "/" + experiment + '_' + n + '_' + vn + '_spec.json' + with open(f, 'w') as outfile: + json.dump(varD, outfile, sort_keys=True, indent=4) #f1 = output_path + '/MISSING_DEFS.json' #with open(f1, 'w') as outfile: @@ -612,6 +617,9 @@ def main(argv=None): if "json" in gaFile: with open(gaFile) as gaF: ga = json.load(gaF) + for k in ga.keys(): + if ga[k] is None: + ga[k] = "" attributes.update(ga) else: with open(gaFile) as y_attributes: diff --git a/source/pyconform/miptableparser.py b/source/pyconform/miptableparser.py index 11359ef3..57df9745 100644 --- a/source/pyconform/miptableparser.py +++ b/source/pyconform/miptableparser.py @@ -421,6 +421,7 @@ def parse_table(self,exp,mips,tables,v_list,table_var_fields,table_axes_fields,t var['mipTable']=c_var.mipTable if c_var.mipTable in tables or '--ALL--' in tables: var["_FillValue"] = "1e+20" + #var["missing_value"] = "1e+20" if hasattr(c_var,'deflate'): var['deflate']= c_var.deflate if hasattr(c_var,'deflate_level'): diff --git a/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py b/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py index e62e8f9d..73875501 100644 --- a/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py +++ b/source/pyconform/modules/CLM_pft_to_CMIP6_vegtype.py @@ -251,6 +251,3 @@ def main(argv=None): if __name__ == '__main__': main() - - - diff --git a/source/pyconform/modules/commonfunctions.py b/source/pyconform/modules/commonfunctions.py index 029d3566..51693e85 100644 --- a/source/pyconform/modules/commonfunctions.py +++ b/source/pyconform/modules/commonfunctions.py @@ -259,6 +259,75 @@ def __getitem__(self, index): return PhysArray(a, name = new_name, units=p_data.units) + +#=================================================================================================== +# sftofFunction +#=================================================================================================== +class sftofFunction(Function): + key = 'sftof' + + def __init__(self, KMT): + super(sftofFunction, self).__init__(KMT) + + def __getitem__(self, index): + p_KMT = self.arguments[0][index] + + if index is None: + return PhysArray(np.zeros((0,0)), dimensions=[p_KMT.dimensions[0],p_KMT.dimensions[1]]) + + KMT = p_KMT.data + + a = np.zeros((KMT.shape[0],KMT.shape[1])) + + for j in range(KMT.shape[0]): + for i in range(KMT.shape[1]): + if KMT[j,i] > 0: + a[j,i] = 1 + + new_name = 'sftof({})'.format( p_KMT.name) + + return PhysArray(a, name = new_name, dimensions=[p_KMT.dimensions[0],p_KMT.dimensions[1]], units=p_KMT.units) + + + +#=================================================================================================== +# POP_bottom_layer_multFunction +#=================================================================================================== +class POP_bottom_layer_multaddFunction(Function): + key = 'POP_bottom_layer_multadd' + + def __init__(self, KMT, data1, data2): + super(POP_bottom_layer_multaddFunction, self).__init__(KMT, data1, data2) + + def __getitem__(self, index): + p_KMT = self.arguments[0][index] + p_data1 = self.arguments[1][index] + p_data2 = self.arguments[2][index] + + data1 = p_data1.data + data2 = p_data2.data + KMT = p_KMT.data + + a1 = np.zeros((p_data2.shape[0],p_data2.shape[2],p_data2.shape[3])) + a2 = np.zeros((p_data2.shape[0])) + + for t in range(p_data2.shape[0]): + for j in range(KMT.shape[0]): + for i in range(KMT.shape[1]): + k = KMT[j,i]-1 + if data2[t,k,j,i] < 1e+16: + a1[t,j,i] = data1[k]*data2[t,k,j,i] + #print a1[t,j,i],data1[k],data2[t,k,j,i] + for t in range(p_data2.shape[0]): + a2[t] = np.ma.sum(a1[t,:,:]) + #print a2[t] + + new_name = 'POP_bottom_layer_multadd({}{}{})'.format(KMT.name, data1.name, data2.name) + return PhysArray(a2, name=new_name, dimensions=[p_data2.dimensions[0]], units=p_data2.units) + + + + #=================================================================================================== # masked_invalidFunction #=================================================================================================== @@ -354,7 +423,200 @@ def __getitem__(self, index): a[t,:,:] = np.ma.where(a1[t,:,:] < a2, var, value) elif '>' in condition: a[t,:,:] = np.ma.where(a1[t,:,:] > a2, var, value) - return PhysArray(a, dimensions=[a1.dimensions[0],a1.dimensions[1],a1.dimensions[2]], units=var.units) + new_name = 'cice_where()'.format() + return PhysArray(a, name=new_name, dimensions=[a1.dimensions[0],a1.dimensions[1],a1.dimensions[2]], units=var.units) + + +#=================================================================================================== +# cice_regions +#=================================================================================================== +class cice_regionsFunction(Function): + key = 'cice_regions' + + def __init__(self, p_aice, p_uvel, p_vvel, p_HTE, p_HTN, p_siline, multiple): + super(cice_regionsFunction, self).__init__(p_aice, p_uvel, p_vvel, p_HTE, p_HTN, p_siline, multiple) + + def __getitem__(self, index): + p_aice = self.arguments[0][index] + p_uvel = self.arguments[1][index] + p_vvel = self.arguments[2][index] + p_HTE = self.arguments[3][index] + p_HTN = self.arguments[4][index] + p_siline = self.arguments[5][index] + multiple = self.arguments[6] + + aice=p_aice + uvel=p_uvel + vvel=p_vvel + HTE=p_HTE + HTN=p_HTN + siline=p_siline + a = np.ma.zeros((aice.data.shape[0],siline.data.shape[0])) + + for t in range(aice.data.shape[0]): + #1 + i = 92 + for j in range(370, 381): + if aice[t,j,i] < 1e+16 and aice[t,j,i+1] < 1e+16 and HTE[j,i] < 1e+16 and uvel[t,j,i] < 1e+16 and uvel[t,j-1,i] < 1e+16: + a[t,0] += 0.5*(aice[t,j,i]+aice[t,j,i+1])*0.5*(HTE[j,i]*uvel[t,j,i]+HTE[j,i]*uvel[t,j-1,i]) + #2 + i = 214 + for j in range(375,377): + if aice[t,j,i] < 1e+16 and aice[t,j,i+1] < 1e+16 and HTE[j,i] < 1e+16 and uvel[t,j,i] < 1e+16 and uvel[t,j-1,i] < 1e+16 and aice[t,j+1,i] < 1e+16 and HTN[j,i] < 1e+16 and vvel[t,j,i] < 1e+16 and vvel[t,j,i-1] < 1e+16: + a[t,1] += 0.5*(aice[t,j,i]+aice[t,j,i+1])*0.5*(HTE[j,i]*uvel[t,j,i]+HTE[j,i]*uvel[t,j-1,i]) + 0.5*(aice[t,j,i]+aice[t,j+1,i])*0.5*(HTN[j,i]*vvel[t,j,i]+HTN[j,i]*vvel[t,j,i-1]) + j = 366 + for i in range(240,244): + if aice[t,j,i] < 1e+16 and aice[t,j,i+1] < 1e+16 and HTE[j,i] < 1e+16 and uvel[t,j,i] < 1e+16 and uvel[t,j-1,i] < 1e+16 and aice[t,j+1,i] < 1e+16 and HTN[j,i] < 1e+16 and vvel[t,j,i] < 1e+16 and vvel[t,j,i-1] < 1e+16: + a[t,1] += 0.5*(aice[t,j,i]+aice[t,j,i+1])*0.5*(HTE[j,i]*uvel[t,j,i]+HTE[j,i]*uvel[t,j-1,i]) + 0.5*(aice[t,j,i]+aice[t,j+1,i])*0.5*(HTN[j,i]*vvel[t,j,i]+HTN[j,i]*vvel[t,j,i-1]) + #3 + i = 85 + for j in range(344,366): + if aice[t,j,i] < 1e+16 and aice[t,j,i+1] < 1e+16 and HTE[j,i] < 1e+16 and uvel[t,j,i] < 1e+16 and uvel[t,j-1,i] < 1e+16: + a[t,2] += 0.5*(aice[t,j,i]+aice[t,j,i+1])*0.5*(HTE[j,i]*uvel[t,j,i]+HTE[j,i]*uvel[t,j-1,i]) + #4 + j = 333 + for i in range(198,201): + if aice[t,j,i] < 1e+16 and aice[t,j+1,i] < 1e+16 and HTN[j,i] < 1e+16 and vvel[t,j,i] < 1e+16 and vvel[t,j,i-1] < 1e+16: + a[t,3] += 0.5*(aice[t,j,i]+aice[t,j+1,i])*0.5*(HTN[j,i]*vvel[t,j,i]+HTN[j,i]*vvel[t,j,i-1]) + + a = a*multiple + + new_name = 'cice_regions()'.format() + return PhysArray(a, name=new_name, dimensions=[p_aice.dimensions[0],p_siline.dimensions[0]], units=uvel.units) + + +#=================================================================================================== +# reduce_luFunction +#=================================================================================================== +class reduce_luFunction(Function): + key = 'reduce_lu' + + # np.where(x < 5, x, -1) + + def __init__(self, p_data,p_lu): + super(reduce_luFunction, self).__init__(p_data,p_lu) + + def __getitem__(self, index): + p_data = self.arguments[0][index] + p_lu = self.arguments[1][index] + + #if index is None: + # return PhysArray(p_data.data, dimensions=[p_data.dimensions[0],p_lu.dimensions[0],p_data.dimensions[2],p_data.dimensions[3]]) + + data = p_data.data + lu = p_lu.data + + data2 = np.ma.zeros((data.shape[0],4,data.shape[2],data.shape[3])) + + for t in range(data.shape[0]): + for x in range(data.shape[2]): + for y in range(data.shape[3]): + data2[t,0,x,y] = data[t,0,x,y] + data2[t,1,x,y] = 0 + data2[t,2,x,y] = data[t,1,x,y] + data2[t,3,x,y] = data[t,6,x,y]+data[t,7,x,y]+data[t,8,x,y] + data2[data2>=1e+16] = 1e+20 + + new_name = 'reduce_lu({}{})'.format(p_data.name,p_lu.name) + return PhysArray(data2, name=new_name, dimensions=[p_data.dimensions[0],p_lu.dimensions[0],p_data.dimensions[2],p_data.dimensions[3]], units=p_data.units) + + +#=================================================================================================== +# soilpoolsFunction +#=================================================================================================== +class get_soilpoolsFunction(Function): + key = 'get_soilpools' + + def __init__(self, p_data1,p_data2,p_data3,p_soilpool): + super(get_soilpoolsFunction, self).__init__(p_data1,p_data2,p_data3,p_soilpool) + + def __getitem__(self, index): + p_data1 = self.arguments[0][index] + p_data2 = self.arguments[1][index] + p_data3 = self.arguments[2][index] + p_soilpool = self.arguments[3][index] + + data1 = p_data1.data + data2 = p_data2.data + data3 = p_data3.data + soilpool = p_soilpool.data + + data = np.ma.zeros((data1.shape[0],3,data1.shape[1],data1.shape[2])) + + data[:,0,:,:] = data1 + data[:,1,:,:] = data2 + data[:,2,:,:] = data3 + + data[data>=1e+16] = 1e+20 + data = np.ma.masked_values(data, 1e+20) + + new_name = 'soilpools({}{}{}{})'.format(p_data1.name,p_data2.name,p_data3.name,p_soilpool.name) + return PhysArray(data, name=new_name, dimensions=[p_data1.dimensions[0],p_soilpool.dimensions[0],p_data1.dimensions[1],p_data1.dimensions[2]], units=p_data1.units) + + +#=================================================================================================== +# expand_latlonFunction +#=================================================================================================== +class expand_latlonFunction(Function): + key = 'expand_latlon' + + def __init__(self, p_data1,p_lat,p_lon): + super(expand_latlonFunction, self).__init__(p_data1,p_lat,p_lon) + + def __getitem__(self, index): + p_data1 = self.arguments[0][index] + p_lat = self.arguments[1][index] + p_lon = self.arguments[2][index] + + data1 = p_data1.data + lat = p_lat.data + lon = p_lon.data + + data = np.ma.zeros((data1.shape[0],lat.shape[0],lon.shape[0])) + + for x in range(lat.shape[0]): + for y in range(lon.shape[0]): + data[:,x,y] = data1 + + data[data>=1e+16] = 1e+20 + data = np.ma.masked_values(data, 1e+20) + + new_name = 'expand_latlon({}{}{})'.format( p_data1.name, p_lat.name, p_lon.name) + return PhysArray(data, name=new_name, dimensions=[p_data1.dimensions[0],p_lat.dimensions[0],p_lon.dimensions[0]], units=p_data1.units) + + +#=================================================================================================== +# ocean_basinFunction +#=================================================================================================== +class ocean_basinFunction(Function): + key = 'ocean_basin' + + def __init__(self, p_data1, p_comp, p_basin): + super(ocean_basinFunction, self).__init__(p_data1, p_comp, p_basin) + + def __getitem__(self, index): + p_data1 = self.arguments[0][index] + p_comp = self.arguments[1] + p_basin = self.arguments[2][index] + + data1 = p_data1.data + comp = int(p_comp) + basin = p_basin.data + + data = np.ma.zeros((data1.shape[0],data1.shape[4],data1.shape[3],basin.shape[0])) + + for t in range(data1.shape[0]): + for x in range(data1.shape[4]): + for y in range(data1.shape[3]): + data[t,x,y,0] = data1[t,1,comp,y,x] + data[t,x,y,1] = data1[t,0,comp,y,x]-data1[t,1,comp,y,x] + data[t,x,y,2] = data1[t,0,comp,y,x] + + data[data>=1e+16] = 1e+20 + data = np.ma.masked_values(data, 1e+20) + + new_name = 'ocean_basin({}{})'.format(p_data1.name, p_basin.name) + return PhysArray(data, name=new_name, dimensions=[p_data1.dimensions[0],p_data1.dimensions[4],p_data1.dimensions[3],p_basin.dimensions[0]], units=p_data1.units) diff --git a/source/pyconform/version.py b/source/pyconform/version.py index ce5f98f0..d0c9d782 100644 --- a/source/pyconform/version.py +++ b/source/pyconform/version.py @@ -1,2 +1,2 @@ # Single place for version information -__version__ = '0.2.5' +__version__ = '0.2.6'