diff --git a/bav_lib.py b/bav_lib.py index 0b4c468..a936cd1 100644 --- a/bav_lib.py +++ b/bav_lib.py @@ -22,6 +22,43 @@ months = mdates.MonthLocator() # every month years_fmt = mdates.DateFormatter('%Y') +#%% +from math import radians, cos, sin, asin, sqrt +def haversine(lat1, lon1, lat2, lon2, to_radians=True, earth_radius=6371): + """ + slightly modified version: of http://stackoverflow.com/a/29546836/2901002 + + Calculate the great circle distance between two points + on the earth (specified in decimal degrees or in radians) + + All (lat, lon) coordinates must have numeric dtypes and be of equal length. + + """ + if to_radians: + lat1, lon1, lat2, lon2 = np.radians([lat1, lon1, lat2, lon2]) + + a = np.sin((lat2-lat1)/2.0)**2 + \ + np.cos(lat1) * np.cos(lat2) * np.sin((lon2-lon1)/2.0)**2 + + return earth_radius * 2 * np.arcsin(np.sqrt(a)) + +# %% +def stat_title(x,y,ax): + ind = np.logical_and(pd.notnull(x),pd.notnull(y)) + x = x[ind] + y=y[ind] + x = x.values.reshape(-1,1) + y = y.values.reshape(-1,1) + + lr = linear_model.LinearRegression() + lr.fit(x,y) + # print('Coefficients: \n', lr.coef_) + preds = lr.predict(x) + ax.set_title('R2=%.3f\nRMSE=%.2f\nN=%.0f' % (r2_score(y,preds), + mean_squared_error(y,preds), + len(x))) + return ax + #%% def multi_plot(data_out, sites = ['KAN_M', 'KAN_U'],sp1 = 4, sp2 = 2, title = '', OutputFolder='figures/', @@ -267,5 +304,36 @@ def mosaic_albedo_fit(df, Rad_in): ax[i, j].set_ylim([0.01, 1.05]) ax[i, j].grid(True) fig.savefig('./output/linear_'+Rad_in+'oa.png',bbox_inches='tight') +#%% +def overall_axis_label(fig, xlab, ylab): + ax0 = fig.add_subplot(111) # The big subplot + + # Turn off axis lines and ticks of the big subplot + ax0.spines['top'].set_color('none') + ax0.spines['bottom'].set_color('none') + ax0.spines['left'].set_color('none') + ax0.spines['right'].set_color('none') + ax0.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False) + ax0.patch.set_visible(False) + + # Set common labels + ax0.set_xlabel('Date') + + if isinstance(ylab,str): + ax0.set_ylabel(ylab) + ax0_2 = [] + else + ax0.set_ylabel(ylab[0]) + ax0_2 = ax0.twinx() + ax0_2.set_ylabel(ylab[1]) + + ax0_2.spines['top'].set_color('none') + ax0_2.spines['bottom'].set_color('none') + ax0_2.spines['left'].set_color('none') + ax0_2.spines['right'].set_color('none') + ax0_2.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False) + + ax0_2.patch.set_visible(False) + return ax0, ax0_2 diff --git a/bav_lib.py.bak b/bav_lib.py.bak deleted file mode 100644 index e0111a8..0000000 --- a/bav_lib.py.bak +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Oct 17 14:03:28 2019 - -@author: bav -""" - -#%matplotlib qt - -import numpy as np -import plotly.graph_objects as go -import matplotlib.pyplot as plt -from matplotlib import cm -import matplotlib as mpl -from mpl_toolkits.axes_grid1 import make_axes_locatable -import pandas as pd - -def OutlookRaster(var,title): - l,b,r,t = var.bounds - res = var.res - x = np.arange(l,r, res[0]) - y = np.arange(t,b, -res[0]) - z=var.read(1) - nan_col = ~np.all(np.isnan(z), axis=0) - z=z[:,nan_col] - x=x[nan_col] - - nan_row = ~np.all(np.isnan(z), axis=1) - z=z[nan_row,:] - y=y[nan_row] - - fig = go.Figure( - data=go.Heatmap(x=x, - y=y, - z=z, - type = 'heatmap', - colorscale='Jet', - colorbar=dict(title=title), - showscale=True)) - fig.update_layout( - autosize=False, - width=500, - height=500) - fig.show() -# fig.write_image(title+".jpeg") - return x,y,z - -#%% -def heatmap(var, title='', col_lim=(np.nan, np.nan) ,cmap_in='gnuplot'): - if np.isnan(col_lim[0]): - col_lim=(np.nanmin(var), np.nanmax(var)) - z=var - nan_col = ~np.all(np.isnan(z), axis=0) - z=z[:,nan_col] - - nan_row = ~np.all(np.isnan(z), axis=1) - z=z[nan_row,:] - - cmap = plt.get_cmap(cmap_in) - cmap.set_bad(color='gray') - -# fig,ax = plt.subplots() - im = plt.imshow(z, cmap=cmap, interpolation='nearest',vmin=col_lim[0], vmax=col_lim[1]) - cb= plt.colorbar(im) - cb.ax.set_ylabel(title) -# fig.show() -# fig.write_image(title+".jpeg") - return z -#%% -def heatmap_discrete(var, title='', col_lim=(np.nan, np.nan) ,cmap_in='tab20_r'): - if np.isnan(col_lim[0]): - col_lim=(np.nanmin(var), np.nanmax(var)) - z=var - nan_col = ~np.all(np.isnan(z), axis=0) - z=z[:,nan_col] - - nan_row = ~np.all(np.isnan(z), axis=1) - z=z[nan_row,:] - - cmap = plt.get_cmap(cmap_in) - cmap.set_bad(color='white') - - bounds = np.unique(var)[np.logical_not(np.isnan(np.unique(var)))] - bounds = np.append(bounds, bounds[len(bounds)-1]+1) - norm = mpl.colors.BoundaryNorm(bounds, cmap.N+1) - - fig,ax = plt.subplots(figsize=(10,15)) - im = ax.imshow(z+1e-6, cmap=cmap, norm = norm, interpolation='Nearest') - divider = make_axes_locatable(ax) - cax = divider.append_axes("right", size="5%" , pad= 0.1) - cb = mpl.colorbar.ColorbarBase(ax=cax, cmap=cmap, norm=norm) - - tic_locs =bounds[0:len(bounds)-1] - (bounds[0:len(bounds)-1]-bounds[1:len(bounds)])/2 - cb.set_ticks(tic_locs) - cb.ax.set_yticklabels(bounds[0:len(bounds)-1]) - cb.ax.set_ylabel(title) - - fig.show() -# fig.write_image(title+".jpeg") - return fig,ax - -#%% Plot OLCI bands -def plot_OLCI_bands(ax): -# pots olci bands in the background - - olci_bands = pd.read_excel (r'misc/olci_bands.xlsx',header=0,thousands=' ') - #Out[409]: Index(['Band', 'λ centre (nm)', 'Width (nm)', 'Function'], dtype='object') - - ax.set_xlabel('Wavelength (nm)') - #ax.autoscale(enable=True, tight=True) - ax.bar(olci_bands['λ centre (nm)'], ax.get_ylim()[1], olci_bands['Width (nm)'], alpha=0.5, edgecolor ='darkblue', label='OLCI bands') - - ax.set_xlim(350, 1050) - - height_text = np.array((2, 2.3, 2.3, 2.3, 2.3, 2.3, - 2.3, 2, 2.4 , - 1.7,2, 1.6, 2, 2.5, - 1.25, 1.7, 2, 1.7,2,2, 2))/3 *ax.get_ylim()[1] - for i in range(1,22): - ax.annotate(str(i), - (olci_bands['λ centre (nm)'][i-1], height_text[i-1]), - ha='center', - bbox=dict(boxstyle="circle", fc="w"), - fontsize =13) - return ax - -#%% Density scatter - -def density_scatter(x,y,ax,ss): - if isinstance(x, pd.coxe.series.Series)==False: - x = pd.DataFrame(x) - x = x[0] - if isinstance(y, pd.core.series.Series)==False: - y = pd.DataFrame(y) - y=y[y.columns[0]] - - y=y[~np.isnan(x)] - x=x[~np.isnan(x)] - x=x[~np.isnan(y)] - y=y[~np.isnan(y)] - - xy = np.vstack([x,y]) - z = gaussian_kde(xy)(xy) - # Sort the points by density, so that the densest points are plotted last - idx = z.argsort() - x, y, z = x.iloc[idx], y.iloc[idx], z[idx] - ax.scatter(x, y, c=z, s=ss) - return ax -# ============================================================================= -# -# ============================================================================= - -#%% Plotting top of atmosphere refletance -#%matplotlib qt -#%matplotlib inline -def mosaic_albedo_fit(df, Rad_in): - fig, ax = plt.subplots(7,3, sharex='col', sharey='row',figsize=(15, 15)) - fig.subplots_adjust(hspace=0, wspace=0) - if Rad_in=='Rt': - fig.text(0.5, 0.05, 'Top of atmosphere OLCI reflectance', ha='center', size = 20) - elif Rad_in=='Rb': - fig.text(0.5, 0.05, 'Bottom of atmosphere OLCI reflectance', ha='center', size = 20) - fig.text(0.05, 0.5, 'PROMICE albedo', va='center', rotation='vertical', size = 20) - - # axes are in a two-dimensional array, indexed by [row, col] - count=0 - ss=5 - - for i in range(7): - for j in range(3): - # Calculate the point density - count = count+1 - R=df[Rad_in+str(count)] - alb = df['PROMICE alb'] - alb=alb[~np.isnan(R)] - R=R[~np.isnan(R)] - alb = alb[~(R>1.1)] - R = R[~(R>1.1)] - - input_name= Rad_in+str(count) - - density_scatter(R,alb,ax[i, j],ss) - - R=R.values - alb=alb.values - R=R.reshape(-1,1) - alb=alb.reshape(-1,1) - lr = linear_model.LinearRegression() - lr.fit(R, alb) - print(Rad_in+str(count)) - print('Coefficients: \n', lr.coef_) - - preds = lr.predict(R) - - tmp = np.array([np.min(R), np.max(R)]) - tmp = tmp.reshape(-1,1) - ax[i, j].plot(tmp, lr.predict(tmp)) -# x_ticks = np.linspace(0.25,1,4) -# ax[i, j].set_xticklabels(x_ticks,fontsize=20) -# ax[i, j].set_yticklabels(x_ticks,fontsize=20) - - ax[i, j].text(0.08, 0.93, - '%s' % input_name, - fontsize=18, - color='black', - fontweight='bold', - horizontalalignment='left', - verticalalignment='top', - bbox=dict(boxstyle="square", - ec='lightskyblue', - alpha=0.8)) - ax[i, j].text(0.7, 0.5, - 'R2=%.3f\nRMSE=%.2f\nN=%.0f' % (r2_score(alb,preds), mean_squared_error(alb,preds),len(R)), - fontsize=13, - color='white', - fontweight='bold', - horizontalalignment='left', - verticalalignment='top', - bbox=dict(boxstyle="square", - ec='lightskyblue', - alpha=0.8)) - ax[i, j].set_xlim([0.01, 1.05]) - ax[i, j].set_ylim([0.01, 1.05]) - ax[i, j].grid(True) - fig.savefig('./output/linear_'+Rad_in+'oa.png',bbox_inches='tight') - - diff --git a/misc/astmg173.xls b/misc/astmg173.xls deleted file mode 100644 index 832735f..0000000 Binary files a/misc/astmg173.xls and /dev/null differ diff --git a/misc/gains_olci.csv b/misc/gains_olci.csv new file mode 100644 index 0000000..fddc2b1 --- /dev/null +++ b/misc/gains_olci.csv @@ -0,0 +1,22 @@ +platform,band,wavelength,average_gain,standard_deviation +A,1,400,0.975458,0.005544 +A,2,412.5,0.974061,0.005897 +A,3,442.5,0.974919,0.005435 +A,4,490,0.968897,0.005645 +A,5,510,0.971844,0.004139 +A,6,560,0.975705,0.003086 +A,7,620,0.980013,0.002107 +A,8,665,0.978339,0.001412 +A,9,673.75,0.978597,0.002128 +A,10,681.25,0.979083,0.001504 +A,11,708.75,0.980135,0.004555 +A,12,753.75,0.985516,0.003723 +A,13,761.25,1,0 +A,14,764.375,1,0 +A,15,767.5,1,0 +A,16,778.75,0.987718,0.00352 +A,17,865,0.986, +A,18,885,0.986569,0.00176 +A,19,900,1,0 +A,20,940,1,0 +A,21,1020,0.913161,0.008537 diff --git a/misc/olci_bands.xlsx b/misc/olci_bands.xlsx deleted file mode 100644 index 1e065d9..0000000 Binary files a/misc/olci_bands.xlsx and /dev/null differ diff --git a/sice.py b/sice.py index 46fcc87..40ecf25 100644 --- a/sice.py +++ b/sice.py @@ -1,4 +1,4 @@ -# pySICEv1.4 +# pySICEv1.5 # # from FORTRAN VERSION 5.2 # March 31, 2020 @@ -80,305 +80,347 @@ import sice_lib as sl import rasterio as rio import time -import sys +import sys, inspect, os +import pandas as pd from constants import w, bai, sol1_clean, sol2, sol3_clean, sol1_pol, sol3_pol, asol np.seterr(invalid='ignore') - -start_time = time.process_time() - -InputFolder = sys.argv[1] + '/' - -# %% ========= input tif ================ - -Oa01 = rio.open(InputFolder + 'r_TOA_01.tif') -meta = Oa01.meta - -with rio.Env(): - meta.update(compress='DEFLATE') - - -def WriteOutput(var, var_name, in_folder): - # this functions write tif files based on a model file, here "Oa01" - # opens a file for writing - - with rio.open(in_folder + var_name + '.tif', 'w+', **meta) as dst: - dst.write(var.astype('float32'), 1) - -toa = np.tile(Oa01.read(1).astype('float32') * np.nan, (21, 1, 1)) - -for i in range(21): - - try: - dat = rio.open((InputFolder + 'r_TOA_' + str(i + 1).zfill(2) + '.tif')) - toa[i, :, :] = dat.read(1).astype('float32') - except: - toa[i, :, :] = np.nan - -ozone = rio.open(InputFolder + 'O3.tif').read(1).astype('float32') -water = rio.open(InputFolder + 'WV.tif').read(1).astype('float32') -sza = rio.open(InputFolder + 'SZA.tif').read(1).astype('float32') -saa = rio.open(InputFolder + 'SAA.tif').read(1).astype('float32') -vza = rio.open(InputFolder + 'OZA.tif').read(1).astype('float32') -vaa = rio.open(InputFolder + 'OAA.tif').read(1).astype('float32') -height = rio.open(InputFolder + 'height.tif').read(1).astype('float32') - -sza[np.isnan(toa[0, :, :])] = np.nan -saa[np.isnan(toa[0, :, :])] = np.nan -vza[np.isnan(toa[0, :, :])] = np.nan -vaa[np.isnan(toa[0, :, :])] = np.nan - -water_vod = genfromtxt('./tg_water_vod.dat', delimiter=' ') -voda = water_vod[range(21), 1] - -ozone_vod = genfromtxt('./tg_vod.dat', delimiter=' ') -tozon = ozone_vod[range(21), 1] -aot = 0.1 - -# %% declaring variables - -BXXX, isnow, D, area, al, r0, isnow, conc, ntype, rp1, rp2, rp3, rs1, rs2, rs3 = \ - vaa * np.nan, vaa * np.nan, vaa * np.nan, vaa * np.nan, vaa * np.nan, vaa * np.nan, \ - vaa * np.nan, vaa * np.nan, vaa * np.nan, vaa * np.nan, vaa * np.nan, vaa * np.nan, \ - vaa * np.nan, vaa * np.nan, vaa * np.nan - -alb_sph, rp, refl = toa * np.nan, toa * np.nan, toa * np.nan - -# %% =========== ozone scattering ==================================== - -BXXX, toa_cor_o3 = sl.ozone_scattering(ozone, tozon, sza, vza, toa) - -# Filtering pixels unsuitable for retrieval -isnow[sza > 75] = 100 -isnow[toa_cor_o3[20, :, :] < 0.1] = 102 - -for i_channel in range(21): - toa_cor_o3[i_channel, ~np.isnan(isnow)] = np.nan - -vaa[~np.isnan(isnow)] = np.nan -saa[~np.isnan(isnow)] = np.nan -sza[~np.isnan(isnow)] = np.nan -vza[~np.isnan(isnow)] = np.nan - -height[~np.isnan(isnow)] = np.nan - -# =========== view geometry and atmosphere propeties ============== - -raa, am1, am2, ak1, ak2, amf, co = sl.view_geometry(vaa, saa, sza, vza, aot, height) - -tau, p, g, gaer, taumol, tauaer = sl.aerosol_properties(aot, height, co) - -# =========== snow properties ==================================== - -D, area, al, r0, bal = sl.snow_properties(toa_cor_o3, ak1, ak2) -# filtering small D -D_thresh = 0.1 -isnow[D < D_thresh] = 104 - -for i in range(21): - toa_cor_o3[i, D < D_thresh] = np.nan - -area[D < D_thresh] = np.nan -al[D < D_thresh] = np.nan -r0[D < D_thresh] = np.nan -bal[D < D_thresh] = np.nan -am1[D < D_thresh] = np.nan -am2[D < D_thresh] = np.nan -# D[D= rs_1 -isnow[ind_clean] = 0 - -# STEP 4a: clean snow retrieval -# the spherical albedo derivation: alb_sph - - -def mult_channel(c, A): - tmp = A.T * c - return tmp.T - - -alb_sph = np.exp(-np.sqrt(1000. * 4. * np.pi - * mult_channel(bai / w, np.tile(al, (21, 1, 1))))) -alb_sph[alb_sph > 0.999] = 1 - -# ========== very dirty snow ==================================== - -ind_pol = toa_cor_o3[0, :, :] < rs_1 - -isnow[ind_pol] = 1 - -ind_very_dark = np.logical_and(toa_cor_o3[20] < 0.4, ind_pol) -isnow[ind_very_dark] = 6 - -am11 = np.sqrt(1. - am1[ind_very_dark] ** 2.) -am12 = np.sqrt(1. - am2[ind_very_dark] ** 2.) - -tz = np.arccos(-am1[ind_very_dark] * am2[ind_very_dark] + am11 * am12 - * np.cos(raa[ind_very_dark] * 3.14159 / 180.)) * 180. / np.pi - -pz = 11.1 * np.exp(-0.087 * tz) + 1.1 * np.exp(-0.014 * tz) - -rclean = 1.247 + 1.186 * (am1[ind_very_dark] + am2[ind_very_dark]) \ - + 5.157 * am1[ind_very_dark] * am2[ind_very_dark] + pz - -rclean = rclean / 4. / (am1[ind_very_dark] + am2[ind_very_dark]) -r0[ind_very_dark] = rclean - -# =========== polluted snow ==================================== - -ind_pol = np.logical_or(ind_very_dark, ind_pol) - -if np.any(ind_pol): - subs_pol = np.argwhere(ind_pol) + +def pySICE(InputPath, OutputFolder, olci_gains = False, slopey = False): + os.makedirs(OutputFolder,exist_ok=True) + + pySICE_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + print(pySICE_dir) + # script directory + start_time = time.process_time() + print(InputPath) + + # %% input text file + if os.path.isfile(InputPath): + InputFolder = os.path.dirname(os.path.dirname(InputPath))+'/' + print('\nText file input') + # data_in = pd.read_csv(InputPath) + data_in = pd.read_csv(InputPath) + toa = np.expand_dims(data_in[[c for c in data_in.columns if c.find('reflec')>=0]].to_numpy().transpose(), axis=2) + + + ozone = np.expand_dims(data_in['total_ozone'], axis=1) + water = np.expand_dims(data_in['total_columnar_water_vapour'], axis=1) + sza = np.expand_dims(data_in['sza'], axis=1) + saa = np.expand_dims(data_in['saa'], axis=1) + vza = np.expand_dims(data_in['vza'], axis=1) + vaa = np.expand_dims(data_in['vaa'], axis=1) + height = np.expand_dims(data_in['altitude'], axis=1) + + sza[np.isnan(toa[0,:,:])] = np.nan + saa[np.isnan(toa[0,:,:])] = np.nan + vza[np.isnan(toa[0,:,:])] = np.nan + vaa[np.isnan(toa[0,:,:])] = np.nan - # approximation of the transcendental equation allowing closed-from solution - # alb_sph[:, ind_pol] = (toa_cor_o3[:, ind_pol] - r[:, ind_pol]) \ - # /(t1[:,ind_pol]*t2[:,ind_pol]*r0[ind_pol] + ratm[:,ind_pol]*(toa_cor_o3[:,ind_pol] - r[:,ind_pol])) + # %% input tif + elif os.path.isdir(InputPath): + InputFolder = InputPath + '/' + print("\nTiff files input") + Oa01 = rio.open(InputFolder+'r_TOA_01.tif') + meta = Oa01.meta + with rio.Env(): + meta.update(compress='DEFLATE') + + def WriteOutput(var,var_name,in_folder): + # this functions write tif files based on a model file, here "Oa01" + # opens a file for writing + + with rio.open(in_folder+var_name+'.tif', 'w+', **meta) as dst: + dst.write(var.astype('float32'),1) + + toa = np.tile(Oa01.read(1)*np.nan, (21,1,1)) + + for i in range(21): + if i in [0, 1, 5, 6, 10, 11, 16, 17, 20]: + dat = rio.open((InputFolder+'r_TOA_'+str(i+1).zfill(2)+'.tif')) + toa[i,:,:] = dat.read(1) + + ozone = rio.open(InputFolder+'O3.tif').read(1) + # water = rio.open(InputFolder+'WV.tif').read(1) + + if slopey: + sza = rio.open(InputFolder+'SZA_eff.tif').read(1) + vza = rio.open(InputFolder+'OZA_eff.tif').read(1) + toa[16,:,:] = rio.open((InputFolder+'ir_TOA_17.tif')).read(1) + toa[20,:,:] = rio.open((InputFolder+'ir_TOA_21.tif')).read(1) + else: + sza = rio.open(InputFolder+'SZA.tif').read(1) + vza = rio.open(InputFolder+'OZA.tif').read(1) + + saa = rio.open(InputFolder+'SAA.tif').read(1) + vaa = rio.open(InputFolder+'OAA.tif').read(1) + height = rio.open(InputFolder+'height.tif').read(1) + + sza[np.isnan(toa[0,:,:])] = np.nan + saa[np.isnan(toa[0,:,:])] = np.nan + vza[np.isnan(toa[0,:,:])] = np.nan + vaa[np.isnan(toa[0,:,:])] = np.nan + + else: + print("\n Input path neither file or directory" ) - # solving iteratively the transcendental equation - alb_sph[:, ind_pol] = 1 + # water and ozone spectral optical density + # water_vod = genfromtxt('./tg_water_vod.dat', delimiter=' ') + # voda = water_vod[range(21),1] + ozone_vod = genfromtxt(pySICE_dir+'/tg_vod.dat', delimiter=' ') + tozon = ozone_vod[range(21),1] + aot = 0.1 - def solver_wrapper(toa_cor_o3, tau, t1, t2, r0, ak1, ak2, ratm, r): - def func_solv(albedo): - return toa_cor_o3 - sl.alb2rtoa(albedo, t1, t2, r0, ak1, ak2, ratm, r) - # it is assumed that albedo is in the range 0.1-1.0 + if olci_gains: + gains = pd.read_csv(pySICE_dir+'/misc/gains_olci.csv').average_gain.values + toa = toa*gains.reshape([-1, 1, 1]) - return sl.zbrent(func_solv, 0.1, 1, 100, 1.e-6) - solver_wrapper_v = np.vectorize(solver_wrapper) - # loop over all bands except band 19, 20 - for i_channel in np.append(np.arange(18), [20]): + #%% declaring variables + BXXX, isnow, D, area, al, r0, isnow, conc, ntype, rp1, rp2, rp3, rs1, rs2, rs3 = vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan, vaa*np.nan + + alb_sph, rp, refl = toa*np.nan, toa*np.nan, toa*np.nan + + #%% ozone scattering + BXXX, toa_cor_o3 = sl.molecular_absorption(ozone,tozon, sza, vza,toa) + + # Filtering pixels unsuitable for retrieval + isnow[sza>75] = 100 + isnow[toa_cor_o3[20, :,:] < 0.1] = 102 + for i_channel in range(21): + toa_cor_o3[i_channel, ~np.isnan(isnow)] = np.nan + + vaa[ ~np.isnan(isnow)] = np.nan + saa[ ~np.isnan(isnow)] = np.nan + sza[ ~np.isnan(isnow)] = np.nan + vza[ ~np.isnan(isnow)] = np.nan + height = height.astype(float) + height[ ~np.isnan(isnow)] = np.nan + + #%% view geometry and atmosphere propeties + raa, cos_sza, cos_vza, ak1, ak2, inv_cos_za, cos_sa = sl.view_geometry(vaa, saa, sza, vza, aot, height) + + tau, p, g, gaer,taumol,tauaer = sl.aerosol_properties(aot, height, cos_sa) + + #%% snow properties + D, area, al, r0, bal = sl.snow_properties(toa_cor_o3, ak1, ak2) + # filtering small D + D_thresh = 0.01 + isnow[np.logical_and(D= rs_1 + isnow[ind_clean] = 0 + + # STEP 4a: clean snow retrieval + # the spherical albedo derivation: alb_sph + def mult_channel(c,A): + tmp = A.T*c + return tmp.T + alb_sph = np.exp(-np.sqrt(1000.*4.*np.pi* mult_channel(bai/w, np.tile(al,(21,1,1))))) + alb_sph[alb_sph>0.999]=1 + + #%% polluted snow + ind_pol = toa_cor_o3[0,:,:] < rs_1 + + isnow[ind_pol] = 1 + + # very dirty snow + ind_very_dark = np.logical_and(toa_cor_o3[20,:,:]<0.4, ind_pol) + isnow[ind_very_dark] = 6 + + am11=np.sqrt(1.-cos_sza[ind_very_dark]**2.) + am12=np.sqrt(1.-cos_vza[ind_very_dark]**2.) + + theta = np.arccos(-cos_sza[ind_very_dark] * cos_vza[ind_very_dark] + am11 * am12 * np.cos(raa[ind_very_dark]*3.14159/180.)) *180./np.pi + + pz=11.1*np.exp(-0.087*theta)+1.1*np.exp(-0.014*theta) + + rclean = 1.247 + 1.186 *(cos_sza[ind_very_dark]+cos_vza[ind_very_dark]) + 5.157 * cos_sza[ind_very_dark] * cos_vza[ind_very_dark] + pz + + rclean = rclean /4. /(cos_sza[ind_very_dark] + cos_vza[ind_very_dark]) + + r0[ind_very_dark] = rclean + # end very dirty snow + + ind_pol = np.logical_or(ind_very_dark, ind_pol) + if np.any(ind_pol): + subs_pol = np.argwhere(ind_pol) + + # approximation of the transcendental equation allowing closed-from solution + #alb_sph[:,ind_pol] = (toa_cor_o3[:,ind_pol] - r[:,ind_pol])/(t1[:,ind_pol]*t2[:,ind_pol]*r0[ind_pol] + ratm[:,ind_pol]*(toa_cor_o3[:,ind_pol] - r[:,ind_pol])) + + # solving iteratively the transcendental equation + alb_sph[:,ind_pol] = 1 - alb_sph[i_channel, ind_pol] = solver_wrapper_v( - toa_cor_o3[i_channel, ind_pol], tau[i_channel, ind_pol], - t1[i_channel, ind_pol], t2[i_channel, ind_pol], - r0[ind_pol], ak1[ind_pol], ak2[ind_pol], ratm[i_channel, ind_pol], - r[i_channel, ind_pol]) + def solver_wrapper(toa_cor_o3,tau, t1, t2, r0, ak1, ak2, ratm, r): + def func_solv(albedo): + return toa_cor_o3 - sl.alb2rtoa(albedo, t1, t2, r0, ak1, ak2, ratm, r) + # it is assumed that albedo is in the range 0.1-1.0 + return sl.zbrent(func_solv, 0.1, 1, 100, 1.e-6) - ind_bad = alb_sph[i_channel, :, :] == -999 - alb_sph[i_channel, ind_bad] = np.nan - isnow[ind_bad] = -i_channel + solver_wrapper_v = np.vectorize(solver_wrapper) + # loop over all bands except band 19, 20 + for i_channel in np.append(np.arange(18), [20]): + alb_sph[i_channel,ind_pol] = solver_wrapper_v( + toa_cor_o3[i_channel,ind_pol], + tau[i_channel,ind_pol], + t1[i_channel,ind_pol], + t2[i_channel,ind_pol], + r0[ind_pol], ak1[ind_pol], + ak2[ind_pol], + ratm[i_channel,ind_pol], + r[i_channel,ind_pol]) + + ind_bad = alb_sph[i_channel,:,:]==-999 + alb_sph[i_channel,ind_bad] = np.nan + isnow[ind_bad]= -i_channel + + # INTERNal CHECK FOR CLEAN PIXELS + # Are reprocessed as clean + ind_clear_pol1 = np.logical_and(ind_pol, alb_sph[0,:,:]>0.98) + ind_clear_pol2 = np.logical_and(ind_pol, alb_sph[1,:,:]>0.98) + ind_clear_pol = np.logical_or(ind_clear_pol1, ind_clear_pol2) + isnow[ind_clear_pol]= 7 + for i_channel in range(21): + alb_sph[i_channel,ind_clear_pol] = np.exp(-np.sqrt(4.*1000.*al[ind_clear_pol] * np.pi * bai[i_channel] / w[i_channel] )) + + # re-defining polluted pixels + ind_pol = np.logical_and(ind_pol, isnow!=7) + + #retrieving snow impurities + ntype, bf, conc = sl.snow_impurities(alb_sph, bal) - # INTERNal CHECK FOR CLEAN PIXELS - # Are reprocessed as clean - ind_clear_pol1 = np.logical_and(ind_pol, alb_sph[0, :, :] > 0.98) - ind_clear_pol2 = np.logical_and(ind_pol, alb_sph[1, :, :] > 0.98) - ind_clear_pol = np.logical_or(ind_clear_pol1, ind_clear_pol2) - isnow[ind_clear_pol] = 7 + # alex 09.06.2019 + # reprocessing of albedo to remove gaseous absorption using linear polynomial approximation in the range 753-778nm. + # Meaning: alb_sph[12],alb_sph[13] and alb_sph[14] are replaced by a linear interpolation between alb_sph[11] and alb_sph[15] + afirn=(alb_sph[15,ind_pol]-alb_sph[11,ind_pol])/(w[15]-w[11]) + bfirn=alb_sph[15,ind_pol]-afirn*w[15] + alb_sph[12,ind_pol] = bfirn + afirn*w[12] + alb_sph[13,ind_pol] = bfirn + afirn*w[13] + alb_sph[14,ind_pol] = bfirn + afirn*w[14] - for i_channel in range(21): - alb_sph[i_channel, ind_clear_pol] = np.exp(-np.sqrt(4. * 1000. - * al[ind_clear_pol] - * np.pi * bai[i_channel] - / w[i_channel])) + # BAV 09-02-2020: 0.5 to 0.35 + # pixels that are clean enough in channels 18 19 20 and 21 are not affected by pollution, the analytical equation can then be used + ind_ok = np.logical_and(ind_pol, toa_cor_o3[20,:,:]>0.35) + for i_channel in range(17,21): + alb_sph[i_channel,ind_ok] = np.exp(-np.sqrt(4.*1000.*al[ind_ok] * np.pi * bai[i_channel] / w[i_channel] )) + # Alex, SEPTEMBER 26, 2019 + # to avoid the influence of gaseous absorption (water vapor) we linearly interpolate in the range 885-1020nm for bare ice cases only (low toa[20]) + # Meaning: alb_sph[18] and alb_sph[19] are replaced by a linear interpolation between alb_sph[17] and alb_sph[20] + delx=w[20]-w[17] + bcoef=(alb_sph[20,ind_pol]-alb_sph[17,ind_pol])/delx + acoef=alb_sph[20,ind_pol]-bcoef*w[20] + alb_sph[18,ind_pol] = acoef + bcoef*w[18] + alb_sph[19,ind_pol] = acoef + bcoef*w[19] + + #%% derivation of plane albedo and reflectance + rp = np.power (alb_sph, ak1) + refl =r0* np.power(alb_sph, (ak1*ak2/r0)) + + ind_all_clean = np.logical_or(ind_clean, isnow == 7) + + ## CalCULATION OF BBA of clean snow + + # old method: integrating equation + #BBA_v = np.vectorize(sl.BBA_calc_clean) + #p1,p2,s1,s2 = BBA_v(al[ind_all_clean], ak1[ind_all_clean]) + # + ## visible(0.3-0.7micron) + #rp1[ind_all_clean]=p1/sol1_clean + #rs1[ind_all_clean]=s1/sol1_clean + ## near-infrared (0.7-2.4micron) + #rp2[ind_all_clean]=p2/sol2 + #rs2[ind_all_clean]=s2/sol2 + ## shortwave(0.3-2.4 micron) + #rp3[ind_all_clean]=(p1+p2)/sol3_clean + #rs3[ind_all_clean]=(s1+s2)/sol3_clean + + # approximation + # planar albedo + #rp1 and rp2 not derived anymore + rp3[ind_all_clean]=sl.plane_albedo_sw_approx(D[ind_all_clean],cos_sza[ind_all_clean]) + # spherical albedo + #rs1 and rs2 not derived anymore + rs3[ind_all_clean]= sl.spher_albedo_sw_approx(D[ind_all_clean]) + + # calculation of the BBA for the polluted snow + rp1[ind_pol], rp2[ind_pol], rp3[ind_pol] = sl.BBA_calc_pol( + rp[:, ind_pol], asol, sol1_pol, sol2, sol3_pol) + rs1[ind_pol], rs2[ind_pol], rs3[ind_pol] = sl.BBA_calc_pol( + alb_sph[:, ind_pol], asol, sol1_pol, sol2, sol3_pol) + + #%% Output + if os.path.isfile(InputPath): + + print('\nText file output') + # data_in = pd.read_csv(InputPath) + data_out = data_in + data_out['grain_diameter']=D + data_out['snow_specific_area']=area + data_out['al']=al + data_out['r0']=r0 + data_out['diagnostic_retrieval']=isnow + data_out['conc']=conc + data_out['albedo_bb_planar_sw']=rp3 + data_out['albedo_bb_spherical_sw']=rs3 - # re-defining polluted pixels - ind_pol = np.logical_and(ind_pol, isnow != 7) + + for i in np.append(np.arange(11), np.arange(15,21)): + # for i in np.arange(21): + data_out['albedo_spectral_spherical_'+str(i+1).zfill(2)]=alb_sph[i,:,:] + for i in np.append(np.arange(11), np.arange(15,21)): + data_out['rBRR_'+str(i+1).zfill(2)]=rp[i,:,:] - # retrieving snow impurities - ntype, bf, conc = sl.snow_impurities(alb_sph, bal) - - # alex 09.06.2019 - # reprocessing of albedo to remove gaseous absorption using linear polynomial - # approximation in the range 753-778nm. - # Meaning: alb_sph[12],alb_sph[13] and alb_sph[14] are replaced by a linear - # interpolation between alb_sph[11] and alb_sph[15] - afirn = (alb_sph[15, ind_pol] - alb_sph[11, ind_pol]) / (w[15] - w[11]) - bfirn = alb_sph[15, ind_pol] - afirn * w[15] - alb_sph[12, ind_pol] = bfirn + afirn * w[12] - alb_sph[13, ind_pol] = bfirn + afirn * w[13] - alb_sph[14, ind_pol] = bfirn + afirn * w[14] - - # BAV 09-02-2020: 0.5 to 0.35 - # pixels that are clean enough in channels 18 19 20 and 21 are not affected - # by pollution, the analytical equation can then be used - ind_ok = np.logical_and(ind_pol, toa_cor_o3[20, :, :] > 0.35) - - for i_channel in range(17, 21): - alb_sph[i_channel, ind_ok] = np.exp(-np.sqrt(4. * 1000. * al[ind_ok] - * np.pi * bai[i_channel] - / w[i_channel])) - # Alex, SEPTEMBER 26, 2019 - # to avoid the influence of gaseous absorption (water vapor) we linearly - # interpolate in the range 885-1020nm for bare ice cases only (low toa[20]) - # Meaning: alb_sph[18] and alb_sph[19] are replaced by a linear interpolation - # between alb_sph[17] and alb_sph[20] - delx = w[20] - w[17] - bcoef = (alb_sph[20, ind_pol] - alb_sph[17, ind_pol]) / delx - acoef = alb_sph[20, ind_pol] - bcoef * w[20] - alb_sph[18, ind_pol] = acoef + bcoef * w[18] - alb_sph[19, ind_pol] = acoef + bcoef * w[19] - -# ========= derivation of plane albedo and reflectance =========== - -rp = np.power(alb_sph, ak1) -refl = r0 * np.power(alb_sph, (ak1 * ak2 / r0)) - -ind_all_clean = np.logical_or(ind_clean, isnow == 7) - -# CalCULATION OF BBA of clean snow - -# old method: integrating equation -# BBA_v = np.vectorize(sl.BBA_calc_clean) -# p1, p2, s1, s2 = BBA_v(al[ind_all_clean], ak1[ind_all_clean]) - -# visible(0.3-0.7micron) -# rp1[ind_all_clean] = p1 / sol1_clean -# rs1[ind_all_clean] = s1 / sol1_clean -# near-infrared (0.7-2.4micron) -# rp2[ind_all_clean] = p2 / sol2 -# rs2[ind_all_clean] = s2 / sol2 -# shortwave(0.3-2.4 micron) -# rp3[ind_all_clean] = (p1 + p2) / sol3_clean -# rs3[ind_all_clean] = (s1 + s2) / sol3_clean - -# approximation -# planar albedo -# rp1 and rp2 not derived anymore -rp3[ind_all_clean] = sl.plane_albedo_sw_approx(D[ind_all_clean], - am1[ind_all_clean]) -# spherical albedo -# rs1 and rs2 not derived anymore -rs3[ind_all_clean] = sl.spher_albedo_sw_approx(D[ind_all_clean]) - -# calculation of the BBA for the polluted snow -rp1[ind_pol], rp2[ind_pol], rp3[ind_pol] = sl.BBA_calc_pol( - rp[:, ind_pol], asol, sol1_pol, sol2, sol3_pol) -rs1[ind_pol], rs2[ind_pol], rs3[ind_pol] = sl.BBA_calc_pol( - alb_sph[:, ind_pol], asol, sol1_pol, sol2, sol3_pol) - -# %% Output - -WriteOutput(BXXX, 'O3_SICE', InputFolder) -WriteOutput(D, 'grain_diameter', InputFolder) -WriteOutput(area, 'snow_specific_surface_area', InputFolder) -WriteOutput(al, 'al', InputFolder) -WriteOutput(r0, 'r0', InputFolder) -WriteOutput(isnow, 'diagnostic_retrieval', InputFolder) -WriteOutput(conc, 'conc', InputFolder) -WriteOutput(rp3, 'albedo_bb_planar_sw', InputFolder) -WriteOutput(rs3, 'albedo_bb_spherical_sw', InputFolder) - -for i in np.append(np.arange(11), np.arange(15, 21)): - - # for i in np.arange(21): - WriteOutput(alb_sph[i, :, :], 'albedo_spectral_spherical_' - + str(i + 1).zfill(2), InputFolder) - WriteOutput(rp[i, :, :], 'albedo_spectral_planar_' - + str(i + 1).zfill(2), InputFolder) - WriteOutput(refl[i, :, :], 'rBRR_' - + str(i + 1).zfill(2), InputFolder) + data_out.to_csv(InputPath[:-4]+'_out.csv') + # ========= input tif =============== + elif os.path.isdir(InputPath): + WriteOutput(BXXX, 'O3_SICE', OutputFolder) + WriteOutput(D, 'grain_diameter',OutputFolder) + WriteOutput(area, 'snow_specific_area', OutputFolder) + WriteOutput(al, 'al', OutputFolder) + WriteOutput(r0, 'r0',OutputFolder) + WriteOutput(isnow,'diagnostic_retrieval',OutputFolder) + WriteOutput(conc, 'conc',OutputFolder) + WriteOutput(rp3, 'albedo_bb_planar_sw',OutputFolder) + WriteOutput(rs3, 'albedo_bb_spherical_sw',OutputFolder) + + for i in np.append(np.arange(11), np.arange(15,21)): + # for i in np.arange(21): + WriteOutput(alb_sph[i,:,:], 'albedo_spectral_spherical_'+str(i+1).zfill(2), OutputFolder) + WriteOutput(rp[i,:,:], 'albedo_spectral_planar_'+str(i+1).zfill(2), OutputFolder) + WriteOutput(refl[i,:,:], 'rBRR_'+str(i+1).zfill(2), OutputFolder) + + print("End SICE.py %s --- %s CPU seconds ---" % (OutputFolder, time.process_time() - start_time)) + +if __name__ == '__main__': + # if the script is called from the command line, then parsing the input path and + # passing it to the main function + InputPath = sys.argv[1] + if len(sys.argv)>3: + OutputFolder = sys.argv[2] + else: + OutputFolder = sys.argv[1] + '/' + print(InputPath) + print(OutputFolder) + print('---') + pySICE(InputPath,OutputFolder) -print("End SICE.py %s --- %s CPU seconds ---" % - (InputFolder, time.process_time() - start_time)) diff --git a/sice_f.py b/sice_f.py index fbff5a2..ea1324f 100644 --- a/sice_f.py +++ b/sice_f.py @@ -34,9 +34,8 @@ #%% input text file if os.path.isfile(sys.argv[1]): - InputFolder = os.path.dirname(os.path.dirname(sys.argv[1]))+'/' - print('\nText file input') - # data_in = pd.read_csv(sys.argv[1]) + InputFolder = os.path.dirname(sys.argv[1])+'/' + # data_in = pd.read_csv('validation/data/S3_PROMICE.csv') data_in = pd.read_csv(sys.argv[1]) toa = np.expand_dims(data_in[[c for c in data_in.columns if c.find('reflec')>=0]].to_numpy().transpose(), axis=2) @@ -117,7 +116,6 @@ def WriteOutput(var,var_name,in_folder): # print('bash ./fortran/sice_f.sh -i '+OutputFolder) # os.system('bash ./fortran/sice_f.sh -i '+OutputFolder) - cmnd = 'bash ./fortran/sice_f.sh -i '+OutputFolder print(cmnd) import subprocess @@ -142,9 +140,7 @@ def WriteOutput(var,var_name,in_folder): # file= 'notsnow.dat') ns,ndate(3),icloud,iice # file='retrieved_O3.dat') ns,alat,alon,BXXX,totadu,deltak,sza,vza,amf -if os.path.isfile(sys.argv[1]): - print('\nText file output') - +if os.path.isfile(sys.argv[1]): impurity = pd.read_csv(OutputFolder+'impurity.dat', names=['ns','ndate','alat','alon','ntype','conc', 'bf','bm','thv', 'toa(1)','isnow'], @@ -156,27 +152,41 @@ def WriteOutput(var,var_name,in_folder): bba = pd.read_csv(OutputFolder+'bba.dat', names=['ns','ndate','alat','alon','rp3','rs3','isnow'], sep=' ', skipinitialspace=True, header=None) + spherical_albedo = pd.read_csv(OutputFolder+'spherical_albedo.dat', + sep=' ', skipinitialspace=True, header=None).values + planar_albedo = pd.read_csv(OutputFolder+'planar_albedo.dat', + sep=' ', skipinitialspace=True, header=None).values + data_out = data_in data_out['grain_diameter']=np.nan - data_out.loc[size.ns-1, ['grain_diameter']]=size.D data_out['snow_specific_area']=np.nan - data_out.loc[size.ns-1,['snow_specific_area']]=size.area data_out['al']=np.nan - data_out.loc[size.ns-1,['al']]=size.al data_out['r0']=np.nan - data_out.loc[size.ns-1,['r0']]=size.r0 data_out['diagnostic_retrieval']=np.nan - data_out.loc[size.ns-1,['diagnostic_retrieval']]=bba.isnow data_out['conc']=np.nan - data_out.loc[size.ns-1,['conc']]=impurity.conc data_out['albedo_bb_planar_sw']=np.nan - data_out.loc[size.ns-1,['albedo_bb_planar_sw']]=bba.rp3 data_out['albedo_bb_spherical_sw']=np.nan - data_out.loc[size.ns-1,['albedo_bb_spherical_sw']]=bba.rs3 - spherical_albedo = pd.read_csv(OutputFolder+'spherical_albedo.dat', - sep=' ', skipinitialspace=True, header=None).values - planar_albedo = pd.read_csv(OutputFolder+'planar_albedo.dat', - sep=' ', skipinitialspace=True, header=None).values + + data_out['grain_diameter'][size.ns-1] = size.D + # data_out.loc[size.ns-1, ['grain_diameter']]=size.D + data_out.loc[size.ns-1,['snow_specific_area']]=size.area + data_out.loc[size.ns-1,['al']]=size.al + data_out.loc[size.ns-1,['r0']]=size.r0 + data_out.loc[size.ns-1,['diagnostic_retrieval']]=bba.isnow + data_out.loc[size.ns-1,['conc']]=impurity.conc + data_out['albedo_bb_planar_sw'][size.ns-1]=bba.rp3 + data_out['albedo_bb_spherical_sw']=np.nan + data_out['albedo_bb_spherical_sw'][size.ns-1]=bba.rs3 + + # data_out.iloc[size.ns-1, data_out.columns.get_loc('grain_diameter')]=size.D + # data_out.iloc[size.ns-1, data_out.columns.get_loc('snow_specific_area')]=size.area + # data_out.iloc[size.ns-1, data_out.columns.get_loc('al')]=size.al + # data_out.iloc[size.ns-1, data_out.columns.get_loc('r0')]=size.r0 + # data_out.iloc[size.ns-1, data_out.columns.get_loc('diagnostic_retrieval')]=bba.isnow + # data_out.iloc[size.ns-1, data_out.columns.get_loc('conc')]=impurity.conc + # data_out.iloc[size.ns-1, data_out.columns.get_loc('albedo_bb_planar_sw')]=bba.rp3 + # data_out.iloc[size.ns-1, data_out.columns.get_loc('albedo_bb_spherical_sw')]=bba.rs3 + for i in np.append(np.arange(11), np.arange(15,21)): # for i in np.arange(21): @@ -188,6 +198,8 @@ def WriteOutput(var,var_name,in_folder): data_out.loc[size.ns-1,['rBRR_'+str(i+1).zfill(2)]]=planar_albedo[:,4+i] data_out.to_csv(sys.argv[1][:-4]+'_fortran_out.csv') + print('\nOutput: '+ sys.argv[1][:-4] + '_fortran_out.csv') + # ========= input tif =============== elif os.path.isdir(sys.argv[1]): Oa01 = rio.open(InputFolder+'r_TOA_01.tif') diff --git a/sice_lib.py b/sice_lib.py index a8d784c..e60e5c2 100644 --- a/sice_lib.py +++ b/sice_lib.py @@ -23,7 +23,7 @@ @author: bav@geus.dk """ -# pySICEv1.3 +# pySICEv1.5 # # from FORTRAN VERSION 5 # March 31, 2020 @@ -80,10 +80,9 @@ # quad_func calculation of quadratic parameters # funp snow spectral planar and spherical albedo function -import numpy as np from constants import w, bai, xa, ya, f0, f1, f2, bet, gam, coef1, coef2, coef3, coef4 - -# %% ================================================ +import numpy as np +# ================================================ # tozon [i_channel] spectral ozone vertical optical depth at the fixed ozone concentration 404.59DU ( wavelength, VOD) # voda[i_channel] spectral water vapour vertical optical depth at the fixed concentration 3.847e+22 molecules per square sm @@ -127,9 +126,7 @@ def ozone_scattering(ozone, tozon, sza, vza, toa): * np.exp(amf * tozon[i] * totadu / 404.59) return BXXX, toa_cor_o3 - -# %% viewing characteristics and aerosol properties - +# viewing characteristics and aerosol properties # sza solar zenith angle # vza viewing zenith angle # saa solar azimuthal angle @@ -156,34 +153,34 @@ def view_geometry(vaa, saa, sza, vza, aot, height): amf = 1. / am1 + 1. / am2 co = -am1 * am2 + as1 * as2 * cofi - return raa, am1, am2, ak1, ak2, amf, co - -# %% - - -def aerosol_properties(aot, height, co): - # Atmospheric optical thickness - tauaer = aot * (w / 0.5) ** (-1.3) - - ad = height / 7400. - ak = height * 0 + 1 - ak[ad > 1.e-6] = np.exp(-ad[ad > 1.e-6]) - - taumol = np.tile(height * np.nan, (21, 1, 1)) - tau = np.tile(height * np.nan, (21, 1, 1)) - g = np.tile(height * np.nan, (21, 1, 1)) - pa = np.tile(height * np.nan, (21, 1, 1)) - p = np.tile(height * np.nan, (21, 1, 1)) - g0 = 0.5263 - g1 = 0.4627 - wave0 = 0.4685 - gaer = g0 + g1 * np.exp(-w / wave0) - pr = 0.75 * (1. + co ** 2) + return raa, cos_sza, cos_vza, ak1, ak2, inv_cos_za, cos_sa +# +def aerosol_properties(aot, height, cos_sa): + # aerosol optical thickness + tauaer =aot*(w/0.5)**(-1.3) + + ad =height/7400. + ak = height*0+1 + #ak[ad > 1.e-6]=np.exp(-ad[ad > 1.e-6]) + ak =np.exp(-height/7400) + + taumol = np.tile(height*np.nan, (21,1,1)) + tau = np.tile(height*np.nan, (21,1,1)) + g = np.tile(height*np.nan, (21,1,1)) + pa = np.tile(height*np.nan, (21,1,1)) + p = np.tile(height*np.nan, (21,1,1)) + + # aerosol asymmetry parameter + gaer=0.5263+0.4627*np.exp(-w/0.4685) + + # phase function for molecular scattering + pmol=0.75*(1.+cos_sa**2) for i in range(21): - - taumol[i, :, :] = ak * 0.00877 / w[i] ** (4.05) - tau[i, :, :] = tauaer[i] + taumol[i, :, :] + # molecular optical thickness + taumol[i,:,:] = ak*0.00877*w[i]**(-4.05) + tau[i,:,:] = tauaer[i] + taumol[i,:,:] + # aerosol asymmetry parameter g[i, :, :] = tauaer[i] * gaer[i] / tau[i, :, :] @@ -192,32 +189,41 @@ def aerosol_properties(aot, height, co): pa[i, :, :] = (1 - g[i, :, :] ** 2) \ / (1. - 2. * g[i, :, :] * co + g[i, :, :] ** 2) ** 1.5 - p[i, :, :] = (taumol[i, :, :] * pr + tauaer[i] * pa[i, :, :]) / tau[i, :, :] - + p[i,:,:]=(taumol[i,:,:]*pmol + tauaer[i]*pa[i,:,:])/tau[i,:,:] return tau, p, g, gaer, taumol, tauaer # %% snow properties - def snow_properties(toa, ak1, ak2): # retrieval of snow properties ( R_0, size of grains from OLCI channels 865[17] and 1020nm[21] # assumed not influenced by atmospheric scattering and absorption processes) - akap2 = 2.25e-6 - alpha2 = 4. * np.pi * akap2 / 1.020 + # imaginary part of the ice refractive index at 1020nm + akap2=2.25e-6 + # bulk absoprtion coefficient of ice at 1020nm + alpha2=4.*np.pi*akap2/1.020 + # imaginary part of the ice refractive index at 865nm + # akap1=2.4e-7 + # bulk absoprtion coefficient of ice at 865nm + # alpha1=4.*np.pi*akap1/0.865 + + # eps = 1/(1-np.sqrt(alpha1/alpha2)) eps = 1.549559365010611 - + # consequently: 1-eps = 1/(1-np.sqrt(alpha2/alpha1)) + # reflectivity of nonabsorbing snow layer rr1 = toa[16, :, :] rr2 = toa[20, :, :] r0 = (rr1 ** eps) * (rr2 ** (1. - eps)) # effective absorption length(mm) - bal = np.log(rr2 / r0) * np.log(rr2 / r0) / alpha2 / (ak1 * ak2 / r0) ** 2 - al = bal / 1000. + bal = (np.log(rr2/r0)/(ak1*ak2/r0))**2 /alpha2 + al = bal/1000. # effective grain size(mm):diameter - D = al / 16.36 + # xi/(1-g) = 9.2 + D=al/(9.2*16/9) + # snow specific area ( dimension: m*m/kg) area = 6. / D / 0.917 @@ -226,11 +232,12 @@ def snow_properties(toa, ak1, ak2): # %% ================================================= -def prepare_coef(tau, g, p, am1, am2, amf, gaer, taumol, tauaer): - astra = tau * np.nan - rms = tau * np.nan - t1 = tau * np.nan - t2 = tau * np.nan +# ================================================= +def prepare_coef(tau, g, p, cos_sza, cos_vza, inv_cos_za, gaer, taumol, tauaer): + M=tau*np.nan + rms=tau*np.nan + t1=tau*np.nan + t2=tau*np.nan # SOBOLEV oskar = 4. + 3. * (1. - g) * tau @@ -245,43 +252,45 @@ def prepare_coef(tau, g, p, am1, am2, amf, gaer, taumol, tauaer): sssss = (wa1 - wa2) / (1. + bex) + wa2 for i in range(21): - - astra[i, :, :] = (1. - np.exp(-tau[i, :, :] * amf)) / (am1 + am2) / 4. - rms[i, :, :] = 1. - b1[i, :, :] * b2[i, :, :] / oskar[i, :, :] \ - + (3. * (1. + g[i, :, :]) * am1 * am2 - 2. * (am1 + am2)) * astra[i, :, :] - # backscattering fraction - # t1[i, :, :] = np.exp(-(1. - g[i, :, :]) * tau[i, :, :] / am1 / 2.) - # t2[i, :, :] = np.exp(-(1. - g[i, :, :]) * tau[i, :, :] / am2 / 2.) - t1[i, :, :] = np.exp(-(1. - g[i, :, :]) * tau[i, :, :] / am1 / 2. - / sssss[i, :, :]) - t2[i, :, :] = np.exp(-(1. - g[i, :, :]) * tau[i, :, :] / am2 / 2. - / sssss[i, :, :]) - - rss = p * astra + M[i,:,:]=(1.-np.exp(-tau[i,:,:]*inv_cos_za))/(cos_sza+cos_vza)/4. + + # multiple scattering contribution to the atmospheric reflectance + rms[i,:,:] = 1. \ + + (3.*(1.+g[i,:,:])*cos_sza*cos_vza - 2.*(cos_sza+cos_vza))*M[i,:,:] \ + - b1[i,:,:]*b2[i,:,:]/oskar[i,:,:] + + # atmospheric backscattering fraction + # t1*t2 is the 2-ways atmospheric transmittance (from the sun to the surface and to the satellite) + # t1[i,:,:] = np.exp(-(1.-g[i,:,:])*tau[i,:,:]/cos_sza/2.) + # t2[i,:,:] = np.exp(-(1.-g[i,:,:])*tau[i,:,:]/cos_vza/2.) + t1[i,:,:]= np.exp(-(1.-g[i,:,:])/2./sssss[i,:,:]/cos_sza*tau[i,:,:]) + t2[i,:,:]= np.exp(-(1.-g[i,:,:])/2./sssss[i,:,:]/cos_vza*tau[i,:,:]) + + # Single scattering contribution to the atmospheric reflectance + rss = p*M + # Atmospheric reflectance r = rss + rms - - # SALBED - # ratm = salbed(tau, g) - a_s = (.18016, -0.18229, 0.15535, -0.14223) - bs = (.58331, -0.50662, -0.09012, 0.0207) - cs = (0.21475, -0.1, 0.13639, -0.21948) - als = (0.16775, -0.06969, 0.08093, -0.08903) - bets = (1.09188, 0.08994, 0.49647, -0.75218) - - a_cst = a_s[0] * g ** 0 + a_s[1] * g ** 1 + a_s[2] * g ** 2 + a_s[3] * g ** 3 - b_cst = bs[0] * g ** 0 + bs[1] * g ** 1 + bs[2] * g ** 2 + bs[3] * g ** 3 - c_cst = cs[0] * g ** 0 + cs[1] * g ** 1 + cs[2] * g ** 2 + cs[3] * g ** 3 - al_cst = als[0] * g ** 0 + als[1] * g ** 1 + als[2] * g ** 2 + als[3] * g ** 3 - bet_cst = bets[0] * g ** 0 + bets[1] * g ** 1 + bets[2] * g ** 2 + bets[3] * g ** 3 + return t1, t2, r, M, rms + +def atm_spher_alb(tau, g): + # SALBED ratm = salbed(tau, g) + # parameterization of atmospheric spherical albedo by Kokhanovsky et al. (2005, doi: 10.1016/j.atmosres.2004.07.004) + a_s = (.18016, -0.18229, 0.15535, -0.14223) + bs = (.58331, -0.50662, -0.09012, 0.0207) + cs = (0.21475, -0.1, 0.13639, -0.21948) + als = (0.16775, -0.06969, 0.08093, -0.08903) + bets = (1.09188, 0.08994, 0.49647, -0.75218) + + a_cst = a_s[0] + a_s[1]*g**1 + a_s[2]*g**2 + a_s[3]*g**3 + b_cst = bs[0] + bs[1]*g**1 + bs[2]*g**2 + bs[3]*g**3 + c_cst = cs[0] + cs[1]*g**1 + cs[2]*g**2 + cs[3]*g**3 + al_cst= als[0] + als[1]*g**1 + als[2]*g**2 + als[3]*g**3 + bet_cst= bets[0] + bets[1]*g**1 + bets[2]*g**2 + bets[3]*g**3 - ratm = tau * (a_cst * np.exp(-tau / al_cst) + b_cst * np.exp(-tau / bet_cst) - + c_cst) - - return t1, t2, ratm, r, astra, rms - -# %% snow_imputirities - - + ratm = tau*(a_cst*np.exp(-tau/al_cst)+b_cst*np.exp(-tau/bet_cst)+c_cst) + return ratm + +# snow_imputirities def snow_impurities(alb_sph, bal): # analysis of snow impurities # ( the concentrations below 0.0001 are not reliable ) @@ -326,9 +335,7 @@ def snow_impurities(alb_sph, bal): return ntype, bf, conc -# %% =========================================================================== - - +# =========================================================================== def alb2rtoa(a, t1, t2, r0, ak1, ak2, ratm, r): # Function that calculates the theoretical reflectance from a snow spherical albedo a # This function can then be solved to find optimal snow albedo @@ -343,9 +350,7 @@ def alb2rtoa(a, t1, t2, r0, ak1, ak2, ratm, r): return rs -# %% =========================================================================== - - +# =========================================================================== def salbed(tau, g): # WARNING: NOT USED ANYMORE # SPHERICAL ALBEDO OF TERRESTRIAL ATMOSPHERE: @@ -374,6 +379,7 @@ def salbed(tau, g): # %% ===================================================================== +# ===================================================================== def zbrent(f, x0, x1, max_iter=100, tolerance=1e-6): # Equation solver using Brent's method # https://en.wikipedia.org/wiki/Brent%27s_method @@ -445,9 +451,7 @@ def zbrent(f, x0, x1, max_iter=100, tolerance=1e-6): return x1 -# %% ===================================================================== - - +# ===================================================================== def funp(x, al, sph_calc, ak1): # Spectral planar albedo # Inputs: @@ -498,19 +502,24 @@ def plane_albedo_sw_approx(D, am1): return anka + banka * np.exp(-1000 * D / diam1) + canka \ * np.exp(-1000 * D / diam2) +# Approximation functions for BBA integration +def plane_albedo_sw_approx(D,cos_sza): + anka= 0.7389 -0.1783*cos_sza +0.0484*cos_sza**2. + banka=0.0853 +0.0414*cos_sza -0.0127*cos_sza**2. + canka=0.1384 +0.0762*cos_sza -0.0268*cos_sza**2. + diam1=187.89 -69.2636*cos_sza +40.4821*cos_sza**2. + diam2=2687.25 -405.09*cos_sza +94.5*cos_sza**2. + return anka+banka*np.exp(-1000*D/diam1)+canka*np.exp(-1000*D/diam2) def spher_albedo_sw_approx(D): - anka = 0.6420 - banka = 0.1044 - canka = 0.1773 - diam1 = 158.62 - diam2 = 2448.18 - return anka + banka * np.exp(-1000 * D / diam1) + canka \ - * np.exp(-1000 * D / diam2) - -# %% CalCULATION OF BBA for clean pixels - - + anka= 0.6420 + banka=0.1044 + canka=0.1773 + diam1=158.62 + diam2=2448.18 + return anka+banka*np.exp(-1000*D/diam1)+canka*np.exp(-1000*D/diam2) + +# CalCULATION OF BBA for clean pixels def BBA_calc_clean(al, ak1): # for clean snow # plane albedo @@ -546,7 +555,8 @@ def func_integ(x): # %% =============================== -def qsimp(func, a, b): +# =============================== +def qsimp(func,a,b): # integrate function between a and b using simpson's method. # works as fast as scipy.integrate quad eps = 1.e-3 @@ -582,9 +592,7 @@ def qsimp(func, a, b): return s -# %% Calculation f BBA for polluted snow - - +# Calculation f BBA for polluted snow def BBA_calc_pol(alb, asol, sol1_pol, sol2, sol3_pol): # polluted snow # NEW CODE FOR BBA OF BARE ICE @@ -633,28 +641,26 @@ def BBA_calc_pol(alb, asol, sol1_pol, sol2, sol3_pol): aj2 = ajx1 + ajx2 + ajx3 # segment 2.2 # exponential approximation for the range 865- 2400 nm - z1 = 0.865 - z2 = 2.4 - rati = r7 / r8 - alasta = (alam8 - alam7) / np.log(rati) - an = 1. / alasta - p = r7 * np.exp(alam7 / alasta) - - aj31 = (1. / an) * (np.exp(-an * z2) - np.exp(-an * z1)) - aj32 = (1. / (bet + an)) * (np.exp(-(bet + an) * z2) - np.exp(-(an + bet) * z1)) - aj33 = (1. / (gam + an)) * (np.exp(-(gam + an) * z2) - np.exp(-(an + gam) * z1)) - aj3 = (-f0 * aj31 - f1 * aj32 - f2 * aj33) * p - - BBA_vis = aj1 / sol1_pol - BBA_nir = (aj2 + aj3) / sol2 # here segment 2.1 and 2.2 are summed - BBA_sw = (aj1 + aj2 + aj3) / sol3_pol - - return BBA_vis, BBA_nir, BBA_sw - -# %% ========================================================================== - - -def quad_func(x0, x1, x2, y0, y1, y2): + z1=0.865 + z2=2.4 + rati=r7/r8 + alasta = (alam8-alam7)/np.log(rati) + an=1./alasta + p = r7 * np.exp(alam7/alasta) + + aj31=(1./an)*(np.exp(-an*z2)-np.exp(-an*z1)) + aj32=(1./(bet+an))*(np.exp(-(bet+an)*z2)-np.exp(-(an+bet)*z1)) + aj33=(1./(gam+an))*(np.exp(-(gam+an)*z2)-np.exp(-(an+gam)*z1)) + aj3=(-f0*aj31-f1*aj32-f2*aj33)*p + + BBA_vis = aj1/sol1_pol + BBA_nir = (aj2+aj3)/sol2 #here segment 2.1 and 2.2 are summed + BBA_sw = (aj1+aj2+aj3)/sol3_pol + + return BBA_vis,BBA_nir, BBA_sw + +# ========================================================================== +def quad_func(x0,x1,x2,y0,y1,y2): # quadratic function used for the polluted snow BBA calculation # see BBA_calc_pol # compatible with arrays diff --git a/tg_water_vod.dat b/tg_water_vod.dat index 0dde523..3b21cdf 100644 --- a/tg_water_vod.dat +++ b/tg_water_vod.dat @@ -1,21 +1,21 @@ - 400.00000 1.378170469E-004 - 412.50000 3.048780958E-004 - 442.50000 1.645714060E-003 - 490.00000 8.935947110E-003 - 510.00000 1.750535146E-002 - 560.00000 4.347104369E-002 - 620.00000 4.487130794E-002 - 665.00000 2.101591797E-002 - 673.75000 1.716230955E-002 - 681.25000 1.466298300E-002 - 708.75000 7.983028470E-003 - 753.75000 3.879744653E-003 - 761.25000 2.923775641E-003 - 764.37500 2.792211429E-003 - 767.50000 2.729651478E-003 - 778.75000 3.255969698E-003 - 865.00000 8.956858078E-004 - 885.00000 5.188799343E-004 - 900.00000 6.715773241E-004 - 940.00000 3.127781417E-004 - 1020.00000 1.408798425E-005 \ No newline at end of file + 400.00000 3.506319806E-005 + 412.50000 6.967526062E-006 + 442.50000 3.505636418E-004 + 490.00000 1.381931600E-004 + 510.00000 1.324224122E-003 + 560.00000 5.099066205E-005 + 620.00000 2.363042944E-005 + 665.00000 6.440468944E-004 + 673.75000 2.377942129E-005 + 681.25000 3.303735591E-004 + 708.75000 1.158079797E-002 + 753.75000 1.154852173E-004 + 761.25000 3.316322348E-006 + 764.37500 2.465775843E-007 + 767.50000 1.137753056E-007 + 778.75000 3.191069529E-004 + 865.00000 5.601462094E-004 + 885.00000 3.026113840E-003 + 900.00000 3.265557172E-001 + 940.00000 2.804599956E+000 + 1020.00000 2.858762309E-003 diff --git a/val_PROMICE.py b/val_PROMICE.py deleted file mode 100644 index 7c87a92..0000000 --- a/val_PROMICE.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Fri Jul 24 08:58:21 2020 -Data processing for the evaluation of SICE against PROMICE measurements -@author: bav -""" - -import numpy as np -import pandas as pd -import os -import errno -from pathlib import Path -import bav_lib as bl -import matplotlib.pyplot as plt - -path_to_file = 'validation/data/S3_PROMICE.csv' - -# Read merged csv -data_all = pd.read_csv(path_to_file) - -#%% Running pySICE -cmd = '%run sice_f.py '+path_to_file -# %run sice.py validation/data/data_in.csv -print(cmd) -eval(cmd) -#%% Analysis -#%matplotlib qt -#%matplotlib inline -data_out = pd.read_csv('validation/data/S3_PROMICE_fortran_out.csv') - -# Removing cloudy measurements -cloud_thd = 0.3 -data_cloud = data_out[data_out['cloud']>cloud_thd] -data_out=data_out[data_out['cloud']<=cloud_thd] -print('\nRemoving '+str(data_cloud.shape[0]) +'/'+ - str(data_cloud.shape[0]+data_out.shape[0]) + ' due to clouds') - -#%% Plot results ## -fs = 15 -ss=5 -f1, ax = plt.subplots(1,3,figsize=(15, 7)) -f1.subplots_adjust(hspace=0.2, wspace=0.1, - left = 0.08 , right = 0.95 , - bottom = 0.2 , top = 0.9) -# ax[0] = bl.density_scatter(data_out['albedo_bb_planar_sw'], data_out['PROMICE_alb'],ax[0],ss) -ax[0].scatter(data_out['albedo_bb_planar_sw'], data_out['PROMICE_alb'], c= "black",s=5) -plt.tight_layout() -ax[0].set_xlabel("Albedo from pySICEv1.4 (-)",fontsize=fs) -ax[0].set_ylabel("PROMICE albedo (-)",fontsize=fs) - -ax[1] = bl.density_scatter(data_out['BBA_emp'], data_out['PROMICE_alb'],ax[1],ss) -ax[1].scatter(data_out['BBA_emp'], data_out['PROMICE_alb'],s=ss,c="black") -plt.tight_layout() -ax[1].set_xlabel("Empirical (-)",fontsize=fs) -ax[1].set_ylabel("PROMICE albedo (-)",fontsize=fs) - -var= 'rBRR_20' -# ax[2] = bl.density_scatter(data_out[var], data_out['PROMICE_alb'],ax[2],ss) -ax[2].scatter(data_out[var], data_out['PROMICE_alb'],s=ss,c="black") -plt.tight_layout() -ax[2].set_xlabel(var,fontsize=fs) -ax[2].set_ylabel("PROMICE albedo (-)",fontsize=fs) - -f1.savefig('validation/figures/scatter.png') - -#%% -data_out['date'] = pd.to_datetime(data_out[['year','month','day','hour','minute','second']]) -data_out.set_index(['date', 'station']) -data_out.sort_values(by=['station','date'],inplace=True) -data_out.head(5) -data_out.station.unique() - -bl.multi_plot(data_out, title = 'Albedo (-)', - sites =['KAN_L','KAN_M','KAN_U','KPC_L','KPC_U','SCO_L','SCO_U','EGP'], - OutputFolder = 'validation/figures/', filename_out='PROMICE_comp_1') -bl.multi_plot(data_out, title='Albedo (-)', - sites =['QAS_L','QAS_U','TAS_L','TAS_A','THU_L','THU_U','UPE_L','UPE_U'], - OutputFolder = 'validation/figures/', filename_out='PROMICE_comp_2') - diff --git a/validation/.ipynb_checkpoints/fetch_PROMICE_data-checkpoint.ipynb b/validation/.ipynb_checkpoints/fetch_PROMICE_data-checkpoint.ipynb deleted file mode 100644 index 5d74b66..0000000 --- a/validation/.ipynb_checkpoints/fetch_PROMICE_data-checkpoint.ipynb +++ /dev/null @@ -1,161 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Fetch PROMICE data\n", - "This codeblock fetches down relevant weather data from https://promice.org/PromiceDataPortal/#Automaticweatherstations (unless the file is already available in the folder, then it skips). \n", - "This file has taken its source in the following thread: https://stackoverflow.com/questions/51455112/python-script-to-download-data-from-noaa\n", - "Script does take a while to run (30+ minutes)...." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "Collapsed": "false" - }, - "outputs": [], - "source": [ - "import errno\n", - "import os\n", - "import wget # conda install: https://anaconda.org/anaconda/pywget\n", - "from pathlib import Path\n", - "import pandas as pd\n", - "import numpy as np\n", - "\n", - "path = \"data/PROMICE\" \n", - "\n", - "# Array information of stations available at PROMICE official site: https://promice.org/WeatherStations.html\n", - "PROMICE_stations = [('EGP',(75.6247,-35.9748), 2660), \n", - " ('KAN_B',(67.1252,-50.1832), 350), \n", - " ('KAN_L',(67.0955,-35.9748), 670), \n", - " ('KAN_M',(67.0670,-48.8355), 1270), \n", - " ('KAN_U',(67.0003,-47.0253), 1840), \n", - " ('KPC_L',(79.9108,-24.0828), 370),\n", - " ('KPC_U',(79.8347,-25.1662), 870), \n", - " ('MIT',(65.6922,-37.8280), 440), \n", - " ('NUK_K',(64.1623,-51.3587), 710), \n", - " ('NUK_L',(64.4822,-49.5358), 530),\n", - " ('NUK_U',(64.5108,-49.2692), 1120),\n", - " ('QAS_L',(61.0308,-46.8493), 280),\n", - " ('QAS_M',(61.0998,-46.8330), 630), \n", - " ('QAS_U',(61.1753,-46.8195), 900), \n", - " ('SCO_L',(72.2230,-26.8182), 460),\n", - " ('SCO_U',(72.3933,-27.2333), 970),\n", - " ('TAS_A',(65.7790,-38.8995), 890),\n", - " ('TAS_L',(65.6402,-38.8987), 250),\n", - " ('THU_L',(76.3998,-68.2665), 570),\n", - " ('THU_U',(76.4197,-68.1463), 760),\n", - " ('UPE_L',(72.8932,-54.2955), 220), \n", - " ('UPE_U',(72.8878,-53.5783), 940)]\n", - "\n", - "# Function for making directories if they do not exists. \n", - "def mkdir_p(path):\n", - " try:\n", - " os.makedirs(path)\n", - " return 'Path created.'\n", - " except OSError as exc:\n", - " if exc.errno == errno.EEXIST and os.path.isdir(path):\n", - " return 'Path already exists!'\n", - " else:\n", - " raise\n", - " \n", - "mkdir_p(path)\n", - "\n", - "\n", - "# Goes through each station and fetch down data online. Necessary manipulations and sorting are made.\n", - "for ws in PROMICE_stations:\n", - " url = f'https://promice.org/PromiceDataPortal/api/download/f24019f7-d586-4465-8181-d4965421e6eb/v03/hourly/csv/{ws[0]}_hour_v03.txt'\n", - " file_x = path + f'/{ws[0]}'\n", - " if Path(file_x + '.csv').is_file():\n", - " pass\n", - " else:\n", - " filename = wget.download(url, out=file_x + '.txt')\n", - " #https://stackoverflow.com/a/19759515\n", - " with open(file_x + '.txt') as infile, open(file_x + '_fixed.txt', 'w') as outfile:\n", - " for line in infile:\n", - " outfile.write(\" \".join(line.split()).replace(' ', ','))\n", - " outfile.write(\",\") # trailing comma shouldn't matter\n", - " outfile.write(\"\\n\")\n", - " df = pd.read_csv (file_x + '_fixed.txt')\n", - " df = df[['Year','MonthOfYear','DayOfYear','HourOfDay(UTC)','CloudCover','TiltToEast(d)','TiltToNorth(d)','Albedo_theta<70d']]\n", - " df = df[df.Year > 2015]\n", - " #df = df[df.MonthOfYear > 4]\n", - " #df = df[df.MonthOfYear < 9]\n", - " #df = df[df['HourOfDay(UTC)'] > 9] #\n", - " #df = df[df['HourOfDay(UTC)'] < 21]\n", - " df = df.round({'CloudCover': 2, 'TiltToEast(d)': 2, 'TiltToNorth(d)': 2, 'Albedo_theta<70d': 2})\n", - " #df = df[df['Albedo_theta<70d'] < 1.00]\n", - " #df = df[df['Albedo_theta<70d'] > 0.00]\n", - " df['station_name'] = ws[0]\n", - " df['latitude N'] = ws[1][0]\n", - " df['longitude W'] = ws[1][1]\n", - " df['elevation'] = float(ws[2])\n", - " df['TiltToEast(d)'].replace(-999.0, np.nan, inplace=True)\n", - " df['TiltToEast(d)'] = df['TiltToEast(d)'].interpolate(method='nearest', limit_direction='both')\n", - " df['TiltToNorth(d)'].replace(-999.0, np.nan, inplace=True)\n", - " df['TiltToNorth(d)'] = df['TiltToNorth(d)'].interpolate(method='nearest', limit_direction='both')\n", - "\n", - " df.to_csv(file_x + '.csv', index=None)\n", - "\n", - "# Removes all temporary files made:\n", - "filelist = [ f for f in os.listdir(path) if f.endswith(\".txt\") ]\n", - "for f in filelist:\n", - " os.remove(os.path.join(path, f))\n", - "\n", - "# Create one big file \"PROMICE.csv\" with all the stations data. \n", - "if Path(f'{path}/PROMICE.csv').is_file():\n", - " pass\n", - "else: \n", - " PROMICE = pd.DataFrame()\n", - " filelist = [ f for f in os.listdir(path) if f.endswith(\".csv\") ]\n", - " for f in filelist:\n", - " PROMICE = PROMICE.append(pd.read_csv(f'{path}/{f}'))\n", - " \n", - " PROMICE['TiltToEast(d)'] = PROMICE['TiltToEast(d)'].abs()\n", - " PROMICE['TiltToNorth(d)'] = PROMICE['TiltToNorth(d)'].abs()\n", - " PROMICE['tilt'] = PROMICE[['TiltToEast(d)','TiltToNorth(d)']].values.max(1)\n", - " PROMICE.drop(['TiltToNorth(d)','TiltToEast(d)'], axis=1, inplace=True)\n", - " PROMICE.rename(columns={'Year': 'year', \n", - " 'MonthOfYear': 'month',\n", - " 'DayOfYear':'dayofyear',\n", - " 'HourOfDay(UTC)':'hour',\n", - " 'CloudCover':'cloud',\n", - " 'station_name':'station',\n", - " 'Albedo_theta<70d':'PROMICE_alb'}, inplace=True)\n", - " PROMICE = PROMICE.round({'latitude N': 4, 'longitude W': 4})\n", - " PROMICE.to_csv(f'{path}/PROMICE.csv', index=None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/validation/.ipynb_checkpoints/merge_PROMICE_and_S3_data-checkpoint.ipynb b/validation/.ipynb_checkpoints/merge_PROMICE_and_S3_data-checkpoint.ipynb deleted file mode 100644 index 5d73a25..0000000 --- a/validation/.ipynb_checkpoints/merge_PROMICE_and_S3_data-checkpoint.ipynb +++ /dev/null @@ -1,146 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Merge PROMICE with S3_extract data\n", - "Process the S3-data and merges it with PROMICE data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import pandas as pd\n", - "import numpy as np\n", - "\n", - "s3_extract = pd.read_csv('./data/OLCI/S3_extract.csv') #Put in path to S3_extract data\n", - "full_size = s3_extract.shape[0]\n", - "s3_extract = s3_extract.drop_duplicates(keep='first') #dropping duplicates except first sample, if any.\n", - "print(f'Removed duplicates: {full_size-s3_extract.shape[0]}')\n", - "\n", - "# Specify columns to keep\n", - "s3_extract = s3_extract[['station', 'year', 'month', 'minute', 'dayofyear', 'hour', 'sza', 'vza', 'saa', 'vaa', 'slope', 'ndsi', 'ndbi', 'humidity', 'total_columnar_water_vapour', 'total_ozone',\n", - " 'Oa01_reflectance','Oa02_reflectance','Oa03_reflectance','Oa04_reflectance','Oa05_reflectance','Oa06_reflectance','Oa07_reflectance',\n", - " 'Oa08_reflectance','Oa09_reflectance', 'Oa10_reflectance','Oa11_reflectance', 'Oa12_reflectance','Oa13_reflectance','Oa14_reflectance',\n", - " 'Oa15_reflectance','Oa16_reflectance','Oa17_reflectance','Oa18_reflectance','Oa19_reflectance','Oa20_reflectance','Oa21_reflectance',\n", - " ]]\n", - "\n", - "# the PROMICE data is only given in hourly interval. Hence the hour have to be corrected in the s3_extract data.\n", - "#s3_extract['hour'] = s3_extract['hour'] - 1 if s3_extract['minute'].astype('int64') < 30 else (s3_extract['hour'] + 1) \n", - "\n", - "# Process data to the right format\n", - "s3_extract = s3_extract.round({\n", - " 'sza': 2,\n", - " 'vza': 2,\n", - " 'saa': 2,\n", - " 'vaa': 2,\n", - " 'slope': 2,\n", - " 'ndsi': 2, \n", - " 'ndbi': 2, \n", - " 'humidity': 2, \n", - " 'total_columnar_water_vapour': 2,\n", - " 'Oa01_reflectance': 2,\n", - " 'Oa02_reflectance': 2,\n", - " 'Oa03_reflectance': 2,\n", - " 'Oa04_reflectance': 2,\n", - " 'Oa05_reflectance': 2,\n", - " 'Oa06_reflectance': 2,\n", - " 'Oa07_reflectance': 2,\n", - " 'Oa08_reflectance': 2,\n", - " 'Oa09_reflectance': 2,\n", - " 'Oa10_reflectance': 2,\n", - " 'Oa11_reflectance': 2,\n", - " 'Oa12_reflectance': 2,\n", - " 'Oa13_reflectance': 2,\n", - " 'Oa14_reflectance': 2,\n", - " 'Oa15_reflectance': 2,\n", - " 'Oa16_reflectance': 2,\n", - " 'Oa17_reflectance': 2,\n", - " 'Oa18_reflectance': 2,\n", - " 'Oa19_reflectance': 2,\n", - " 'Oa20_reflectance': 2,\n", - " 'Oa21_reflectance': 2\n", - "})\n", - "\n", - "# Rename columns\n", - "s3_extract.rename(columns={'station':'station',\n", - " 'year': 'year', \n", - " 'month': 'month',\n", - " 'dayofyear': 'dayofyear',\n", - " 'hour': 'hour',\n", - " 'sza': 'sza',\n", - " 'vza': 'vza',\n", - " 'saa': 'saa', \n", - " 'vaa': 'vaa',\n", - " 'slope': 'slope',\n", - " 'ndsi': 'ndsi',\n", - " 'ndbi': 'ndbi',\n", - " 'humidity': 'humidity',\n", - " 'total_columnar_water_vapour': 'total_columnar_water_vapour',\n", - " 'total_ozone': 'total_ozone',\n", - " 'Oa01_reflectance': 'Oa01',\n", - " 'Oa02_reflectance': 'Oa02',\n", - " 'Oa03_reflectance': 'Oa03',\n", - " 'Oa04_reflectance': 'Oa04',\n", - " 'Oa05_reflectance': 'Oa05',\n", - " 'Oa06_reflectance': 'Oa06',\n", - " 'Oa07_reflectance': 'Oa07',\n", - " 'Oa08_reflectance': 'Oa08',\n", - " 'Oa09_reflectance': 'Oa09',\n", - " 'Oa10_reflectance': 'Oa10',\n", - " 'Oa11_reflectance': 'Oa11',\n", - " 'Oa12_reflectance': 'Oa12',\n", - " 'Oa13_reflectance': 'Oa13',\n", - " 'Oa14_reflectance': 'Oa14',\n", - " 'Oa15_reflectance': 'Oa15',\n", - " 'Oa16_reflectance': 'Oa16',\n", - " 'Oa17_reflectance': 'Oa17',\n", - " 'Oa18_reflectance': 'Oa18',\n", - " 'Oa19_reflectance': 'Oa19',\n", - " 'Oa20_reflectance': 'Oa20',\n", - " 'Oa21_reflectance': 'Oa21' \n", - " }, \n", - " inplace=True)\n", - "\n", - "# Merge images with PROMICE data\n", - "OLCI_with_PROMICE = pd.merge(left=s3_extract, right=pd.read_csv('data/PROMICE/PROMICE.csv'), \n", - " left_on=['year','month','dayofyear','hour','station'], \n", - " right_on=['year','month','dayofyear','hour', 'station'])\n", - "# Create merged csv\n", - "OLCI_with_PROMICE.to_csv(f'data/OLCI/S3_extract_with_PROMICE.csv', index=None) " - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/validation/TedstoneValidation.py b/validation/TedstoneValidation.py deleted file mode 100644 index b9a6a33..0000000 --- a/validation/TedstoneValidation.py +++ /dev/null @@ -1,270 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Aug 1 18:43:46 2019 - -@author: bav -""" - -#%% preamble -# this script will read-in and plot 3-dimensional NetCDF data in python -import os -os.environ['PROJ_LIB']=r'C:\Users\bav\AppData\Local\Continuum\anaconda3\Library\share' - -from netCDF4 import Dataset -import math -import numpy as np -import matplotlib -import matplotlib.mlab as mlab -import matplotlib.pyplot as plt -import matplotlib.cm as cm -from mpl_toolkits.basemap import Basemap -import pandas as pd -import glob # for directory content -from osgeo import gdal -from osgeo import osr -from pylab import * # for diff function -from pyproj import Proj # for UTM-latlon conversion -import matplotlib.pyplot as plt -import matplotlib as mpl -from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable -from mpl_toolkits.axes_grid1.colorbar import colorbar -from osgeo import gdal -import sys - -#%% Creating readable geotiffs - # File structure: -# line number ( e.g.,time of measurements) latitude, longitude, -# solar zenith angle, viewing zenith angle, solar azimuthal angle, -# viewing azimuthal angle, height of underlying surface in m, -# 21 OLCI TOA reflectances - -# Read in NetCDF4 file. Assign directory path if necessary. -folder = ('.\\Tedstone\\UAS\\albedo_and_class') -files = [f for f in glob.glob(folder + "**\\*.nc", recursive=True)] -for f in files: -#f=files[1] - filename = f[len(folder):len(f)] - - print(filename) - - data = Dataset(f, mode='r') - albedo = data.variables['albedo'][:] - classified = data.variables['classified'][:] - #lats=data.variables['lat'][:] - #lons=data.variables['lon'][:] - x=data.variables['x'][:] - y=data.variables['y'][:] - - # find the extent and difference - xmin=min(x);xmax=max(x) - ymin=min(y);ymax=max(y) - mdx=abs(diff(x)) - mdy=abs(diff(y)) - - # determine dx and dy from the median of all the non-zero difference values - dx=median(mdx[where(mdx>0.0)[0]]) - dy=median(mdy[where(mdy>0.0)[0]]) - - # write as 32-bit GeoTIFF using GDAL - ny,nx = albedo.shape - driver = gdal.GetDriverByName("GTiff") - ds = driver.Create(filename[1:len(filename)-3]+'_alb.tif', nx, ny, 1, gdal.GDT_Float32) - - # top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution - ds.SetGeoTransform( [xmin, dx, 0, ymax, 0, dy ] ) - - # set the reference info - srs = osr.SpatialReference() - - # UTM zone 22, North=1 - srs.SetUTM(22,1) - srs.SetWellKnownGeogCS('WGS84') - ds.SetProjection( srs.ExportToWkt() ) - - # write the data to a single band - ds.GetRasterBand(1).WriteArray(albedo) - # close - ds = None - - ds = driver.Create(filename[1:len(filename)-3]+'_clas.tif', nx, ny, 1, gdal.GDT_Float32) - - # top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution - ds.SetGeoTransform( [xmin, dx, 0, ymax, 0, dy ] ) - - # set the reference info - srs = osr.SpatialReference() - - # UTM zone 22, North=1 - srs.SetUTM(22,1) - srs.SetWellKnownGeogCS('WGS84') - ds.SetProjection( srs.ExportToWkt() ) - - # write the data to a single band - ds.GetRasterBand(1).WriteArray(classified) - # close - ds = None - -# %% Creating readable geotiffs - # File structure: -# line number ( e.g.,time of measurements) latitude, longitude, -# solar zenith angle, viewing zenith angle, solar azimuthal angle, -# viewing azimuthal angle, height of underlying surface in m, -# 21 OLCI TOA reflectances - -# Read in NetCDF4 file. Assign directory path if necessary. -alb_2_3 = [0.62, 0.53, 0.47, 0.45 0.48, 0.39, 0.47] -alb_sicef = [nan, 0.64, 0.54, 0.56, 0.56, 0.47, 0.49] - -folder = ('.\\Tedstone\\UAS\\albedo_and_class') -files = [f for f in glob.glob(folder + "**\\*.nc", recursive=True)] -count = 0 -for f in files: -#f=files[1] - count = count +1 - filename = f[len(folder):len(f)] - - print(filename) - - data = Dataset(f, mode='r') - albedo = data.variables['albedo'][:] - albedo[albedo<=0]='nan' - -# albedo[albedo == 0] = 'nan' - classified = data.variables['classified'][:] - #lats=data.variables['lat'][:] - #lons=data.variables['lon'][:] - x=data.variables['x'][:] - y=data.variables['y'][:] - - # find the extent and difference - xmin=min(x);xmax=max(x) - ymin=min(y);ymax=max(y) - mdx= np.abs(diff(x)) - mdy= np.abs(diff(y)) - - # determine dx and dy from the median of all the non-zero difference values - dx = np.median(mdx[np.where(mdx>0.0)[0]]) - dy = np.median(mdy[np.where(mdy>0.0)[0]]) - - x_mean = np.mean(x) - y_mean = np.mean(y) - - xx, yy = np.meshgrid(x,y) - myProj_UTM22 = Proj("+proj=utm +zone=22, +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs") - myProj_3413 = Proj(" +proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs") - lon, lat = myProj_UTM22(x_mean,y_mean, inverse=True) - x_3413_center, y_3413_center = myProj_3413(lon, lat) - -# file_sice = glob.glob('.\\Tedstone v2.3_jeb\\201707' + filename[11:13] + '*', recursive=True) -# file_sice = (file_sice[0] + '\\albedo_bb_planar_sw.tif') -# src_ds=gdal.Open(file_sice) -# gt=src_ds.GetGeoTransform() -# rb=src_ds.GetRasterBand(1) -# gdal.UseExceptions() #so it doesn't print to screen everytime point is outside grid - -#import rasterio as rio -#from rasterio.plot import plotting_extent -#import geopandas as gpd -## Rasterstats contains the zonalstatistics function that you will use to extract raster values -#import rasterstats as rs -#import pandas as pd -#import earthpy as et - - -#with rio.open(file_sice) as SICE_raster: -# SICE_data = SICE_raster.read(1, masked=True) -# SICE_meta = SICE_raster.profile -# -#df = pd.DataFrame({'Location': ['CenterPoint'], -# 'Latitude': [lat], -# 'Longitude': [lon]}) -#gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude)) -#gdf.buffer(1000).to_file( filename[1:13] +'_buffer.shp') - - -# from scipy.spatial import ConvexHull -# x_all = xx.flatten() -# y_all = yy.flatten() -# a = albedo.flatten() -# -# points = np.vstack((x_all[~np.isnan(a)], y_all[~np.isnan(a)])) -# points = np.transpose(points) -# hull = ConvexHull(points.__array__()) - -# Plutting hull and barycenter -# plt.plot(points[np.arange(0,len(points),math.floor(len(points)/1000)),0], -# points[np.arange(0,len(points),math.floor(len(points)/1000)),1], -# 'o') -# for simplex in hull.simplices: -# plt.plot(points[simplex, 0], points[simplex, 1], 'k-') -# -# -# plt.plot(points[hull.vertices,0], points[hull.vertices,1], 'r--', lw=4) -# plt.plot(points[hull.vertices[0],0], points[hull.vertices[0],1], 'ro') -# plt.scatter(x_mean,y_mean,s=300,c='r',zorder=3) -# -# plt.show() - - -# Plottin - cmap = matplotlib.cm.jet - cmap.set_bad('white',1.) - -#fig, ax = plt.subplots(figsize=(10, 10)) -#ax.imshow(SICE_data, -# # Here you must set the spatial extent or else the data will not line up with your geopandas layer -# extent=plotting_extent(SICE_raster), -# cmap=cmap, -# interpolation='nearest', -# vmin=0, vmax=1) -#gdf.plot(ax=ax, -# marker='s', -# markersize=45, -# color='purple') -#plt.show() - - - fig, (ax1, ax2) = plt.subplots(1, 2) -# ax1.imshow(SICE_data, -# # Here you must set the spatial extent or else the data will not line up with your geopandas layer -# extent=plotting_extent(SICE_raster), -# cmap=cmap, -# interpolation='nearest', -# vmin=0, vmax=1) - im1 = ax1.imshow(albedo, - cmap=cmap, - interpolation='nearest', - vmin=0, vmax=1, - extent=(np.amin(x), np.amax(x), np.amin(y), np.amax(y))) - ax1_divider = make_axes_locatable(ax1) - # add an axes above the main axes. - cax1 = ax1_divider.append_axes("top", size="7%", pad="10%") - cb1 = colorbar(im1, cax=cax1, orientation="horizontal") - # change tick position to top. Tick position defaults to bottom and overlaps - # the image. - cax1.xaxis.set_ticks_position("top") - cax1.set_xlabel("Albedo (-)") - cax1.xaxis.set_label_position('top') - ax1.ticklabel_format(useOffset=False) - ax1.set_xlabel('x (m East, UTM22)') - ax1.set_ylabel('y (m North, UTM22)') - - ax2 = plt.subplot(1,2,2) - a=albedo.flatten() - hist, bin_edges = np.histogram(a[~np.isnan(a)], bins=50) - plt.step(bin_edges[0:len(hist)],hist*0.05*0.05,color='black') - plt.axvline(x=np.mean(a[~np.isnan(a)]),color='red',label='mean') - plt.axvline(x=np.median(a[~np.isnan(a)]),color='blue',label='median') - plt.axvline(x=alb_2_3[count],color='magenta',label='SICEv2.3') - plt.axvline(x=alb_sicef[count],color='green',label='SICE.f') - - ax2.legend(loc='upper left') - plt.title(filename[1:13]) - ax2.yaxis.tick_right() - ax2.yaxis.set_label_position('right') - ax2.set_xlabel("Albedo (-)") - ax2.set_ylabel("Area (m^2)") - plt.autoscale(enable=True, axis='both', tight=True) - plt.gcf().subplots_adjust(right=0.0015) - plt.savefig(filename[1:13] + "_plot.png", dpi=150) - plt.show() \ No newline at end of file diff --git a/validation/comp_cook_OLCI.py b/validation/comp_cook_OLCI.py deleted file mode 100644 index fbf2f63..0000000 --- a/validation/comp_cook_OLCI.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -""" - -""" -ly='p' # x for plot window, p for .png -do_gif=0 - - -import os -import numpy as np -import matplotlib.pyplot as plt -import pandas as pd - -plt.rcParams['font.sans-serif'] = ['Georgia'] -plt.rcParams['axes.facecolor'] = 'w' -plt.rcParams['axes.edgecolor'] = 'w' -plt.rcParams['axes.grid'] = True -plt.rcParams['grid.alpha'] = 1 -plt.rcParams['grid.color'] = "grey" -plt.rcParams["font.size"] = 16 - -version='20190807' -SICE_version_name='2.0' - -#version='20190906' -#SICE_version_name='3.0' - -version='20190830' -SICE_version_name='2.1' - -version='20190926' -SICE_version_name='2.2' - -figpath="/Users/jason/Dropbox/S3/validation_source_data/Cook_data/Figs/" - -fn='/Users/jason/Dropbox/S3/ancil/band_centers.txt' -df = pd.read_csv(fn,header=None) -central_wavelengths=df.iloc[:,0] - -print(central_wavelengths) -central_wavelengths=central_wavelengths.to_numpy().flatten() - -SICE_path="/Users/jason/Dropbox/S3/AK_FORTRAN/"+version+"_SICE/output/" - -inv= np.where((central_wavelengths < 1020) & (central_wavelengths >= 900)) -print(central_wavelengths[inv]) - -#for i in range(10,40): -# for i in range(10,11): -for i in range(11,12): - - if ly == 'p' : - plt.close("all") - - idx="{:02d}".format(i+1) - - print(idx) - - figname=idx+"_comp_Cook_OLCI_"+version - - fn=SICE_path+idx+".albedo_spectral_planar.csv" - print(fn) - df = pd.read_csv(fn) - SICE=df.loc[0,'r1':'r21'] - SICE=SICE.to_numpy().flatten() -# SICE[inv]=np.nan - - fn=SICE_path+idx+".BOAR.csv" - print(fn) - df = pd.read_csv(fn) - BOAR=df.loc[0,'r1':'r21'] - BOAR=BOAR.to_numpy().flatten() - BOAR[inv]=np.nan - - fn=SICE_path+idx+".TOAR.csv" - df = pd.read_csv(fn) - TOAR=df.loc[0,'r1':'r21'] - TOAR=TOAR.to_numpy().flatten() - - data_fn = "/Users/jason/Dropbox/S3/validation_source_data/Cook_data/stats_lo.csv" - df = pd.read_csv(data_fn) - - fn="/Users/jason/Dropbox/S3/validation_source_data/Cook_data/output_S3_extract/"+idx+".csv" - df_OLCI=pd.read_csv(fn) - #OLCI = df2["albedo_spectral_planar_400"] - #albedo_spectral_planar_400 albedo_spectral_planar_412 albedo_spectral_planar_442 albedo_spectral_planar_490 albedo_spectral_planar_510 albedo_spectral_planar_560 albedo_spectral_planar_620 albedo_spectral_planar_665 albedo_spectral_planar_673 albedo_spectral_planar_681 albedo_spectral_planar_708 albedo_spectral_planar_753 albedo_spectral_planar_761 albedo_spectral_planar_764 albedo_spectral_planar_767 albedo_spectral_planar_778 albedo_spectral_planar_865 albedo_spectral_planar_885 albedo_spectral_planar_900 albedo_spectral_planar_940 albedo_spectral_planar_1020 - v= np.where(df_OLCI["day"] == 22) - - -# print(v) - - OLCI=df_OLCI.loc[v[0],'albedo_spectral_planar_400':'albedo_spectral_planar_1020'] - OLCI=OLCI.to_numpy().flatten() - - # Assign variables - X = df["lamda"].values - Y = df["mean"].values - Y_std = df["std"] .values - - # Convert to numpy arrays and reshape X vector (to comply with the convetion of the 'linear_model' function) - # .values takes input from a "series" to an array - - - #print("X min: {:.2f}".format(X.min())) - #print("X max: {:.2f}".format(X.max())) - #print("Y min: {:.2f}".format(Y.min())) - #print("Y max: {:.2f}".format(Y.max())) - - v= np.where( (df["lamda"] < 1500)) - - X=X[v] - Y=Y[v] - Y_std=Y_std[v] - - th=1 # line thickness - plt.plot(X, Y, color='navy', linewidth=th, label='obs. J. Cook',linestyle='-') - plt.plot(X, Y+Y_std*1.96, color='navy', linewidth=th, label='obs. 95% interval',linestyle='--') - plt.plot(X, Y-Y_std*1.96, color='navy', linewidth=th,linestyle='--') - - ss=10 - th=0.5 - # plt.plot(central_wavelengths,OLCI,marker="x",markersize=ss/2.,color='m',label='planar alb, S3Snow v2.3',linewidth=th) - #plt.plot(central_wavelengths,OLCI,':',color='m') - plt.plot(central_wavelengths,SICE,marker="+",markersize=ss,color='r', label='planar alb, SICE '+SICE_version_name,linewidth=th) - #plt.plot(central_wavelengths,SICE,'-',color='r') - plt.plot(central_wavelengths,TOAR,marker="|",markersize=ss,color='g', label='rTOA',linewidth=th) - plt.plot(central_wavelengths,TOAR,'-',color='g') - plt.plot(central_wavelengths,BOAR,marker="|",markersize=ss,color='b', label='rBRR',linewidth=th) - plt.plot(central_wavelengths,BOAR,'-',color='b') - - plt.xlabel('wavelength, nm') ; plt.ylabel('spectral albedo') - plt.title('SW Greenland bare ice, SICE v'+SICE_version_name+', case '+idx) - - plt.legend(loc='upper right') - - plt.ylim(-0.035,0.7) - - if ly == 'x': - plt.show() - if ly == 'p': - DPI=300 - DPI=150 - fignam=figpath+figname - plt.savefig(fignam+'.png', bbox_inches = 'tight',figsize = (13,8), dpi = DPI) - - # do_gif plt.savefig(fignam+'.eps') - -if do_gif == 1: - os.system(opt+'/local/bin/convert -delay 10 -loop 0 '+figpath+'*'+version+'*.png '+figpath+'anim'+version+'.gif') diff --git a/validation/figures/PROMICE_comp_1.png b/validation/figures/PROMICE_comp_1.png deleted file mode 100644 index 1412ee1..0000000 Binary files a/validation/figures/PROMICE_comp_1.png and /dev/null differ diff --git a/validation/figures/PROMICE_comp_2.png b/validation/figures/PROMICE_comp_2.png deleted file mode 100644 index 5548a32..0000000 Binary files a/validation/figures/PROMICE_comp_2.png and /dev/null differ diff --git a/validation/figures/scatter.png b/validation/figures/scatter.png deleted file mode 100644 index 8e1be38..0000000 Binary files a/validation/figures/scatter.png and /dev/null differ diff --git a/validation/merge_S3_PROMICE.py b/validation/merge_S3_PROMICE.py deleted file mode 100644 index a640990..0000000 --- a/validation/merge_S3_PROMICE.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Wed Jul 29 07:01:37 2020 - -@author: bav -""" - -# -*- coding: utf-8 -*- -""" -Created on Fri Jul 24 08:58:21 2020 -Data processing for the evaluation of SICE against PROMICE measurements -@author: bav -""" - -import numpy as np -import pandas as pd -import os -import errno -from pathlib import Path -import wget # conda install: https://anaconda.org/anaconda/pywget - -#%% preparing input file -path = './data/' -filename = 'S3_28072020' -data = pd.read_csv(path+filename+'.csv'); - -cols = data.columns -cols = [c for c in cols if c.find('radiance')<0] -cols = [c for c in cols if c.find('solar')<0] -cols = [c for c in cols if c.find('temperature')<0] -cols = [c for c in cols if c.find('spectral')<0] -cols = [c for c in cols if c.find('rBRR')<0] -cols = [c for c in cols if c.find('albedo')<0] -cols = [c for c in cols if c.find('variance')<0] -cols = [c for c in cols if c.find('grain')<0] -cols = [c for c in cols if c.find('snow')<0] -cols = [c for c in cols if c.find('ndsi')<0] -cols = [c for c in cols if c.find('ndbi')<0] -cols = [c for c in cols if c.find('wind')<0] -cols = [c for c in cols if c.find('OAA')<0] -cols = [c for c in cols if c.find('SAA')<0] -cols = [c for c in cols if c.find('OZA')<0] -cols = [c for c in cols if c.find('SZA')<0] -cols = [c for c in cols if c.find('plat')<0] -cols = [c for c in cols if c.find('aspect')<0] -cols = [c for c in cols if c.find('_y')<0] -cols = [c for c in cols if c.find('Unname')<0] -data=data[cols] - -for c in cols: - if c.find('_x')>0: - data.rename(columns={c:c.replace('_x', '')}, inplace=True) - - -#%% Adding PROMICE observations -# Array information of stations available at PROMICE official site: https://promice.org/WeatherStations.html -PROMICE_stations = [('EGP',(75.6247,-35.9748), 2660), - ('KAN_B',(67.1252,-50.1832), 350), - ('KAN_L',(67.0955,-35.9748), 670), - ('KAN_M',(67.0670,-48.8355), 1270), - ('KAN_U',(67.0003,-47.0253), 1840), - ('KPC_L',(79.9108,-24.0828), 370), - ('KPC_U',(79.8347,-25.1662), 870), - ('MIT',(65.6922,-37.8280), 440), - ('NUK_K',(64.1623,-51.3587), 710), - ('NUK_L',(64.4822,-49.5358), 530), - ('NUK_U',(64.5108,-49.2692), 1120), - ('QAS_L',(61.0308,-46.8493), 280), - ('QAS_M',(61.0998,-46.8330), 630), - ('QAS_U',(61.1753,-46.8195), 900), - ('SCO_L',(72.2230,-26.8182), 460), - ('SCO_U',(72.3933,-27.2333), 970), - ('TAS_A',(65.7790,-38.8995), 890), - ('TAS_L',(65.6402,-38.8987), 250), - ('THU_L',(76.3998,-68.2665), 570), - ('THU_U',(76.4197,-68.1463), 760), - ('UPE_L',(72.8932,-54.2955), 220), - ('UPE_U',(72.8878,-53.5783), 940)] - -path_to_PROMICE = "./data/PROMICE" - -# Function for making directories if they do not exists. -def mkdir_p(path): - try: - os.makedirs(path) - return 'Path created.' - except OSError as exc: - if exc.errno == errno.EEXIST and os.path.isdir(path): - return 'Path already exists!' - else: - raise - -mkdir_p(path_to_PROMICE) - -# Goes through each station and fetch down data online. Necessary manipulations and sorting are made. -for ws in PROMICE_stations: - if Path(f'{path_to_PROMICE}/{ws[0]}_hour_v03.txt').is_file(): - print('\nPROMICE.csv file already exists.') - pass - else: - print(ws) - url = f'https://promice.org/PromiceDataPortal/api/download/f24019f7-d586-4465-8181-d4965421e6eb/v03/hourly/csv/{ws[0]}_hour_v03.txt' - filename = wget.download(url, out= path_to_PROMICE + f'/{ws[0]}_hour_v03.txt') - -if Path(f'{path_to_PROMICE}/PROMICE.csv').is_file(): - print('\nPROMICE.csv file already exists.') - pass -else: - # Create one big file "PROMICE.csv" with all the stations data. - # reading PROMICE file and joining them together - for ws in PROMICE_stations: - filepath = path_to_PROMICE + '/' + ws[0] + '_hour_v03.txt' - - df = pd.read_csv (filepath, delim_whitespace=True) - df = df[['Year','MonthOfYear','DayOfYear','HourOfDay(UTC)','CloudCover', - 'TiltToEast(d)','TiltToNorth(d)','Albedo_theta<70d']] - df = df[df.Year > 2015] - df['station_name'] = ws[0] - df['latitude N'] = ws[1][0] - df['longitude W'] = ws[1][1] - df['elevation'] = float(ws[2]) - df['TiltToEast(d)'].replace(-999.0, np.nan, inplace=True) - try : - df['TiltToEast(d)'] = df['TiltToEast(d)'].interpolate(method='nearest', limit_direction='both') - except: - print('no tilt at '+ws[0]) - df['TiltToNorth(d)'].replace(-999.0, np.nan, inplace=True) - try: - df['TiltToNorth(d)'] = df['TiltToNorth(d)'].interpolate(method='nearest', limit_direction='both') - except: - print('no tilt at '+ws[0]) - - df.to_csv(path_to_PROMICE + '/' + ws[0] + '.csv', index=None) - - PROMICE = pd.DataFrame() - filelist = [ f for f in os.listdir(path_to_PROMICE) if f.endswith(".csv") ] - for f in filelist: - print(f) - PROMICE = PROMICE.append(pd.read_csv(f'{path_to_PROMICE}/{f}')) - - PROMICE['TiltToEast(d)'] = PROMICE['TiltToEast(d)'].abs() - PROMICE['TiltToNorth(d)'] = PROMICE['TiltToNorth(d)'].abs() - PROMICE['tilt'] = PROMICE[['TiltToEast(d)','TiltToNorth(d)']].values.max(1) - PROMICE.drop(['TiltToNorth(d)','TiltToEast(d)'], axis=1, inplace=True) - PROMICE.rename(columns={'Year': 'year', - 'MonthOfYear': 'month', - 'DayOfYear':'dayofyear', - 'HourOfDay(UTC)':'hour', - 'CloudCover':'cloud', - 'station_name':'station', - 'Albedo_theta<70d':'PROMICE_alb'}, inplace=True) - PROMICE.to_csv(f'{path_to_PROMICE}/PROMICE.csv', index=None) - -#%% Merging -data_PROMICE = pd.read_csv(path_to_PROMICE+'/PROMICE.csv') -data.rename(columns={'site': 'station'}, inplace=True) - -full_size = data.shape[0] -data = data.drop_duplicates(keep='first') #dropping duplicates except first sample, if any. -print(f'Removed duplicates: {full_size-data.shape[0]}') - -# the PROMICE data is only given in hourly interval. Hence the hour have to be corrected in the s3_extract data. -#s3_extract['hour'] = s3_extract['hour'] - 1 if s3_extract['minute'].astype('int64') < 30 else (s3_extract['hour'] + 1) - -# Merge images with PROMICE data -data_all = pd.merge(left=data, right=data_PROMICE, how='inner', - left_on=['year','month','dayofyear','hour','station'], - right_on=['year','month','dayofyear','hour', 'station']) -# data_all.drop(columns=['Unnamed: 0','Unnamed: 0.1'],inplace=True) -# keeping only good data -print(data_all.shape[0]) -data_all = data_all.replace(to_replace=-999,value=np.nan) -data_all = data_all[data_all['PROMICE_alb'].notnull()] -data_all = data_all[data_all['PROMICE_alb']<1] -data_all = data_all[data_all['PROMICE_alb']>0] -data_all = data_all[data_all['sza']<70] -print(data_all.shape[0]) -tmp =data_all.shape[0] - -data_all['BBA_emp'] = 0.945*(data_all["Oa01_reflectance"] + data_all["Oa06_reflectance"] + data_all["Oa17_reflectance"] + data_all["Oa21_reflectance"]) / 4.0+ 0.055 - -# Create merged csv -data_all.to_csv('./data/S3_PROMICE.csv', index=None) diff --git a/validation/old/.ipynb_checkpoints/fetch_PROMICE_data-checkpoint.ipynb b/validation/old/.ipynb_checkpoints/fetch_PROMICE_data-checkpoint.ipynb deleted file mode 100644 index 3d760b4..0000000 --- a/validation/old/.ipynb_checkpoints/fetch_PROMICE_data-checkpoint.ipynb +++ /dev/null @@ -1,173 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Fetch PROMICE data\n", - "This codeblock fetches down relevant weather data from https://promice.org/PromiceDataPortal/#Automaticweatherstations (unless the file is already available in the folder, then it skips). \n", - "This file has taken its source in the following thread: https://stackoverflow.com/questions/51455112/python-script-to-download-data-from-noaa\n", - "Script does take a while to run (30+ minutes)...." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'wget'", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0merrno\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mos\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 3\u001b[1;33m \u001b[1;32mimport\u001b[0m \u001b[0mwget\u001b[0m \u001b[1;31m# conda install: https://anaconda.org/anaconda/pywget\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 4\u001b[0m \u001b[1;32mfrom\u001b[0m \u001b[0mpathlib\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mPath\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mpandas\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mpd\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'wget'" - ] - } - ], - "source": [ - "import errno\n", - "import os\n", - "import wget # conda install: https://anaconda.org/anaconda/pywget\n", - "from pathlib import Path\n", - "import pandas as pd\n", - "import numpy as np\n", - "\n", - "path = \"data/PROMICE\" \n", - "\n", - "# Array information of stations available at PROMICE official site: https://promice.org/WeatherStations.html\n", - "PROMICE_stations = [('EGP',(75.6247,-35.9748), 2660), \n", - " ('KAN_B',(67.1252,-50.1832), 350), \n", - " ('KAN_L',(67.0955,-35.9748), 670), \n", - " ('KAN_M',(67.0670,-48.8355), 1270), \n", - " ('KAN_U',(67.0003,-47.0253), 1840), \n", - " ('KPC_L',(79.9108,-24.0828), 370),\n", - " ('KPC_U',(79.8347,-25.1662), 870), \n", - " ('MIT',(65.6922,-37.8280), 440), \n", - " ('NUK_K',(64.1623,-51.3587), 710), \n", - " ('NUK_L',(64.4822,-49.5358), 530),\n", - " ('NUK_U',(64.5108,-49.2692), 1120),\n", - " ('QAS_L',(61.0308,-46.8493), 280),\n", - " ('QAS_M',(61.0998,-46.8330), 630), \n", - " ('QAS_U',(61.1753,-46.8195), 900), \n", - " ('SCO_L',(72.2230,-26.8182), 460),\n", - " ('SCO_U',(72.3933,-27.2333), 970),\n", - " ('TAS_A',(65.7790,-38.8995), 890),\n", - " ('TAS_L',(65.6402,-38.8987), 250),\n", - " ('THU_L',(76.3998,-68.2665), 570),\n", - " ('THU_U',(76.4197,-68.1463), 760),\n", - " ('UPE_L',(72.8932,-54.2955), 220), \n", - " ('UPE_U',(72.8878,-53.5783), 940)]\n", - "\n", - "# Function for making directories if they do not exists. \n", - "def mkdir_p(path):\n", - " try:\n", - " os.makedirs(path)\n", - " return 'Path created.'\n", - " except OSError as exc:\n", - " if exc.errno == errno.EEXIST and os.path.isdir(path):\n", - " return 'Path already exists!'\n", - " else:\n", - " raise\n", - " \n", - "mkdir_p(path)\n", - "\n", - "\n", - "# Goes through each station and fetch down data online. Necessary manipulations and sorting are made.\n", - "for ws in PROMICE_stations:\n", - " url = f'https://promice.org/PromiceDataPortal/api/download/f24019f7-d586-4465-8181-d4965421e6eb/v03/hourly/csv/{ws[0]}_hour_v03.txt'\n", - " file_x = path + f'/{ws[0]}'\n", - " if Path(file_x + '.csv').is_file():\n", - " pass\n", - " else:\n", - " filename = wget.download(url, out=file_x + '.txt')\n", - " #https://stackoverflow.com/a/19759515\n", - " with open(file_x + '.txt') as infile, open(file_x + '_fixed.txt', 'w') as outfile:\n", - " for line in infile:\n", - " outfile.write(\" \".join(line.split()).replace(' ', ','))\n", - " outfile.write(\",\") # trailing comma shouldn't matter\n", - " outfile.write(\"\\n\")\n", - " df = pd.read_csv (file_x + '_fixed.txt')\n", - " df = df[['Year','MonthOfYear','DayOfYear','HourOfDay(UTC)','CloudCover','TiltToEast(d)','TiltToNorth(d)','Albedo_theta<70d']]\n", - " df = df[df.Year > 2015]\n", - " #df = df[df.MonthOfYear > 4]\n", - " #df = df[df.MonthOfYear < 9]\n", - " #df = df[df['HourOfDay(UTC)'] > 9] #\n", - " #df = df[df['HourOfDay(UTC)'] < 21]\n", - " df = df.round({'CloudCover': 2, 'TiltToEast(d)': 2, 'TiltToNorth(d)': 2, 'Albedo_theta<70d': 2})\n", - " #df = df[df['Albedo_theta<70d'] < 1.00]\n", - " #df = df[df['Albedo_theta<70d'] > 0.00]\n", - " df['station_name'] = ws[0]\n", - " df['latitude N'] = ws[1][0]\n", - " df['longitude W'] = ws[1][1]\n", - " df['elevation'] = float(ws[2])\n", - " df['TiltToEast(d)'].replace(-999.0, np.nan, inplace=True)\n", - " df['TiltToEast(d)'] = df['TiltToEast(d)'].interpolate(method='nearest', limit_direction='both')\n", - " df['TiltToNorth(d)'].replace(-999.0, np.nan, inplace=True)\n", - " df['TiltToNorth(d)'] = df['TiltToNorth(d)'].interpolate(method='nearest', limit_direction='both')\n", - "\n", - " df.to_csv(file_x + '.csv', index=None)\n", - "\n", - "# Removes all temporary files made:\n", - "filelist = [ f for f in os.listdir(path) if f.endswith(\".txt\") ]\n", - "for f in filelist:\n", - " os.remove(os.path.join(path, f))\n", - "\n", - "# Create one big file \"PROMICE.csv\" with all the stations data. \n", - "if Path(f'{path}/PROMICE.csv').is_file():\n", - " pass\n", - "else: \n", - " PROMICE = pd.DataFrame()\n", - " filelist = [ f for f in os.listdir(path) if f.endswith(\".csv\") ]\n", - " for f in filelist:\n", - " PROMICE = PROMICE.append(pd.read_csv(f'{path}/{f}'))\n", - " \n", - " PROMICE['TiltToEast(d)'] = PROMICE['TiltToEast(d)'].abs()\n", - " PROMICE['TiltToNorth(d)'] = PROMICE['TiltToNorth(d)'].abs()\n", - " PROMICE['tilt'] = PROMICE[['TiltToEast(d)','TiltToNorth(d)']].values.max(1)\n", - " PROMICE.drop(['TiltToNorth(d)','TiltToEast(d)'], axis=1, inplace=True)\n", - " PROMICE.rename(columns={'Year': 'year', \n", - " 'MonthOfYear': 'month',\n", - " 'DayOfYear':'dayofyear',\n", - " 'HourOfDay(UTC)':'hour',\n", - " 'CloudCover':'cloud',\n", - " 'station_name':'station',\n", - " 'Albedo_theta<70d':'PROMICE_alb'}, inplace=True)\n", - " PROMICE = PROMICE.round({'latitude N': 4, 'longitude W': 4})\n", - " PROMICE.to_csv(f'{path}/PROMICE.csv', index=None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/validation/old/fetch_PROMICE_data.ipynb b/validation/old/fetch_PROMICE_data.ipynb deleted file mode 100644 index a128ee2..0000000 --- a/validation/old/fetch_PROMICE_data.ipynb +++ /dev/null @@ -1,180 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Fetch PROMICE data\n", - "This codeblock fetches down relevant weather data from https://promice.org/PromiceDataPortal/#Automaticweatherstations (unless the file is already available in the folder, then it skips). \n", - "This file has taken its source in the following thread: https://stackoverflow.com/questions/51455112/python-script-to-download-data-from-noaa\n", - "Script does take a while to run (30+ minutes)...." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "Collapsed": "false" - }, - "outputs": [ - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 53\u001b[0m \u001b[1;32mpass\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 54\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 55\u001b[1;33m \u001b[0mfilename\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mwget\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdownload\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0murl\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mout\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfile_x\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m'.txt'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 56\u001b[0m \u001b[1;31m#https://stackoverflow.com/a/19759515\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfile_x\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m'.txt'\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0minfile\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfile_x\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m'_fixed.txt'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'w'\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0moutfile\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\site-packages\\wget.py\u001b[0m in \u001b[0;36mdownload\u001b[1;34m(url, out, bar)\u001b[0m\n\u001b[0;32m 524\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 525\u001b[0m \u001b[0mbinurl\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0murl\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 526\u001b[1;33m \u001b[1;33m(\u001b[0m\u001b[0mtmpfile\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mulib\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0murlretrieve\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mbinurl\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtmpfile\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcallback\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 527\u001b[0m \u001b[0mfilename\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdetect_filename\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0murl\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mout\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 528\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0moutdir\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\urllib\\request.py\u001b[0m in \u001b[0;36murlretrieve\u001b[1;34m(url, filename, reporthook, data)\u001b[0m\n\u001b[0;32m 274\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 275\u001b[0m \u001b[1;32mwhile\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 276\u001b[1;33m \u001b[0mblock\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mfp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mbs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 277\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mblock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 278\u001b[0m \u001b[1;32mbreak\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\http\\client.py\u001b[0m in \u001b[0;36mread\u001b[1;34m(self, amt)\u001b[0m\n\u001b[0;32m 455\u001b[0m \u001b[1;31m# Amount is given, implement using readinto\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 456\u001b[0m \u001b[0mb\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbytearray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mamt\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 457\u001b[1;33m \u001b[0mn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreadinto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 458\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mmemoryview\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mn\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtobytes\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 459\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\http\\client.py\u001b[0m in \u001b[0;36mreadinto\u001b[1;34m(self, b)\u001b[0m\n\u001b[0;32m 499\u001b[0m \u001b[1;31m# connection, and the user is reading more bytes than will be provided\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 500\u001b[0m \u001b[1;31m# (for example, reading in 1k chunks)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 501\u001b[1;33m \u001b[0mn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreadinto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 502\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mn\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mb\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 503\u001b[0m \u001b[1;31m# Ideally, we would raise IncompleteRead if the content-length\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\socket.py\u001b[0m in \u001b[0;36mreadinto\u001b[1;34m(self, b)\u001b[0m\n\u001b[0;32m 587\u001b[0m \u001b[1;32mwhile\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 588\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 589\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sock\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrecv_into\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 590\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 591\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_timeout_occurred\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\ssl.py\u001b[0m in \u001b[0;36mrecv_into\u001b[1;34m(self, buffer, nbytes, flags)\u001b[0m\n\u001b[0;32m 1069\u001b[0m \u001b[1;34m\"non-zero flags not allowed in calls to recv_into() on %s\"\u001b[0m \u001b[1;33m%\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1070\u001b[0m self.__class__)\n\u001b[1;32m-> 1071\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mnbytes\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbuffer\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1072\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1073\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrecv_into\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mbuffer\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mnbytes\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mflags\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\lib\\ssl.py\u001b[0m in \u001b[0;36mread\u001b[1;34m(self, len, buffer)\u001b[0m\n\u001b[0;32m 927\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 928\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mbuffer\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 929\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sslobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbuffer\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 930\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 931\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sslobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], - "source": [ - "import errno\n", - "import os\n", - "import wget # conda install: https://anaconda.org/anaconda/pywget\n", - "from pathlib import Path\n", - "import pandas as pd\n", - "import numpy as np\n", - "\n", - "path = \"data/PROMICE\" \n", - "\n", - "# Array information of stations available at PROMICE official site: https://promice.org/WeatherStations.html\n", - "PROMICE_stations = [('EGP',(75.6247,-35.9748), 2660), \n", - " ('KAN_B',(67.1252,-50.1832), 350), \n", - " ('KAN_L',(67.0955,-35.9748), 670), \n", - " ('KAN_M',(67.0670,-48.8355), 1270), \n", - " ('KAN_U',(67.0003,-47.0253), 1840), \n", - " ('KPC_L',(79.9108,-24.0828), 370),\n", - " ('KPC_U',(79.8347,-25.1662), 870), \n", - " ('MIT',(65.6922,-37.8280), 440), \n", - " ('NUK_K',(64.1623,-51.3587), 710), \n", - " ('NUK_L',(64.4822,-49.5358), 530),\n", - " ('NUK_U',(64.5108,-49.2692), 1120),\n", - " ('QAS_L',(61.0308,-46.8493), 280),\n", - " ('QAS_M',(61.0998,-46.8330), 630), \n", - " ('QAS_U',(61.1753,-46.8195), 900), \n", - " ('SCO_L',(72.2230,-26.8182), 460),\n", - " ('SCO_U',(72.3933,-27.2333), 970),\n", - " ('TAS_A',(65.7790,-38.8995), 890),\n", - " ('TAS_L',(65.6402,-38.8987), 250),\n", - " ('THU_L',(76.3998,-68.2665), 570),\n", - " ('THU_U',(76.4197,-68.1463), 760),\n", - " ('UPE_L',(72.8932,-54.2955), 220), \n", - " ('UPE_U',(72.8878,-53.5783), 940)]\n", - "\n", - "# Function for making directories if they do not exists. \n", - "def mkdir_p(path):\n", - " try:\n", - " os.makedirs(path)\n", - " return 'Path created.'\n", - " except OSError as exc:\n", - " if exc.errno == errno.EEXIST and os.path.isdir(path):\n", - " return 'Path already exists!'\n", - " else:\n", - " raise\n", - " \n", - "mkdir_p(path)\n", - "\n", - "\n", - "# Goes through each station and fetch down data online. Necessary manipulations and sorting are made.\n", - "for ws in PROMICE_stations:\n", - " url = f'https://promice.org/PromiceDataPortal/api/download/f24019f7-d586-4465-8181-d4965421e6eb/v03/hourly/csv/{ws[0]}_hour_v03.txt'\n", - " file_x = path + f'/{ws[0]}'\n", - " if Path(file_x + '.csv').is_file():\n", - " pass\n", - " else:\n", - " filename = wget.download(url, out=file_x + '.txt')\n", - " #https://stackoverflow.com/a/19759515\n", - " with open(file_x + '.txt') as infile, open(file_x + '_fixed.txt', 'w') as outfile:\n", - " for line in infile:\n", - " outfile.write(\" \".join(line.split()).replace(' ', ','))\n", - " outfile.write(\",\") # trailing comma shouldn't matter\n", - " outfile.write(\"\\n\")\n", - " df = pd.read_csv (file_x + '_fixed.txt')\n", - " df = df[['Year','MonthOfYear','DayOfYear','HourOfDay(UTC)','CloudCover','TiltToEast(d)','TiltToNorth(d)','Albedo_theta<70d']]\n", - " df = df[df.Year > 2015]\n", - " #df = df[df.MonthOfYear > 4]\n", - " #df = df[df.MonthOfYear < 9]\n", - " #df = df[df['HourOfDay(UTC)'] > 9] #\n", - " #df = df[df['HourOfDay(UTC)'] < 21]\n", - " df = df.round({'CloudCover': 2, 'TiltToEast(d)': 2, 'TiltToNorth(d)': 2, 'Albedo_theta<70d': 2})\n", - " #df = df[df['Albedo_theta<70d'] < 1.00]\n", - " #df = df[df['Albedo_theta<70d'] > 0.00]\n", - " df['station_name'] = ws[0]\n", - " df['latitude N'] = ws[1][0]\n", - " df['longitude W'] = ws[1][1]\n", - " df['elevation'] = float(ws[2])\n", - " df['TiltToEast(d)'].replace(-999.0, np.nan, inplace=True)\n", - " df['TiltToEast(d)'] = df['TiltToEast(d)'].interpolate(method='nearest', limit_direction='both')\n", - " df['TiltToNorth(d)'].replace(-999.0, np.nan, inplace=True)\n", - " df['TiltToNorth(d)'] = df['TiltToNorth(d)'].interpolate(method='nearest', limit_direction='both')\n", - "\n", - " df.to_csv(file_x + '.csv', index=None)\n", - "\n", - "# Removes all temporary files made:\n", - "filelist = [ f for f in os.listdir(path) if f.endswith(\".txt\") ]\n", - "for f in filelist:\n", - " os.remove(os.path.join(path, f))\n", - "\n", - "# Create one big file \"PROMICE.csv\" with all the stations data. \n", - "if Path(f'{path}/PROMICE.csv').is_file():\n", - " pass\n", - "else: \n", - " PROMICE = pd.DataFrame()\n", - " filelist = [ f for f in os.listdir(path) if f.endswith(\".csv\") ]\n", - " for f in filelist:\n", - " PROMICE = PROMICE.append(pd.read_csv(f'{path}/{f}'))\n", - " \n", - " PROMICE['TiltToEast(d)'] = PROMICE['TiltToEast(d)'].abs()\n", - " PROMICE['TiltToNorth(d)'] = PROMICE['TiltToNorth(d)'].abs()\n", - " PROMICE['tilt'] = PROMICE[['TiltToEast(d)','TiltToNorth(d)']].values.max(1)\n", - " PROMICE.drop(['TiltToNorth(d)','TiltToEast(d)'], axis=1, inplace=True)\n", - " PROMICE.rename(columns={'Year': 'year', \n", - " 'MonthOfYear': 'month',\n", - " 'DayOfYear':'dayofyear',\n", - " 'HourOfDay(UTC)':'hour',\n", - " 'CloudCover':'cloud',\n", - " 'station_name':'station',\n", - " 'Albedo_theta<70d':'PROMICE_alb'}, inplace=True)\n", - " PROMICE = PROMICE.round({'latitude N': 4, 'longitude W': 4})\n", - " PROMICE.to_csv(f'{path}/PROMICE.csv', index=None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/validation/old/merge_PROMICE_and_S3_data.ipynb b/validation/old/merge_PROMICE_and_S3_data.ipynb deleted file mode 100644 index 516b7cd..0000000 --- a/validation/old/merge_PROMICE_and_S3_data.ipynb +++ /dev/null @@ -1,146 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Merge PROMICE with S3_extract data\n", - "Process the S3-data and merges it with PROMICE data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import pandas as pd\n", - "import numpy as np\n", - "\n", - "s3_extract = pd.read_csv('./data/OLCI/S3_extract.csv') #Put in path to S3_extract data\n", - "full_size = s3_extract.shape[0]\n", - "s3_extract = s3_extract.drop_duplicates(keep='first') #dropping duplicates except first sample, if any.\n", - "print(f'Removed duplicates: {full_size-s3_extract.shape[0]}')\n", - "\n", - "# Specify columns to keep\n", - "s3_extract = s3_extract[['station', 'year', 'month', 'minute', 'dayofyear', 'hour', 'sza', 'vza', 'saa', 'vaa', 'slope', 'ndsi', 'ndbi', 'humidity', 'total_columnar_water_vapour', 'total_ozone',\n", - " 'Oa01_reflectance','Oa02_reflectance','Oa03_reflectance','Oa04_reflectance','Oa05_reflectance','Oa06_reflectance','Oa07_reflectance',\n", - " 'Oa08_reflectance','Oa09_reflectance', 'Oa10_reflectance','Oa11_reflectance', 'Oa12_reflectance','Oa13_reflectance','Oa14_reflectance',\n", - " 'Oa15_reflectance','Oa16_reflectance','Oa17_reflectance','Oa18_reflectance','Oa19_reflectance','Oa20_reflectance','Oa21_reflectance',\n", - " ]]\n", - "\n", - "# the PROMICE data is only given in hourly interval. Hence the hour have to be corrected in the s3_extract data.\n", - "#s3_extract['hour'] = s3_extract['hour'] - 1 if s3_extract['minute'].astype('int64') < 30 else (s3_extract['hour'] + 1) \n", - "\n", - "# Process data to the right format\n", - "s3_extract = s3_extract.round({\n", - " 'sza': 2,\n", - " 'vza': 2,\n", - " 'saa': 2,\n", - " 'vaa': 2,\n", - " 'slope': 2,\n", - " 'ndsi': 2, \n", - " 'ndbi': 2, \n", - " 'humidity': 2, \n", - " 'total_columnar_water_vapour': 2,\n", - " 'Oa01_reflectance': 2,\n", - " 'Oa02_reflectance': 2,\n", - " 'Oa03_reflectance': 2,\n", - " 'Oa04_reflectance': 2,\n", - " 'Oa05_reflectance': 2,\n", - " 'Oa06_reflectance': 2,\n", - " 'Oa07_reflectance': 2,\n", - " 'Oa08_reflectance': 2,\n", - " 'Oa09_reflectance': 2,\n", - " 'Oa10_reflectance': 2,\n", - " 'Oa11_reflectance': 2,\n", - " 'Oa12_reflectance': 2,\n", - " 'Oa13_reflectance': 2,\n", - " 'Oa14_reflectance': 2,\n", - " 'Oa15_reflectance': 2,\n", - " 'Oa16_reflectance': 2,\n", - " 'Oa17_reflectance': 2,\n", - " 'Oa18_reflectance': 2,\n", - " 'Oa19_reflectance': 2,\n", - " 'Oa20_reflectance': 2,\n", - " 'Oa21_reflectance': 2\n", - "})\n", - "\n", - "# Rename columns\n", - "s3_extract.rename(columns={'station':'station',\n", - " 'year': 'year', \n", - " 'month': 'month',\n", - " 'dayofyear': 'dayofyear',\n", - " 'hour': 'hour',\n", - " 'sza': 'sza',\n", - " 'vza': 'vza',\n", - " 'saa': 'saa', \n", - " 'vaa': 'vaa',\n", - " 'slope': 'slope',\n", - " 'ndsi': 'ndsi',\n", - " 'ndbi': 'ndbi',\n", - " 'humidity': 'humidity',\n", - " 'total_columnar_water_vapour': 'total_columnar_water_vapour',\n", - " 'total_ozone': 'total_ozone',\n", - " 'Oa01_reflectance': 'Oa01',\n", - " 'Oa02_reflectance': 'Oa02',\n", - " 'Oa03_reflectance': 'Oa03',\n", - " 'Oa04_reflectance': 'Oa04',\n", - " 'Oa05_reflectance': 'Oa05',\n", - " 'Oa06_reflectance': 'Oa06',\n", - " 'Oa07_reflectance': 'Oa07',\n", - " 'Oa08_reflectance': 'Oa08',\n", - " 'Oa09_reflectance': 'Oa09',\n", - " 'Oa10_reflectance': 'Oa10',\n", - " 'Oa11_reflectance': 'Oa11',\n", - " 'Oa12_reflectance': 'Oa12',\n", - " 'Oa13_reflectance': 'Oa13',\n", - " 'Oa14_reflectance': 'Oa14',\n", - " 'Oa15_reflectance': 'Oa15',\n", - " 'Oa16_reflectance': 'Oa16',\n", - " 'Oa17_reflectance': 'Oa17',\n", - " 'Oa18_reflectance': 'Oa18',\n", - " 'Oa19_reflectance': 'Oa19',\n", - " 'Oa20_reflectance': 'Oa20',\n", - " 'Oa21_reflectance': 'Oa21' \n", - " }, \n", - " inplace=True)\n", - "\n", - "# Merge images with PROMICE data\n", - "OLCI_with_PROMICE = pd.merge(left=s3_extract, right=pd.read_csv('data/PROMICE/PROMICE.csv'), \n", - " left_on=['year','month','dayofyear','hour','station'], \n", - " right_on=['year','month','dayofyear','hour', 'station'])\n", - "# Create merged csv\n", - "OLCI_with_PROMICE.to_csv(f'data/OLCI/S3_extract_with_PROMICE.csv', index=None) " - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.7" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/wrapper.py b/wrapper.py new file mode 100644 index 0000000..faa5cdf --- /dev/null +++ b/wrapper.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" +Created on Sun Aug 16 09:01:00 2020 + +@author: bav +""" +filepath = 'NUK_K_hour_v03.txt' + +from jaws import main +main() \ No newline at end of file